gt
stringclasses
1 value
context
stringlengths
2.05k
161k
// Copyright 2014 The Flutter Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. package dev.flutter.plugins.integration_test; import android.annotation.TargetApi; import android.app.Activity; import android.graphics.Bitmap; import android.graphics.Canvas; import android.graphics.Rect; import android.os.Build; import android.os.Handler; import android.os.HandlerThread; import android.os.Looper; import android.view.Choreographer; import android.view.PixelCopy; import android.view.View; import android.view.ViewGroup; import androidx.annotation.NonNull; import androidx.annotation.Nullable; import io.flutter.embedding.android.FlutterActivity; import io.flutter.embedding.android.FlutterSurfaceView; import io.flutter.embedding.android.FlutterView; import io.flutter.plugin.common.MethodChannel; import io.flutter.plugin.common.MethodChannel.Result; import java.io.ByteArrayOutputStream; import java.io.IOException; import java.lang.StringBuilder; /** * FlutterDeviceScreenshot is a utility class that allows to capture a screenshot * that includes both Android views and the Flutter UI. * * To take screenshots, the rendering surface must be changed to {@code FlutterImageView}, * since surfaces like {@code FlutterSurfaceView} and {@code FlutterTextureView} are opaque * when the view hierarchy is rendered to a bitmap. * * It's also necessary to ask the framework to schedule a frame, and then add a listener * that waits for that frame to be presented by the Android framework. */ @TargetApi(19) class FlutterDeviceScreenshot { /** * Finds the {@code FlutterView} added to the {@code activity} view hierarchy. * * <p> This assumes that there's only one {@code FlutterView} per activity, which * is always the case. * * @param activity typically, {code FlutterActivity}. * @return the Flutter view. */ @Nullable private static FlutterView getFlutterView(@NonNull Activity activity) { return (FlutterView)activity.findViewById(FlutterActivity.FLUTTER_VIEW_ID); } /** * Whether the app is run with instrumentation. * * @return true if the app is running with instrumentation. */ static boolean hasInstrumentation() { // TODO(egarciad): InstrumentationRegistry requires the uiautomator dependency. // However, Flutter adds test dependencies to release builds. // As a result, disable screenshots with instrumentation until the issue is fixed. // https://github.com/flutter/flutter/issues/56591 return false; } /** * Captures a screenshot using ui automation. * * @return byte array containing the screenshot. */ static byte[] captureWithUiAutomation() throws IOException { return new byte[0]; } // Whether the flutter surface is already converted to an image. private static boolean flutterSurfaceConvertedToImage = false; /** * Converts the Flutter surface to an image view. * This allows to render the view hierarchy to a bitmap since * {@code FlutterSurfaceView} and {@code FlutterTextureView} cannot be rendered to a bitmap. * * @param activity typically {@code FlutterActivity}. */ static void convertFlutterSurfaceToImage(@NonNull Activity activity) { final FlutterView flutterView = getFlutterView(activity); if (flutterView != null && !flutterSurfaceConvertedToImage) { flutterView.convertToImageView(); flutterSurfaceConvertedToImage = true; } } /** * Restores the original Flutter surface. * The new surface will either be {@code FlutterSurfaceView} or {@code FlutterTextureView}. * * @param activity typically {@code FlutterActivity}. * @param onDone callback called once the surface has been restored. */ static void revertFlutterImage(@NonNull Activity activity) { final FlutterView flutterView = getFlutterView(activity); if (flutterView != null && flutterSurfaceConvertedToImage) { flutterView.revertImageView(() -> { flutterSurfaceConvertedToImage = false; }); } } // Handlers use to capture a view. private static Handler backgroundHandler; private static Handler mainHandler; /** * Captures a screenshot by drawing the view to a Canvas. * * <p> {@code convertFlutterSurfaceToImage} must be called prior to capturing the view, * otherwise the result is an error. * * @param activity this is {@link FlutterActivity}. * @param methodChannel the method channel to call into Dart. * @param result the result for the method channel that will contain the byte array. */ static void captureView( @NonNull Activity activity, @NonNull MethodChannel methodChannel, @NonNull Result result) { final FlutterView flutterView = getFlutterView(activity); if (flutterView == null) { result.error("Could not copy the pixels", "FlutterView is null", null); return; } if (!flutterSurfaceConvertedToImage) { result.error("Could not copy the pixels", "Flutter surface must be converted to image first", null); return; } // Ask the framework to schedule a new frame. methodChannel.invokeMethod("scheduleFrame", null); if (backgroundHandler == null) { final HandlerThread screenshotBackgroundThread = new HandlerThread("screenshot"); screenshotBackgroundThread.start(); backgroundHandler = new Handler(screenshotBackgroundThread.getLooper()); } if (mainHandler == null) { mainHandler = new Handler(Looper.getMainLooper()); } takeScreenshot(backgroundHandler, mainHandler, flutterView, result); } /** * Waits for the next Android frame. * * @param r a callback. */ private static void waitForAndroidFrame(Runnable r) { Choreographer.getInstance() .postFrameCallback( new Choreographer.FrameCallback() { @Override public void doFrame(long frameTimeNanos) { r.run(); } }); } /** * Waits until a Flutter frame is rendered by the Android OS. * * @param backgroundHandler the handler associated to a background thread. * @param mainHandler the handler associated to the platform thread. * @param view the flutter view. * @param result the result that contains the byte array. */ private static void takeScreenshot( @NonNull Handler backgroundHandler, @NonNull Handler mainHandler, @NonNull FlutterView view, @NonNull Result result) { final boolean acquired = view.acquireLatestImageViewFrame(); // The next frame may already have already been comitted. // The next frame is guaranteed to have the Flutter image. waitForAndroidFrame( () -> { waitForAndroidFrame( () -> { if (acquired) { FlutterDeviceScreenshot.convertViewToBitmap(view, result, backgroundHandler); } else { takeScreenshot(backgroundHandler, mainHandler, view, result); } }); }); } /** * Renders {@code FlutterView} to a Bitmap. * * If successful, The byte array is provided in the result. * * @param flutterView the Flutter view. * @param result the result that contains the byte array. * @param backgroundHandler a background handler to avoid blocking the platform thread. */ private static void convertViewToBitmap( @NonNull FlutterView flutterView, @NonNull Result result, @NonNull Handler backgroundHandler) { if (Build.VERSION.SDK_INT < Build.VERSION_CODES.O) { final Bitmap bitmap = Bitmap.createBitmap( flutterView.getWidth(), flutterView.getHeight(), Bitmap.Config.RGB_565); final Canvas canvas = new Canvas(bitmap); flutterView.draw(canvas); final ByteArrayOutputStream output = new ByteArrayOutputStream(); bitmap.compress(Bitmap.CompressFormat.PNG, /*quality=*/ 100, output); result.success(output.toByteArray()); return; } final Bitmap bitmap = Bitmap.createBitmap( flutterView.getWidth(), flutterView.getHeight(), Bitmap.Config.ARGB_8888); final int[] flutterViewLocation = new int[2]; flutterView.getLocationInWindow(flutterViewLocation); final int flutterViewLeft = flutterViewLocation[0]; final int flutterViewTop = flutterViewLocation[1]; final Rect flutterViewRect = new Rect( flutterViewLeft, flutterViewTop, flutterViewLeft + flutterView.getWidth(), flutterViewTop + flutterView.getHeight()); final Activity flutterActivity = (Activity) flutterView.getContext(); PixelCopy.request( flutterActivity.getWindow(), flutterViewRect, bitmap, (int copyResult) -> { final Handler mainHandler = new Handler(Looper.getMainLooper()); if (copyResult == PixelCopy.SUCCESS) { final ByteArrayOutputStream output = new ByteArrayOutputStream(); bitmap.compress(Bitmap.CompressFormat.PNG, /*quality=*/ 100, output); mainHandler.post( () -> { result.success(output.toByteArray()); }); } else { mainHandler.post( () -> { result.error("Could not copy the pixels", "result was " + copyResult, null); }); } }, backgroundHandler); } }
/* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright * ownership. Elasticsearch licenses this file to you under * the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.plugins; import com.google.common.collect.*; import org.apache.lucene.util.IOUtils; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.action.admin.cluster.node.info.PluginInfo; import org.elasticsearch.action.admin.cluster.node.info.PluginsInfo; import org.elasticsearch.common.Strings; import org.elasticsearch.common.collect.MapBuilder; import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.component.AbstractComponent; import org.elasticsearch.common.component.LifecycleComponent; import org.elasticsearch.common.inject.Module; import org.elasticsearch.common.settings.ImmutableSettings; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.env.Environment; import org.elasticsearch.index.CloseableIndexComponent; import java.io.File; import java.io.FileInputStream; import java.io.InputStream; import java.lang.reflect.Method; import java.net.URL; import java.util.*; import static org.elasticsearch.common.io.FileSystemUtils.isAccessibleDirectory; /** * */ public class PluginsService extends AbstractComponent { private static final String ES_PLUGIN_PROPERTIES = "es-plugin.properties"; private final Environment environment; /** * We keep around a list of jvm plugins */ private final ImmutableList<Tuple<PluginInfo, Plugin>> plugins; private final ImmutableMap<Plugin, List<OnModuleReference>> onModuleReferences; private PluginsInfo cachedPluginsInfo; private final TimeValue refreshInterval; private long lastRefresh; static class OnModuleReference { public final Class<? extends Module> moduleClass; public final Method onModuleMethod; OnModuleReference(Class<? extends Module> moduleClass, Method onModuleMethod) { this.moduleClass = moduleClass; this.onModuleMethod = onModuleMethod; } } /** * Constructs a new PluginService * @param settings The settings of the system * @param environment The environment of the system */ public PluginsService(Settings settings, Environment environment) { super(settings); this.environment = environment; ImmutableList.Builder<Tuple<PluginInfo, Plugin>> tupleBuilder = ImmutableList.builder(); // first we load all the default plugins from the settings String[] defaultPluginsClasses = settings.getAsArray("plugin.types"); for (String pluginClass : defaultPluginsClasses) { Plugin plugin = PluginUtils.loadPlugin(pluginClass, settings, settings.getClassLoader()); PluginInfo pluginInfo = new PluginInfo(plugin.name(), plugin.description(), hasSite(plugin.name()), true, PluginInfo.VERSION_NOT_AVAILABLE, false); if (logger.isTraceEnabled()) { logger.trace("plugin loaded from settings [{}]", pluginInfo); } tupleBuilder.add(new Tuple<PluginInfo, Plugin>(pluginInfo, plugin)); } // now, find all the ones that are in the classpath tupleBuilder.addAll(loadPlugins()); this.plugins = tupleBuilder.build(); // We need to build a List of jvm and site plugins for checking mandatory plugins Map<String, Plugin> jvmPlugins = Maps.newHashMap(); List<String> sitePlugins = Lists.newArrayList(); for (Tuple<PluginInfo, Plugin> tuple : this.plugins) { jvmPlugins.put(tuple.v2().name(), tuple.v2()); if (tuple.v1().isSite()) { sitePlugins.add(tuple.v1().getName()); } } // we load site plugins ImmutableList<Tuple<PluginInfo, Plugin>> tuples = loadSitePlugins(); for (Tuple<PluginInfo, Plugin> tuple : tuples) { sitePlugins.add(tuple.v1().getName()); } // Checking expected plugins String[] mandatoryPlugins = settings.getAsArray("plugin.mandatory", null); if (mandatoryPlugins != null) { Set<String> missingPlugins = Sets.newHashSet(); for (String mandatoryPlugin : mandatoryPlugins) { if (!jvmPlugins.containsKey(mandatoryPlugin) && !sitePlugins.contains(mandatoryPlugin) && !missingPlugins.contains(mandatoryPlugin)) { missingPlugins.add(mandatoryPlugin); } } if (!missingPlugins.isEmpty()) { throw new ElasticsearchException("Missing mandatory plugins [" + Strings.collectionToDelimitedString(missingPlugins, ", ") + "]"); } } logger.info("loaded {}, sites {}", jvmPlugins.keySet(), sitePlugins); MapBuilder<Plugin, List<OnModuleReference>> onModuleReferences = MapBuilder.newMapBuilder(); for (Plugin plugin : jvmPlugins.values()) { List<OnModuleReference> list = Lists.newArrayList(); for (Method method : plugin.getClass().getDeclaredMethods()) { if (!method.getName().equals("onModule")) { continue; } if (method.getParameterTypes().length == 0 || method.getParameterTypes().length > 1) { logger.warn("Plugin: {} implementing onModule with no parameters or more than one parameter", plugin.name()); continue; } Class moduleClass = method.getParameterTypes()[0]; if (!Module.class.isAssignableFrom(moduleClass)) { logger.warn("Plugin: {} implementing onModule by the type is not of Module type {}", plugin.name(), moduleClass); continue; } method.setAccessible(true); list.add(new OnModuleReference(moduleClass, method)); } if (!list.isEmpty()) { onModuleReferences.put(plugin, list); } } this.onModuleReferences = onModuleReferences.immutableMap(); this.refreshInterval = componentSettings.getAsTime("info_refresh_interval", TimeValue.timeValueSeconds(10)); } public ImmutableList<Tuple<PluginInfo, Plugin>> plugins() { return plugins; } public void processModules(Iterable<Module> modules) { for (Module module : modules) { processModule(module); } } public void processModule(Module module) { for (Tuple<PluginInfo, Plugin> plugin : plugins()) { plugin.v2().processModule(module); // see if there are onModule references List<OnModuleReference> references = onModuleReferences.get(plugin.v2()); if (references != null) { for (OnModuleReference reference : references) { if (reference.moduleClass.isAssignableFrom(module.getClass())) { try { reference.onModuleMethod.invoke(plugin.v2(), module); } catch (Exception e) { logger.warn("plugin {}, failed to invoke custom onModule method", e, plugin.v2().name()); } } } } } } public Settings updatedSettings() { ImmutableSettings.Builder builder = ImmutableSettings.settingsBuilder() .put(this.settings); for (Tuple<PluginInfo, Plugin> plugin : plugins) { builder.put(plugin.v2().additionalSettings()); } return builder.build(); } public Collection<Class<? extends Module>> modules() { List<Class<? extends Module>> modules = Lists.newArrayList(); for (Tuple<PluginInfo, Plugin> plugin : plugins) { modules.addAll(plugin.v2().modules()); } return modules; } public Collection<Module> modules(Settings settings) { List<Module> modules = Lists.newArrayList(); for (Tuple<PluginInfo, Plugin> plugin : plugins) { modules.addAll(plugin.v2().modules(settings)); } return modules; } public Collection<Class<? extends LifecycleComponent>> services() { List<Class<? extends LifecycleComponent>> services = Lists.newArrayList(); for (Tuple<PluginInfo, Plugin> plugin : plugins) { services.addAll(plugin.v2().services()); } return services; } public Collection<Class<? extends Module>> indexModules() { List<Class<? extends Module>> modules = Lists.newArrayList(); for (Tuple<PluginInfo, Plugin> plugin : plugins) { modules.addAll(plugin.v2().indexModules()); } return modules; } public Collection<Module> indexModules(Settings settings) { List<Module> modules = Lists.newArrayList(); for (Tuple<PluginInfo, Plugin> plugin : plugins) { modules.addAll(plugin.v2().indexModules(settings)); } return modules; } public Collection<Class<? extends CloseableIndexComponent>> indexServices() { List<Class<? extends CloseableIndexComponent>> services = Lists.newArrayList(); for (Tuple<PluginInfo, Plugin> plugin : plugins) { services.addAll(plugin.v2().indexServices()); } return services; } public Collection<Class<? extends Module>> shardModules() { List<Class<? extends Module>> modules = Lists.newArrayList(); for (Tuple<PluginInfo, Plugin> plugin : plugins) { modules.addAll(plugin.v2().shardModules()); } return modules; } public Collection<Module> shardModules(Settings settings) { List<Module> modules = Lists.newArrayList(); for (Tuple<PluginInfo, Plugin> plugin : plugins) { modules.addAll(plugin.v2().shardModules(settings)); } return modules; } public Collection<Class<? extends CloseableIndexComponent>> shardServices() { List<Class<? extends CloseableIndexComponent>> services = Lists.newArrayList(); for (Tuple<PluginInfo, Plugin> plugin : plugins) { services.addAll(plugin.v2().shardServices()); } return services; } /** * Get information about plugins (jvm and site plugins). * Information are cached for 10 seconds by default. Modify `plugins.info_refresh_interval` property if needed. * Setting `plugins.info_refresh_interval` to `-1` will cause infinite caching. * Setting `plugins.info_refresh_interval` to `0` will disable caching. * @return List of plugins information */ synchronized public PluginsInfo info() { if (refreshInterval.millis() != 0) { if (cachedPluginsInfo != null && (refreshInterval.millis() < 0 || (System.currentTimeMillis() - lastRefresh) < refreshInterval.millis())) { if (logger.isTraceEnabled()) { logger.trace("using cache to retrieve plugins info"); } return cachedPluginsInfo; } lastRefresh = System.currentTimeMillis(); } if (logger.isTraceEnabled()) { logger.trace("starting to fetch info on plugins"); } cachedPluginsInfo = new PluginsInfo(); // We first add all JvmPlugins for (Tuple<PluginInfo, Plugin> plugin : this.plugins) { if (logger.isTraceEnabled()) { logger.trace("adding jvm plugin [{}]", plugin.v1()); } cachedPluginsInfo.add(plugin.v1()); } // We reload site plugins (in case of some changes) for (Tuple<PluginInfo, Plugin> plugin : loadSitePlugins()) { if (logger.isTraceEnabled()) { logger.trace("adding site plugin [{}]", plugin.v1()); } cachedPluginsInfo.add(plugin.v1()); } return cachedPluginsInfo; } private List<Tuple<PluginInfo,Plugin>> loadPlugins() { File pluginsFile = environment.pluginsFile(); if (!isAccessibleDirectory(pluginsFile, logger)) { return Collections.emptyList(); } List<Tuple<PluginInfo, Plugin>> pluginData = Lists.newArrayList(); boolean defaultIsolation = settings.getAsBoolean("plugins.isolation", Boolean.TRUE); ClassLoader esClassLoader = settings.getClassLoader(); Method addURL = null; boolean discoveredAddUrl = false; File[] pluginsFiles = pluginsFile.listFiles(); if (pluginsFiles != null) { for (File pluginRoot : pluginsFiles) { if (isAccessibleDirectory(pluginRoot, logger)) { try { logger.trace("--- adding plugin [" + pluginRoot.getAbsolutePath() + "]"); // check isolation List<File> pluginClassPath = PluginUtils.pluginClassPathAsFiles(pluginRoot); List<URL> pluginProperties = PluginUtils.lookupPluginProperties(pluginClassPath); boolean isolated = PluginUtils.lookupIsolation(pluginProperties, defaultIsolation); if (isolated) { logger.trace("--- creating isolated space for plugin [" + pluginRoot.getAbsolutePath() + "]"); PluginClassLoader pcl = new PluginClassLoader(PluginUtils.convertFileToUrl(pluginClassPath), esClassLoader); pluginData.addAll(loadPlugin(pluginClassPath, pluginProperties, pcl, true)); } else { if (!discoveredAddUrl) { discoveredAddUrl = true; Class<?> esClassLoaderClass = esClassLoader.getClass(); while (!esClassLoaderClass.equals(Object.class)) { try { addURL = esClassLoaderClass.getDeclaredMethod("addURL", URL.class); addURL.setAccessible(true); break; } catch (NoSuchMethodException e) { // no method, try the parent esClassLoaderClass = esClassLoaderClass.getSuperclass(); } } } if (addURL == null) { logger.debug("failed to find addURL method on classLoader [" + esClassLoader + "] to add methods"); } else { for (File file : pluginClassPath) { addURL.invoke(esClassLoader, file.toURI().toURL()); } pluginData.addAll(loadPlugin(pluginClassPath, pluginProperties, esClassLoader, false)); } } } catch (Throwable e) { logger.warn("failed to add plugin [" + pluginRoot.getAbsolutePath() + "]", e); } } } } else { logger.debug("failed to list plugins from {}. Check your right access.", pluginsFile.getAbsolutePath()); } return pluginData; } private Collection<? extends Tuple<PluginInfo, Plugin>> loadPlugin(List<File> pluginClassPath, List<URL> properties, ClassLoader classLoader, boolean isolation) throws Exception { List<Tuple<PluginInfo, Plugin>> plugins = Lists.newArrayList(); Enumeration<URL> entries = Collections.enumeration(properties); while (entries.hasMoreElements()) { URL pluginUrl = entries.nextElement(); Properties pluginProps = new Properties(); InputStream is = null; try { is = pluginUrl.openStream(); pluginProps.load(is); String pluginClassName = pluginProps.getProperty("plugin"); if (pluginClassName == null) { throw new IllegalArgumentException("No plugin class specified"); } String pluginVersion = pluginProps.getProperty("version", PluginInfo.VERSION_NOT_AVAILABLE); Plugin plugin = PluginUtils.loadPlugin(pluginClassName, settings, classLoader); // Is it a site plugin as well? Does it have also an embedded _site structure File siteFile = new File(new File(environment.pluginsFile(), plugin.name()), "_site"); boolean isSite = isAccessibleDirectory(siteFile, logger); if (logger.isTraceEnabled()) { logger.trace("found a jvm plugin [{}], [{}]{}", plugin.name(), plugin.description(), isSite ? ": with _site structure" : ""); } PluginInfo pluginInfo = new PluginInfo(plugin.name(), plugin.description(), isSite, true, pluginVersion, isolation); plugins.add(new Tuple<PluginInfo, Plugin>(pluginInfo, plugin)); } catch (Throwable e) { logger.warn("failed to load plugin from [" + pluginUrl + "]", e); } finally { IOUtils.closeWhileHandlingException(is); } } return plugins; } private ImmutableList<Tuple<PluginInfo,Plugin>> loadSitePlugins() { ImmutableList.Builder<Tuple<PluginInfo, Plugin>> sitePlugins = ImmutableList.builder(); List<String> loadedJvmPlugins = new ArrayList<String>(); // Already known jvm plugins are ignored for(Tuple<PluginInfo, Plugin> tuple : plugins) { if (tuple.v1().isSite()) { loadedJvmPlugins.add(tuple.v1().getName()); } } // Let's try to find all _site plugins we did not already found File pluginsFile = environment.pluginsFile(); if (!pluginsFile.exists() || !pluginsFile.isDirectory()) { return sitePlugins.build(); } for (File pluginFile : pluginsFile.listFiles()) { if (!loadedJvmPlugins.contains(pluginFile.getName())) { File sitePluginDir = new File(pluginFile, "_site"); if (isAccessibleDirectory(sitePluginDir, logger)) { // We have a _site plugin. Let's try to get more information on it String name = pluginFile.getName(); String version = PluginInfo.VERSION_NOT_AVAILABLE; String description = PluginInfo.DESCRIPTION_NOT_AVAILABLE; // We check if es-plugin.properties exists in plugin/_site dir File pluginPropFile = new File(sitePluginDir, ES_PLUGIN_PROPERTIES); if (pluginPropFile.exists()) { Properties pluginProps = new Properties(); InputStream is = null; try { is = new FileInputStream(pluginPropFile.getAbsolutePath()); pluginProps.load(is); description = pluginProps.getProperty("description", PluginInfo.DESCRIPTION_NOT_AVAILABLE); version = pluginProps.getProperty("version", PluginInfo.VERSION_NOT_AVAILABLE); } catch (Exception e) { // Can not load properties for this site plugin. Ignoring. logger.debug("can not load {} file.", e, ES_PLUGIN_PROPERTIES); } finally { IOUtils.closeWhileHandlingException(is); } } if (logger.isTraceEnabled()) { logger.trace("found a site plugin name [{}], version [{}], description [{}]", name, version, description); } sitePlugins.add(new Tuple<PluginInfo, Plugin>(new PluginInfo(name, description, true, false, version, false), null)); } } } return sitePlugins.build(); } /** * @param name plugin name * @return if this jvm plugin has also a _site structure */ private boolean hasSite(String name) { // Let's try to find all _site plugins we did not already found File pluginsFile = environment.pluginsFile(); if (!pluginsFile.exists() || !pluginsFile.isDirectory()) { return false; } File sitePluginDir = new File(pluginsFile, name + "/_site"); return isAccessibleDirectory(sitePluginDir, logger); } }
/* * Copyright 2016 Red Hat, Inc. and/or its affiliates * and other contributors as indicated by the @author tags. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.keycloak.admin.client.resource; import org.keycloak.representations.idm.UserRepresentation; import javax.ws.rs.Consumes; import javax.ws.rs.DELETE; import javax.ws.rs.GET; import javax.ws.rs.POST; import javax.ws.rs.Path; import javax.ws.rs.PathParam; import javax.ws.rs.Produces; import javax.ws.rs.QueryParam; import javax.ws.rs.core.MediaType; import javax.ws.rs.core.Response; import java.util.List; public interface UsersResource { @GET @Produces(MediaType.APPLICATION_JSON) List<UserRepresentation> search(@QueryParam("username") String username, @QueryParam("firstName") String firstName, @QueryParam("lastName") String lastName, @QueryParam("email") String email, @QueryParam("first") Integer firstResult, @QueryParam("max") Integer maxResults); @GET @Produces(MediaType.APPLICATION_JSON) List<UserRepresentation> search(@QueryParam("username") String username, @QueryParam("firstName") String firstName, @QueryParam("lastName") String lastName, @QueryParam("email") String email, @QueryParam("first") Integer firstResult, @QueryParam("max") Integer maxResults, @QueryParam("enabled") Boolean enabled, @QueryParam("briefRepresentation") Boolean briefRepresentation); /** * Search for users based on the given filters. * * @param username a value contained in username * @param firstName a value contained in first name * @param lastName a value contained in last name * @param email a value contained in email * @param emailVerified whether the email has been verified * @param idpAlias the alias of the Identity Provider * @param idpUserId the userId at the Identity Provider * @param firstResult the position of the first result to retrieve * @param maxResults the maximum number of results to retrieve * @param enabled only return enabled or disabled users * @param briefRepresentation Only return basic information (only guaranteed to return id, username, created, first * and last name, email, enabled state, email verification state, federation link, and access. * Note that it means that namely user attributes, required actions, and not before are not returned.) * @return a list of {@link UserRepresentation} */ @GET @Produces(MediaType.APPLICATION_JSON) List<UserRepresentation> search(@QueryParam("username") String username, @QueryParam("firstName") String firstName, @QueryParam("lastName") String lastName, @QueryParam("email") String email, @QueryParam("emailVerified") Boolean emailVerified, @QueryParam("idpAlias") String idpAlias, @QueryParam("idpUserId") String idpUserId, @QueryParam("first") Integer firstResult, @QueryParam("max") Integer maxResults, @QueryParam("enabled") Boolean enabled, @QueryParam("briefRepresentation") Boolean briefRepresentation); @GET @Produces(MediaType.APPLICATION_JSON) List<UserRepresentation> search(@QueryParam("username") String username, @QueryParam("firstName") String firstName, @QueryParam("lastName") String lastName, @QueryParam("email") String email, @QueryParam("emailVerified") Boolean emailVerified, @QueryParam("first") Integer firstResult, @QueryParam("max") Integer maxResults, @QueryParam("enabled") Boolean enabled, @QueryParam("briefRepresentation") Boolean briefRepresentation); @GET @Produces(MediaType.APPLICATION_JSON) List<UserRepresentation> search(@QueryParam("emailVerified") Boolean emailVerified, @QueryParam("first") Integer firstResult, @QueryParam("max") Integer maxResults, @QueryParam("enabled") Boolean enabled, @QueryParam("briefRepresentation") Boolean briefRepresentation); @GET @Produces(MediaType.APPLICATION_JSON) List<UserRepresentation> search(@QueryParam("username") String username); @GET @Produces(MediaType.APPLICATION_JSON) List<UserRepresentation> search(@QueryParam("username") String username, @QueryParam("exact") Boolean exact); /** * Search for users whose username or email matches the value provided by {@code search}. The {@code search} * argument also allows finding users by specific attributes as follows: * * <ul> * <li><i>id:</i> - Find users by identifier. For instance, <i>id:aa497859-bbf5-44ac-bf1a-74dbffcaf197</i></li> * </ul> * * @param search the value to search. It can be the username, email or any of the supported options to query based on user attributes * @param firstResult the position of the first result to retrieve * @param maxResults the maximum number of results to retreive * @return a list of {@link UserRepresentation} */ @GET @Produces(MediaType.APPLICATION_JSON) List<UserRepresentation> search(@QueryParam("search") String search, @QueryParam("first") Integer firstResult, @QueryParam("max") Integer maxResults); /** * Search for users whose username or email matches the value provided by {@code search}. The {@code search} * argument also allows finding users by specific attributes as follows: * * <ul> * <li><i>id:</i> - Find users by identifier. For instance, <i>id:aa497859-bbf5-44ac-bf1a-74dbffcaf197</i></li> * </ul> * * @param search the value to search. It can be the username, email or any of the supported options to query based on user attributes * @param firstResult the position of the first result to retrieve * @param maxResults the maximum number of results to retreive * @param briefRepresentation Only return basic information (only guaranteed to return id, username, created, first and last name, * email, enabled state, email verification state, federation link, and access. * Note that it means that namely user attributes, required actions, and not before are not returned.) * @return a list of {@link UserRepresentation} */ @GET @Produces(MediaType.APPLICATION_JSON) List<UserRepresentation> search(@QueryParam("search") String search, @QueryParam("first") Integer firstResult, @QueryParam("max") Integer maxResults, @QueryParam("briefRepresentation") Boolean briefRepresentation); @GET @Produces(MediaType.APPLICATION_JSON) List<UserRepresentation> list(@QueryParam("first") Integer firstResult, @QueryParam("max") Integer maxResults); @GET @Produces(MediaType.APPLICATION_JSON) List<UserRepresentation> list(); @POST @Consumes(MediaType.APPLICATION_JSON) Response create(UserRepresentation userRepresentation); /** * Returns the number of users that can be viewed. * * @return number of users */ @Path("count") @GET @Produces(MediaType.APPLICATION_JSON) Integer count(); /** * Returns the number of users that can be viewed and match the given search criteria. * If none is specified this is equivalent to {{@link #count()}}. * * @param search criteria to search for * @return number of users matching the search criteria */ @Path("count") @GET @Produces(MediaType.APPLICATION_JSON) Integer count(@QueryParam("search") String search); /** * Returns the number of users that can be viewed and match the given filters. * If none of the filters is specified this is equivalent to {{@link #count()}}. * * @param last last name field of a user * @param first first name field of a user * @param email email field of a user * @param username username field of a user * @return number of users matching the given filters */ @Path("count") @GET @Produces(MediaType.APPLICATION_JSON) Integer count(@QueryParam("lastName") String last, @QueryParam("firstName") String first, @QueryParam("email") String email, @QueryParam("username") String username); /** * Returns the number of users that can be viewed and match the given filters. * If none of the filters is specified this is equivalent to {{@link #count()}}. * * @param last last name field of a user * @param first first name field of a user * @param email email field of a user * @param emailVerified emailVerified field of a user * @param username username field of a user * @return number of users matching the given filters */ @Path("count") @GET @Produces(MediaType.APPLICATION_JSON) Integer count(@QueryParam("lastName") String last, @QueryParam("firstName") String first, @QueryParam("email") String email, @QueryParam("emailVerified") Boolean emailVerified, @QueryParam("username") String username); /** * Returns the number of users with the given status for emailVerified. * If none of the filters is specified this is equivalent to {{@link #count()}}. * * @param emailVerified emailVerified field of a user * @return number of users matching the given filters */ @Path("count") @GET @Produces(MediaType.APPLICATION_JSON) Integer countEmailVerified(@QueryParam("emailVerified") Boolean emailVerified); @Path("{id}") UserResource get(@PathParam("id") String id); @Path("{id}") @DELETE Response delete(@PathParam("id") String id); }
package controlP5; /** * controlP5 is a processing gui library. * * 2006-2012 by Andreas Schlegel * * This library is free software; you can redistribute it and/or * modify it under the terms of the GNU Lesser General Public License * as published by the Free Software Foundation; either version 2.1 * of the License, or (at your option) any later version. * This library is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU * Lesser General Public License for more details. * * You should have received a copy of the GNU Lesser General * Public License along with this library; if not, write to the * Free Software Foundation, Inc., 59 Temple Place, Suite 330, * Boston, MA 02111-1307 USA * * @author Andreas Schlegel (http://www.sojamo.de) * @modified 12/23/2012 * @version 2.0.4 * */ import java.lang.reflect.Method; import java.lang.reflect.Field; import java.security.AccessControlException; /** * The ControllerPlug is used to do all the reflection procedures to link a controller to a variable or function inside your main * application. * * @example use/ControlP5plugTo */ public class ControllerPlug { private Object _myObject; private String _myName; private Method _myMethod; private Field _myField; private int _myType = ControlP5Constants.INVALID; private Class<?> _myParameterClass; private int _myParameterType = -1; private Object _myValue = null; private Class<?>[] _myAcceptClassList; private Class<?> _myEventMethodParameter = ControlEvent.class; public ControllerPlug(final Object theObject, final String theName, final int theType, final int theParameterType, Class<?>[] theAcceptClassList) { set(theObject, theName, theType, theParameterType, theAcceptClassList); } ControllerPlug(Class<?> param, final Object theObject, final String theName, final int theType, final int theParameterType) { setEventMethodParameter(param); set(theObject, theName, theType, theParameterType, null); } void setEventMethodParameter(Class<?> theClass) { _myEventMethodParameter = theClass; } protected void set(Object theObject) { set(theObject, getName(), getType(), getParameterType(), getAcceptClassList()); } public void set(final Object theObject, final String theName, final int theType, final int theParameterType, final Class<?>[] theAcceptClassList) { _myObject = theObject; _myName = theName; _myType = theType; _myParameterType = theParameterType; _myAcceptClassList = theAcceptClassList; Class<?> myClass = theObject.getClass(); /* check for methods */ if (_myType == ControlP5Constants.METHOD) { try { Method[] myMethods = myClass.getDeclaredMethods(); for (int i = 0; i < myMethods.length; i++) { if ((myMethods[i].getName()).equals(theName)) { if (myMethods[i].getParameterTypes().length == 1) { for (int j = 0; j < _myAcceptClassList.length; j++) { if (myMethods[i].getParameterTypes()[0] == _myAcceptClassList[j]) { _myParameterClass = myMethods[i].getParameterTypes()[0]; break; } } } else if (myMethods[i].getParameterTypes().length == 0) { _myParameterClass = null; break; } break; } } Class<?>[] myArgs = (_myParameterClass == null) ? new Class[] {} : new Class[] { _myParameterClass }; _myMethod = myClass.getDeclaredMethod(_myName, myArgs); _myMethod.setAccessible(true); } catch (SecurityException e) { printSecurityWarning(e); } catch (NoSuchMethodException e) { if (_myParameterClass != CallbackEvent.class) { ControlP5.logger().warning(" plug() failed. If function " + theName + " does exist, make it public. " + e); } } /* check for controlEvent */ } else if (_myType == ControlP5Constants.EVENT) { try { _myMethod = _myObject.getClass().getMethod(_myName, new Class[] { _myEventMethodParameter }); _myMethod.setAccessible(true); _myParameterClass = _myEventMethodParameter; } catch (SecurityException e) { printSecurityWarning(e); } catch (NoSuchMethodException e) { if (_myEventMethodParameter != CallbackEvent.class) { ControlP5.logger().warning(" plug() failed " + _myParameterClass + ". If function " + theName + " does exist, make it public. " + e); } } /* check for fields */ } else if (_myType == ControlP5Constants.FIELD) { Field[] myFields = ControlBroadcaster.getFieldsFor(myClass); for (int i = 0; i < myFields.length; i++) { if (myFields[i].getName().equals(_myName)) { _myParameterClass = myFields[i].getType(); } } if (_myParameterClass != null) { /** * note. when running in applet mode. for some reason setAccessible(true) works for methods but not for fields. * theAccessControlException is thrown. therefore, make fields in your code public. */ try { _myField = myClass.getDeclaredField(_myName); try { _myField.setAccessible(true); } catch (java.security.AccessControlException e) { printSecurityWarning(e); } try { _myValue = (_myField.get(theObject)); } catch (Exception ex) { printSecurityWarning(ex); } } catch (NoSuchFieldException e) { ControlP5.logger().warning(e.toString()); } } } } private void printSecurityWarning(Exception e) { // AccessControlException required for applets. if (e.getClass().equals(AccessControlException.class)) { ControlP5.isApplet = true; ControlP5.logger().warning("You are probably running in applet mode.\n" + "make sure fields and methods in your code are public.\n" + e); } } protected Object getValue() { return _myValue; } protected Object getObject() { return _myObject; } protected String getName() { return _myName; } protected int getType() { return _myType; } protected int getParameterType() { return _myParameterType; } protected Class<?>[] getAcceptClassList() { return _myAcceptClassList; } protected Class<?> getClassType() { return _myParameterClass; } protected boolean checkType(int theType) { return _myType == theType; } protected boolean checkName(String theName) { return (_myName.equals(theName)); } private Object get(float theValue) { if (_myParameterClass == float.class) { return new Float(theValue); } else if (_myParameterClass == int.class) { return new Integer((int) theValue); } else if (_myParameterClass == boolean.class) { return (theValue > 0.5) ? new Boolean(true) : new Boolean(false); } else { return null; } } protected Object getFieldParameter(float theValue) { return get(theValue); } protected Object[] getMethodParameter(float theValue) { return new Object[] { get(theValue) }; } protected Method getMethod() { return _myMethod; } protected Field getField() { return _myField; } static public boolean checkPlug(Object theObject, String thePlugName, Class<?>[] theArgs) { try { theObject.getClass().getDeclaredMethod(thePlugName, theArgs); return true; } catch (Exception e) { return false; } } @Deprecated protected Class<?> classType() { return _myParameterClass; } @Deprecated protected Object value() { return _myValue; } @Deprecated protected Object object() { return _myObject; } @Deprecated protected String name() { return _myName; } @Deprecated protected int type() { return _myType; } @Deprecated protected int parameterType() { return _myParameterType; } @Deprecated protected Class<?>[] acceptClassList() { return _myAcceptClassList; } }
package ch.spacebase.openclassic.server.player; import java.io.ByteArrayOutputStream; import java.io.DataOutputStream; import java.net.SocketAddress; import java.util.List; import java.util.concurrent.CopyOnWriteArrayList; import java.util.zip.GZIPOutputStream; import ch.spacebase.openclassic.api.OpenClassic; import ch.spacebase.openclassic.api.Position; import ch.spacebase.openclassic.api.block.Blocks; import ch.spacebase.openclassic.api.block.custom.CustomBlock; import ch.spacebase.openclassic.api.data.NBTData; import ch.spacebase.openclassic.api.event.EventFactory; import ch.spacebase.openclassic.api.event.player.PlayerTeleportEvent; import ch.spacebase.openclassic.api.level.Level; import ch.spacebase.openclassic.api.network.msg.IdentificationMessage; import ch.spacebase.openclassic.api.network.msg.LevelDataMessage; import ch.spacebase.openclassic.api.network.msg.LevelFinalizeMessage; import ch.spacebase.openclassic.api.network.msg.LevelInitializeMessage; import ch.spacebase.openclassic.api.network.msg.PlayerChatMessage; import ch.spacebase.openclassic.api.network.msg.PlayerDespawnMessage; import ch.spacebase.openclassic.api.network.msg.PlayerSpawnMessage; import ch.spacebase.openclassic.api.network.msg.PlayerTeleportMessage; import ch.spacebase.openclassic.api.network.msg.custom.LevelColorMessage; import ch.spacebase.openclassic.api.permissions.Group; import ch.spacebase.openclassic.api.player.Player; import ch.spacebase.openclassic.api.player.Session; import ch.spacebase.openclassic.api.plugin.RemotePluginInfo; import ch.spacebase.openclassic.api.util.Constants; import ch.spacebase.openclassic.server.ClassicServer; public class ServerPlayer implements Player { private byte playerId; private Position pos; private String name; private String displayName; private ServerSession session; private byte placeMode = 0; //private int airTicks = 0; private ClientInfo client = new ClientInfo(this); private NBTData data; private List<String> hidden = new CopyOnWriteArrayList<String>(); public boolean teleported = false; private boolean sendingLevel = false; public ServerPlayer(String name, Position pos, ServerSession session) { this.name = name; this.displayName = name; this.pos = pos; this.session = session; this.data = new NBTData(this.name); this.data.load(OpenClassic.getServer().getDirectory().getPath() + "/players/" + this.name + ".nbt"); session.setPlayer(this); this.playerId = (byte) (((ClassicServer) OpenClassic.getGame()).getSessionRegistry().size()); } public Session getSession() { return this.session; } public byte getPlayerId() { return this.playerId; } public Position getPosition() { return this.pos; } public void setPosition(Position pos) { this.pos = pos; } public String getName() { return this.name; } public String getDisplayName() { return this.displayName; } public void setDisplayName(String name) { this.displayName = name; } public byte getPlaceMode() { return this.placeMode; } public void setPlaceMode(int type) { this.placeMode = (byte) type; } public void moveTo(Position pos) { this.moveTo(pos.getLevel(), pos.getX(), pos.getY(), pos.getZ(), pos.getYaw(), pos.getPitch()); } public void moveTo(float x, float y, float z) { this.moveTo(this.pos.getLevel(), x, y, z); } public void moveTo(float x, float y, float z, float yaw, float pitch) { this.moveTo(this.pos.getLevel(), x, y, z, yaw, pitch); } public void moveTo(Level level, float x, float y, float z) { this.moveTo(this.pos.getLevel(), x, y, z, this.pos.getYaw(), this.pos.getPitch()); } public void moveTo(Level level, float x, float y, float z, float yaw, float pitch) { Position to = new Position(level, x, y, z, yaw, pitch); Level old = this.pos.getLevel(); PlayerTeleportEvent event = EventFactory.callEvent(new PlayerTeleportEvent(this, this.getPosition(), to)); if(event.isCancelled()) return; this.pos = event.getTo(); this.teleported = true; if(!old.getName().equals(this.pos.getLevel().getName())) { this.pos.getLevel().addPlayer(this); old.removePlayer(this.getName()); old.sendToAllExcept(this, new PlayerDespawnMessage(this.getPlayerId())); this.session.send(new IdentificationMessage(Constants.PROTOCOL_VERSION, "Sending to " + this.pos.getLevel().getName() + "...", "", this.getGroup().hasPermission("openclassic.commands.solid") ? Constants.OP : Constants.NOT_OP)); this.sendLevel(this.pos.getLevel()); } else { this.getSession().send(new PlayerTeleportMessage((byte) -1, this.getPosition().getX(), this.getPosition().getY(), this.getPosition().getZ(), (byte) this.getPosition().getYaw(), (byte) this.getPosition().getPitch())); this.getPosition().getLevel().sendToAllExcept(this, new PlayerTeleportMessage(this.getPlayerId(), this.getPosition().getX(), this.getPosition().getY() + 0.59375, this.getPosition().getZ(), (byte) this.getPosition().getYaw(), (byte) this.getPosition().getPitch())); } } public Group getGroup() { return OpenClassic.getServer().getPermissionManager().getPlayerGroup(this.getName()); } public void setGroup(Group group) { OpenClassic.getServer().getPermissionManager().setPlayerGroup(this.getName(), group); } public SocketAddress getAddress() { return this.session.getAddress(); } public String getIp() { return this.session.getAddress().toString().replace("/", "").split(":")[0]; } @Override public boolean hasPermission(String permission) { return this.getGroup() != null && this.getGroup().hasPermission(permission); } @Override public String getCommandPrefix() { return "/"; } public void disconnect(String reason) { this.session.disconnect(reason); } public void tick() { // Experimental /* if(!OpenClassic.getServer().getConfig().getBoolean("options.allow-flight", false)) { if(this.pos.getLevel().getBlockTypeAt(this.pos.getBlockX(), this.pos.getBlockY() - 2, this.pos.getBlockZ()) == BlockType.AIR) { this.airTicks++; } else if(this.airTicks != 0) { this.airTicks = 0; } if(this.airTicks > 300) { this.session.disconnect("Flying is not allowed on this server."); } } */ } public void destroy() { this.getPosition().getLevel().removePlayer(this.getName()); this.playerId = 0; this.pos = null; this.session = null; } @Override public void sendMessage(String message) { this.getSession().send(new PlayerChatMessage(this.getPlayerId(), message)); } public void sendLevel(final Level level) { final Player player = this; OpenClassic.getGame().getScheduler().scheduleAsyncTask(OpenClassic.getGame(), new Runnable() { @Override public void run() { while(sendingLevel) { try { Thread.sleep(5000); } catch (InterruptedException e) { e.printStackTrace(); } } sendingLevel = true; try { session.send(new LevelInitializeMessage()); ByteArrayOutputStream out = new ByteArrayOutputStream(); GZIPOutputStream gzip = new GZIPOutputStream(out); DataOutputStream dataOut = new DataOutputStream(gzip); byte[] b = level.getBlocks(); if(!hasCustomClient()) { for(int index = 0; index < b.length; index++) { if(Blocks.fromId(b[index]) instanceof CustomBlock) { b[index] = ((CustomBlock) Blocks.fromId(b[index])).getFallback().getId(); } } } dataOut.writeInt(b.length); dataOut.write(b); dataOut.close(); gzip.close(); byte[] data = out.toByteArray(); out.close(); double numChunks = data.length / 1024; double sent = 0; for (int chunkStart = 0; chunkStart < data.length; chunkStart += 1024) { byte[] chunkData = new byte[1024]; short length = 1024; if (data.length - chunkStart < length) length = (short) (data.length - chunkStart); System.arraycopy(data, chunkStart, chunkData, 0, length); session.send(new LevelDataMessage(length, chunkData, (byte) ((sent / numChunks) * 255))); sent++; } session.send(new LevelFinalizeMessage(level.getWidth(), level.getHeight(), level.getDepth())); moveTo(level.getSpawn()); level.sendToAllExcept(player, new PlayerSpawnMessage(player.getPlayerId(), player.getName(), player.getPosition().getX(), player.getPosition().getY(), player.getPosition().getZ(), (byte) player.getPosition().getYaw(), (byte) player.getPosition().getPitch())); for (Player p : level.getPlayers()) { if(p.getPlayerId() == getPlayerId()) continue; session.send(new PlayerSpawnMessage(p.getPlayerId(), p.getName(), p.getPosition().getX(), p.getPosition().getY(), p.getPosition().getZ(), (byte) p.getPosition().getYaw(), (byte) p.getPosition().getPitch())); } if(hasCustomClient()) { session.send(new LevelColorMessage("sky", level.getSkyColor())); session.send(new LevelColorMessage("fog", level.getFogColor())); session.send(new LevelColorMessage("cloud", level.getCloudColor())); } } catch (Exception e) { session.disconnect("Failed to send level!"); OpenClassic.getLogger().severe("Failed to send level " + level.getName() + " to player " + getName() + "!"); e.printStackTrace(); } sendingLevel = false; } }); } public ClientInfo getClientInfo() { return this.client; } @Override public boolean hasCustomClient() { return this.client.isCustom(); } @Override public String getClientVersion() { return this.client.getVersion(); } @Override public NBTData getData() { return this.data; } @Override public List<RemotePluginInfo> getPlugins() { return this.client.getPlugins(); } @Override public void chat(String message) { this.session.messageReceived(new PlayerChatMessage((byte) -1, message)); } @Override public void hidePlayer(Player player) { this.getSession().send(new PlayerDespawnMessage(player.getPlayerId())); this.hidden.add(player.getName()); } @Override public void showPlayer(Player player) { this.hidden.remove(player.getName()); this.getSession().send(new PlayerSpawnMessage(player.getPlayerId(), player.getName(), player.getPosition().getX(), player.getPosition().getY(), player.getPosition().getZ(), (byte) player.getPosition().getYaw(), (byte) player.getPosition().getPitch())); } @Override public boolean canSee(Player player) { return this.hidden.contains(player.getName()); } @Override public String getLanguage() { return this.client.getLanguage().equals("") ? OpenClassic.getGame().getLanguage() : this.client.getLanguage(); } }
/* * Copyright (c) 2009-2014, Peter Abeles. All Rights Reserved. * * This file is part of Efficient Java Matrix Library (EJML). * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.ejml.alg.dense.decomposition.bidiagonal; import org.ejml.data.DenseMatrix64F; import org.ejml.data.RowD1Matrix64F; import org.ejml.interfaces.decomposition.BidiagonalDecomposition; import org.ejml.ops.CommonOps; import org.ejml.ops.MatrixFeatures; import org.ejml.ops.RandomMatrices; import org.ejml.ops.SpecializedOps; import org.ejml.simple.SimpleMatrix; import org.junit.Test; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertTrue; /** * @author Peter Abeles */ public class TestBidiagonalDecompositionRow_D64 extends GenericBidiagonalCheck { /** * See if the naive implementation and this version produce the same results. */ @Test public void testAgainstNaive() { for( int i = 1; i <= 5; i++ ) { for( int j = 1; j <= 5; j++ ) { checkNaive(i,j); } } } private void checkNaive(int m, int n) { SimpleMatrix A = SimpleMatrix.wrap(RandomMatrices.createRandom(m,n,rand)); BidiagonalDecompositionRow_D64 decomp = new BidiagonalDecompositionRow_D64(); BidiagonalDecompositionNaive_D64 naive = new BidiagonalDecompositionNaive_D64(); assertTrue(decomp.decompose(A.getMatrix().copy())); assertTrue(naive.decompose(A.getMatrix())); SimpleMatrix U = SimpleMatrix.wrap(decomp.getU(null,false,false)); SimpleMatrix B = SimpleMatrix.wrap(decomp.getB(null,false)); SimpleMatrix V = SimpleMatrix.wrap(decomp.getV(null,false,false)); // U.print(); // B.print(); // naive.getB().print(); // V.print(); // naive.getV().print(); // naive.getVTran().print(); assertTrue(naive.getB().isIdentical(B,1e-8)); assertTrue(naive.getU().isIdentical(U,1e-8)); assertTrue(naive.getV().isIdentical(V,1e-8)); // check the decomposition DenseMatrix64F foundA = U.mult(B).mult(V.transpose()).getMatrix(); // A.print(); // foundA.print(); assertTrue(MatrixFeatures.isIdentical(A.getMatrix(),foundA,1e-8)); } @Test public void testComputeU() { int m = 7; int n = 5; DenseMatrix64F A = RandomMatrices.createRandom(m,n,rand); DebugBidiagonal alg = new DebugBidiagonal(A); DenseMatrix64F B = new DenseMatrix64F(A); DenseMatrix64F C = new DenseMatrix64F(m,n); DenseMatrix64F u = new DenseMatrix64F(m,1); RowD1Matrix64F UBV = alg.getUBV(); for( int i = 0; i < n; i++ ) { alg.computeU(i); SpecializedOps.subvector(UBV,i+1,i,m-i-1,false,i+1,u); u.data[i] = 1; DenseMatrix64F Q = SpecializedOps.createReflector(u,alg.getGammasU()[i]); CommonOps.mult(Q,B,C); // u.print(); // B.print(); // UBV.print(); // C.print(); B.set(C); // make sure everything is as expected for( int j = i+1; j < m; j++ ) { assertEquals(0,C.get(j,i),1e-8); } for( int j = i+1; j < n; j++ ) { assertEquals(UBV.get(i,j),C.get(i,j),1e-8); } u.data[i] = 0; } } @Test public void testComputeV() { int m = 7; int n = 5; DenseMatrix64F A = RandomMatrices.createRandom(m,n,rand); DebugBidiagonal alg = new DebugBidiagonal(A); DenseMatrix64F B = new DenseMatrix64F(A); DenseMatrix64F C = new DenseMatrix64F(m,n); DenseMatrix64F u = new DenseMatrix64F(n,1); RowD1Matrix64F UBV = alg.getUBV(); // A.print(); for( int i = 0; i < n-2; i++ ) { alg.computeV(i); u.zero(); SpecializedOps.subvector(UBV,i,i+2,n-i-2,true,i+2,u); u.data[i+1] = 1; DenseMatrix64F Q = SpecializedOps.createReflector(u,alg.getGammasV()[i]); // Q.print(); CommonOps.mult(B,Q,C); // u.print(); // B.print(); // UBV.print(); // C.print(); B.set(C); // make sure everything is as expected for( int j = i+2; j < n; j++ ) { assertEquals(0,C.get(i,j),1e-8); } for( int j = i+2; j < m; j++ ) { assertEquals(UBV.get(j,i),C.get(j,i),1e-8); } u.data[i] = 0; } } @Override protected BidiagonalDecomposition<DenseMatrix64F> createQRDecomposition() { return new BidiagonalDecompositionRow_D64(); } private static class DebugBidiagonal extends BidiagonalDecompositionRow_D64 { public DebugBidiagonal( DenseMatrix64F A ) { init(A.copy()); } @Override protected void computeU(int k) { super.computeU(k); } @Override protected void computeV(int k) { super.computeV(k); } } }
package com.taxonic.carml.util; import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.Matchers.is; import static org.junit.jupiter.api.Assertions.assertThrows; import static org.mockito.ArgumentMatchers.any; import static org.mockito.Mockito.times; import static org.mockito.Mockito.verify; import com.google.common.collect.Sets; import java.io.StringWriter; import java.util.Objects; import java.util.Set; import java.util.function.Consumer; import java.util.function.UnaryOperator; import java.util.stream.Stream; import org.eclipse.rdf4j.model.IRI; import org.eclipse.rdf4j.model.Model; import org.eclipse.rdf4j.model.Resource; import org.eclipse.rdf4j.model.Statement; import org.eclipse.rdf4j.model.Value; import org.eclipse.rdf4j.model.ValueFactory; import org.eclipse.rdf4j.model.impl.LinkedHashModel; import org.eclipse.rdf4j.model.impl.SimpleValueFactory; import org.eclipse.rdf4j.model.util.ModelBuilder; import org.eclipse.rdf4j.model.util.ModelCollector; import org.eclipse.rdf4j.rio.RDFFormat; import org.eclipse.rdf4j.rio.Rio; import org.hamcrest.Description; import org.hamcrest.Matcher; import org.hamcrest.TypeSafeMatcher; import org.junit.jupiter.api.Test; import org.junit.jupiter.api.extension.ExtendWith; import org.mockito.Mock; import org.mockito.junit.jupiter.MockitoExtension; @ExtendWith(MockitoExtension.class) class ModelsTest { private static final ValueFactory VALUE_FACTORY = SimpleValueFactory.getInstance(); private static final IRI DEFAULT_IRI = VALUE_FACTORY.createIRI("http://example.com/default"); private static final UnaryOperator<Resource> DEFAULT_GRAPH_MODIFIER = graph -> graph.equals(DEFAULT_IRI) ? null : graph; @Mock private Consumer<Statement> statementConsumer1; @Mock private Consumer<Statement> statementConsumer2; @Test void givenDeepModel_whenDescribeResource_thenReturnDeepDescription() { // Given Model model = Models.parse(Objects.requireNonNull(ModelsTest.class.getResourceAsStream("toDescribeDeep.ttl")), RDFFormat.TURTLE); IRI resource = VALUE_FACTORY.createIRI("http://example.com/Fourth"); // When Model described = Models.describeResource(model, resource); // Then assertThat(described, isIsomorphicWith(Models .parse(Objects.requireNonNull(ModelsTest.class.getResourceAsStream("describedDeep.ttl")), RDFFormat.TURTLE))); } @Test void givenShallowModel_whenDescribeResource_thenReturnShallowDescription() { // Given Model model = Models.parse(Objects.requireNonNull(ModelsTest.class.getResourceAsStream("toDescribeShallow.ttl")), RDFFormat.TURTLE); IRI resource = VALUE_FACTORY.createIRI("http://example.com/Fourth"); // When Model described = Models.describeResource(model, resource); // Then assertThat(described, isIsomorphicWith(Models.parse( Objects.requireNonNull(ModelsTest.class.getResourceAsStream("describedShallow.ttl")), RDFFormat.TURTLE))); } @Test void givenDeepModel_whenReverseDescribeResource_thenReturnDeepReverseDescription() { // Given Model model = Models.parse(Objects.requireNonNull(ModelsTest.class.getResourceAsStream("toDescribeDeep.ttl")), RDFFormat.TURTLE); IRI resource = VALUE_FACTORY.createIRI("http://example.com/Fourth"); // When Model described = Models.reverseDescribeResource(model, resource); // Then assertThat(described, isIsomorphicWith(Models.parse( Objects.requireNonNull(ModelsTest.class.getResourceAsStream("reverseDescribedDeep.ttl")), RDFFormat.TURTLE))); } @Test void givenShallowModel_whenReverseDescribeResource_thenReturnShallowReverseDescription() { // Given Model model = Models.parse(Objects.requireNonNull(ModelsTest.class.getResourceAsStream("toDescribeShallow.ttl")), RDFFormat.TURTLE); IRI resource = VALUE_FACTORY.createIRI("http://example.com/Fourth"); // When Model described = Models.reverseDescribeResource(model, resource); // Then assertThat(described, isIsomorphicWith( Models.parse(Objects.requireNonNull(ModelsTest.class.getResourceAsStream("reverseDescribedShallow.ttl")), RDFFormat.TURTLE))); } @Test void givenDeepModel_whenSymmetricDescribeResource_thenReturnDeepSymmetricDescription() { // Given Model model = Models.parse(Objects.requireNonNull(ModelsTest.class.getResourceAsStream("toDescribeDeep.ttl")), RDFFormat.TURTLE); IRI resource = VALUE_FACTORY.createIRI("http://example.com/Fourth"); // When Model described = Models.symmetricDescribeResource(model, resource); // Then assertThat(described, isIsomorphicWith(Models.parse( Objects.requireNonNull(ModelsTest.class.getResourceAsStream("symmetricDescribedDeep.ttl")), RDFFormat.TURTLE))); } @Test void givenShallowModel_whenSymmetricDescribeResource_thenReturnShallowSymmetricDescription() { // Given Model model = Models.parse(Objects.requireNonNull(ModelsTest.class.getResourceAsStream("toDescribeShallow.ttl")), RDFFormat.TURTLE); IRI resource = VALUE_FACTORY.createIRI("http://example.com/Fourth"); // When Model described = Models.symmetricDescribeResource(model, resource); // Then assertThat(described, isIsomorphicWith( Models.parse(Objects.requireNonNull(ModelsTest.class.getResourceAsStream("symmetricDescribedShallow.ttl")), RDFFormat.TURTLE))); } @Test void givenValues_whenAllArgsCreateStatement_thenReturnStatement() { // Given Value subjectValue = VALUE_FACTORY.createIRI("http://example.com/subject"); Value predicateValue = VALUE_FACTORY.createIRI("http://example.com/predicate"); Value objectValue = VALUE_FACTORY.createLiteral("object"); Value graphValue = VALUE_FACTORY.createIRI("http://example.com/graph"); // When Statement statement = Models.createStatement(subjectValue, predicateValue, objectValue, graphValue, DEFAULT_GRAPH_MODIFIER, VALUE_FACTORY, statementConsumer1, statementConsumer2); // Then Statement expected = VALUE_FACTORY.createStatement(VALUE_FACTORY.createIRI("http://example.com/subject"), VALUE_FACTORY.createIRI("http://example.com/predicate"), VALUE_FACTORY.createLiteral("object"), VALUE_FACTORY.createIRI("http://example.com/graph")); assertThat(statement, is(expected)); verify(statementConsumer1).accept(any()); verify(statementConsumer2).accept(any()); } @Test void givenValuesWithGraphMatchingModifier_whenAllArgsCreateStatement_thenReturnStatement() { // Given Value subjectValue = VALUE_FACTORY.createIRI("http://example.com/subject"); Value predicateValue = VALUE_FACTORY.createIRI("http://example.com/predicate"); Value objectValue = VALUE_FACTORY.createLiteral("object"); Value graphValue = VALUE_FACTORY.createIRI("http://example.com/default"); // When Statement statement = Models.createStatement(subjectValue, predicateValue, objectValue, graphValue, DEFAULT_GRAPH_MODIFIER, VALUE_FACTORY, statementConsumer1, statementConsumer2); // Then Statement expected = VALUE_FACTORY.createStatement(VALUE_FACTORY.createIRI("http://example.com/subject"), VALUE_FACTORY.createIRI("http://example.com/predicate"), VALUE_FACTORY.createLiteral("object")); assertThat(statement, is(expected)); verify(statementConsumer1).accept(any()); verify(statementConsumer2).accept(any()); } @Test void givenValues_whenCreateStatement_thenReturnStatement() { // Given Value subjectValue = VALUE_FACTORY.createIRI("http://example.com/subject"); Value predicateValue = VALUE_FACTORY.createIRI("http://example.com/predicate"); Value objectValue = VALUE_FACTORY.createLiteral("object"); Value graphValue = VALUE_FACTORY.createIRI("http://example.com/graph"); // When Statement statement = Models.createStatement(subjectValue, predicateValue, objectValue, graphValue); // Then Statement expected = VALUE_FACTORY.createStatement(VALUE_FACTORY.createIRI("http://example.com/subject"), VALUE_FACTORY.createIRI("http://example.com/predicate"), VALUE_FACTORY.createLiteral("object"), VALUE_FACTORY.createIRI("http://example.com/graph")); assertThat(statement, is(expected)); } @Test void givenValuesWithoutGraph_whenCreateStatement_thenReturnStatement() { // Given Value subjectValue = VALUE_FACTORY.createIRI("http://example.com/subject"); Value predicateValue = VALUE_FACTORY.createIRI("http://example.com/predicate"); Value objectValue = VALUE_FACTORY.createLiteral("object"); // When Statement statement = Models.createStatement(subjectValue, predicateValue, objectValue, null); // Then Statement expected = VALUE_FACTORY.createStatement(VALUE_FACTORY.createIRI("http://example.com/subject"), VALUE_FACTORY.createIRI("http://example.com/predicate"), VALUE_FACTORY.createLiteral("object")); assertThat(statement, is(expected)); } @Test void givenIncorrectSubject_whenCreateStatement_thenThrowException() { // Given Value subjectValue = VALUE_FACTORY.createLiteral("subject"); Value predicateValue = VALUE_FACTORY.createIRI("http://example.com/predicate"); Value objectValue = VALUE_FACTORY.createLiteral("object"); Value graphValue = VALUE_FACTORY.createIRI("http://example.com/graph"); // When Throwable modelsException = assertThrows(ModelsException.class, () -> Models.createStatement(subjectValue, predicateValue, objectValue, graphValue)); // Then assertThat(modelsException.getMessage(), is("Expected subjectValue `\"subject\"` to be instance of Resource, " + "but was org.eclipse.rdf4j.model.impl.SimpleLiteral")); } @Test void givenIncorrectPredicate_whenCreateStatement_thenThrowException() { // Given Value subjectValue = VALUE_FACTORY.createIRI("http://example.com/subject"); Value predicateValue = VALUE_FACTORY.createLiteral("predicate"); Value objectValue = VALUE_FACTORY.createLiteral("object"); Value graphValue = VALUE_FACTORY.createIRI("http://example.com/graph"); // When Throwable modelsException = assertThrows(ModelsException.class, () -> Models.createStatement(subjectValue, predicateValue, objectValue, graphValue)); // Then assertThat(modelsException.getMessage(), is("Expected predicateValue `\"predicate\"` to be instance of IRI, " + "but was org.eclipse.rdf4j.model.impl.SimpleLiteral")); } @Test void givenIncorrectGraph_whenCreateStatement_thenThrowException() { // Given Value subjectValue = VALUE_FACTORY.createIRI("http://example.com/subject"); Value predicateValue = VALUE_FACTORY.createIRI("http://example.com/predicate"); Value objectValue = VALUE_FACTORY.createLiteral("object"); Value graphValue = VALUE_FACTORY.createLiteral("graph"); // When Throwable modelsException = assertThrows(ModelsException.class, () -> Models.createStatement(subjectValue, predicateValue, objectValue, graphValue)); // Then assertThat(modelsException.getMessage(), is("Expected graphValue `\"graph\"` to be instance of Resource, but " + "was org.eclipse.rdf4j.model.impl.SimpleLiteral")); } @Test void givenValueSets_whenAllArgsStreamCartesianProductStatements_thenReturnStatementStream() { // Given Set<Resource> subjects = Set.of(VALUE_FACTORY.createIRI("http://example.com/subject1"), VALUE_FACTORY.createIRI("http://example.com/subject2")); Set<IRI> predicates = Set.of(VALUE_FACTORY.createIRI("http://example.com/predicate1"), VALUE_FACTORY.createIRI("http://example.com/predicate2")); Set<? extends Value> objects = Set.of(VALUE_FACTORY.createLiteral("object1"), VALUE_FACTORY.createLiteral("object2"), VALUE_FACTORY.createLiteral("object3")); Set<Resource> graphs = Set.of(VALUE_FACTORY.createIRI("http://example.com/graph1"), VALUE_FACTORY.createIRI("http://example.com/graph2")); // When Stream<Statement> statementStream = Models.streamCartesianProductStatements(subjects, predicates, objects, graphs, DEFAULT_GRAPH_MODIFIER, VALUE_FACTORY, statementConsumer1, statementConsumer2); // Then Model expected = new ModelBuilder().namedGraph(VALUE_FACTORY.createIRI("http://example.com/graph1")) .subject(VALUE_FACTORY.createIRI("http://example.com/subject1")) .add(VALUE_FACTORY.createIRI("http://example.com/predicate1"), VALUE_FACTORY.createLiteral("object1")) .add(VALUE_FACTORY.createIRI("http://example.com/predicate1"), VALUE_FACTORY.createLiteral("object2")) .add(VALUE_FACTORY.createIRI("http://example.com/predicate1"), VALUE_FACTORY.createLiteral("object3")) .add(VALUE_FACTORY.createIRI("http://example.com/predicate2"), VALUE_FACTORY.createLiteral("object1")) .add(VALUE_FACTORY.createIRI("http://example.com/predicate2"), VALUE_FACTORY.createLiteral("object2")) .add(VALUE_FACTORY.createIRI("http://example.com/predicate2"), VALUE_FACTORY.createLiteral("object3")) .subject(VALUE_FACTORY.createIRI("http://example.com/subject2")) .add(VALUE_FACTORY.createIRI("http://example.com/predicate1"), VALUE_FACTORY.createLiteral("object1")) .add(VALUE_FACTORY.createIRI("http://example.com/predicate1"), VALUE_FACTORY.createLiteral("object2")) .add(VALUE_FACTORY.createIRI("http://example.com/predicate1"), VALUE_FACTORY.createLiteral("object3")) .add(VALUE_FACTORY.createIRI("http://example.com/predicate2"), VALUE_FACTORY.createLiteral("object1")) .add(VALUE_FACTORY.createIRI("http://example.com/predicate2"), VALUE_FACTORY.createLiteral("object2")) .add(VALUE_FACTORY.createIRI("http://example.com/predicate2"), VALUE_FACTORY.createLiteral("object3")) .namedGraph(VALUE_FACTORY.createIRI("http://example.com/graph2")) .subject(VALUE_FACTORY.createIRI("http://example.com/subject1")) .add(VALUE_FACTORY.createIRI("http://example.com/predicate1"), VALUE_FACTORY.createLiteral("object1")) .add(VALUE_FACTORY.createIRI("http://example.com/predicate1"), VALUE_FACTORY.createLiteral("object2")) .add(VALUE_FACTORY.createIRI("http://example.com/predicate1"), VALUE_FACTORY.createLiteral("object3")) .add(VALUE_FACTORY.createIRI("http://example.com/predicate2"), VALUE_FACTORY.createLiteral("object1")) .add(VALUE_FACTORY.createIRI("http://example.com/predicate2"), VALUE_FACTORY.createLiteral("object2")) .add(VALUE_FACTORY.createIRI("http://example.com/predicate2"), VALUE_FACTORY.createLiteral("object3")) .subject(VALUE_FACTORY.createIRI("http://example.com/subject2")) .add(VALUE_FACTORY.createIRI("http://example.com/predicate1"), VALUE_FACTORY.createLiteral("object1")) .add(VALUE_FACTORY.createIRI("http://example.com/predicate1"), VALUE_FACTORY.createLiteral("object2")) .add(VALUE_FACTORY.createIRI("http://example.com/predicate1"), VALUE_FACTORY.createLiteral("object3")) .add(VALUE_FACTORY.createIRI("http://example.com/predicate2"), VALUE_FACTORY.createLiteral("object1")) .add(VALUE_FACTORY.createIRI("http://example.com/predicate2"), VALUE_FACTORY.createLiteral("object2")) .add(VALUE_FACTORY.createIRI("http://example.com/predicate2"), VALUE_FACTORY.createLiteral("object3")) .build(); assertThat(statementStream.collect(ModelCollector.toModel()), is(expected)); verify(statementConsumer1, times(24)).accept(any()); verify(statementConsumer2, times(24)).accept(any()); } @Test void givenValueSetsWithoutGraphs_whenAllArgsStreamCartesianProductStatements_thenReturnStatementStream() { // Given Set<Resource> subjects = Set.of(VALUE_FACTORY.createIRI("http://example.com/subject1"), VALUE_FACTORY.createIRI("http://example.com/subject2")); Set<IRI> predicates = Set.of(VALUE_FACTORY.createIRI("http://example.com/predicate1"), VALUE_FACTORY.createIRI("http://example.com/predicate2")); Set<? extends Value> objects = Set.of(VALUE_FACTORY.createLiteral("object1"), VALUE_FACTORY.createLiteral("object2"), VALUE_FACTORY.createLiteral("object3")); Set<Resource> graphs = Set.of(); // When Stream<Statement> statementStream = Models.streamCartesianProductStatements(subjects, predicates, objects, graphs, DEFAULT_GRAPH_MODIFIER, VALUE_FACTORY, statementConsumer1, statementConsumer2); // Then Model expected = new ModelBuilder().subject(VALUE_FACTORY.createIRI("http://example.com/subject1")) .add(VALUE_FACTORY.createIRI("http://example.com/predicate1"), VALUE_FACTORY.createLiteral("object1")) .add(VALUE_FACTORY.createIRI("http://example.com/predicate1"), VALUE_FACTORY.createLiteral("object2")) .add(VALUE_FACTORY.createIRI("http://example.com/predicate1"), VALUE_FACTORY.createLiteral("object3")) .add(VALUE_FACTORY.createIRI("http://example.com/predicate2"), VALUE_FACTORY.createLiteral("object1")) .add(VALUE_FACTORY.createIRI("http://example.com/predicate2"), VALUE_FACTORY.createLiteral("object2")) .add(VALUE_FACTORY.createIRI("http://example.com/predicate2"), VALUE_FACTORY.createLiteral("object3")) .subject(VALUE_FACTORY.createIRI("http://example.com/subject2")) .add(VALUE_FACTORY.createIRI("http://example.com/predicate1"), VALUE_FACTORY.createLiteral("object1")) .add(VALUE_FACTORY.createIRI("http://example.com/predicate1"), VALUE_FACTORY.createLiteral("object2")) .add(VALUE_FACTORY.createIRI("http://example.com/predicate1"), VALUE_FACTORY.createLiteral("object3")) .add(VALUE_FACTORY.createIRI("http://example.com/predicate2"), VALUE_FACTORY.createLiteral("object1")) .add(VALUE_FACTORY.createIRI("http://example.com/predicate2"), VALUE_FACTORY.createLiteral("object2")) .add(VALUE_FACTORY.createIRI("http://example.com/predicate2"), VALUE_FACTORY.createLiteral("object3")) .build(); assertThat(statementStream.collect(ModelCollector.toModel()), is(expected)); verify(statementConsumer1, times(12)).accept(any()); verify(statementConsumer2, times(12)).accept(any()); } @Test void givenValueSets_whenStreamCartesianProductStatements_thenReturnStatementStream() { // Given Set<Resource> subjects = Set.of(VALUE_FACTORY.createIRI("http://example.com/subject1"), VALUE_FACTORY.createIRI("http://example.com/subject2")); Set<IRI> predicates = Set.of(VALUE_FACTORY.createIRI("http://example.com/predicate1"), VALUE_FACTORY.createIRI("http://example.com/predicate2")); Set<? extends Value> objects = Set.of(VALUE_FACTORY.createLiteral("object1"), VALUE_FACTORY.createLiteral("object2"), VALUE_FACTORY.createLiteral("object3")); Set<Resource> graphs = Set.of(VALUE_FACTORY.createIRI("http://example.com/graph1"), VALUE_FACTORY.createIRI("http://example.com/graph2")); // When Stream<Statement> statementStream = Models.streamCartesianProductStatements(subjects, predicates, objects, graphs); // Then Model expected = new ModelBuilder().namedGraph(VALUE_FACTORY.createIRI("http://example.com/graph1")) .subject(VALUE_FACTORY.createIRI("http://example.com/subject1")) .add(VALUE_FACTORY.createIRI("http://example.com/predicate1"), VALUE_FACTORY.createLiteral("object1")) .add(VALUE_FACTORY.createIRI("http://example.com/predicate1"), VALUE_FACTORY.createLiteral("object2")) .add(VALUE_FACTORY.createIRI("http://example.com/predicate1"), VALUE_FACTORY.createLiteral("object3")) .add(VALUE_FACTORY.createIRI("http://example.com/predicate2"), VALUE_FACTORY.createLiteral("object1")) .add(VALUE_FACTORY.createIRI("http://example.com/predicate2"), VALUE_FACTORY.createLiteral("object2")) .add(VALUE_FACTORY.createIRI("http://example.com/predicate2"), VALUE_FACTORY.createLiteral("object3")) .subject(VALUE_FACTORY.createIRI("http://example.com/subject2")) .add(VALUE_FACTORY.createIRI("http://example.com/predicate1"), VALUE_FACTORY.createLiteral("object1")) .add(VALUE_FACTORY.createIRI("http://example.com/predicate1"), VALUE_FACTORY.createLiteral("object2")) .add(VALUE_FACTORY.createIRI("http://example.com/predicate1"), VALUE_FACTORY.createLiteral("object3")) .add(VALUE_FACTORY.createIRI("http://example.com/predicate2"), VALUE_FACTORY.createLiteral("object1")) .add(VALUE_FACTORY.createIRI("http://example.com/predicate2"), VALUE_FACTORY.createLiteral("object2")) .add(VALUE_FACTORY.createIRI("http://example.com/predicate2"), VALUE_FACTORY.createLiteral("object3")) .namedGraph(VALUE_FACTORY.createIRI("http://example.com/graph2")) .subject(VALUE_FACTORY.createIRI("http://example.com/subject1")) .add(VALUE_FACTORY.createIRI("http://example.com/predicate1"), VALUE_FACTORY.createLiteral("object1")) .add(VALUE_FACTORY.createIRI("http://example.com/predicate1"), VALUE_FACTORY.createLiteral("object2")) .add(VALUE_FACTORY.createIRI("http://example.com/predicate1"), VALUE_FACTORY.createLiteral("object3")) .add(VALUE_FACTORY.createIRI("http://example.com/predicate2"), VALUE_FACTORY.createLiteral("object1")) .add(VALUE_FACTORY.createIRI("http://example.com/predicate2"), VALUE_FACTORY.createLiteral("object2")) .add(VALUE_FACTORY.createIRI("http://example.com/predicate2"), VALUE_FACTORY.createLiteral("object3")) .subject(VALUE_FACTORY.createIRI("http://example.com/subject2")) .add(VALUE_FACTORY.createIRI("http://example.com/predicate1"), VALUE_FACTORY.createLiteral("object1")) .add(VALUE_FACTORY.createIRI("http://example.com/predicate1"), VALUE_FACTORY.createLiteral("object2")) .add(VALUE_FACTORY.createIRI("http://example.com/predicate1"), VALUE_FACTORY.createLiteral("object3")) .add(VALUE_FACTORY.createIRI("http://example.com/predicate2"), VALUE_FACTORY.createLiteral("object1")) .add(VALUE_FACTORY.createIRI("http://example.com/predicate2"), VALUE_FACTORY.createLiteral("object2")) .add(VALUE_FACTORY.createIRI("http://example.com/predicate2"), VALUE_FACTORY.createLiteral("object3")) .build(); assertThat(statementStream.collect(ModelCollector.toModel()), is(expected)); } @Test void givenEmptyTripleValueSet_whenCreateStatement_thenThrowException() { // Given Set<Resource> subjects = Set.of(); Set<IRI> predicates = Set.of(VALUE_FACTORY.createIRI("http://example.com/predicate1"), VALUE_FACTORY.createIRI("http://example.com/predicate2")); Set<? extends Value> objects = Set.of(VALUE_FACTORY.createLiteral("object1"), VALUE_FACTORY.createLiteral("object2"), VALUE_FACTORY.createLiteral("object3")); Set<Resource> graphs = Set.of(); // When Throwable modelsException = assertThrows(ModelsException.class, () -> Models.streamCartesianProductStatements(subjects, predicates, objects, graphs, DEFAULT_GRAPH_MODIFIER, VALUE_FACTORY, statementConsumer1, statementConsumer2)); // Then assertThat(modelsException.getMessage(), is("Could not create cartesian product statements because at least " + "one of subjects, predicates or objects was empty.")); } static Matcher<Model> isIsomorphicWith(final Model expected) { return new TypeSafeMatcher<>() { @Override protected boolean matchesSafely(Model actual) { return org.eclipse.rdf4j.model.util.Models.isomorphic(actual, expected); } @Override public void describeTo(Description description) { description.appendText(String.format("Model with %s statements.", expected.size())); } @Override protected void describeMismatchSafely(final Model item, final Description mismatchDescription) { mismatchDescription.appendText(String.format("Model with %s statements.%n%n", item.size())); Sets.SetView<Statement> statementsMissing = Sets.difference(expected, item); mismatchDescription.appendText(String.format("Statements expected but missing:%n%n")); mismatchDescription.appendText(modelToString(new LinkedHashModel(statementsMissing))); Sets.SetView<Statement> surplusStatements = Sets.difference(item, expected); mismatchDescription.appendText(String.format("Statements that were not expected:%n%n")); mismatchDescription.appendText(modelToString(new LinkedHashModel(surplusStatements))); } private String modelToString(final Model model) { model.setNamespace("ex", "http://example.org/"); StringWriter stringWriter = new StringWriter(); Rio.write(model, stringWriter, RDFFormat.TURTLE); return stringWriter.toString() .replace("\r\n", System.lineSeparator()) .replace("\r", System.lineSeparator()); } }; } }
/* * Copyright 2000-2009 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ /* * Created by IntelliJ IDEA. * User: max * Date: Jun 4, 2002 * Time: 8:27:13 PM * To change template for new class use * Code Style | Class Templates options (Tools | IDE Options). */ package com.intellij.openapi.editor.impl; import com.intellij.openapi.application.ex.ApplicationManagerEx; import com.intellij.openapi.diagnostic.Logger; import com.intellij.openapi.editor.FoldRegion; import com.intellij.openapi.editor.FoldingGroup; import com.intellij.openapi.editor.LogicalPosition; import com.intellij.openapi.editor.colors.EditorColors; import com.intellij.openapi.editor.event.DocumentEvent; import com.intellij.openapi.editor.ex.DocumentEx; import com.intellij.openapi.editor.ex.FoldingListener; import com.intellij.openapi.editor.ex.FoldingModelEx; import com.intellij.openapi.editor.ex.PrioritizedDocumentListener; import com.intellij.openapi.editor.markup.TextAttributes; import com.intellij.util.containers.MultiMap; import org.jetbrains.annotations.NotNull; import java.awt.*; import java.util.Arrays; import java.util.List; import java.util.Set; import java.util.concurrent.CopyOnWriteArraySet; public class FoldingModelImpl implements FoldingModelEx, PrioritizedDocumentListener { private static final Logger LOG = Logger.getInstance("#com.intellij.openapi.editor.impl.EditorFoldingModelImpl"); private final Set<FoldingListener> myListeners = new CopyOnWriteArraySet<FoldingListener>(); private boolean myIsFoldingEnabled; private final EditorImpl myEditor; private final FoldRegionsTree myFoldTree; private TextAttributes myFoldTextAttributes; private boolean myIsBatchFoldingProcessing; private boolean myDoNotCollapseCaret; private boolean myFoldRegionsProcessed; private int mySavedCaretX; private int mySavedCaretY; private int mySavedCaretShift; private boolean myCaretPositionSaved; private final MultiMap<FoldingGroup, FoldRegion> myGroups = new MultiMap<FoldingGroup, FoldRegion>(); public FoldingModelImpl(EditorImpl editor) { myEditor = editor; myIsFoldingEnabled = true; myIsBatchFoldingProcessing = false; myDoNotCollapseCaret = false; myFoldTree = new FoldRegionsTree() { protected boolean isFoldingEnabled() { return FoldingModelImpl.this.isFoldingEnabled(); } protected boolean isBatchFoldingProcessing() { return myIsBatchFoldingProcessing; } }; myFoldRegionsProcessed = false; refreshSettings(); } @NotNull public List<FoldRegion> getGroupedRegions(@NotNull FoldingGroup group) { return (List<FoldRegion>)myGroups.get(group); } @NotNull public FoldRegion getFirstRegion(@NotNull FoldingGroup group, FoldRegion child) { final List<FoldRegion> regions = getGroupedRegions(group); if (regions.isEmpty()) { final boolean inAll = Arrays.asList(getAllFoldRegions()).contains(child); throw new AssertionError("Folding group without children; the known child is in all: " + inAll); } FoldRegion main = regions.get(0); for (int i = 1; i < regions.size(); i++) { FoldRegion region = regions.get(i); if (main.getStartOffset() > region.getStartOffset()) { main = region; } } return main; } public int getEndOffset(@NotNull FoldingGroup group) { final List<FoldRegion> regions = getGroupedRegions(group); int endOffset = 0; for (FoldRegion region : regions) { if (region.isValid()) { endOffset = Math.max(endOffset, region.getEndOffset()); } } return endOffset; } public void refreshSettings() { myFoldTextAttributes = myEditor.getColorsScheme().getAttributes(EditorColors.FOLDED_TEXT_ATTRIBUTES); } public boolean isFoldingEnabled() { return myIsFoldingEnabled; } public boolean isOffsetCollapsed(int offset) { assertReadAccess(); return getCollapsedRegionAtOffset(offset) != null; } private void assertIsDispatchThread() { ApplicationManagerEx.getApplicationEx().assertIsDispatchThread(myEditor.getComponent()); } private static void assertReadAccess() { ApplicationManagerEx.getApplicationEx().assertReadAccessAllowed(); } public void setFoldingEnabled(boolean isEnabled) { assertIsDispatchThread(); myIsFoldingEnabled = isEnabled; } public FoldRegion addFoldRegion(int startOffset, int endOffset, @NotNull String placeholderText) { FoldRegion region = createFoldRegion(startOffset, endOffset, placeholderText, null, false); if (region == null) return null; if (!addFoldRegion(region)) { region.dispose(); return null; } return region; } public boolean addFoldRegion(@NotNull final FoldRegion region) { assertIsDispatchThread(); if (!isFoldingEnabled()) { return false; } if (!myIsBatchFoldingProcessing) { LOG.error("Fold regions must be added or removed inside batchFoldProcessing() only."); return false; } myFoldRegionsProcessed = true; if (myFoldTree.addRegion(region)) { final FoldingGroup group = region.getGroup(); if (group != null) { myGroups.putValue(group, region); } for (FoldingListener listener : myListeners) { listener.onFoldRegionStateChange(region); } return true; } return false; } public void runBatchFoldingOperation(@NotNull Runnable operation) { runBatchFoldingOperation(operation, false); } private void runBatchFoldingOperation(final Runnable operation, final boolean dontCollapseCaret) { assertIsDispatchThread(); boolean oldDontCollapseCaret = myDoNotCollapseCaret; myDoNotCollapseCaret |= dontCollapseCaret; boolean oldBatchFlag = myIsBatchFoldingProcessing; if (!oldBatchFlag) { mySavedCaretShift = myEditor.visibleLineNumberToYPosition(myEditor.getCaretModel().getVisualPosition().line) - myEditor.getScrollingModel().getVerticalScrollOffset(); } myIsBatchFoldingProcessing = true; myFoldTree.myCachedLastIndex = -1; operation.run(); myFoldTree.myCachedLastIndex = -1; if (!oldBatchFlag) { if (myFoldRegionsProcessed) { notifyBatchFoldingProcessingDone(); myFoldRegionsProcessed = false; } myIsBatchFoldingProcessing = false; } myDoNotCollapseCaret = oldDontCollapseCaret; } public void runBatchFoldingOperationDoNotCollapseCaret(@NotNull final Runnable operation) { runBatchFoldingOperation(operation, true); } public void flushCaretShift() { mySavedCaretShift = -1; } @NotNull public FoldRegion[] getAllFoldRegions() { assertReadAccess(); return myFoldTree.fetchAllRegions(); } public FoldRegion getCollapsedRegionAtOffset(int offset) { return myFoldTree.fetchOutermost(offset); } int getLastTopLevelIndexBefore (int offset) { return myFoldTree.getLastTopLevelIndexBefore(offset); } public FoldRegion getFoldingPlaceholderAt(Point p) { assertReadAccess(); LogicalPosition pos = myEditor.xyToLogicalPosition(p); int line = pos.line; if (line >= myEditor.getDocument().getLineCount()) return null; //leftmost folded block position if (myEditor.xyToVisualPosition(p).equals(myEditor.logicalToVisualPosition(pos))) return null; int offset = myEditor.logicalPositionToOffset(pos); return myFoldTree.fetchOutermost(offset); } public void removeFoldRegion(@NotNull final FoldRegion region) { assertIsDispatchThread(); if (!myIsBatchFoldingProcessing) { LOG.error("Fold regions must be added or removed inside batchFoldProcessing() only."); } region.setExpanded(true); final FoldingGroup group = region.getGroup(); if (group != null) { myGroups.removeValue(group, region); } myFoldTree.removeRegion(region); myFoldRegionsProcessed = true; region.dispose(); } public void dispose() { myGroups.clear(); myFoldTree.clear(); } public void expandFoldRegion(FoldRegion region) { assertIsDispatchThread(); if (region.isExpanded() || region.shouldNeverExpand()) return; if (!myIsBatchFoldingProcessing) { LOG.error("Fold regions must be collapsed or expanded inside batchFoldProcessing() only."); } if (myCaretPositionSaved) { int savedOffset = myEditor.logicalPositionToOffset(new LogicalPosition(mySavedCaretY, mySavedCaretX)); FoldRegion[] allCollapsed = myFoldTree.fetchCollapsedAt(savedOffset); if (allCollapsed.length == 1 && allCollapsed[0] == region) { LogicalPosition pos = new LogicalPosition(mySavedCaretY, mySavedCaretX); myEditor.getCaretModel().moveToLogicalPosition(pos); } } myFoldRegionsProcessed = true; ((FoldRegionImpl) region).setExpandedInternal(true); notifyListenersOnFoldRegionStateChange(region); } public void collapseFoldRegion(FoldRegion region) { assertIsDispatchThread(); if (!region.isExpanded()) return; if (!myIsBatchFoldingProcessing) { LOG.error("Fold regions must be collapsed or expanded inside batchFoldProcessing() only."); } LogicalPosition caretPosition = myEditor.getCaretModel().getLogicalPosition(); int caretOffset = myEditor.logicalPositionToOffset(caretPosition); if (FoldRegionsTree.contains(region, caretOffset)) { if (myDoNotCollapseCaret) return; if (!myCaretPositionSaved) { mySavedCaretX = caretPosition.column; mySavedCaretY = caretPosition.line; myCaretPositionSaved = true; } } int selectionStart = myEditor.getSelectionModel().getSelectionStart(); int selectionEnd = myEditor.getSelectionModel().getSelectionEnd(); if (FoldRegionsTree.contains(region, selectionStart-1) || FoldRegionsTree.contains(region, selectionEnd)) myEditor.getSelectionModel().removeSelection(); myFoldRegionsProcessed = true; ((FoldRegionImpl) region).setExpandedInternal(false); notifyListenersOnFoldRegionStateChange(region); } private void notifyBatchFoldingProcessingDone() { doNotifyBatchFoldingProcessingDone(); } private void doNotifyBatchFoldingProcessingDone() { myFoldTree.rebuild(); for (FoldingListener listener : myListeners) { listener.onFoldProcessingEnd(); } myEditor.updateCaretCursor(); myEditor.recalculateSizeAndRepaint(); if (myEditor.getGutterComponentEx().isFoldingOutlineShown()) { myEditor.getGutterComponentEx().repaint(); } LogicalPosition caretPosition = myEditor.getCaretModel().getLogicalPosition(); // There is a possible case that caret position is already visual position aware. But visual position depends on number of folded // logical lines as well, hence, we can't be sure that target logical position defines correct visual position because fold // regions have just changed. Hence, we use 'raw' logical position instead. if (caretPosition.visualPositionAware) { caretPosition = new LogicalPosition(caretPosition.line, caretPosition.column); } int caretOffset = myEditor.logicalPositionToOffset(caretPosition); boolean hasBlockSelection = myEditor.getSelectionModel().hasBlockSelection(); int selectionStart = myEditor.getSelectionModel().getSelectionStart(); int selectionEnd = myEditor.getSelectionModel().getSelectionEnd(); int column = -1; int line = -1; int offsetToUse = -1; FoldRegion collapsed = myFoldTree.fetchOutermost(caretOffset); if (myCaretPositionSaved) { int savedOffset = myEditor.logicalPositionToOffset(new LogicalPosition(mySavedCaretY, mySavedCaretX)); FoldRegion collapsedAtSaved = myFoldTree.fetchOutermost(savedOffset); if (collapsedAtSaved == null) { column = mySavedCaretX; line = mySavedCaretY; } else { offsetToUse = collapsedAtSaved.getStartOffset(); } } if (collapsed != null && column == -1) { line = collapsed.getDocument().getLineNumber(collapsed.getStartOffset()); column = myEditor.offsetToLogicalPosition(collapsed.getStartOffset()).column; } boolean oldCaretPositionSaved = myCaretPositionSaved; if (offsetToUse >= 0) { myEditor.getCaretModel().moveToOffset(offsetToUse); } else if (column != -1) { myEditor.getCaretModel().moveToLogicalPosition(new LogicalPosition(line, column)); } else { myEditor.getCaretModel().moveToLogicalPosition(caretPosition); } myCaretPositionSaved = oldCaretPositionSaved; if (!hasBlockSelection && selectionStart < myEditor.getDocument().getTextLength()) { myEditor.getSelectionModel().setSelection(selectionStart, selectionEnd); } if (mySavedCaretShift > 0) { myEditor.getScrollingModel().disableAnimation(); int scrollTo = myEditor.visibleLineNumberToYPosition(myEditor.getCaretModel().getVisualPosition().line) - mySavedCaretShift; myEditor.getScrollingModel().scrollVertically(scrollTo); myEditor.getScrollingModel().enableAnimation(); } } public void rebuild() { myFoldTree.rebuild(); } private void updateCachedOffsets() { myFoldTree.updateCachedOffsets(); } public int getFoldedLinesCountBefore(int offset) { return myFoldTree.getFoldedLinesCountBefore(offset); } public FoldRegion[] fetchTopLevel() { return myFoldTree.fetchTopLevel(); } public FoldRegion fetchOutermost(int offset) { return myFoldTree.fetchOutermost(offset); } public FoldRegion[] fetchCollapsedAt(int offset) { return myFoldTree.fetchCollapsedAt(offset); } public boolean intersectsRegion (int startOffset, int endOffset) { return myFoldTree.intersectsRegion(startOffset, endOffset); } public FoldRegion[] fetchVisible() { return myFoldTree.fetchVisible(); } public int getLastCollapsedRegionBefore(int offset) { return myFoldTree.getLastTopLevelIndexBefore(offset); } public TextAttributes getPlaceholderAttributes() { return myFoldTextAttributes; } public void flushCaretPosition() { myCaretPositionSaved = false; } public void beforeDocumentChange(DocumentEvent event) { } public void documentChanged(DocumentEvent event) { if (((DocumentEx)event.getDocument()).isInBulkUpdate()) { myFoldTree.clear(); } else { updateCachedOffsets(); } } public int getPriority() { return EditorDocumentPriorities.FOLD_MODEL; } public FoldRegion createFoldRegion(int startOffset, int endOffset, @NotNull String placeholder, FoldingGroup group, boolean neverExpands) { FoldRegionImpl region = new FoldRegionImpl(myEditor, startOffset, endOffset, placeholder, group, neverExpands); region.registerInDocument(); LOG.assertTrue(region.isValid()); return region; } @Override public boolean addListener(@NotNull FoldingListener listener) { return myListeners.add(listener); } @Override public boolean removeListener(@NotNull FoldingListener listener) { return myListeners.remove(listener); } private void notifyListenersOnFoldRegionStateChange(@NotNull FoldRegion foldRegion) { for (FoldingListener listener : myListeners) { listener.onFoldRegionStateChange(foldRegion); } } }
/* * Copyright 2014 Google Inc. All rights reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package br.com.devfest.norte.wear; import android.app.Activity; import android.content.Intent; import android.os.Bundle; import android.os.Handler; import android.support.v4.app.NotificationManagerCompat; import android.support.v4.view.ViewPager; import android.util.Log; import android.view.WindowManager; import android.widget.ImageView; import com.google.android.gms.common.ConnectionResult; import com.google.android.gms.common.api.GoogleApiClient; import com.google.android.gms.common.api.ResultCallback; import com.google.android.gms.wearable.DataApi; import com.google.android.gms.wearable.DataMap; import com.google.android.gms.wearable.PutDataMapRequest; import com.google.android.gms.wearable.PutDataRequest; import com.google.android.gms.wearable.Wearable; import br.com.devfest.norte.R; import org.json.JSONArray; import org.json.JSONException; import org.json.JSONObject; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Timer; import java.util.TimerTask; import br.com.devfest.norte.wear.fragments.FeedbackFragment; import br.com.devfest.norte.wear.fragments.RadioFragment; import br.com.devfest.norte.wear.fragments.StarFragment; import br.com.devfest.norte.wear.fragments.SubmitFragment; import br.com.devfest.norte.wear.utils.Utils; import static br.com.devfest.norte.wear.utils.Utils.LOGD; import static br.com.devfest.norte.wear.utils.Utils.makeLogTag; /** * The main activity tht build all pages of a session feedback. */ public class PagerActivity extends Activity implements OnQuestionAnsweredListener, GoogleApiClient.ConnectionCallbacks, GoogleApiClient.OnConnectionFailedListener { private static final long PAGE_FLIP_DELAY_MS = 2000; // in millis private ViewPager mViewPager; private static final String TAG = makeLogTag("PagerActivity"); private List<FeedbackFragment> mFragments = new ArrayList<FeedbackFragment>(); private Timer mTimer; private TimerTask mTimerTask; private Handler mHandler; private Map<Integer, Integer> responses = new HashMap<Integer, Integer>(); private GoogleApiClient mGoogleApiClient; private String mSessionId; private ImageView[] indicators = null; private int[] mSavedResponses; @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.activity_pager); getWindow().addFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON); mHandler = new Handler(); setupViews(); if (getIntent().hasExtra(HomeListenerService.KEY_SESSION_ID)) { mSessionId = getIntent().getExtras().getString(HomeListenerService.KEY_SESSION_ID); LOGD(TAG, "Session received from service: " + mSessionId); } // if there is any prior persisted answers for this feedback, lets load them. mSavedResponses = Utils.getPersistedResponses(this, mSessionId); Utils.saveSessionId(this, mSessionId); for (int i = 0; i < 4; i++) { LOGD(TAG, "Response " + i + ": " + mSavedResponses[i]); if (mSavedResponses[i] > -1) { responses.put(i, mSavedResponses[i]); } } final PagerAdapter adapter = new PagerAdapter(getFragmentManager()); StarFragment fragment0 = StarFragment.newInstance(0, mSavedResponses[0]); RadioFragment fragment1 = RadioFragment.newInstance(1, mSavedResponses[1]); RadioFragment fragment2 = RadioFragment.newInstance(2, mSavedResponses[2]); RadioFragment fragment3 = RadioFragment.newInstance(3, mSavedResponses[3]); mFragments.add(fragment0); mFragments.add(fragment1); mFragments.add(fragment2); mFragments.add(fragment3); for (FeedbackFragment f : mFragments) { f.setOnQuestionListener(this); adapter.addFragment(f); } adapter.addFragment(new SubmitFragment(this)); mViewPager.setAdapter(adapter); mViewPager.setOnPageChangeListener(new ViewPager.OnPageChangeListener() { @Override public void onPageScrolled(int i, float v, int i2) { } @Override public void onPageSelected(int i) { clearTimer(); setIndicator(i); if (i == 0) { mFragments.get(i).reshowQuestion(); } } @Override public void onPageScrollStateChanged(int i) { } }); mGoogleApiClient = new GoogleApiClient.Builder(this) .addApi(Wearable.API) .addConnectionCallbacks(this) .addOnConnectionFailedListener(this) .build(); } @Override protected void onStart() { mGoogleApiClient.connect(); super.onStart(); } @Override protected void onStop() { mGoogleApiClient.disconnect(); super.onStop(); } @Override public void onQuestionAnswered(int questionNumber, int responseNumber) { LOGD(TAG, "Question Answered: " + questionNumber + " -> " + responseNumber); Utils.saveResponse(this, questionNumber, responseNumber); renewTimer(questionNumber + 1); responses.put(questionNumber, responseNumber); } @Override public void submit() { // do the submission Intent finishIntent = new Intent(this, FinishActivity.class); try { PutDataRequest dataRequest = buildDataRequest(); if (mGoogleApiClient.isConnected()) { Wearable.DataApi.putDataItem( mGoogleApiClient, dataRequest) .setResultCallback(new ResultCallback<DataApi.DataItemResult>() { @Override public void onResult(DataApi.DataItemResult dataItemResult) { if (!dataItemResult.getStatus().isSuccess()) { LOGD(TAG, "Failed to send back responses, status: " + dataItemResult.getStatus()); } } }); } else { Log.e(TAG, "submit() Failed to send data to phone since there was no Google API " + "client connectivity"); } } catch (JSONException e) { Log.e(TAG, "Failed to build a json from responses", e); } // remove local notification upon submission NotificationManagerCompat.from(this) .cancel(mSessionId, HomeListenerService.NOTIFICATION_ID); // clear persisted local data Utils.clearResponses(this); startActivity(finishIntent); finish(); } /** * Builds a {@link com.google.android.gms.wearable.PutDataRequest} which holds a JSON * representation of the feedback collected. */ private PutDataRequest buildDataRequest() throws JSONException { PutDataMapRequest putDataMapRequest = PutDataMapRequest .create(HomeListenerService.PATH_RESPONSE); DataMap dataMap = putDataMapRequest.getDataMap(); JSONArray jsonArray = new JSONArray(); if (!responses.isEmpty()) { JSONObject sessionObj = new JSONObject(); sessionObj.put("s", mSessionId); jsonArray.put(0, sessionObj); int i = 1; for (Integer key : responses.keySet()) { JSONObject obj = new JSONObject(); obj.put("q", key); obj.put("a", responses.get(key)); jsonArray.put(i++, obj); } } String response = jsonArray.toString(); LOGD(TAG, "JSON representation of the response: " + response); dataMap.putString("response", response); return putDataMapRequest.asPutDataRequest(); } /** * Renews the timer that causes pages to flip upon providing an answer. */ private void renewTimer(final int targetPage) { clearTimer(); mTimerTask = new TimerTask() { @Override public void run() { mHandler.post(new Runnable() { @Override public void run() { mViewPager.setCurrentItem(targetPage); } }); } }; mTimer = new Timer(); mTimer.schedule(mTimerTask, PAGE_FLIP_DELAY_MS); } /** * Clear the timer that flips the pages. */ private void clearTimer() { if (null != mTimer) { mTimer.cancel(); } } @Override public void onConnected(Bundle bundle) { LOGD(TAG, "onConnected() Connected to Google Api Service"); } @Override public void onConnectionSuspended(int i) { } @Override public void onConnectionFailed(ConnectionResult connectionResult) { Log.e(TAG, "onConnectionFailed(): Connection to Google Api Service failed with result: " + connectionResult); } private void setupViews() { mViewPager = (ViewPager) findViewById(R.id.pager); indicators = new ImageView[5]; indicators[0] = (ImageView) findViewById(R.id.indicator_0); indicators[1] = (ImageView) findViewById(R.id.indicator_1); indicators[2] = (ImageView) findViewById(R.id.indicator_2); indicators[3] = (ImageView) findViewById(R.id.indicator_3); indicators[4] = (ImageView) findViewById(R.id.indicator_4); } /** * Sets the page indicator for the ViewPager */ private void setIndicator(int i) { for (int k = 0; k < indicators.length; k++) { indicators[k].setImageResource(i == k ? R.drawable.page_dot_full : R.drawable.page_dot_empty); } } }
// // This file was generated by the JavaTM Architecture for XML Binding(JAXB) Reference Implementation, v2.2.4-2 // See <a href="http://java.sun.com/xml/jaxb">http://java.sun.com/xml/jaxb</a> // Any modifications to this file will be lost upon recompilation of the source schema. // Generated on: 2014.08.09 at 09:22:32 PM IST // package com.pacificmetrics.ims.apip.qti.item; import java.math.BigInteger; import java.util.ArrayList; import java.util.List; import javax.xml.bind.annotation.XmlAccessType; import javax.xml.bind.annotation.XmlAccessorType; import javax.xml.bind.annotation.XmlAttribute; import javax.xml.bind.annotation.XmlElement; import javax.xml.bind.annotation.XmlID; import javax.xml.bind.annotation.XmlRootElement; import javax.xml.bind.annotation.XmlSchemaType; import javax.xml.bind.annotation.XmlType; import javax.xml.bind.annotation.adapters.CollapsedStringAdapter; import javax.xml.bind.annotation.adapters.NormalizedStringAdapter; import javax.xml.bind.annotation.adapters.XmlJavaTypeAdapter; import com.pacificmetrics.orca.export.ims.IMSInteractionInf; /** * * The ChoiceInteraction is the complexType for the information in a choice interaction. The choice interaction presents a set of choices to the candidate. The candidate's task is to select one or more of the choices, up to a maximum of maxChoices. There is no corresponding minimum number of choices. The interaction is always initialized with no choices selected. * * * <p>Java class for ChoiceInteraction.Type complex type. * * <p>The following schema fragment specifies the expected content contained within this class. * * <pre> * &lt;complexType name="ChoiceInteraction.Type"> * &lt;complexContent> * &lt;restriction base="{http://www.w3.org/2001/XMLSchema}anyType"> * &lt;sequence> * &lt;element ref="{http://www.imsglobal.org/xsd/apip/apipv1p0/qtiitem/imsqti_v2p2}prompt" minOccurs="0"/> * &lt;element ref="{http://www.imsglobal.org/xsd/apip/apipv1p0/qtiitem/imsqti_v2p2}simpleChoice" maxOccurs="unbounded"/> * &lt;/sequence> * &lt;attGroup ref="{http://www.imsglobal.org/xsd/apip/apipv1p0/qtiitem/imsqti_v2p2}xmllang.ChoiceInteraction.Attr"/> * &lt;attGroup ref="{http://www.imsglobal.org/xsd/apip/apipv1p0/qtiitem/imsqti_v2p2}class.ChoiceInteraction.Attr"/> * &lt;attGroup ref="{http://www.imsglobal.org/xsd/apip/apipv1p0/qtiitem/imsqti_v2p2}minChoices.ChoiceInteraction.Attr"/> * &lt;attGroup ref="{http://www.imsglobal.org/xsd/apip/apipv1p0/qtiitem/imsqti_v2p2}orientation.ChoiceInteraction.Attr"/> * &lt;attGroup ref="{http://www.imsglobal.org/xsd/apip/apipv1p0/qtiitem/imsqti_v2p2}responseIdentifier.ChoiceInteraction.Attr"/> * &lt;attGroup ref="{http://www.imsglobal.org/xsd/apip/apipv1p0/qtiitem/imsqti_v2p2}shuffle.ChoiceInteraction.Attr"/> * &lt;attGroup ref="{http://www.imsglobal.org/xsd/apip/apipv1p0/qtiitem/imsqti_v2p2}maxChoices.ChoiceInteraction.Attr"/> * &lt;attGroup ref="{http://www.imsglobal.org/xsd/apip/apipv1p0/qtiitem/imsqti_v2p2}label.ChoiceInteraction.Attr"/> * &lt;attGroup ref="{http://www.imsglobal.org/xsd/apip/apipv1p0/qtiitem/imsqti_v2p2}xmlbase.ChoiceInteraction.Attr"/> * &lt;attGroup ref="{http://www.imsglobal.org/xsd/apip/apipv1p0/qtiitem/imsqti_v2p2}id.ChoiceInteraction.Attr"/> * &lt;/restriction> * &lt;/complexContent> * &lt;/complexType> * </pre> * * */ @XmlAccessorType(XmlAccessType.FIELD) @XmlType(name = "ChoiceInteraction.Type", propOrder = { "prompt", "simpleChoices" }) @XmlRootElement(name = "choiceInteraction") public class ChoiceInteraction implements IMSInteractionInf { protected Prompt prompt; @XmlElement(name = "simpleChoice", required = true) protected List<SimpleChoice> simpleChoices; @XmlAttribute(name = "lang", namespace = "http://www.w3.org/XML/1998/namespace") protected String lang; @XmlAttribute(name = "class") protected List<String> clazzs; @XmlAttribute(name = "minChoices") @XmlSchemaType(name = "nonNegativeInteger") protected BigInteger minChoices; @XmlAttribute(name = "orientation") protected String orientation; @XmlAttribute(name = "responseIdentifier", required = true) @XmlJavaTypeAdapter(CollapsedStringAdapter.class) protected String responseIdentifier; @XmlAttribute(name = "shuffle") protected Boolean shuffle; @XmlAttribute(name = "maxChoices") @XmlSchemaType(name = "nonNegativeInteger") protected BigInteger maxChoices; @XmlAttribute(name = "label") @XmlJavaTypeAdapter(NormalizedStringAdapter.class) @XmlSchemaType(name = "normalizedString") protected String label; @XmlAttribute(name = "base", namespace = "http://www.w3.org/XML/1998/namespace") @XmlSchemaType(name = "anyURI") protected String base; @XmlAttribute(name = "id") @XmlJavaTypeAdapter(CollapsedStringAdapter.class) @XmlID protected String id; /** * Gets the value of the prompt property. * * @return * possible object is * {@link Prompt } * */ public Prompt getPrompt() { return prompt; } /** * Sets the value of the prompt property. * * @param value * allowed object is * {@link Prompt } * */ public void setPrompt(Prompt value) { this.prompt = value; } /** * Gets the value of the simpleChoices property. * * <p> * This accessor method returns a reference to the live list, * not a snapshot. Therefore any modification you make to the * returned list will be present inside the JAXB object. * This is why there is not a <CODE>set</CODE> method for the simpleChoices property. * * <p> * For example, to add a new item, do as follows: * <pre> * getSimpleChoices().add(newItem); * </pre> * * * <p> * Objects of the following type(s) are allowed in the list * {@link SimpleChoice } * * */ public List<SimpleChoice> getSimpleChoices() { if (simpleChoices == null) { simpleChoices = new ArrayList<SimpleChoice>(); } return this.simpleChoices; } /** * Gets the value of the lang property. * * @return * possible object is * {@link String } * */ public String getLang() { return lang; } /** * Sets the value of the lang property. * * @param value * allowed object is * {@link String } * */ public void setLang(String value) { this.lang = value; } /** * Gets the value of the clazzs property. * * <p> * This accessor method returns a reference to the live list, * not a snapshot. Therefore any modification you make to the * returned list will be present inside the JAXB object. * This is why there is not a <CODE>set</CODE> method for the clazzs property. * * <p> * For example, to add a new item, do as follows: * <pre> * getClazzs().add(newItem); * </pre> * * * <p> * Objects of the following type(s) are allowed in the list * {@link String } * * */ public List<String> getClazzs() { if (clazzs == null) { clazzs = new ArrayList<String>(); } return this.clazzs; } /** * Gets the value of the minChoices property. * * @return * possible object is * {@link BigInteger } * */ public BigInteger getMinChoices() { if (minChoices == null) { return new BigInteger("0"); } else { return minChoices; } } /** * Sets the value of the minChoices property. * * @param value * allowed object is * {@link BigInteger } * */ public void setMinChoices(BigInteger value) { this.minChoices = value; } /** * Gets the value of the orientation property. * * @return * possible object is * {@link String } * */ public String getOrientation() { return orientation; } /** * Sets the value of the orientation property. * * @param value * allowed object is * {@link String } * */ public void setOrientation(String value) { this.orientation = value; } /** * Gets the value of the responseIdentifier property. * * @return * possible object is * {@link String } * */ public String getResponseIdentifier() { return responseIdentifier; } /** * Sets the value of the responseIdentifier property. * * @param value * allowed object is * {@link String } * */ public void setResponseIdentifier(String value) { this.responseIdentifier = value; } /** * Gets the value of the shuffle property. * * @return * possible object is * {@link Boolean } * */ public boolean isShuffle() { if (shuffle == null) { return false; } else { return shuffle; } } /** * Sets the value of the shuffle property. * * @param value * allowed object is * {@link Boolean } * */ public void setShuffle(Boolean value) { this.shuffle = value; } /** * Gets the value of the maxChoices property. * * @return * possible object is * {@link BigInteger } * */ public BigInteger getMaxChoices() { if (maxChoices == null) { return new BigInteger("1"); } else { return maxChoices; } } /** * Sets the value of the maxChoices property. * * @param value * allowed object is * {@link BigInteger } * */ public void setMaxChoices(BigInteger value) { this.maxChoices = value; } /** * Gets the value of the label property. * * @return * possible object is * {@link String } * */ public String getLabel() { return label; } /** * Sets the value of the label property. * * @param value * allowed object is * {@link String } * */ public void setLabel(String value) { this.label = value; } /** * Gets the value of the base property. * * @return * possible object is * {@link String } * */ public String getBase() { return base; } /** * Sets the value of the base property. * * @param value * allowed object is * {@link String } * */ public void setBase(String value) { this.base = value; } /** * Gets the value of the id property. * * @return * possible object is * {@link String } * */ public String getId() { return id; } /** * Sets the value of the id property. * * @param value * allowed object is * {@link String } * */ public void setId(String value) { this.id = value; } }
package info.ineighborhood.cardme; import info.ineighborhood.cardme.io.CompatibilityMode; import info.ineighborhood.cardme.io.FoldingScheme; import info.ineighborhood.cardme.io.VCardWriter; import info.ineighborhood.cardme.util.StringUtil; import info.ineighborhood.cardme.util.Util; import info.ineighborhood.cardme.vcard.EncodingType; import info.ineighborhood.cardme.vcard.VCard; import info.ineighborhood.cardme.vcard.VCardImpl; import info.ineighborhood.cardme.vcard.VCardVersion; import info.ineighborhood.cardme.vcard.errors.VCardError; import info.ineighborhood.cardme.vcard.errors.VCardErrorHandling; import info.ineighborhood.cardme.vcard.features.AddressFeature; import info.ineighborhood.cardme.vcard.features.CategoriesFeature; import info.ineighborhood.cardme.vcard.features.DisplayableNameFeature; import info.ineighborhood.cardme.vcard.features.EmailFeature; import info.ineighborhood.cardme.vcard.features.FormattedNameFeature; import info.ineighborhood.cardme.vcard.features.LabelFeature; import info.ineighborhood.cardme.vcard.features.LogoFeature; import info.ineighborhood.cardme.vcard.features.NameFeature; import info.ineighborhood.cardme.vcard.features.NicknameFeature; import info.ineighborhood.cardme.vcard.features.NoteFeature; import info.ineighborhood.cardme.vcard.features.OrganizationFeature; import info.ineighborhood.cardme.vcard.features.PhotoFeature; import info.ineighborhood.cardme.vcard.features.ProfileFeature; import info.ineighborhood.cardme.vcard.features.SoundFeature; import info.ineighborhood.cardme.vcard.features.SourceFeature; import info.ineighborhood.cardme.vcard.features.TelephoneFeature; import info.ineighborhood.cardme.vcard.types.AddressType; import info.ineighborhood.cardme.vcard.types.BirthdayType; import info.ineighborhood.cardme.vcard.types.CategoriesType; import info.ineighborhood.cardme.vcard.types.ClassType; import info.ineighborhood.cardme.vcard.types.DisplayableNameType; import info.ineighborhood.cardme.vcard.types.EmailType; import info.ineighborhood.cardme.vcard.types.ExtendedType; import info.ineighborhood.cardme.vcard.types.FormattedNameType; import info.ineighborhood.cardme.vcard.types.GeographicPositionType; import info.ineighborhood.cardme.vcard.types.LabelType; import info.ineighborhood.cardme.vcard.types.LogoType; import info.ineighborhood.cardme.vcard.types.MailerType; import info.ineighborhood.cardme.vcard.types.NameType; import info.ineighborhood.cardme.vcard.types.NicknameType; import info.ineighborhood.cardme.vcard.types.NoteType; import info.ineighborhood.cardme.vcard.types.OrganizationType; import info.ineighborhood.cardme.vcard.types.PhotoType; import info.ineighborhood.cardme.vcard.types.ProductIdType; import info.ineighborhood.cardme.vcard.types.ProfileType; import info.ineighborhood.cardme.vcard.types.RevisionType; import info.ineighborhood.cardme.vcard.types.RoleType; import info.ineighborhood.cardme.vcard.types.SortStringType; import info.ineighborhood.cardme.vcard.types.SoundType; import info.ineighborhood.cardme.vcard.types.SourceType; import info.ineighborhood.cardme.vcard.types.TelephoneType; import info.ineighborhood.cardme.vcard.types.TimeZoneType; import info.ineighborhood.cardme.vcard.types.TitleType; import info.ineighborhood.cardme.vcard.types.UIDType; import info.ineighborhood.cardme.vcard.types.URLType; import info.ineighborhood.cardme.vcard.types.VersionType; import info.ineighborhood.cardme.vcard.types.media.AudioMediaType; import info.ineighborhood.cardme.vcard.types.media.ImageMediaType; import info.ineighborhood.cardme.vcard.types.parameters.AddressParameterType; import info.ineighborhood.cardme.vcard.types.parameters.EmailParameterType; import info.ineighborhood.cardme.vcard.types.parameters.LabelParameterType; import info.ineighborhood.cardme.vcard.types.parameters.ParameterTypeStyle; import info.ineighborhood.cardme.vcard.types.parameters.TelephoneParameterType; import java.io.File; import java.net.URL; import java.util.Calendar; import java.util.List; /** * Copyright (c) 2004, Neighborhood Technologies * All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: * * Redistributions of source code must retain the above copyright notice, * this list of conditions and the following disclaimer. * * Redistributions in binary form must reproduce the above copyright notice, * this list of conditions and the following disclaimer in the documentation * and/or other materials provided with the distribution. * * Neither the name of Neighborhood Technologies nor the names of its contributors * may be used to endorse or promote products derived from this software * without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE * POSSIBILITY OF SUCH DAMAGE. */ /** * * @author George El-Haddad * <br/> * Feb 10, 2010 */ public class TestVCard { /** * run this test from the command line * * @param args */ public static void main(String args[]) { try { testVcard(); } catch(Exception ex) { ex.printStackTrace(); } } private static VCard getFullVCardNoErrors() throws Exception { VCard vcard = new VCardImpl(); vcard.setVersion(new VersionType(VCardVersion.V3_0)); DisplayableNameFeature displayableName = new DisplayableNameType(); displayableName.setName("VCard for John Doe"); vcard.setDisplayableNameFeature(displayableName); ProfileFeature profile = new ProfileType(); profile.setProfile("VCard"); vcard.setProfile(profile); SourceFeature source = new SourceType(); source.setSource("Whatever"); vcard.setSource(source); NameFeature name = new NameType(); name.setFamilyName("Doe"); name.setGivenName("John"); name.addHonorificPrefix("Mr."); name.addHonorificSuffix("I"); name.addAdditionalName("Johny"); vcard.setName(name); FormattedNameFeature formattedName = new FormattedNameType(); formattedName.setFormattedName("John \"Johny\" Doe"); vcard.setFormattedName(formattedName); NicknameFeature nicknames = new NicknameType(); nicknames.addNickname("Johny"); nicknames.addNickname("JayJay"); vcard.setNicknames(nicknames); CategoriesFeature categories = new CategoriesType(); categories.addCategory("Category 1"); categories.addCategory("Category 2"); categories.addCategory("Category 3"); vcard.setCategories(categories); vcard.setSecurityClass(new ClassType("Public")); vcard.setProductId(new ProductIdType("31e78c0d-fb07-479d-b6af-95a9a3f2916f")); vcard.setSortString(new SortStringType("JOHN")); vcard.setMailer(new MailerType("Mozilla Thunderbird")); vcard.setTitle(new TitleType("Generic Accountant")); vcard.setRole(new RoleType("Counting Money")); OrganizationFeature organizations = new OrganizationType(); organizations.addOrganization("IBM"); organizations.addOrganization("SUN"); vcard.setOrganizations(organizations); vcard.setUID(new UIDType("c0ff639f-9633-4e57-bcfd-55079cfd9d65")); vcard.addURL(new URLType(new URL("http://www.sun.com"))); vcard.setGeographicPosition(new GeographicPositionType(3.4f, -2.6f)); Calendar birthday = Calendar.getInstance(); birthday.clear(); birthday.set(Calendar.YEAR, 1980); birthday.set(Calendar.MONTH, 4); birthday.set(Calendar.DAY_OF_MONTH, 21); vcard.setBirthday(new BirthdayType(birthday)); vcard.setRevision(new RevisionType(Calendar.getInstance())); vcard.setTimeZone(new TimeZoneType(Calendar.getInstance().getTimeZone())); AddressFeature address1 = new AddressType(); address1.setExtendedAddress(""); address1.setCountryName("U.S.A."); address1.setLocality("New York"); address1.setRegion("New York"); address1.setPostalCode("NYC887"); address1.setPostOfficeBox("25334"); address1.setStreetAddress("South cresent drive, Building 5, 3rd floor"); address1.addAddressParameterType(AddressParameterType.HOME); address1.addAddressParameterType(AddressParameterType.PARCEL); address1.addAddressParameterType(AddressParameterType.PREF); vcard.addAddress(address1); LabelFeature labelForAddress1 = new LabelType(); labelForAddress1.addLabelParameterType(LabelParameterType.HOME); labelForAddress1.addLabelParameterType(LabelParameterType.PARCEL); labelForAddress1.addLabelParameterType(LabelParameterType.PREF); labelForAddress1.setLabel("John Doe\nNew York, NewYork,\nSouth Crecent Drive,\nBuilding 5, floor 3,\nUSA"); vcard.setLabel(labelForAddress1, address1); TelephoneFeature telephone = new TelephoneType(); telephone.setTelephone("+1 (212) 204-34456"); telephone.addTelephoneParameterType(TelephoneParameterType.CELL); telephone.addTelephoneParameterType(TelephoneParameterType.HOME); telephone.setParameterTypeStyle(ParameterTypeStyle.PARAMETER_VALUE_LIST); vcard.addTelephoneNumber(telephone); TelephoneFeature telephone2 = new TelephoneType(); telephone2.setTelephone("00-1-212-555-7777"); telephone2.addTelephoneParameterType(TelephoneParameterType.FAX); telephone2.addTelephoneParameterType(TelephoneParameterType.WORK); telephone2.setParameterTypeStyle(ParameterTypeStyle.PARAMETER_LIST); vcard.addTelephoneNumber(telephone2); EmailFeature email = new EmailType(); email.setEmail("john.doe@ibm.com"); email.addEmailParameterType(EmailParameterType.IBMMAIL); email.addEmailParameterType(EmailParameterType.INTERNET); email.addEmailParameterType(EmailParameterType.PREF); vcard.addEmail(email); vcard.addEmail(new EmailType("billy_bob@gmail.com")); NoteFeature note = new NoteType(); note.setNote("THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS \"AS IS\"\nAND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE\nIMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE\nARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE\nLIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR\nCONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF\nSUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS\nINTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN\nCONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)\nARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE\nPOSSIBILITY OF SUCH DAMAGE."); vcard.addNote(note); PhotoFeature photo1 = new PhotoType(); photo1.setCompression(true); photo1.setEncodingType(EncodingType.BINARY); photo1.setImageMediaType(ImageMediaType.PNG); byte[] tuxPicture1 = Util.getFileAsBytes(new File("test/images/smallTux.png")); photo1.setPhoto(tuxPicture1); vcard.addPhoto(photo1); LogoFeature logo = new LogoType(); logo.setCompression(false); logo.setEncodingType(EncodingType.BINARY); logo.setImageMediaType(ImageMediaType.PNG); byte[] tuxPicture2 = Util.getFileAsBytes(new File("test/images/smallTux.png")); logo.setLogo(tuxPicture2); vcard.addLogo(logo); SoundFeature sound = new SoundType(); sound.setCompression(true); sound.setEncodingType(EncodingType.BINARY); sound.setAudioMediaType(AudioMediaType.OGG); sound.setSoundURI(new File("test/images/smallTux.png").toURI()); vcard.addSound(sound); vcard.addExtendedType(new ExtendedType("X-GENERATOR", "Cardme Generator")); vcard.addExtendedType(new ExtendedType("X-LONG-STRING", "1234567890123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890")); ((VCardErrorHandling)vcard).setThrowExceptions(false); return vcard; } private static VCard getFullVCardAllErrors() throws Exception { VCard vcard = new VCardImpl(); vcard.setVersion(new VersionType(VCardVersion.V3_0)); NameFeature name = new NameType(); name.setFamilyName("Doe"); name.setGivenName("John"); name.addHonorificPrefix("Mr."); name.addHonorificSuffix("I"); name.addAdditionalName("Johny"); vcard.setName(name); FormattedNameFeature formattedName = new FormattedNameType(); formattedName.setFormattedName(null); vcard.setFormattedName(formattedName); NicknameFeature nicknames = new NicknameType(); nicknames.addNickname("Johny"); nicknames.addNickname("JayJay"); vcard.setNicknames(nicknames); CategoriesFeature categories = new CategoriesType(); categories.addCategory("Category 1"); categories.addCategory("Category 2"); categories.addCategory("Category 3"); vcard.setCategories(categories); vcard.setSecurityClass(new ClassType()); vcard.setProductId(new ProductIdType()); vcard.setSortString(new SortStringType()); vcard.setMailer(new MailerType()); vcard.setTitle(new TitleType()); vcard.setRole(new RoleType()); OrganizationFeature organizations = new OrganizationType(); organizations.addOrganization("IBM"); organizations.addOrganization("SUN"); vcard.setOrganizations(organizations); vcard.setUID(new UIDType()); vcard.addURL(new URLType()); vcard.setGeographicPosition(new GeographicPositionType()); vcard.setBirthday(new BirthdayType()); vcard.setRevision(new RevisionType()); vcard.setTimeZone(new TimeZoneType()); AddressFeature address1 = new AddressType(); vcard.addAddress(address1); LabelFeature labelForAddress1 = new LabelType(); vcard.setLabel(labelForAddress1, address1); TelephoneFeature telephone = new TelephoneType(); vcard.addTelephoneNumber(telephone); TelephoneFeature telephone2 = new TelephoneType(); vcard.addTelephoneNumber(telephone2); EmailFeature email = new EmailType(); vcard.addEmail(email); NoteFeature note = new NoteType(); vcard.addNote(note); PhotoFeature photo1 = new PhotoType(); vcard.addPhoto(photo1); LogoFeature logo = new LogoType(); vcard.addLogo(logo); SoundFeature sound = new SoundType(); vcard.addSound(sound); vcard.addExtendedType(new ExtendedType("X-MISC", "Something")); ((VCardErrorHandling)vcard).setThrowExceptions(false); return vcard; } /** * test vcard interfaces * @throws Exception */ public static void testVcard() throws Exception { // Create VCard Writer VCardWriter vcardWriter = new VCardWriter(); // Set VCard Writer Parameters vcardWriter.setOutputVersion(VCardVersion.V3_0); vcardWriter.setFoldingScheme(FoldingScheme.MIME_DIR); vcardWriter.setCompatibilityMode(CompatibilityMode.RFC2426); // Get a VCard VCard fullNoErrorVCard = getFullVCardNoErrors(); VCard fullAllErrorVCard = getFullVCardAllErrors(); // Set it on the Writer. vcardWriter.setVCard(fullNoErrorVCard); // Write the VCard String vcardString = vcardWriter.buildVCardString(); System.out.println("Full VCard No Error"); System.out.println("----------------"); System.out.println(vcardString); if(vcardWriter.hasErrors()) { System.out.println("Errors\n----------------"); List<VCardError> errors = ((VCardErrorHandling)vcardWriter.getVCard()).getErrors(); for(int i = 0; i < errors.size(); i++) { System.out.println(errors.get(i).getErrorMessage()); } } System.out.println("----------------"); // Write the VCard vcardWriter.setVCard(fullAllErrorVCard); vcardString = vcardWriter.buildVCardString(); System.out.println("Full VCard With Errors"); System.out.println("----------------"); System.out.println(vcardString); if(vcardWriter.hasErrors()) { System.out.println("Errors\n----------------"); List<VCardError> errors = ((VCardErrorHandling)vcardWriter.getVCard()).getErrors(); for(int i = 0; i < errors.size(); i++) { System.out.println(StringUtil.formatException(errors.get(i).getError())); } } System.out.println("----------------"); } }
/* * Copyright 2012-2017 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with * the License. A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR * CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions * and limitations under the License. */ package com.amazonaws.services.iotdata; import static java.util.concurrent.Executors.newFixedThreadPool; import javax.annotation.Generated; import com.amazonaws.services.iotdata.model.*; import com.amazonaws.client.AwsAsyncClientParams; import com.amazonaws.annotation.ThreadSafe; import com.amazonaws.ClientConfiguration; import com.amazonaws.auth.AWSCredentials; import com.amazonaws.auth.AWSCredentialsProvider; import java.util.concurrent.ExecutorService; import com.amazonaws.auth.DefaultAWSCredentialsProviderChain; /** * Client for accessing AWS IoT Data Plane asynchronously. Each asynchronous method will return a Java Future object * representing the asynchronous operation; overloads which accept an {@code AsyncHandler} can be used to receive * notification when an asynchronous operation completes. * <p> * <fullname>AWS IoT</fullname> * <p> * AWS IoT-Data enables secure, bi-directional communication between Internet-connected things (such as sensors, * actuators, embedded devices, or smart appliances) and the AWS cloud. It implements a broker for applications and * things to publish messages over HTTP (Publish) and retrieve, update, and delete thing shadows. A thing shadow is a * persistent representation of your things and their state in the AWS cloud. * </p> */ @ThreadSafe @Generated("com.amazonaws:aws-java-sdk-code-generator") public class AWSIotDataAsyncClient extends AWSIotDataClient implements AWSIotDataAsync { private static final int DEFAULT_THREAD_POOL_SIZE = 50; private final java.util.concurrent.ExecutorService executorService; /** * Constructs a new asynchronous client to invoke service methods on AWS IoT Data Plane. A credentials provider * chain will be used that searches for credentials in this order: * <ul> * <li>Environment Variables - AWS_ACCESS_KEY_ID and AWS_SECRET_KEY</li> * <li>Java System Properties - aws.accessKeyId and aws.secretKey</li> * <li>Credential profiles file at the default location (~/.aws/credentials) shared by all AWS SDKs and the AWS CLI</li> * <li>Instance profile credentials delivered through the Amazon EC2 metadata service</li> * </ul> * <p> * Asynchronous methods are delegated to a fixed-size thread pool containing 50 threads (to match the default * maximum number of concurrent connections to the service). * * @see com.amazonaws.auth.DefaultAWSCredentialsProviderChain * @see java.util.concurrent.Executors#newFixedThreadPool(int) * @deprecated use {@link AWSIotDataAsyncClientBuilder#defaultClient()} */ @Deprecated public AWSIotDataAsyncClient() { this(DefaultAWSCredentialsProviderChain.getInstance()); } /** * Constructs a new asynchronous client to invoke service methods on AWS IoT Data Plane. A credentials provider * chain will be used that searches for credentials in this order: * <ul> * <li>Environment Variables - AWS_ACCESS_KEY_ID and AWS_SECRET_KEY</li> * <li>Java System Properties - aws.accessKeyId and aws.secretKey</li> * <li>Credential profiles file at the default location (~/.aws/credentials) shared by all AWS SDKs and the AWS CLI</li> * <li>Instance profile credentials delivered through the Amazon EC2 metadata service</li> * </ul> * <p> * Asynchronous methods are delegated to a fixed-size thread pool containing a number of threads equal to the * maximum number of concurrent connections configured via {@code ClientConfiguration.getMaxConnections()}. * * @param clientConfiguration * The client configuration options controlling how this client connects to AWS IoT Data Plane (ex: proxy * settings, retry counts, etc). * * @see com.amazonaws.auth.DefaultAWSCredentialsProviderChain * @see java.util.concurrent.Executors#newFixedThreadPool(int) * @deprecated use {@link AWSIotDataAsyncClientBuilder#withClientConfiguration(ClientConfiguration)} */ @Deprecated public AWSIotDataAsyncClient(ClientConfiguration clientConfiguration) { this(DefaultAWSCredentialsProviderChain.getInstance(), clientConfiguration, newFixedThreadPool(clientConfiguration.getMaxConnections())); } /** * Constructs a new asynchronous client to invoke service methods on AWS IoT Data Plane using the specified AWS * account credentials. * <p> * Asynchronous methods are delegated to a fixed-size thread pool containing 50 threads (to match the default * maximum number of concurrent connections to the service). * * @param awsCredentials * The AWS credentials (access key ID and secret key) to use when authenticating with AWS services. * @see java.util.concurrent.Executors#newFixedThreadPool(int) * @deprecated use {@link AWSIotDataAsyncClientBuilder#withCredentials(AWSCredentialsProvider)} */ @Deprecated public AWSIotDataAsyncClient(AWSCredentials awsCredentials) { this(awsCredentials, newFixedThreadPool(DEFAULT_THREAD_POOL_SIZE)); } /** * Constructs a new asynchronous client to invoke service methods on AWS IoT Data Plane using the specified AWS * account credentials and executor service. Default client settings will be used. * * @param awsCredentials * The AWS credentials (access key ID and secret key) to use when authenticating with AWS services. * @param executorService * The executor service by which all asynchronous requests will be executed. * @deprecated use {@link AWSIotDataAsyncClientBuilder#withCredentials(AWSCredentialsProvider)} and * {@link AWSIotDataAsyncClientBuilder#withExecutorFactory(com.amazonaws.client.builder.ExecutorFactory)} */ @Deprecated public AWSIotDataAsyncClient(AWSCredentials awsCredentials, ExecutorService executorService) { this(awsCredentials, configFactory.getConfig(), executorService); } /** * Constructs a new asynchronous client to invoke service methods on AWS IoT Data Plane using the specified AWS * account credentials, executor service, and client configuration options. * * @param awsCredentials * The AWS credentials (access key ID and secret key) to use when authenticating with AWS services. * @param clientConfiguration * Client configuration options (ex: max retry limit, proxy settings, etc). * @param executorService * The executor service by which all asynchronous requests will be executed. * @deprecated use {@link AWSIotDataAsyncClientBuilder#withCredentials(AWSCredentialsProvider)} and * {@link AWSIotDataAsyncClientBuilder#withClientConfiguration(ClientConfiguration)} and * {@link AWSIotDataAsyncClientBuilder#withExecutorFactory(com.amazonaws.client.builder.ExecutorFactory)} */ @Deprecated public AWSIotDataAsyncClient(AWSCredentials awsCredentials, ClientConfiguration clientConfiguration, ExecutorService executorService) { super(awsCredentials, clientConfiguration); this.executorService = executorService; } /** * Constructs a new asynchronous client to invoke service methods on AWS IoT Data Plane using the specified AWS * account credentials provider. Default client settings will be used. * <p> * Asynchronous methods are delegated to a fixed-size thread pool containing 50 threads (to match the default * maximum number of concurrent connections to the service). * * @param awsCredentialsProvider * The AWS credentials provider which will provide credentials to authenticate requests with AWS services. * @see java.util.concurrent.Executors#newFixedThreadPool(int) * @deprecated use {@link AWSIotDataAsyncClientBuilder#withCredentials(AWSCredentialsProvider)} */ @Deprecated public AWSIotDataAsyncClient(AWSCredentialsProvider awsCredentialsProvider) { this(awsCredentialsProvider, newFixedThreadPool(DEFAULT_THREAD_POOL_SIZE)); } /** * Constructs a new asynchronous client to invoke service methods on AWS IoT Data Plane using the provided AWS * account credentials provider and client configuration options. * <p> * Asynchronous methods are delegated to a fixed-size thread pool containing a number of threads equal to the * maximum number of concurrent connections configured via {@code ClientConfiguration.getMaxConnections()}. * * @param awsCredentialsProvider * The AWS credentials provider which will provide credentials to authenticate requests with AWS services. * @param clientConfiguration * Client configuration options (ex: max retry limit, proxy settings, etc). * * @see com.amazonaws.auth.DefaultAWSCredentialsProviderChain * @see java.util.concurrent.Executors#newFixedThreadPool(int) * @deprecated use {@link AWSIotDataAsyncClientBuilder#withCredentials(AWSCredentialsProvider)} and * {@link AWSIotDataAsyncClientBuilder#withClientConfiguration(ClientConfiguration)} */ @Deprecated public AWSIotDataAsyncClient(AWSCredentialsProvider awsCredentialsProvider, ClientConfiguration clientConfiguration) { this(awsCredentialsProvider, clientConfiguration, newFixedThreadPool(clientConfiguration.getMaxConnections())); } /** * Constructs a new asynchronous client to invoke service methods on AWS IoT Data Plane using the specified AWS * account credentials provider and executor service. Default client settings will be used. * * @param awsCredentialsProvider * The AWS credentials provider which will provide credentials to authenticate requests with AWS services. * @param executorService * The executor service by which all asynchronous requests will be executed. * @deprecated use {@link AWSIotDataAsyncClientBuilder#withCredentials(AWSCredentialsProvider)} and * {@link AWSIotDataAsyncClientBuilder#withExecutorFactory(com.amazonaws.client.builder.ExecutorFactory)} */ @Deprecated public AWSIotDataAsyncClient(AWSCredentialsProvider awsCredentialsProvider, ExecutorService executorService) { this(awsCredentialsProvider, configFactory.getConfig(), executorService); } /** * Constructs a new asynchronous client to invoke service methods on AWS IoT Data Plane using the specified AWS * account credentials provider, executor service, and client configuration options. * * @param awsCredentialsProvider * The AWS credentials provider which will provide credentials to authenticate requests with AWS services. * @param clientConfiguration * Client configuration options (ex: max retry limit, proxy settings, etc). * @param executorService * The executor service by which all asynchronous requests will be executed. * @deprecated use {@link AWSIotDataAsyncClientBuilder#withCredentials(AWSCredentialsProvider)} and * {@link AWSIotDataAsyncClientBuilder#withClientConfiguration(ClientConfiguration)} and * {@link AWSIotDataAsyncClientBuilder#withExecutorFactory(com.amazonaws.client.builder.ExecutorFactory)} */ @Deprecated public AWSIotDataAsyncClient(AWSCredentialsProvider awsCredentialsProvider, ClientConfiguration clientConfiguration, ExecutorService executorService) { super(awsCredentialsProvider, clientConfiguration); this.executorService = executorService; } public static AWSIotDataAsyncClientBuilder asyncBuilder() { return AWSIotDataAsyncClientBuilder.standard(); } /** * Constructs a new asynchronous client to invoke service methods on AWS IoT Data Plane using the specified * parameters. * * @param asyncClientParams * Object providing client parameters. */ AWSIotDataAsyncClient(AwsAsyncClientParams asyncClientParams) { super(asyncClientParams); this.executorService = asyncClientParams.getExecutor(); } /** * Returns the executor service used by this client to execute async requests. * * @return The executor service used by this client to execute async requests. */ public ExecutorService getExecutorService() { return executorService; } @Override public java.util.concurrent.Future<DeleteThingShadowResult> deleteThingShadowAsync(DeleteThingShadowRequest request) { return deleteThingShadowAsync(request, null); } @Override public java.util.concurrent.Future<DeleteThingShadowResult> deleteThingShadowAsync(final DeleteThingShadowRequest request, final com.amazonaws.handlers.AsyncHandler<DeleteThingShadowRequest, DeleteThingShadowResult> asyncHandler) { final DeleteThingShadowRequest finalRequest = beforeClientExecution(request); return executorService.submit(new java.util.concurrent.Callable<DeleteThingShadowResult>() { @Override public DeleteThingShadowResult call() throws Exception { DeleteThingShadowResult result = null; try { result = executeDeleteThingShadow(finalRequest); } catch (Exception ex) { if (asyncHandler != null) { asyncHandler.onError(ex); } throw ex; } if (asyncHandler != null) { asyncHandler.onSuccess(finalRequest, result); } return result; } }); } @Override public java.util.concurrent.Future<GetThingShadowResult> getThingShadowAsync(GetThingShadowRequest request) { return getThingShadowAsync(request, null); } @Override public java.util.concurrent.Future<GetThingShadowResult> getThingShadowAsync(final GetThingShadowRequest request, final com.amazonaws.handlers.AsyncHandler<GetThingShadowRequest, GetThingShadowResult> asyncHandler) { final GetThingShadowRequest finalRequest = beforeClientExecution(request); return executorService.submit(new java.util.concurrent.Callable<GetThingShadowResult>() { @Override public GetThingShadowResult call() throws Exception { GetThingShadowResult result = null; try { result = executeGetThingShadow(finalRequest); } catch (Exception ex) { if (asyncHandler != null) { asyncHandler.onError(ex); } throw ex; } if (asyncHandler != null) { asyncHandler.onSuccess(finalRequest, result); } return result; } }); } @Override public java.util.concurrent.Future<PublishResult> publishAsync(PublishRequest request) { return publishAsync(request, null); } @Override public java.util.concurrent.Future<PublishResult> publishAsync(final PublishRequest request, final com.amazonaws.handlers.AsyncHandler<PublishRequest, PublishResult> asyncHandler) { final PublishRequest finalRequest = beforeClientExecution(request); return executorService.submit(new java.util.concurrent.Callable<PublishResult>() { @Override public PublishResult call() throws Exception { PublishResult result = null; try { result = executePublish(finalRequest); } catch (Exception ex) { if (asyncHandler != null) { asyncHandler.onError(ex); } throw ex; } if (asyncHandler != null) { asyncHandler.onSuccess(finalRequest, result); } return result; } }); } @Override public java.util.concurrent.Future<UpdateThingShadowResult> updateThingShadowAsync(UpdateThingShadowRequest request) { return updateThingShadowAsync(request, null); } @Override public java.util.concurrent.Future<UpdateThingShadowResult> updateThingShadowAsync(final UpdateThingShadowRequest request, final com.amazonaws.handlers.AsyncHandler<UpdateThingShadowRequest, UpdateThingShadowResult> asyncHandler) { final UpdateThingShadowRequest finalRequest = beforeClientExecution(request); return executorService.submit(new java.util.concurrent.Callable<UpdateThingShadowResult>() { @Override public UpdateThingShadowResult call() throws Exception { UpdateThingShadowResult result = null; try { result = executeUpdateThingShadow(finalRequest); } catch (Exception ex) { if (asyncHandler != null) { asyncHandler.onError(ex); } throw ex; } if (asyncHandler != null) { asyncHandler.onSuccess(finalRequest, result); } return result; } }); } /** * Shuts down the client, releasing all managed resources. This includes forcibly terminating all pending * asynchronous service calls. Clients who wish to give pending asynchronous service calls time to complete should * call {@code getExecutorService().shutdown()} followed by {@code getExecutorService().awaitTermination()} prior to * calling this method. */ @Override public void shutdown() { super.shutdown(); executorService.shutdownNow(); } }
/* * Copyright 2015 Red Hat, Inc. and/or its affiliates * and other contributors as indicated by the @author tags. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.hawkular.inventory.base; import static org.hawkular.inventory.api.filters.With.type; import java.util.Iterator; import org.hawkular.inventory.api.Action; import org.hawkular.inventory.api.Configuration; import org.hawkular.inventory.api.Relationships; import org.hawkular.inventory.api.filters.Filter; import org.hawkular.inventory.api.filters.Related; import org.hawkular.inventory.api.model.AbstractElement; import org.hawkular.inventory.api.model.Entity; import org.hawkular.inventory.api.model.Path; import org.hawkular.inventory.api.model.Relationship; import org.hawkular.inventory.base.spi.InventoryBackend; import org.hawkular.inventory.base.spi.SwitchElementType; import rx.subjects.Subject; /** * Holds the data needed throughout the construction of inventory traversal. * * @author Lukas Krejci * @since 0.1.0 */ public final class TraversalContext<BE, E extends AbstractElement<?, ?>> { /** * The inventory instance we're operating in. */ protected final BaseInventory<BE> inventory; /** * The query to the "point" right before the entities of interest. */ protected final Query sourcePath; /** * A query that will select the entities of interest from the {@link #sourcePath}. */ protected final Query selectCandidates; /** * The inventory backend to be used for querying and persistence. */ protected final InventoryBackend<BE> backend; /** * The type of the entity currently being sought after. */ protected final Class<E> entityClass; /** * The user provided configuration. */ protected final Configuration configuration; /** * The previous context, from which this one was created. Can be null. */ protected final TraversalContext<BE, ?> previous; private final ObservableContext observableContext; private final int transactionRetries; TraversalContext(BaseInventory<BE> inventory, Query sourcePath, Query selectCandidates, InventoryBackend<BE> backend, Class<E> entityClass, Configuration configuration, ObservableContext observableContext) { this(inventory, sourcePath, selectCandidates, backend, entityClass, configuration, observableContext, getTransactionRetries(configuration), null); } private TraversalContext(BaseInventory<BE> inventory, Query sourcePath, Query selectCandidates, InventoryBackend<BE> backend, Class<E> entityClass, Configuration configuration, ObservableContext observableContext, int transactionRetries, TraversalContext<BE, ?> previous) { this.inventory = inventory; this.sourcePath = sourcePath; this.selectCandidates = selectCandidates; this.backend = backend; this.entityClass = entityClass; this.configuration = configuration; this.observableContext = observableContext; this.transactionRetries = transactionRetries; this.previous = previous; } private static int getTransactionRetries(Configuration configuration) { String retries = configuration.getProperty(BaseInventory.TRANSACTION_RETRIES, "5"); return Integer.parseInt(retries); } /** * If the current position in the traversal defines any select candidates, the new context will have its source path * composed by appending the select candidates to the current source path. * * @return a context builder with the modified source path */ Builder<BE, E> proceed() { return new Builder<>(this, hop(), Query.filter(), entityClass); } /** * The new context will have the source path composed by appending current select candidates to the current source * path and its select candidates will filter for entities related by the provided relationship to the new sources * and will have the provided type. * * @param over the relationship with which the select candidates will be related to the entities on the source * path * @param entityType the type of the entities related to the entities on the source path * @param <T> the type of the "target" entities * @return a context builder with the modified source path, select candidates and type */ <T extends Entity<?, ?>> Builder<BE, T> proceedTo(Relationships.WellKnown over, Class<T> entityType) { return new Builder<>(this, hop(), Query.filter(), entityType).hop(Related.by(over), type(entityType)); } /** * Virtually identical to {@link #proceedTo(Relationships.WellKnown, Class)} only follows the relationship in the * opposite direction. * * @param over the relationship to retreat over (i.e. if the current position is the target of the * relationship, the sought after entity type needs to be the source of the relationship). * @param entityType the type of the entities to retreat to * @param <T> the type of the "target" entities * @return a context builder with the modified source path, select candidates and type */ <T extends Entity<?, ?>> Builder<BE, T> retreatTo(Relationships.WellKnown over, Class<T> entityType) { return new Builder<>(this, hop(), Query.filter(), entityType).hop(Related.asTargetBy(over), type(entityType)); } /** * The new context will have the source path composed by appending current select candidates to the current source * path. The new context will have select candidates such that it will select the relationships in given direction * stemming from the entities on the new source path. * * @param direction the direction of the relationships to look for * @return a context builder with the modified source path, select candidates and type */ Builder<BE, Relationship> proceedToRelationships(Relationships.Direction direction) { return new Builder<>(this, hop(), Query.filter(), Relationship.class) .hop(new SwitchElementType(direction, false)); } /** * An opposite of {@link #proceedToRelationships(Relationships.Direction)}. * * @param direction the direction in which to "leave" the relationships * @param entityType the type of entities to "hop to" * @param <T> the type of entities to "hop to" * @return a context builder with the modified source path, select candidates and type */ <T extends Entity<?, ?>> Builder<BE, T> proceedFromRelationshipsTo(Relationships.Direction direction, Class<T> entityType) { return new Builder<>(this, hop(), Query.filter(), entityType) .hop(new SwitchElementType(direction, true)).where(type(entityType)); } /** * @return a new query selecting the select candidates from the source path. The resulting extender * is set up to append filter fragments. */ Query.SymmetricExtender select() { return sourcePath.extend().filter().withExact(selectCandidates); } /** * @return appends the select candidates to the source path. The only difference between this and {@link #select()} * is that this method returns the extender set up to append path fragments. */ Query.SymmetricExtender hop() { return sourcePath.extend().path().withExact(selectCandidates).path(); } /** * Constructs a new traversal context by replacing the source path with the provided query and clearing out the * selected candidates. * * @param path the source path of the new context * @return a new traversal context with the provided source path and empty select candidates, but otherwise * identical to this one. */ TraversalContext<BE, E> replacePath(Query path) { return new TraversalContext<>(inventory, path, Query.empty(), backend, entityClass, configuration, observableContext, transactionRetries, this); } TraversalContext<BE, E> proceedTo(Path path) { if (!entityClass.equals(path.getSegment().getElementType())) { throw new IllegalArgumentException("Path doesn't point to the type of element currently being accessed."); } return replacePath(Util.extendTo(this, path)); } /** * Sends out the notification to the subscribers. * * @param entity the entity on which the action took place * @param action the action (for which the entity and context resolve to the same type) * @param <V> the type of the entity and at the same time the type of the action context * @see #notify(Object, Object, Action) */ <V> void notify(V entity, Action<V, V> action) { notify(entity, entity, action); } /** * Sends out the notification to the subscribers. * * @param entity the entity on which the action occured * @param actionContext the description of the action * @param action the actual action * @param <C> the type of the action description (aka context) * @param <V> the type of the entity on which the action occurred */ <C, V> void notify(V entity, C actionContext, Action<C, V> action) { Iterator<Subject<C, C>> subjects = observableContext.matchingSubjects(action, entity); while (subjects.hasNext()) { Subject<C, C> s = subjects.next(); s.onNext(actionContext); } } public int getTransactionRetriesCount() { return transactionRetries; } /** * Sends out all the pending notifications in the supplied object. * * @param entityAndNotifications the list of pending notifications */ void notifyAll(EntityAndPendingNotifications<?> entityAndNotifications) { entityAndNotifications.getNotifications().forEach(this::notify); } /** * Another way of sending out a notification. * * @param notification the notification to send out * @param <C> the type of the action description (aka context) * @param <V> the type of the entity on which the action occurred */ <C, V> void notify(Notification<C, V> notification) { notify(notification.getValue(), notification.getActionContext(), notification.getAction()); } /** * Builds a new traversal context. * * @param <BE> the type of the backend elements * @param <E> the type of the inventory element the new context will represent */ public static final class Builder<BE, E extends AbstractElement<?, ?>> { private final TraversalContext<BE, ?> sourceContext; private final Query.SymmetricExtender pathExtender; private final Query.SymmetricExtender selectExtender; private final Class<E> entityClass; public Builder(TraversalContext<BE, ?> sourceContext, Query.SymmetricExtender pathExtender, Query.SymmetricExtender selectExtender, Class<E> entityClass) { this.sourceContext = sourceContext; this.pathExtender = pathExtender; this.selectExtender = selectExtender; this.entityClass = entityClass; } /** * Appends the sets of filters in succession to the select candidates. * * @param filters the sets of filters to apply * @return this builder * @see #where(Filter[][]) * @see #where(Filter...) */ public Builder<BE, E> whereAll(Filter[][] filters) { if (filters.length == 1) { return where(filters[0]); } else { for (Filter[] fs : filters) { hop().where(fs); } return this; } } /** * Create query branches in the select candidates with each of the provided sets of filters. * * @param filters the sets of filters, each representing a new branch in the query * @return this builder */ public Builder<BE, E> where(Filter[][] filters) { selectExtender.filter().with(filters); return this; } /** * Appends the provided set of filters to the current select candidates. * * @param filters the set of filters to append * @return this builder */ public Builder<BE, E> where(Filter... filters) { selectExtender.filter().with(filters); return this; } /** * Create query branches in the select candidates with each of the provided sets of filters. * The filters are applied as path fragments. * * @param filters the sets of the filters to append as path fragments * @return this builder */ public Builder<BE, E> hop(Filter[][] filters) { selectExtender.path().with(filters); return this; } /** * Appends the provided set of filters to the current select candidates. * The filters are applied as path fragments. * * @param filters the set of filters to append as path fragments * @return this builder */ public Builder<BE, E> hop(Filter... filters) { selectExtender.path().with(filters); return this; } /** * @return a new traversal context set up using this builder */ TraversalContext<BE, E> get() { return new TraversalContext<>(sourceContext.inventory, pathExtender.get(), selectExtender.get(), sourceContext.backend, entityClass, sourceContext.configuration, sourceContext.observableContext, sourceContext.transactionRetries, sourceContext); } /** * Changes the entity type of the to-be-returned traversal context. * * @param entityType the type of entities to be returned by traversals using the new context * @param <T> the type * @return a new traversal context set up using this builder and querying for entities of the provided type */ <T extends AbstractElement<?, ?>> TraversalContext<BE, T> getting(Class<T> entityType) { return new TraversalContext<>(sourceContext.inventory, pathExtender.get(), selectExtender.get(), sourceContext.backend, entityType, sourceContext.configuration, sourceContext.observableContext, sourceContext.transactionRetries, sourceContext); } } }
package com.livescribe.web.registration.client; import java.io.IOException; import java.sql.Connection; import java.sql.ResultSet; import java.sql.SQLException; import java.sql.Statement; import java.util.List; import org.junit.After; import org.junit.Assert; import org.junit.Before; import org.junit.Test; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.jdbc.core.JdbcTemplate; import com.livescribe.framework.exception.ClientException; import com.livescribe.framework.exception.InvalidParameterException; import com.livescribe.framework.exception.MultipleRecordsFoundException; import com.livescribe.framework.web.response.ResponseCode; import com.livescribe.framework.web.response.ServiceResponse; import com.livescribe.web.registration.BaseTest; import com.livescribe.web.registration.TestConstants; import com.livescribe.web.registration.controller.RegistrationData; import com.livescribe.web.registration.dto.RegistrationDTO; import com.livescribe.web.registration.dto.RegistrationHistoryDTO; import com.livescribe.web.registration.dto.WarrantyDTO; import com.livescribe.web.registration.exception.RegistrationNotFoundException; import com.livescribe.web.registration.jetty.HttpTestServer; import com.livescribe.web.registration.response.RegistrationHistoryListResponse; import com.livescribe.web.registration.response.RegistrationListResponse; import com.livescribe.web.registration.response.RegistrationResponse; import com.livescribe.web.registration.response.WarrantyListResponse; import com.livescribe.web.registration.response.WarrantyResponse; public class RegistrationClientTest extends BaseTest implements TestConstants { private static final String PEN_DISPLAY_ID_1 = "AYE-ASX-DWY-UY"; private static final String PEN_SERIAL_1 = "2594172913044"; private static final String APP_ID_1 = "com.livescribe.web.KFMTestApp-JMeter-1"; private static final String APP_ID_2 = "com.livescribe.web.KFMTestApp-JMeter-2"; private static final String EMAIL_1 = "kfm1@ls.com"; private HttpTestServer server = null; private RegistrationClient client; private RegistrationData regData; private RegistrationData errorRegData1; @Autowired private JdbcTemplate jdbcTemplate; /** * <p></p> * * @throws IOException */ public RegistrationClientTest() throws Exception { super(); logger.debug("Starting up ..."); this.client = new RegistrationClient(); this.server = new HttpTestServer(); logger.debug("Instantiated."); } @Before public void setUp() throws Exception { logger.debug("BEFORE - setup()"); this.server.start(); regData = new RegistrationData(); regData.setAppId(APP_ID_1); regData.setCountry("United States"); regData.setDisplayId(PEN_DISPLAY_ID_1); regData.setEdition(0); regData.setEmail("kfm1@ls.com"); regData.setFirstName("Jack"); regData.setLastName("Straw"); regData.setLocale("en_US"); regData.setOptIn(new Boolean(false)); regData.setPenName("Random Vector Pen #1"); regData.setPenSerial(PEN_SERIAL_1); errorRegData1 = new RegistrationData(); errorRegData1.setAppId(""); errorRegData1.setCountry("United States"); errorRegData1.setDisplayId(PEN_DISPLAY_ID_1); errorRegData1.setEdition(0); errorRegData1.setEmail("kfm2@ls.com"); errorRegData1.setFirstName("Jack"); errorRegData1.setLastName("Straw"); errorRegData1.setLocale("en_US"); errorRegData1.setOptIn(new Boolean(false)); errorRegData1.setPenName("Random Vector Pen #2"); errorRegData1.setPenSerial(PEN_SERIAL_1); logger.debug("AFTER - setup()"); } @After public void tearDown() throws Exception { this.server.stop(); } /*------------------------------------------------------------ * Register Test Cases *------------------------------------------------------------*/ @Test public void testRegisterPen_Success() throws Exception { logger.debug("---------- START - testRegisterPen_Success()"); ServiceResponse response = client.register(regData); Assert.assertNotNull("The returned response object was 'null'.", response); Assert.assertEquals("Incorrect ResponseCode received.", ResponseCode.SUCCESS, response.getResponseCode()); logger.debug("---------- END - testRegisterPen_Success()"); } @Test public void testRegister_Fail_MissingParameter() throws Exception { logger.debug("---------- START - testRegister_Fail_MissingParameter()"); ServiceResponse response = null; boolean expThrown = false; try { response = client.register(errorRegData1); } catch (InvalidParameterException e) { String msg = "InvalidParameterException thrown"; expThrown = true; } Assert.assertTrue("The 'InvalidParameterException' was NOT thrown.", expThrown); logger.debug("---------- END - testRegister_Fail_MissingParameter()"); } /*------------------------------------------------------------ * Find Registration Test Cases *------------------------------------------------------------*/ @Test public void testFindUniqueRegistration_Success() throws Exception { logger.debug("---------- START - testFindUniqueRegistration_Success()"); RegistrationResponse response = client.findUniqueRegistration(APP_ID_1, PEN_DISPLAY_ID_1, EMAIL_1); Assert.assertNotNull("The returned response object was 'null'.", response); Assert.assertEquals("Incorrect response returned.", "United States", response.getRegistrationDto().getCountry()); logger.debug("---------- END - testFindUniqueRegistration_Success()"); } @Test public void testFindRegistrationByPenSerial_Success() throws Exception { logger.debug("---------- START - testFindRegistrationByPenSerial_Success()"); RegistrationListResponse response = client.findRegistrationsListByPenSerial(PEN_DISPLAY_ID_1); Assert.assertNotNull("The returned response object was 'null'.", response); List<RegistrationDTO> list = response.getRegistrations(); Assert.assertEquals("Incorrect number of registrations returned.", 3, list.size()); logger.debug("---------- END - testFindRegistrationByPenSerial_Success()"); } @Test public void testFindRegistrationByPenSerial_Fail_RegistrationNotFound() { logger.debug("---------- START - testFindRegistrationByPenSerial_Fail_RegistrationNotFound()"); RegistrationListResponse response = null; boolean correctExpThrown = false; try { response = client.findRegistrationsListByPenSerial(PEN_DISPLAY_ID_NON_EXISTENT); } catch (IllegalStateException ise) { String msg = "IllegalStateException thrown"; correctExpThrown = false; } catch (InvalidParameterException ipe) { String msg = "InvalidParameterException thrown"; correctExpThrown = false; } catch (RegistrationNotFoundException rnfe) { String msg = "RegistrationNotFoundException thrown"; correctExpThrown = true; } catch (ClientException ce) { String msg = "ClientException thrown"; correctExpThrown = false; } catch (IOException ioe) { String msg = "IOException thrown"; correctExpThrown = false; } if (response != null) { List<RegistrationDTO> list = response.getRegistrations(); logger.debug("list contains " + list.size() + " registrations."); RegistrationDTO reg = list.get(0); logger.debug(reg.toString()); } Assert.assertTrue("Incorrect exception thrown.", correctExpThrown); Assert.assertNull("The RegistrationListResponse object was NOT 'null'.", response); logger.debug("---------- END - testFindRegistrationByPenSerial_Fail_RegistrationNotFound()"); } /*------------------------------------------------------------ * Find Registration History Test Cases *------------------------------------------------------------*/ @Test public void testFindRegistrationHistoryByEmail_Success() throws InvalidParameterException, RegistrationNotFoundException, ClientException { logger.debug("---------- START - testFindRegistrationHistoryByEmail_Success()"); RegistrationHistoryListResponse response = client.findRegistrationHistoryByEmail(XML_LOADED_REGISTRATION_HISTORY_EMAIL_1); Assert.assertNotNull("The returned RegistrationHistoryListResponse was 'null'.", response); List<RegistrationHistoryDTO> list = response.getRegistrationHistories(); Assert.assertNotNull("The returned List was 'null'.", list); Assert.assertEquals("Incorrect number of RegistrationHistory objects was returned.", 4, list.size()); RegistrationHistoryDTO regHistDto = list.get(0); Assert.assertEquals("Incorrect email address returned.", XML_LOADED_REGISTRATION_HISTORY_EMAIL_1, regHistDto.getEmail()); logger.debug("---------- END - testFindRegistrationHistoryByEmail_Success()"); } @Test public void testFindRegistrationHistoryByPenSerial_Success() throws InvalidParameterException, RegistrationNotFoundException, ClientException { logger.debug("---------- START - testFindRegistrationHistoryByPenSerial_Success()"); RegistrationHistoryListResponse response = client.findRegistrationHistoryByPenSerial(XML_LOADED_REGISTRATION_HISTORY_PEN_SERIAL_1); Assert.assertNotNull("The returned RegistrationHistoryListResponse was 'null'.", response); List<RegistrationHistoryDTO> list = response.getRegistrationHistories(); Assert.assertNotNull("The returned List was 'null'.", list); Assert.assertEquals("Incorrect number of RegistrationHistory objects was returned.", 2, list.size()); RegistrationHistoryDTO regHistDto = list.get(0); Assert.assertEquals("Incorrect pen display ID returned.", XML_LOADED_REGISTRATION_HISTORY_PEN_SERIAL_1, regHistDto.getDisplayId()); logger.debug("---------- END - testFindRegistrationHistoryByPenSerial_Success()"); } /*------------------------------------------------------------ * Find Warranty Test Cases *------------------------------------------------------------*/ @Test public void testFindWarrantyByEmail_Success() throws InvalidParameterException, RegistrationNotFoundException, ClientException { logger.debug("---------- START - testFindWarrantyByEmail_Success()"); WarrantyListResponse response = client.findWarrantyByEmail(XML_LOADED_WARRANTY_EMAIL_1); Assert.assertNotNull("The returned response was 'null'.", response); List<WarrantyDTO> list = response.getWarranties(); Assert.assertEquals("Incorrect number of WarrantyDTO objects returned.", 3, list.size()); logger.debug("---------- END - testFindWarrantyByEmail_Success()"); } @Test public void testFindWarrantyByPenSerial_Success() throws InvalidParameterException, RegistrationNotFoundException, MultipleRecordsFoundException, ClientException { logger.debug("---------- START - testFindWarrantyByPenSerial_Success()"); WarrantyResponse response = client.findWarrantyByPenSerial(XML_LOADED_WARRANTY_PEN_SERIAL_1); Assert.assertNotNull("The returned response was 'null'.", response); WarrantyDTO warrantyDto = response.getWarrantyDto(); Assert.assertNotNull("The returned WarrantyDTO was 'null'.", warrantyDto); String email = warrantyDto.getEmail(); Assert.assertEquals("Incorrect email address returned in WarrantyDTO.", XML_LOADED_WARRANTY_EMAIL_1, email); logger.debug("---------- END - testFindWarrantyByPenSerial_Success()"); } /*------------------------------------------------------------ * Delete Registration Test Cases *------------------------------------------------------------*/ /** * <p></p> * * NOTE: This test DOES NOT verify that a record was, in fact, * deleted.&nbsp;&nbsp;It verifies that the client can parse the * &quot;SUCCESS&quot; XML response message that it receives. * * @throws SQLException */ @Test public void testDeleteByEmail_Success() throws SQLException { logger.debug("---------- START - testDeleteByEmail_Success()"); // Delete the record. try { client.deleteByEmail(XML_LOADED_REGISTRATION_EMAIL_1); } catch (InvalidParameterException ipe) { Assert.fail("InvalidParameterException thrown"); } catch (ClientException ce) { Assert.fail("ClientException thrown"); } logger.debug("---------- END - testDeleteByEmail_Success()"); } @Test public void testDeleteByEmail_Fail_NoEmailAddress() { logger.debug("---------- START - testDeleteByEmail_Fail_NoEmailAddress()"); // Delete the record. try { client.deleteByEmail(""); } catch (InvalidParameterException ipe) { logger.debug(ipe.getMessage()); Assert.assertTrue("InvalidParameterException was NOT thrown!", true); logger.debug("---------- END - testDeleteByEmail_Fail_NoEmailAddress()"); return; } catch (ClientException ce) { Assert.fail("ClientException thrown"); } Assert.fail("No exception was thrown."); logger.debug("---------- END - testDeleteByEmail_Fail_NoEmailAddress()"); } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.pulsar.sql.presto; import java.util.concurrent.TimeUnit; import org.apache.bookkeeper.stats.Counter; import org.apache.bookkeeper.stats.NullStatsProvider; import org.apache.bookkeeper.stats.OpStatsLogger; import org.apache.bookkeeper.stats.StatsLogger; import org.apache.bookkeeper.stats.StatsProvider; /** * This class helps to track metrics related to the connector. */ public class PulsarConnectorMetricsTracker implements AutoCloseable{ private final StatsLogger statsLogger; private static final String SCOPE = "split"; // metric names // time spend waiting to get entry from entry queue because it is empty private static final String ENTRY_QUEUE_DEQUEUE_WAIT_TIME = "entry-queue-dequeue-wait-time"; // total time spend waiting to get entry from entry queue per query private static final String ENTRY_QUEUE_DEQUEUE_WAIT_TIME_PER_QUERY = "entry-queue-dequeue-wait-time-per-query"; // number of bytes read from bookkeeper private static final String BYTES_READ = "bytes-read"; // total number of bytes read per query private static final String BYTES_READ_PER_QUERY = "bytes-read-per-query"; // time spent derserializing entries private static final String ENTRY_DESERIALIZE_TIME = "entry-deserialize-time"; // time spent derserializing entries per query private static final String ENTRY_DESERIALIZE_TIME_PER_QUERY = "entry-deserialize-time_per_query"; // time spent waiting for message queue enqueue because message queue is full private static final String MESSAGE_QUEUE_ENQUEUE_WAIT_TIME = "message-queue-enqueue-wait-time"; // time spent waiting for message queue enqueue because message queue is full per query private static final String MESSAGE_QUEUE_ENQUEUE_WAIT_TIME_PER_QUERY = "message-queue-enqueue-wait-time-per-query"; private static final String NUM_MESSAGES_DERSERIALIZED = "num-messages-deserialized"; // number of messages deserialized public static final String NUM_MESSAGES_DERSERIALIZED_PER_ENTRY = "num-messages-deserialized-per-entry"; // number of messages deserialized per query public static final String NUM_MESSAGES_DERSERIALIZED_PER_QUERY = "num-messages-deserialized-per-query"; // number of read attempts. Will fail if queues are full public static final String READ_ATTEMPTS = "read-attempts"; // number of read attempts per query public static final String READ_ATTEMTPS_PER_QUERY = "read-attempts-per-query"; // latency of reads per batch public static final String READ_LATENCY_PER_BATCH = "read-latency-per-batch"; // total read latency per query public static final String READ_LATENCY_PER_QUERY = "read-latency-per-query"; // number of entries per batch public static final String NUM_ENTRIES_PER_BATCH = "num-entries-per-batch"; // number of entries per query public static final String NUM_ENTRIES_PER_QUERY = "num-entries-per-query"; // time spent waiting to dequeue from message queue because its empty per query public static final String MESSAGE_QUEUE_DEQUEUE_WAIT_TIME_PER_QUERY = "message-queue-dequeue-wait-time-per-query"; // time spent deserializing message to record e.g. avro, json, etc public static final String RECORD_DESERIALIZE_TIME = "record-deserialize-time"; // time spent deserializing message to record per query private static final String RECORD_DESERIALIZE_TIME_PER_QUERY = "record-deserialize-time-per-query"; private static final String NUM_RECORD_DESERIALIZED = "num-record-deserialized"; private static final String TOTAL_EXECUTION_TIME = "total-execution-time"; // stats loggers private final OpStatsLogger statsLoggerEntryQueueDequeueWaitTime; private final Counter statsLoggerBytesRead; private final OpStatsLogger statsLoggerEntryDeserializeTime; private final OpStatsLogger statsLoggerMessageQueueEnqueueWaitTime; private final Counter statsLoggerNumMessagesDeserialized; private final OpStatsLogger statsLoggerNumMessagesDeserializedPerEntry; private final OpStatsLogger statsLoggerReadAttempts; private final OpStatsLogger statsLoggerReadLatencyPerBatch; private final OpStatsLogger statsLoggerNumEntriesPerBatch; private final OpStatsLogger statsLoggerRecordDeserializeTime; private final Counter statsLoggerNumRecordDeserialized; private final OpStatsLogger statsLoggerTotalExecutionTime; // internal tracking variables private long entryQueueDequeueWaitTimeStartTime; private long entryQueueDequeueWaitTimeSum = 0L; private long bytesReadSum = 0L; private long entryDeserializeTimeStartTime; private long entryDeserializeTimeSum = 0L; private long messageQueueEnqueueWaitTimeStartTime; private long messageQueueEnqueueWaitTimeSum = 0L; private long numMessagesDerserializedSum = 0L; private long numMessagedDerserializedPerBatch = 0L; private long readAttemptsSuccessSum = 0L; private long readAttemptsFailSum = 0L; private long readLatencySuccessSum = 0L; private long readLatencyFailSum = 0L; private long numEntriesPerBatchSum = 0L; private long messageQueueDequeueWaitTimeSum = 0L; private long recordDeserializeTimeStartTime; private long recordDeserializeTimeSum = 0L; public PulsarConnectorMetricsTracker(StatsProvider statsProvider) { this.statsLogger = statsProvider instanceof NullStatsProvider ? null : statsProvider.getStatsLogger(SCOPE); if (this.statsLogger != null) { statsLoggerEntryQueueDequeueWaitTime = statsLogger.getOpStatsLogger(ENTRY_QUEUE_DEQUEUE_WAIT_TIME); statsLoggerBytesRead = statsLogger.getCounter(BYTES_READ); statsLoggerEntryDeserializeTime = statsLogger.getOpStatsLogger(ENTRY_DESERIALIZE_TIME); statsLoggerMessageQueueEnqueueWaitTime = statsLogger.getOpStatsLogger(MESSAGE_QUEUE_ENQUEUE_WAIT_TIME); statsLoggerNumMessagesDeserialized = statsLogger.getCounter(NUM_MESSAGES_DERSERIALIZED); statsLoggerNumMessagesDeserializedPerEntry = statsLogger .getOpStatsLogger(NUM_MESSAGES_DERSERIALIZED_PER_ENTRY); statsLoggerReadAttempts = statsLogger.getOpStatsLogger(READ_ATTEMPTS); statsLoggerReadLatencyPerBatch = statsLogger.getOpStatsLogger(READ_LATENCY_PER_BATCH); statsLoggerNumEntriesPerBatch = statsLogger.getOpStatsLogger(NUM_ENTRIES_PER_BATCH); statsLoggerRecordDeserializeTime = statsLogger.getOpStatsLogger(RECORD_DESERIALIZE_TIME); statsLoggerNumRecordDeserialized = statsLogger.getCounter(NUM_RECORD_DESERIALIZED); statsLoggerTotalExecutionTime = statsLogger.getOpStatsLogger(TOTAL_EXECUTION_TIME); } else { statsLoggerEntryQueueDequeueWaitTime = null; statsLoggerBytesRead = null; statsLoggerEntryDeserializeTime = null; statsLoggerMessageQueueEnqueueWaitTime = null; statsLoggerNumMessagesDeserialized = null; statsLoggerNumMessagesDeserializedPerEntry = null; statsLoggerReadAttempts = null; statsLoggerReadLatencyPerBatch = null; statsLoggerNumEntriesPerBatch = null; statsLoggerRecordDeserializeTime = null; statsLoggerNumRecordDeserialized = null; statsLoggerTotalExecutionTime = null; } } public void start_ENTRY_QUEUE_DEQUEUE_WAIT_TIME() { if (statsLogger != null) { entryQueueDequeueWaitTimeStartTime = System.nanoTime(); } } public void end_ENTRY_QUEUE_DEQUEUE_WAIT_TIME() { if (statsLogger != null) { long time = System.nanoTime() - entryQueueDequeueWaitTimeStartTime; entryQueueDequeueWaitTimeSum += time; statsLoggerEntryQueueDequeueWaitTime.registerSuccessfulEvent(time, TimeUnit.NANOSECONDS); } } public void register_BYTES_READ(long bytes) { if (statsLogger != null) { bytesReadSum += bytes; statsLoggerBytesRead.add(bytes); } } public void start_ENTRY_DESERIALIZE_TIME() { if (statsLogger != null) { entryDeserializeTimeStartTime = System.nanoTime(); } } public void end_ENTRY_DESERIALIZE_TIME() { if (statsLogger != null) { long time = System.nanoTime() - entryDeserializeTimeStartTime; entryDeserializeTimeSum += time; statsLoggerEntryDeserializeTime.registerSuccessfulEvent(time, TimeUnit.NANOSECONDS); } } public void start_MESSAGE_QUEUE_ENQUEUE_WAIT_TIME() { if (statsLogger != null) { messageQueueEnqueueWaitTimeStartTime = System.nanoTime(); } } public void end_MESSAGE_QUEUE_ENQUEUE_WAIT_TIME() { if (statsLogger != null) { long time = System.nanoTime() - messageQueueEnqueueWaitTimeStartTime; messageQueueEnqueueWaitTimeSum += time; statsLoggerMessageQueueEnqueueWaitTime.registerSuccessfulEvent(time, TimeUnit.NANOSECONDS); } } public void incr_NUM_MESSAGES_DESERIALIZED_PER_ENTRY() { if (statsLogger != null) { numMessagedDerserializedPerBatch++; statsLoggerNumMessagesDeserialized.add(1); } } public void end_NUM_MESSAGES_DESERIALIZED_PER_ENTRY() { if (statsLogger != null) { numMessagesDerserializedSum += numMessagedDerserializedPerBatch; statsLoggerNumMessagesDeserializedPerEntry.registerSuccessfulValue(numMessagedDerserializedPerBatch); numMessagedDerserializedPerBatch = 0L; } } public void incr_READ_ATTEMPTS_SUCCESS() { if (statsLogger != null) { readAttemptsSuccessSum++; statsLoggerReadAttempts.registerSuccessfulValue(1L); } } public void incr_READ_ATTEMPTS_FAIL() { if (statsLogger != null) { readAttemptsFailSum++; statsLoggerReadAttempts.registerFailedValue(1L); } } public void register_READ_LATENCY_PER_BATCH_SUCCESS(long latency) { if (statsLogger != null) { readLatencySuccessSum += latency; statsLoggerReadLatencyPerBatch.registerSuccessfulEvent(latency, TimeUnit.NANOSECONDS); } } public void register_READ_LATENCY_PER_BATCH_FAIL(long latency) { if (statsLogger != null) { readLatencyFailSum += latency; statsLoggerReadLatencyPerBatch.registerFailedEvent(latency, TimeUnit.NANOSECONDS); } } public void incr_NUM_ENTRIES_PER_BATCH_SUCCESS(long delta) { if (statsLogger != null) { numEntriesPerBatchSum += delta; statsLoggerNumEntriesPerBatch.registerSuccessfulValue(delta); } } public void incr_NUM_ENTRIES_PER_BATCH_FAIL(long delta) { if (statsLogger != null) { statsLoggerNumEntriesPerBatch.registerFailedValue(delta); } } public void register_MESSAGE_QUEUE_DEQUEUE_WAIT_TIME(long latency) { if (statsLogger != null) { messageQueueDequeueWaitTimeSum += latency; } } public void start_RECORD_DESERIALIZE_TIME() { if (statsLogger != null) { recordDeserializeTimeStartTime = System.nanoTime(); } } public void end_RECORD_DESERIALIZE_TIME() { if (statsLogger != null) { long time = System.nanoTime() - recordDeserializeTimeStartTime; recordDeserializeTimeSum += time; statsLoggerRecordDeserializeTime.registerSuccessfulEvent(time, TimeUnit.NANOSECONDS); } } public void incr_NUM_RECORD_DESERIALIZED() { if (statsLogger != null) { statsLoggerNumRecordDeserialized.add(1); } } public void register_TOTAL_EXECUTION_TIME(long latency) { if (statsLogger != null) { statsLoggerTotalExecutionTime.registerSuccessfulEvent(latency, TimeUnit.NANOSECONDS); } } @Override public void close() { if (statsLogger != null) { // register total entry dequeue wait time for query statsLogger.getOpStatsLogger(ENTRY_QUEUE_DEQUEUE_WAIT_TIME_PER_QUERY) .registerSuccessfulEvent(entryQueueDequeueWaitTimeSum, TimeUnit.NANOSECONDS); //register bytes read per query statsLogger.getOpStatsLogger(BYTES_READ_PER_QUERY) .registerSuccessfulValue(bytesReadSum); // register total time spent deserializing entries for query statsLogger.getOpStatsLogger(ENTRY_DESERIALIZE_TIME_PER_QUERY) .registerSuccessfulEvent(entryDeserializeTimeSum, TimeUnit.NANOSECONDS); // register time spent waiting for message queue enqueue because message queue is full per query statsLogger.getOpStatsLogger(MESSAGE_QUEUE_ENQUEUE_WAIT_TIME_PER_QUERY) .registerSuccessfulEvent(messageQueueEnqueueWaitTimeSum, TimeUnit.NANOSECONDS); // register number of messages deserialized per query statsLogger.getOpStatsLogger(NUM_MESSAGES_DERSERIALIZED_PER_QUERY) .registerSuccessfulValue(numMessagesDerserializedSum); // register number of read attempts per query statsLogger.getOpStatsLogger(READ_ATTEMTPS_PER_QUERY) .registerSuccessfulValue(readAttemptsSuccessSum); statsLogger.getOpStatsLogger(READ_ATTEMTPS_PER_QUERY) .registerFailedValue(readAttemptsFailSum); // register total read latency for query statsLogger.getOpStatsLogger(READ_LATENCY_PER_QUERY) .registerSuccessfulEvent(readLatencySuccessSum, TimeUnit.NANOSECONDS); statsLogger.getOpStatsLogger(READ_LATENCY_PER_QUERY) .registerFailedEvent(readLatencyFailSum, TimeUnit.NANOSECONDS); // register number of entries per query statsLogger.getOpStatsLogger(NUM_ENTRIES_PER_QUERY) .registerSuccessfulValue(numEntriesPerBatchSum); // register time spent waiting to read for message queue per query statsLogger.getOpStatsLogger(MESSAGE_QUEUE_DEQUEUE_WAIT_TIME_PER_QUERY) .registerSuccessfulEvent(messageQueueDequeueWaitTimeSum, TimeUnit.MILLISECONDS); // register time spent deserializing records per query statsLogger.getOpStatsLogger(RECORD_DESERIALIZE_TIME_PER_QUERY) .registerSuccessfulEvent(recordDeserializeTimeSum, TimeUnit.NANOSECONDS); } } }
/* -*-mode:java; c-basic-offset:2; indent-tabs-mode:nil -*- */ /* Copyright (c) 2002-2014 ymnk, JCraft,Inc. All rights reserved. Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. 3. The names of the authors may not be used to endorse or promote products derived from this software without specific prior written permission. THIS SOFTWARE IS PROVIDED ``AS IS'' AND ANY EXPRESSED OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL JCRAFT, INC. OR ANY CONTRIBUTORS TO THIS SOFTWARE BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ package com.jcraft.jsch; import java.net.Socket; import java.io.File; import java.io.FileInputStream; import java.io.IOException; class Util{ private static final byte[] b64 =Util.str2byte("ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/="); private static byte val(byte foo){ if(foo == '=') return 0; for(int j=0; j<b64.length; j++){ if(foo==b64[j]) return (byte)j; } return 0; } static byte[] fromBase64(byte[] buf, int start, int length) throws JSchException { try { byte[] foo=new byte[length]; int j=0; for (int i=start;i<start+length;i+=4){ foo[j]=(byte)((val(buf[i])<<2)|((val(buf[i+1])&0x30)>>>4)); if(buf[i+2]==(byte)'='){ j++; break;} foo[j+1]=(byte)(((val(buf[i+1])&0x0f)<<4)|((val(buf[i+2])&0x3c)>>>2)); if(buf[i+3]==(byte)'='){ j+=2; break;} foo[j+2]=(byte)(((val(buf[i+2])&0x03)<<6)|(val(buf[i+3])&0x3f)); j+=3; } byte[] bar=new byte[j]; System.arraycopy(foo, 0, bar, 0, j); return bar; } catch(ArrayIndexOutOfBoundsException e) { throw new JSchException("fromBase64: invalid base64 data", e); } } static byte[] toBase64(byte[] buf, int start, int length){ byte[] tmp=new byte[length*2]; int i,j,k; int foo=(length/3)*3+start; i=0; for(j=start; j<foo; j+=3){ k=(buf[j]>>>2)&0x3f; tmp[i++]=b64[k]; k=(buf[j]&0x03)<<4|(buf[j+1]>>>4)&0x0f; tmp[i++]=b64[k]; k=(buf[j+1]&0x0f)<<2|(buf[j+2]>>>6)&0x03; tmp[i++]=b64[k]; k=buf[j+2]&0x3f; tmp[i++]=b64[k]; } foo=(start+length)-foo; if(foo==1){ k=(buf[j]>>>2)&0x3f; tmp[i++]=b64[k]; k=((buf[j]&0x03)<<4)&0x3f; tmp[i++]=b64[k]; tmp[i++]=(byte)'='; tmp[i++]=(byte)'='; } else if(foo==2){ k=(buf[j]>>>2)&0x3f; tmp[i++]=b64[k]; k=(buf[j]&0x03)<<4|(buf[j+1]>>>4)&0x0f; tmp[i++]=b64[k]; k=((buf[j+1]&0x0f)<<2)&0x3f; tmp[i++]=b64[k]; tmp[i++]=(byte)'='; } byte[] bar=new byte[i]; System.arraycopy(tmp, 0, bar, 0, i); return bar; // return sun.misc.BASE64Encoder().encode(buf); } static String[] split(String foo, String split){ if(foo==null) return null; byte[] buf=Util.str2byte(foo); java.util.Vector bar=new java.util.Vector(); int start=0; int index; while(true){ index=foo.indexOf(split, start); if(index>=0){ bar.addElement(Util.byte2str(buf, start, index-start)); start=index+1; continue; } bar.addElement(Util.byte2str(buf, start, buf.length-start)); break; } String[] result=new String[bar.size()]; for(int i=0; i<result.length; i++){ result[i]=(String)(bar.elementAt(i)); } return result; } static boolean glob(byte[] pattern, byte[] name){ return glob0(pattern, 0, name, 0); } static private boolean glob0(byte[] pattern, int pattern_index, byte[] name, int name_index){ if(name.length>0 && name[0]=='.'){ if(pattern.length>0 && pattern[0]=='.'){ if(pattern.length==2 && pattern[1]=='*') return true; return glob(pattern, pattern_index+1, name, name_index+1); } return false; } return glob(pattern, pattern_index, name, name_index); } static private boolean glob(byte[] pattern, int pattern_index, byte[] name, int name_index){ //System.err.println("glob: "+new String(pattern)+", "+pattern_index+" "+new String(name)+", "+name_index); int patternlen=pattern.length; if(patternlen==0) return false; int namelen=name.length; int i=pattern_index; int j=name_index; while(i<patternlen && j<namelen){ if(pattern[i]=='\\'){ if(i+1==patternlen) return false; i++; if(pattern[i]!=name[j]) return false; i+=skipUTF8Char(pattern[i]); j+=skipUTF8Char(name[j]); continue; } if(pattern[i]=='*'){ while(i<patternlen){ if(pattern[i]=='*'){ i++; continue; } break; } if(patternlen==i) return true; byte foo=pattern[i]; if(foo=='?'){ while(j<namelen){ if(glob(pattern, i, name, j)){ return true; } j+=skipUTF8Char(name[j]); } return false; } else if(foo=='\\'){ if(i+1==patternlen) return false; i++; foo=pattern[i]; while(j<namelen){ if(foo==name[j]){ if(glob(pattern, i+skipUTF8Char(foo), name, j+skipUTF8Char(name[j]))){ return true; } } j+=skipUTF8Char(name[j]); } return false; } while(j<namelen){ if(foo==name[j]){ if(glob(pattern, i, name, j)){ return true; } } j+=skipUTF8Char(name[j]); } return false; } if(pattern[i]=='?'){ i++; j+=skipUTF8Char(name[j]); continue; } if(pattern[i]!=name[j]) return false; i+=skipUTF8Char(pattern[i]); j+=skipUTF8Char(name[j]); if(!(j<namelen)){ // name is end if(!(i<patternlen)){ // pattern is end return true; } if(pattern[i]=='*'){ break; } } continue; } if(i==patternlen && j==namelen) return true; if(!(j<namelen) && // name is end pattern[i]=='*'){ boolean ok=true; while(i<patternlen){ if(pattern[i++]!='*'){ ok=false; break; } } return ok; } return false; } static String quote(String path){ byte[] _path=str2byte(path); int count=0; for(int i=0;i<_path.length; i++){ byte b=_path[i]; if(b=='\\' || b=='?' || b=='*') count++; } if(count==0) return path; byte[] _path2=new byte[_path.length+count]; for(int i=0, j=0; i<_path.length; i++){ byte b=_path[i]; if(b=='\\' || b=='?' || b=='*'){ _path2[j++]='\\'; } _path2[j++]=b; } return byte2str(_path2); } static String unquote(String path){ byte[] foo=str2byte(path); byte[] bar=unquote(foo); if(foo.length==bar.length) return path; return byte2str(bar); } static byte[] unquote(byte[] path){ int pathlen=path.length; int i=0; while(i<pathlen){ if(path[i]=='\\'){ if(i+1==pathlen) break; System.arraycopy(path, i+1, path, i, path.length-(i+1)); pathlen--; i++; continue; } i++; } if(pathlen==path.length) return path; byte[] foo=new byte[pathlen]; System.arraycopy(path, 0, foo, 0, pathlen); return foo; } private static String[] chars={ "0","1","2","3","4","5","6","7","8","9", "a","b","c","d","e","f" }; static String getFingerPrint(HASH hash, byte[] data){ try{ hash.init(); hash.update(data, 0, data.length); byte[] foo=hash.digest(); StringBuffer sb=new StringBuffer(); int bar; for(int i=0; i<foo.length;i++){ bar=foo[i]&0xff; sb.append(chars[(bar>>>4)&0xf]); sb.append(chars[(bar)&0xf]); if(i+1<foo.length) sb.append(":"); } return sb.toString(); } catch(Exception e){ return "???"; } } static boolean array_equals(byte[] foo, byte bar[]){ int i=foo.length; if(i!=bar.length) return false; for(int j=0; j<i; j++){ if(foo[j]!=bar[j]) return false; } //try{while(true){i--; if(foo[i]!=bar[i])return false;}}catch(Exception e){} return true; } static Socket createSocket(String host, int port, int timeout) throws JSchException{ Socket socket=null; if(timeout==0){ try{ socket=new Socket(host, port); return socket; } catch(Exception e){ String message=e.toString(); if(e instanceof Throwable) throw new JSchException(message, (Throwable)e); throw new JSchException(message); } } final String _host=host; final int _port=port; final Socket[] sockp=new Socket[1]; final Exception[] ee=new Exception[1]; String message=""; Thread tmp=new Thread(new Runnable(){ public void run(){ sockp[0]=null; try{ sockp[0]=new Socket(_host, _port); } catch(Exception e){ ee[0]=e; if(sockp[0]!=null && sockp[0].isConnected()){ try{ sockp[0].close(); } catch(Exception eee){} } sockp[0]=null; } } }); tmp.setName("Opening Socket "+host); tmp.start(); try{ tmp.join(timeout); message="timeout: "; } catch(InterruptedException eee){ } if(sockp[0]!=null && sockp[0].isConnected()){ socket=sockp[0]; } else{ message+="socket is not established"; if(ee[0]!=null){ message=ee[0].toString(); } tmp.interrupt(); tmp=null; throw new JSchException(message, ee[0]); } return socket; } static byte[] str2byte(String str, String encoding){ if(str==null) return null; try{ return str.getBytes(encoding); } catch(java.io.UnsupportedEncodingException e){ return str.getBytes(); } } static byte[] str2byte(String str){ return str2byte(str, "UTF-8"); } static String byte2str(byte[] str, String encoding){ return byte2str(str, 0, str.length, encoding); } static String byte2str(byte[] str, int s, int l, String encoding){ try{ return new String(str, s, l, encoding); } catch(java.io.UnsupportedEncodingException e){ return new String(str, s, l); } } static String byte2str(byte[] str){ return byte2str(str, 0, str.length, "UTF-8"); } static String byte2str(byte[] str, int s, int l){ return byte2str(str, s, l, "UTF-8"); } static String toHex(byte[] str){ StringBuffer sb = new StringBuffer(); for(int i = 0; i<str.length; i++){ String foo = Integer.toHexString(str[i]&0xff); sb.append("0x"+(foo.length() == 1 ? "0" : "")+foo); if(i+1<str.length) sb.append(":"); } return sb.toString(); } static final byte[] empty = str2byte(""); /* static byte[] char2byte(char[] foo){ int len=0; for(int i=0; i<foo.length; i++){ if((foo[i]&0xff00)==0) len++; else len+=2; } byte[] bar=new byte[len]; for(int i=0, j=0; i<foo.length; i++){ if((foo[i]&0xff00)==0){ bar[j++]=(byte)foo[i]; } else{ bar[j++]=(byte)(foo[i]>>>8); bar[j++]=(byte)foo[i]; } } return bar; } */ static void bzero(byte[] foo){ if(foo==null) return; for(int i=0; i<foo.length; i++) foo[i]=0; } static String diffString(String str, String[] not_available){ String[] stra=Util.split(str, ","); String result=null; loop: for(int i=0; i<stra.length; i++){ for(int j=0; j<not_available.length; j++){ if(stra[i].equals(not_available[j])){ continue loop; } } if(result==null){ result=stra[i]; } else{ result=result+","+stra[i]; } } return result; } static String checkTilde(String str){ try{ if(str.startsWith("~")){ str = str.replace("~", System.getProperty("user.home")); } } catch(SecurityException e){ } return str; } private static int skipUTF8Char(byte b){ if((byte)(b&0x80)==0) return 1; if((byte)(b&0xe0)==(byte)0xc0) return 2; if((byte)(b&0xf0)==(byte)0xe0) return 3; return 1; } static byte[] fromFile(String _file) throws IOException { _file = checkTilde(_file); File file = new File(_file); FileInputStream fis = new FileInputStream(_file); try { byte[] result = new byte[(int)(file.length())]; int len=0; while(true){ int i=fis.read(result, len, result.length-len); if(i<=0) break; len+=i; } fis.close(); return result; } finally { if(fis!=null) fis.close(); } } }
/* * Copyright 2019 Immutables Authors and Contributors * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.immutables.criteria.reactor; import org.immutables.criteria.backend.Backend; import org.immutables.criteria.expression.ImmutableQuery; import org.immutables.criteria.expression.Query; import org.immutables.criteria.repository.MapperFunction3; import org.immutables.criteria.repository.MapperFunction4; import org.immutables.criteria.repository.MapperFunction5; import org.immutables.criteria.repository.Tuple; import org.immutables.criteria.repository.reactive.ReactiveMapper1; import org.immutables.criteria.repository.reactive.ReactiveMapper2; import org.immutables.criteria.repository.reactive.ReactiveMapper3; import org.immutables.criteria.repository.reactive.ReactiveMapper4; import org.immutables.criteria.repository.reactive.ReactiveMapper5; import org.immutables.criteria.repository.reactive.ReactiveMapperTuple; import reactor.core.publisher.Flux; import reactor.core.publisher.Mono; import java.util.Optional; import java.util.function.BiFunction; import java.util.function.Function; /** * Default implementation for various rxjava mappers and their utilities */ final class ReactorMappers { static class Mapper1<T1> implements ReactorMapper1<T1>, ReactorMapper1.DistinctLimitOffset<T1> { private final ReactiveMapper1<T1> mapper; private final ReactorFetcher.DistinctLimitOffset<T1> fetcher; Mapper1(Query query, Backend.Session session) { this(new ReactiveMapper1<>(query, session)); } private Mapper1(ReactiveMapper1<T1> mapper) { this(mapper, ReactorFetcherDelegate.fromReactive(mapper)); } private Mapper1(ReactiveMapper1<T1> mapper, ReactorFetcher<T1> fetcher) { this.mapper = mapper; this.fetcher = (ReactorFetcher.DistinctLimitOffset<T1>) fetcher; } @Override public DistinctLimitOffset<Optional<T1>> asOptional() { return new Mapper1<>(mapper.asOptional()); } @Override public Flux<T1> fetch() { return fetcher.fetch(); } @Override public Mono<T1> one() { return fetcher.one(); } @Override public Mono<T1> oneOrNone() { return fetcher.oneOrNone(); } @Override public Mono<Boolean> exists() { return fetcher.exists(); } @Override public Mono<Long> count() { return fetcher.count(); } @Override public LimitOffset<T1> distinct() { return new Mapper1<>(mapper, fetcher.distinct()); } @Override public Offset<T1> limit(long limit) { return new Mapper1<>(mapper, fetcher.limit(limit)); } @Override public ReactorMapper1<T1> offset(long offset) { return new Mapper1<>(mapper, fetcher.offset(offset)); } } static class Mapper2<T1, T2> implements ReactorMapper2<T1, T2>, ReactorMapper2.DistinctLimitOffset<T1, T2> { private final ImmutableQuery query; private final Backend.Session session; Mapper2(Query query, Backend.Session session) { this.query = ImmutableQuery.copyOf(query); this.session = session; } @Override public <R> ReactorFetcher<R> map(BiFunction<T1, T2, R> mapFn) { ReactiveMapper2<T1, T2> delegate = new ReactiveMapper2<>(query, session); return ReactorFetcherDelegate.fromReactive(delegate.map(mapFn)); } @Override public <R> ReactorFetcher<R> map(Function<? super Tuple, ? extends R> mapFn) { ReactiveMapper2<T1, T2> delegate = new ReactiveMapper2<>(query, session); return ReactorFetcherDelegate.fromReactive(delegate.map(mapFn)); } @Override public LimitOffset<T1, T2> distinct() { return new Mapper2<>(query.withDistinct(true), session); } @Override public Offset<T1, T2> limit(long limit) { return new Mapper2<>(query.withLimit(limit), session); } @Override public ReactorMapper2<T1, T2> offset(long offset) { return new Mapper2<>(query.withOffset(offset), session); } } static class Mapper3<T1, T2, T3> implements ReactorMapper3<T1, T2, T3>, ReactorMapper3.DistinctLimitOffset<T1, T2, T3> { private final ImmutableQuery query; private final Backend.Session session; Mapper3(Query query, Backend.Session session) { this.query = ImmutableQuery.copyOf(query); this.session = session; } @Override public <R> ReactorFetcher<R> map(MapperFunction3<T1, T2, T3, R> mapFn) { ReactiveMapper3<T1, T2, T3> delegate = new ReactiveMapper3<>(query, session); return ReactorFetcherDelegate.fromReactive(delegate.map(mapFn)); } @Override public <R> ReactorFetcher<R> map(Function<? super Tuple, ? extends R> mapFn) { ReactiveMapper3<T1, T2, T3> delegate = new ReactiveMapper3<>(query, session); return ReactorFetcherDelegate.fromReactive(delegate.map(mapFn)); } @Override public LimitOffset<T1, T2, T3> distinct() { return new Mapper3<>(query.withDistinct(true), session); } @Override public Offset<T1, T2, T3> limit(long limit) { return new Mapper3<>(query.withLimit(limit), session); } @Override public ReactorMapper3<T1, T2, T3> offset(long offset) { return new Mapper3<>(query.withOffset(offset), session); } } static class Mapper4<T1, T2, T3, T4> implements ReactorMapper4<T1, T2, T3, T4>, ReactorMapper4.DistinctLimitOffset<T1, T2, T3, T4> { private final ImmutableQuery query; private final Backend.Session session; Mapper4(Query query, Backend.Session session) { this.query = ImmutableQuery.copyOf(query); this.session = session; } @Override public <R> ReactorFetcher<R> map(MapperFunction4<T1, T2, T3, T4, R> mapFn) { ReactiveMapper4<T1, T2, T3, T4> delegate = new ReactiveMapper4<>(query, session); return ReactorFetcherDelegate.fromReactive(delegate.map(mapFn)); } @Override public <R> ReactorFetcher<R> map(Function<? super Tuple, ? extends R> mapFn) { ReactiveMapper4<T1, T2, T3, T4> delegate = new ReactiveMapper4<>(query, session); return ReactorFetcherDelegate.fromReactive(delegate.map(mapFn)); } @Override public LimitOffset<T1, T2, T3, T4> distinct() { return new Mapper4<>(query.withDistinct(true), session); } @Override public Offset<T1, T2, T3, T4> limit(long limit) { return new Mapper4<>(query.withLimit(limit), session); } @Override public ReactorMapper4<T1, T2, T3, T4> offset(long offset) { return new Mapper4<>(query.withOffset(offset), session); } } static class Mapper5<T1, T2, T3, T4, T5> implements ReactorMapper5<T1, T2, T3, T4, T5>, ReactorMapper5.DistinctLimitOffset<T1, T2, T3, T4, T5> { private final ImmutableQuery query; private final Backend.Session session; Mapper5(Query query, Backend.Session session) { this.query = ImmutableQuery.copyOf(query); this.session = session; } @Override public <R> ReactorFetcher<R> map(MapperFunction5<T1, T2, T3, T4, T5, R> mapFn) { ReactiveMapper5<T1, T2, T3, T4, T5> delegate = new ReactiveMapper5<>(query, session); return ReactorFetcherDelegate.fromReactive(delegate.map(mapFn)); } @Override public <R> ReactorFetcher<R> map(Function<? super Tuple, ? extends R> mapFn) { ReactiveMapper5<T1, T2, T3, T4, T5> delegate = new ReactiveMapper5<>(query, session); return ReactorFetcherDelegate.fromReactive(delegate.map(mapFn)); } @Override public LimitOffset<T1, T2, T3, T4, T5> distinct() { return new Mapper5<>(query.withDistinct(true), session); } @Override public Offset<T1, T2, T3, T4, T5> limit(long limit) { return new Mapper5<>(query.withLimit(limit), session); } @Override public ReactorMapper5<T1, T2, T3, T4, T5> offset(long offset) { return new Mapper5<>(query.withOffset(offset), session); } } static class MapperTuple implements ReactorMapperTuple, ReactorMapperTuple.DistinctLimitOffset { private final ImmutableQuery query; private final Backend.Session session; MapperTuple(Query query, Backend.Session session) { this.query = ImmutableQuery.copyOf(query); this.session = session; } @Override public <R> ReactorFetcher<R> map(Function<? super Tuple, ? extends R> mapFn) { ReactiveMapperTuple mapper = new ReactiveMapperTuple(query, session); return ReactorFetcherDelegate.fromReactive(mapper.map(mapFn)); } @Override public LimitOffset distinct() { return new MapperTuple(query.withDistinct(true), session); } @Override public Offset limit(long limit) { return new MapperTuple(query.withLimit(limit), session); } @Override public ReactorMapperTuple offset(long offset) { return new MapperTuple(query.withOffset(offset), session); } } private ReactorMappers() {} }
/*********************************************************************************************************************** * * Copyright (C) 2010 by the Stratosphere project (http://stratosphere.eu) * * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the * specific language governing permissions and limitations under the License. * **********************************************************************************************************************/ package de.tuberlin.cit.livescale.messaging; import static org.junit.Assert.*; import java.io.IOException; import java.net.URI; import java.net.URISyntaxException; import java.util.Collection; import java.util.HashMap; import java.util.Iterator; import java.util.LinkedList; import java.util.Map; import java.util.UUID; import org.junit.Before; import org.junit.Test; import de.tuberlin.cit.livescale.messaging.AbstractMessage; import de.tuberlin.cit.livescale.messaging.Message; import de.tuberlin.cit.livescale.messaging.MessageFactory; import de.tuberlin.cit.livescale.messaging.MessageManifest; import de.tuberlin.cit.livescale.messaging.ResponseMessage; import de.tuberlin.cit.livescale.messaging.MessageFactory.InvalidMessageTypeException; import de.tuberlin.cit.livescale.messaging.MessageFactory.MalformedMessageException; import de.tuberlin.cit.livescale.messaging.messages.ClientFavoriteStreamStarted; import de.tuberlin.cit.livescale.messaging.messages.ClientFollowerAnswer; import de.tuberlin.cit.livescale.messaging.messages.ClientRegistrationAnswer; import de.tuberlin.cit.livescale.messaging.messages.ClientStreamRcv; import de.tuberlin.cit.livescale.messaging.messages.ClientStreamSend; import de.tuberlin.cit.livescale.messaging.messages.DispatcherGCM; import de.tuberlin.cit.livescale.messaging.messages.DispatcherRegistration; import de.tuberlin.cit.livescale.messaging.messages.DispatcherRequestFollower; import de.tuberlin.cit.livescale.messaging.messages.DispatcherRequestStreamRcv; import de.tuberlin.cit.livescale.messaging.messages.DispatcherRequestStreamSend; import de.tuberlin.cit.livescale.messaging.messages.DispatcherStreamConfirm; import de.tuberlin.cit.livescale.messaging.messages.DispatcherStreamStatus; import de.tuberlin.cit.livescale.messaging.messages.StreamserverNewStream; import de.tuberlin.cit.livescale.messaging.messages.StreamserverRequestStreamStatus; import de.tuberlin.cit.livescale.messaging.messages.TestMessage; /** * This collection tests the serialization process on the message classes * * @author Bernd Louis * */ public class MessagesSerializationTests { private UUID uuid; @Before public void setUp() { this.uuid = UUID.randomUUID(); } /** * Simulates marshaling through the {@link MessageFactory} by first * rendering the {@link Message} and then trying to create a new one from * the provided {@link Map} * * @param m * @return * @throws InvalidMessageTypeException * in case the {@link MessageFactory} was not able to resolve * the Message */ private Message renderAndCreate(Message m) throws InvalidMessageTypeException { return MessageFactory.createMessage(MessageFactory.renderMessage(m)); } /** * Asserts the equality of <code>m1</code> and <code>m2</code>'s Message * UUIDs. * * @param m1 * @param m2 */ private void assertUUIDEquality(Message m1, Message m2) { assertEquals(m1.getUUID(), m2.getUUID()); if (m1 instanceof ResponseMessage && m2 instanceof ResponseMessage) { this.assertResponseRequestUUIDEquality((ResponseMessage) m1, (ResponseMessage) m2); } } /** * Asserts the equality of of <code>m1</code> and <code>m2</code>'s request * UUIDs. * * @param m1 * @param m2 */ private void assertResponseRequestUUIDEquality(ResponseMessage m1, ResponseMessage m2) { assertEquals(m1.getRequestMessageUUID(), m2.getRequestMessageUUID()); } @Test public void testDispatcherGCM() throws InvalidMessageTypeException { DispatcherGCM m1 = new DispatcherGCM(); m1.setUsername("uname"); m1.setPassword("password"); m1.setC2dmKey("key"); DispatcherGCM m2 = (DispatcherGCM) this.renderAndCreate(m1); assertEquals(m1.getUsername(), m2.getUsername()); assertEquals(m1.getPassword(), m2.getPassword()); assertEquals(m1.getC2dmKey(), m2.getC2dmKey()); this.assertUUIDEquality(m1, m2); } @Test public void testDispatcherRegistrationSerialization() throws InvalidMessageTypeException { DispatcherRegistration m1 = new DispatcherRegistration(); m1.setUsername("uname"); m1.setPassword("pwd"); DispatcherRegistration m2 = (DispatcherRegistration) this .renderAndCreate(m1); assertEquals(m1.getUsername(), m2.getUsername()); assertEquals(m1.getPassword(), m2.getPassword()); this.assertUUIDEquality(m1, m2); } @Test public void testDispatcherRequestFollower() throws InvalidMessageTypeException { DispatcherRequestFollower m1 = new DispatcherRequestFollower(); m1.setFollowerUsername("uname1"); m1.setFollowRequest(-1); m1.setPassword("pwd"); m1.setUsername("uname2"); DispatcherRequestFollower m2 = (DispatcherRequestFollower) this .renderAndCreate(m1); assertEquals(m1.getFollowerUsername(), m2.getFollowerUsername()); assertEquals(m1.getFollowRequest(), m2.getFollowRequest()); assertEquals(m1.getPassword(), m2.getPassword()); assertEquals(m1.getUsername(), m2.getUsername()); this.assertUUIDEquality(m1, m2); } @Test public void testDispatcherRequestStreamRcv() throws InvalidMessageTypeException { DispatcherRequestStreamRcv m1 = new DispatcherRequestStreamRcv(); m1.setToken("token"); DispatcherRequestStreamRcv m2 = (DispatcherRequestStreamRcv) this .renderAndCreate(m1); assertEquals(m1.getToken(), m2.getToken()); this.assertUUIDEquality(m1, m2); } @Test public void testDispatcherRequestStreamSend() throws InvalidMessageTypeException { DispatcherRequestStreamSend m1 = new DispatcherRequestStreamSend(); m1.setUsername("uname"); m1.setPassword("pwd"); DispatcherRequestStreamSend m2 = (DispatcherRequestStreamSend) this .renderAndCreate(m1); assertEquals(m1.getUsername(), m2.getUsername()); assertEquals(m1.getPassword(), m2.getPassword()); this.assertUUIDEquality(m1, m2); } @Test public void testStreamServerNewStream() throws InvalidMessageTypeException { StreamserverNewStream m1 = new StreamserverNewStream(); m1.setReceiveEndpointToken("rectoken"); m1.setSendEndpointToken("stoken"); m1.setUsername("uname"); StreamserverNewStream m2 = (StreamserverNewStream) this .renderAndCreate(m1); assertEquals(m1.getReceiveEndpointToken(), m2.getReceiveEndpointToken()); assertEquals(m1.getSendEndpointToken(), m2.getSendEndpointToken()); assertEquals(m1.getUsername(), m2.getUsername()); assertUUIDEquality(m1, m2); } @Test public void testStreamserverRequestStreamStatus() throws InvalidMessageTypeException { StreamserverRequestStreamStatus m1 = new StreamserverRequestStreamStatus(); m1.setReceiveEndpointToken("recep"); m1.setSendEndpointToken("sendep"); StreamserverRequestStreamStatus m2 = (StreamserverRequestStreamStatus) this .renderAndCreate(m1); assertEquals(m1.getReceiveEndpointToken(), m2.getReceiveEndpointToken()); assertEquals(m1.getSendEndpointToken(), m2.getSendEndpointToken()); this.assertUUIDEquality(m1, m2); } @Test public void testClientFollowerAnswer() throws InvalidMessageTypeException { ClientFollowerAnswer m1 = new ClientFollowerAnswer(this.uuid); m1.setFollowerName("fname"); m1.setFollowerResult(-1); m1.setUsername("uname"); ClientFollowerAnswer m2 = (ClientFollowerAnswer) this .renderAndCreate(m1); assertEquals(m1.getFollowerName(), m2.getFollowerName()); assertEquals(m1.getFollowerResult(), m2.getFollowerResult()); assertEquals(m1.getUsername(), m2.getUsername()); this.assertUUIDEquality(m1, m2); } @Test public void testClientRegistrationAnswer() throws InvalidMessageTypeException { ClientRegistrationAnswer m1 = new ClientRegistrationAnswer(this.uuid); m1.setErrorMessage("errormsg"); m1.setOk(true); m1.setUsername("uname"); ClientRegistrationAnswer m2 = (ClientRegistrationAnswer) this .renderAndCreate(m1); assertEquals(m1.getErrorMessage(), m2.getErrorMessage()); assertEquals(m1.isOk(), m2.isOk()); assertEquals(m1.getUsername(), m2.getUsername()); this.assertUUIDEquality(m1, m2); } @Test public void testClientStreamRcv() throws InvalidMessageTypeException { ClientStreamRcv m1 = new ClientStreamRcv(this.uuid); m1.setAddress("address"); m1.setPort(2342); m1.setToken("token"); m1.setUserNameSender("uamesender"); ClientStreamRcv m2 = (ClientStreamRcv) this.renderAndCreate(m1); assertEquals(m1.getAddress(), m2.getAddress()); assertEquals(m1.getPort(), m2.getPort()); assertEquals(m1.getToken(), m2.getToken()); assertEquals(m1.getUserNameSender(), m2.getUserNameSender()); this.assertUUIDEquality(m1, m2); } @Test public void testClientStreamSend() throws InvalidMessageTypeException { ClientStreamSend m1 = new ClientStreamSend(this.uuid); m1.setReceiveEndpointAddress("recep"); m1.setReceiveEndpointPort(2342); m1.setReceiveEndpointToken("rectoken"); m1.setSendEndpointAddress("sendep"); m1.setSendEndpointPort(4269); m1.setSendEndpointToken("stoken"); m1.setUsername("uname"); ClientStreamSend m2 = (ClientStreamSend) this.renderAndCreate(m1); assertEquals(m1.getReceiveEndpointAddress(), m2.getReceiveEndpointAddress()); assertEquals(m1.getReceiveEndpointPort(), m2.getReceiveEndpointPort()); assertEquals(m1.getReceiveEndpointToken(), m2.getReceiveEndpointToken()); assertEquals(m1.getSendEndpointAddress(), m2.getSendEndpointAddress()); assertEquals(m1.getSendEndpointPort(), m2.getSendEndpointPort()); assertEquals(m1.getSendEndpointToken(), m2.getSendEndpointToken()); assertEquals(m1.getUsername(), m2.getUsername()); this.assertUUIDEquality(m1, m2); } @Test public void testDispatcherStreamConfirm() throws InvalidMessageTypeException { DispatcherStreamConfirm m1 = new DispatcherStreamConfirm(this.uuid); m1.setReceiveEndpointAddress("recep"); m1.setReceiveEndpointPort(2342); m1.setReceiveEndpointToken("rectoken"); m1.setSendEndpointAddress("sendep"); m1.setSendEndpointPort(4269); m1.setSendEndpointToken("stoken"); m1.setUsername("usern"); DispatcherStreamConfirm m2 = (DispatcherStreamConfirm) this .renderAndCreate(m1); assertEquals(m1.getReceiveEndpointAddress(), m2.getReceiveEndpointAddress()); assertEquals(m1.getReceiveEndpointPort(), m2.getReceiveEndpointPort()); assertEquals(m1.getReceiveEndpointToken(), m2.getReceiveEndpointToken()); assertEquals(m1.getSendEndpointAddress(), m2.getSendEndpointAddress()); assertEquals(m1.getSendEndpointPort(), m2.getSendEndpointPort()); assertEquals(m1.getSendEndpointToken(), m2.getSendEndpointToken()); assertEquals(m1.getUsername(), m2.getUsername()); this.assertUUIDEquality(m1, m2); } @Test public void testDispatcherStreamStatus() throws InvalidMessageTypeException { DispatcherStreamStatus m1 = new DispatcherStreamStatus(this.uuid); m1.setActive(true); m1.setReceiveEndpointToken("recep"); m1.setSendEndpointToken("seep"); DispatcherStreamStatus m2 = (DispatcherStreamStatus) this .renderAndCreate(m1); assertEquals(m1.isActive(), m2.isActive()); assertEquals(m1.getReceiveEndpointToken(), m2.getReceiveEndpointToken()); assertEquals(m1.getSendEndpointToken(), m2.getSendEndpointToken()); this.assertUUIDEquality(m1, m2); } @Test public void testClientFavoriteStreamStarted() throws InvalidMessageTypeException { ClientFavoriteStreamStarted m1 = new ClientFavoriteStreamStarted(); m1.setUsername("leName"); m1.setReceiveToken("recToken"); ClientFavoriteStreamStarted m2 = (ClientFavoriteStreamStarted) this.renderAndCreate(m1); assertEquals(m1.getUsername(), m2.getUsername()); assertEquals(m1.getReceiveToken(), m2.getReceiveToken()); } @Test(expected = MalformedMessageException.class) public void testFailOnMissingAbstractMessageToMapSuperCall() { AbstractMessage am = new AbstractMessage() { @Override public void toMap(Map<String, Object> messageMap) { // we're not doing a super call here! so the UUID field // will be missing } }; MessageFactory.renderMessage(am); } @Test(expected = MalformedMessageException.class) public void testFailOnResponseMessageMissingToMapSuperCall() { ResponseMessage rm = new ResponseMessage(UUID.randomUUID()) { @Override public void toMap(Map<String, Object> messageMap) { // as of this writing this is the // same situation as the // testFailOnMissingAbstractMessageToMapSuperCall // Test, but this might change in the future. So we might write // a // test for this just as well. } }; MessageFactory.renderMessage(rm); } @Test public void testDirectMessageManifestSerialization() throws URISyntaxException, InvalidMessageTypeException { TestMessage m1 = new TestMessage(); m1.setFieldFive("m1.5"); m1.setFieldSix("m1.6"); TestMessage m2 = new TestMessage(); m2.setFieldFive("m2.5"); m2.setFieldSix("m2.6"); Collection<Message> msgs = new LinkedList<Message>(); msgs.add(m1); msgs.add(m2); MessageManifest mm1 = new MessageManifest(msgs, new URI("testscheme", "testauthority", "/testpath", "", "")); Map<String, Object> outmap = new HashMap<String, Object>(); mm1.toMap(outmap); MessageManifest mm2 = new MessageManifest(outmap); assertEquals(mm1.getTargetURI(), mm2.getTargetURI()); assertTrue(mm2.getMessages().size() == 2); } @Test public void testIndirectMessageManifestSerialization() throws URISyntaxException, MalformedMessageException, IOException { TestMessage m1 = new TestMessage(); String m1five = "m1.5"; m1.setFieldFive(m1five); String m1six = "m1.6"; m1.setFieldSix(m1six); TestMessage m2 = new TestMessage(); String m2five = "m2.5"; String m2six = "m2.6"; m2.setFieldFive(m2five); m2.setFieldSix(m2six); Collection<Message> msgs = new LinkedList<Message>(); msgs.add(m1); msgs.add(m2); MessageManifest mm1 = new MessageManifest(msgs, new URI("testscheme", "testauthority", "/testpath", "", "")); MessageManifest mm2 = MessageFactory.decode(MessageFactory.encode(mm1)); assertEquals(mm1.getTargetURI(), mm2.getTargetURI()); assertTrue(mm2.getMessages().size() == 2); Iterator<Message> it = mm2.getMessages().iterator(); TestMessage out1 = (TestMessage) it.next(); TestMessage out2 = (TestMessage) it.next(); assertEquals(m1five, out1.getFieldFive()); assertEquals(m1six, out1.getFieldSix()); assertEquals(m2five, out2.getFieldFive()); assertEquals(m2six, out2.getFieldSix()); } }
/* * RandomGUID * @version 1.2.1 11/05/02 * @author Marc A. Mnich * * From www.JavaExchange.com, Open Software licensing * * 11/05/02 -- Performance enhancement from Mike Dubman. * Moved InetAddr.getLocal to static block. Mike has measured * a 10 fold improvement in run time. * 01/29/02 -- Bug fix: Improper seeding of nonsecure Random object * caused duplicate GUIDs to be produced. Random object * is now only created once per JVM. * 01/19/02 -- Modified random seeding and added new constructor * to allow secure random feature. * 01/14/02 -- Added random function seeding with JVM run time * */ package org.prevayler.contrib.facade; import java.net.InetAddress; import java.net.UnknownHostException; import java.security.MessageDigest; import java.security.NoSuchAlgorithmException; import java.security.SecureRandom; import java.util.Random; /* * In the multitude of java GUID generators, I found none that * guaranteed randomness. GUIDs are guaranteed to be globally unique * by using ethernet MACs, IP addresses, time elements, and sequential * numbers. GUIDs are not expected to be random and most often are * easy/possible to guess given a sample from a given generator. * SQL Server, for example generates GUID that are unique but * sequencial within a given instance. * * GUIDs can be used as security devices to hide things such as * files within a filesystem where listings are unavailable (e.g. files * that are served up from a Web server with indexing turned off). * This may be desireable in cases where standard authentication is not * appropriate. In this scenario, the RandomGUIDs are used as directories. * Another example is the use of GUIDs for primary keys in a database * where you want to ensure that the keys are secret. Random GUIDs can * then be used in a URL to prevent hackers (or users) from accessing * records by guessing or simply by incrementing sequential numbers. * * There are many other possiblities of using GUIDs in the realm of * security and encryption where the element of randomness is important. * This class was written for these purposes but can also be used as a * general purpose GUID generator as well. * * RandomGUID generates truly random GUIDs by using the system's * IP address (name/IP), system time in milliseconds (as an integer), * and a very large random number joined together in a single String * that is passed through an MD5 hash. The IP address and system time * make the MD5 seed globally unique and the random number guarantees * that the generated GUIDs will have no discernable pattern and * cannot be guessed given any number of previously generated GUIDs. * It is generally not possible to access the seed information (IP, time, * random number) from the resulting GUIDs as the MD5 hash algorithm * provides one way encryption. * * ----> Security of RandomGUID: <----- * RandomGUID can be called one of two ways -- with the basic java Random * number generator or a cryptographically strong random generator * (SecureRandom). The choice is offered because the secure random * generator takes about 3.5 times longer to generate its random numbers * and this performance hit may not be worth the added security * especially considering the basic generator is seeded with a * cryptographically strong random seed. * * Seeding the basic generator in this way effectively decouples * the random numbers from the time component making it virtually impossible * to predict the random number component even if one had absolute knowledge * of the System time. Thanks to Ashutosh Narhari for the suggestion * of using the static method to prime the basic random generator. * * Using the secure random option, this class compies with the statistical * random number generator tests specified in FIPS 140-2, Security * Requirements for Cryptographic Modules, secition 4.9.1. * * I converted all the pieces of the seed to a String before handing * it over to the MD5 hash so that you could print it out to make * sure it contains the data you expect to see and to give a nice * warm fuzzy. If you need better performance, you may want to stick * to byte[] arrays. * * I believe that it is important that the algorithm for * generating random GUIDs be open for inspection and modification. * This class is free for all uses. * * * - Marc */ public class RandomGUID extends Object { public String valueBeforeMD5 = ""; public String valueAfterMD5 = ""; private static Random myRand; private static SecureRandom mySecureRand; private static String s_id; /* * Static block to take care of one time secureRandom seed. * It takes a few seconds to initialize SecureRandom. You might * want to consider removing this static block or replacing * it with a "time since first loaded" seed to reduce this time. * This block will run only once per JVM instance. */ static { mySecureRand = new SecureRandom(); long secureInitializer = mySecureRand.nextLong(); myRand = new Random(secureInitializer); try { s_id = InetAddress.getLocalHost().toString(); } catch (UnknownHostException e) { e.printStackTrace(); } } /* * Default constructor. With no specification of security option, * this constructor defaults to lower security, high performance. */ public RandomGUID() { getRandomGUID(false); } /* * Constructor with security option. Setting secure true * enables each random number generated to be cryptographically * strong. Secure false defaults to the standard Random function seeded * with a single cryptographically strong random number. */ public RandomGUID(boolean secure) { getRandomGUID(secure); } /* * Method to generate the random GUID */ private void getRandomGUID(boolean secure) { MessageDigest md5 = null; StringBuffer sbValueBeforeMD5 = new StringBuffer(); try { md5 = MessageDigest.getInstance("MD5"); } catch (NoSuchAlgorithmException e) { System.out.println("Error: " + e); } try { long time = System.currentTimeMillis(); long rand = 0; if (secure) { rand = mySecureRand.nextLong(); } else { rand = myRand.nextLong(); } // This StringBuffer can be a long as you need; the MD5 // hash will always return 128 bits. You can change // the seed to include anything you want here. // You could even stream a file through the MD5 making // the odds of guessing it at least as great as that // of guessing the contents of the file! sbValueBeforeMD5.append(s_id); sbValueBeforeMD5.append(":"); sbValueBeforeMD5.append(Long.toString(time)); sbValueBeforeMD5.append(":"); sbValueBeforeMD5.append(Long.toString(rand)); valueBeforeMD5 = sbValueBeforeMD5.toString(); md5.update(valueBeforeMD5.getBytes()); byte[] array = md5.digest(); StringBuffer sb = new StringBuffer(); for (int j = 0; j < array.length; ++j) { int b = array[j] & 0xFF; if (b < 0x10) sb.append('0'); sb.append(Integer.toHexString(b)); } valueAfterMD5 = sb.toString(); } catch (Exception e) { System.out.println("Error:" + e); } } /* * Convert to the standard format for GUID * (Useful for SQL Server UniqueIdentifiers, etc.) * Example: C2FEEEAC-CFCD-11D1-8B05-00600806D9B6 */ public String toString() { String raw = valueAfterMD5.toUpperCase(); StringBuffer sb = new StringBuffer(); sb.append(raw.substring(0, 8)); sb.append("-"); sb.append(raw.substring(8, 12)); sb.append("-"); sb.append(raw.substring(12, 16)); sb.append("-"); sb.append(raw.substring(16, 20)); sb.append("-"); sb.append(raw.substring(20)); return sb.toString(); } /* * Demonstraton and self test of class */ public static void main(String args[]) { for (int i = 0; i < 100; i++) { RandomGUID myGUID = new RandomGUID(); System.out.println("Seeding String=" + myGUID.valueBeforeMD5); System.out.println("rawGUID=" + myGUID.valueAfterMD5); System.out.println("RandomGUID=" + myGUID.toString()); } } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.cassandra.hints; import java.io.File; import java.io.IOException; import java.nio.ByteBuffer; import java.util.*; import java.util.concurrent.ExecutionException; import java.util.concurrent.TimeUnit; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.apache.cassandra.cql3.QueryProcessor; import org.apache.cassandra.cql3.UntypedResultSet; import org.apache.cassandra.db.*; import org.apache.cassandra.db.compaction.CompactionManager; import org.apache.cassandra.db.marshal.UUIDType; import org.apache.cassandra.db.partitions.PartitionUpdate; import org.apache.cassandra.io.FSWriteError; import org.apache.cassandra.io.sstable.Descriptor; import org.apache.cassandra.io.util.DataInputBuffer; import org.apache.cassandra.io.util.FileUtils; import org.apache.cassandra.serializers.MarshalException; import org.apache.cassandra.utils.FBUtilities; /** * A migrator that goes through the legacy system.hints table and writes all the hints to the new hints storage format. */ @SuppressWarnings("deprecation") public final class LegacyHintsMigrator { private static final Logger logger = LoggerFactory.getLogger(LegacyHintsMigrator.class); private final File hintsDirectory; private final long maxHintsFileSize; private final ColumnFamilyStore legacyHintsTable; private final int pageSize; public LegacyHintsMigrator(File hintsDirectory, long maxHintsFileSize) { this.hintsDirectory = hintsDirectory; this.maxHintsFileSize = maxHintsFileSize; legacyHintsTable = Keyspace.open(SystemKeyspace.NAME).getColumnFamilyStore(SystemKeyspace.LEGACY_HINTS); pageSize = calculatePageSize(legacyHintsTable); } // read fewer columns (mutations) per page if they are very large private static int calculatePageSize(ColumnFamilyStore legacyHintsTable) { int size = 128; int meanCellCount = legacyHintsTable.getMeanColumns(); double meanPartitionSize = legacyHintsTable.getMeanPartitionSize(); if (meanCellCount != 0 && meanPartitionSize != 0) { int avgHintSize = (int) meanPartitionSize / meanCellCount; size = Math.max(2, Math.min(size, (512 << 10) / avgHintSize)); } return size; } public void migrate() { // nothing to migrate if (legacyHintsTable.isEmpty()) return; logger.info("Migrating legacy hints to new storage"); // major-compact all of the existing sstables to get rid of the tombstones + expired hints logger.info("Forcing a major compaction of {}.{} table", SystemKeyspace.NAME, SystemKeyspace.LEGACY_HINTS); compactLegacyHints(); // paginate over legacy hints and write them to the new storage logger.info("Migrating legacy hints to the new storage"); migrateLegacyHints(); // truncate the legacy hints table logger.info("Truncating {}.{} table", SystemKeyspace.NAME, SystemKeyspace.LEGACY_HINTS); legacyHintsTable.truncateBlocking(); } private void compactLegacyHints() { Collection<Descriptor> descriptors = new ArrayList<>(); legacyHintsTable.getTracker().getUncompacting().forEach(sstable -> descriptors.add(sstable.descriptor)); if (!descriptors.isEmpty()) forceCompaction(descriptors); } private void forceCompaction(Collection<Descriptor> descriptors) { try { CompactionManager.instance.submitUserDefined(legacyHintsTable, descriptors, FBUtilities.nowInSeconds()).get(); } catch (InterruptedException | ExecutionException e) { throw new RuntimeException(e); } } private void migrateLegacyHints() { ByteBuffer buffer = ByteBuffer.allocateDirect(256 * 1024); String query = String.format("SELECT DISTINCT target_id FROM %s.%s", SystemKeyspace.NAME, SystemKeyspace.LEGACY_HINTS); //noinspection ConstantConditions QueryProcessor.executeInternal(query).forEach(row -> migrateLegacyHints(row.getUUID("target_id"), buffer)); FileUtils.clean(buffer); } private void migrateLegacyHints(UUID hostId, ByteBuffer buffer) { String query = String.format("SELECT target_id, hint_id, message_version, mutation, ttl(mutation) AS ttl, writeTime(mutation) AS write_time " + "FROM %s.%s " + "WHERE target_id = ?", SystemKeyspace.NAME, SystemKeyspace.LEGACY_HINTS); // read all the old hints (paged iterator), write them in the new format UntypedResultSet rows = QueryProcessor.executeInternalWithPaging(query, pageSize, hostId); migrateLegacyHints(hostId, rows, buffer); // delete the whole partition in the legacy table; we would truncate the whole table afterwards, but this allows // to not lose progress in case of a terminated conversion deleteLegacyHintsPartition(hostId); } private void migrateLegacyHints(UUID hostId, UntypedResultSet rows, ByteBuffer buffer) { migrateLegacyHints(hostId, rows.iterator(), buffer); } private void migrateLegacyHints(UUID hostId, Iterator<UntypedResultSet.Row> iterator, ByteBuffer buffer) { do { migrateLegacyHintsInternal(hostId, iterator, buffer); // if there are hints that didn't fit in the previous file, keep calling the method to write to a new // file until we get everything written. } while (iterator.hasNext()); } private void migrateLegacyHintsInternal(UUID hostId, Iterator<UntypedResultSet.Row> iterator, ByteBuffer buffer) { HintsDescriptor descriptor = new HintsDescriptor(hostId, System.currentTimeMillis()); try (HintsWriter writer = HintsWriter.create(hintsDirectory, descriptor)) { try (HintsWriter.Session session = writer.newSession(buffer)) { while (iterator.hasNext()) { Hint hint = convertLegacyHint(iterator.next()); if (hint != null) session.append(hint); if (session.position() >= maxHintsFileSize) break; } } } catch (IOException e) { throw new FSWriteError(e, descriptor.fileName()); } } private static Hint convertLegacyHint(UntypedResultSet.Row row) { Mutation mutation = deserializeLegacyMutation(row); if (mutation == null) return null; long creationTime = row.getLong("write_time"); // milliseconds, not micros, for the hints table int expirationTime = FBUtilities.nowInSeconds() + row.getInt("ttl"); int originalGCGS = expirationTime - (int) TimeUnit.MILLISECONDS.toSeconds(creationTime); int gcgs = Math.min(originalGCGS, mutation.smallestGCGS()); return Hint.create(mutation, creationTime, gcgs); } private static Mutation deserializeLegacyMutation(UntypedResultSet.Row row) { try { Mutation mutation = Mutation.serializer.deserialize(new DataInputBuffer(row.getBlob("mutation"), true), row.getInt("message_version")); mutation.getPartitionUpdates().forEach(PartitionUpdate::validate); return mutation; } catch (IOException e) { logger.error("Failed to migrate a hint for {} from legacy {}.{} table: {}", row.getUUID("target_id"), SystemKeyspace.NAME, SystemKeyspace.LEGACY_HINTS, e); return null; } catch (MarshalException e) { logger.warn("Failed to validate a hint for {} (table id {}) from legacy {}.{} table - skipping: {})", row.getUUID("target_id"), SystemKeyspace.NAME, SystemKeyspace.LEGACY_HINTS, e); return null; } } private static void deleteLegacyHintsPartition(UUID hostId) { // intentionally use millis, like the rest of the legacy implementation did, just in case Mutation mutation = new Mutation(PartitionUpdate.fullPartitionDelete(SystemKeyspace.LegacyHints, UUIDType.instance.decompose(hostId), System.currentTimeMillis(), FBUtilities.nowInSeconds())); mutation.applyUnsafe(); } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.beam.sdk.extensions.sql.meta.provider.pubsublite; import static org.apache.beam.sdk.util.Preconditions.checkArgumentNotNull; import static org.apache.beam.vendor.guava.v26_0_jre.com.google.common.base.Preconditions.checkArgument; import com.alibaba.fastjson.JSONObject; import com.google.auto.service.AutoService; import com.google.auto.value.AutoOneOf; import com.google.cloud.pubsublite.SubscriptionPath; import com.google.cloud.pubsublite.TopicPath; import com.google.cloud.pubsublite.proto.PubSubMessage; import java.util.Optional; import org.apache.beam.sdk.extensions.sql.meta.BeamSqlTable; import org.apache.beam.sdk.extensions.sql.meta.Table; import org.apache.beam.sdk.extensions.sql.meta.provider.InMemoryMetaTableProvider; import org.apache.beam.sdk.extensions.sql.meta.provider.TableProvider; import org.apache.beam.sdk.schemas.Schema; import org.apache.beam.sdk.schemas.Schema.EquivalenceNullablePolicy; import org.apache.beam.sdk.schemas.Schema.Field; import org.apache.beam.sdk.schemas.Schema.FieldType; import org.apache.beam.sdk.schemas.Schema.TypeName; import org.apache.beam.sdk.schemas.io.DeadLetteredTransform; import org.apache.beam.sdk.schemas.io.Failure; import org.apache.beam.sdk.schemas.io.GenericDlq; import org.apache.beam.sdk.schemas.io.payloads.PayloadSerializer; import org.apache.beam.sdk.schemas.io.payloads.PayloadSerializers; import org.apache.beam.sdk.transforms.MapElements; import org.apache.beam.sdk.transforms.PTransform; import org.apache.beam.sdk.transforms.SimpleFunction; import org.apache.beam.sdk.values.PCollection; import org.apache.beam.sdk.values.PDone; import org.apache.beam.sdk.values.Row; import org.apache.beam.sdk.values.TypeDescriptor; /** * Pub/Sub Lite table provider. * * <p>Pub/Sub Lite tables may be constructed with: * * <pre>{@code * CREATE EXTERNAL TABLE tableName( * message_key BYTES [NOT NULL], // optional, always present on read * publish_timestamp TIMESTAMP [NOT NULL], // optional, readable tables only, always present on read * event_timestamp TIMESTAMP [NOT NULL], // optional, null if not present in readable table, unset in message if null in writable table. NOT NULL enforces field presence on read * attributes ARRAY<ROW<key VARCHAR, values ARRAY<BYTES>>>, // optional, null values never present on reads or handled on writes * payload BYTES | ROW<[INSERT SCHEMA HERE]>, * ) * TYPE pubsublite * // For writable tables * LOCATION 'projects/[PROJECT]/locations/[CLOUD ZONE]/topics/[TOPIC]' * // For readable tables * LOCATION 'projects/[PROJECT]/locations/[CLOUD ZONE]/subscriptions/[SUBSCRIPTION]' * TBLPROPERTIES '{ * "deadLetterQueue": "[DLQ_KIND]:[DLQ_ID]", // optional * "format": "[FORMAT]", // optional * // format params * }' * }</pre> */ @AutoService(TableProvider.class) public class PubsubLiteTableProvider extends InMemoryMetaTableProvider { @Override public String getTableType() { return "pubsublite"; } private static Optional<PayloadSerializer> getSerializer(Schema schema, JSONObject properties) { if (schema.getField("payload").getType().equals(FieldType.BYTES)) { checkArgument( !properties.containsKey("format"), "Must not set the 'format' property if not unpacking payload."); return Optional.empty(); } String format = properties.containsKey("format") ? properties.getString("format") : "json"; return Optional.of(PayloadSerializers.getSerializer(format, schema, properties.getInnerMap())); } private static void checkFieldHasType(Field field, FieldType type) { checkArgument( type.equivalent(field.getType(), EquivalenceNullablePolicy.WEAKEN), String.format("'%s' field must have schema matching '%s'.", field.getName(), type)); } private static void validateSchema(Schema schema) { checkArgument( schema.hasField(RowHandler.PAYLOAD_FIELD), "Must provide a 'payload' field for Pub/Sub Lite."); for (Field field : schema.getFields()) { switch (field.getName()) { case RowHandler.ATTRIBUTES_FIELD: checkFieldHasType(field, RowHandler.ATTRIBUTES_FIELD_TYPE); break; case RowHandler.EVENT_TIMESTAMP_FIELD: case RowHandler.PUBLISH_TIMESTAMP_FIELD: checkFieldHasType(field, FieldType.DATETIME); break; case RowHandler.MESSAGE_KEY_FIELD: checkFieldHasType(field, FieldType.BYTES); break; case RowHandler.PAYLOAD_FIELD: checkArgument( FieldType.BYTES.equivalent(field.getType(), EquivalenceNullablePolicy.WEAKEN) || field.getType().getTypeName().equals(TypeName.ROW), String.format( "'%s' field must either have a 'BYTES NOT NULL' or 'ROW' schema.", field.getName())); break; default: throw new IllegalArgumentException( String.format( "'%s' field is invalid at the top level for Pub/Sub Lite.", field.getName())); } } } @AutoOneOf(Location.Kind.class) abstract static class Location { enum Kind { TOPIC, SUBSCRIPTION } abstract Kind getKind(); abstract TopicPath topic(); abstract SubscriptionPath subscription(); static Location parse(String location) { if (location.contains("/topics/")) { return AutoOneOf_PubsubLiteTableProvider_Location.topic(TopicPath.parse(location)); } if (location.contains("/subscriptions/")) { return AutoOneOf_PubsubLiteTableProvider_Location.subscription( SubscriptionPath.parse(location)); } throw new IllegalArgumentException( String.format( "Location '%s' does not correspond to either a Pub/Sub Lite topic or subscription.", location)); } } private static RowHandler getRowHandler( Schema schema, Optional<PayloadSerializer> optionalSerializer) { if (optionalSerializer.isPresent()) { return new RowHandler(schema, optionalSerializer.get()); } return new RowHandler(schema); } private static Optional<PTransform<PCollection<Failure>, PDone>> getDlqTransform( JSONObject properties) { if (!properties.containsKey("deadLetterQueue")) { return Optional.empty(); } return Optional.of(GenericDlq.getDlqTransform(properties.getString("deadLetterQueue"))); } private static <InputT, OutputT> PTransform<PCollection<? extends InputT>, PCollection<OutputT>> addDlqIfPresent( SimpleFunction<InputT, OutputT> transform, JSONObject properties) { if (properties.containsKey("deadLetterQueue")) { return new DeadLetteredTransform<>(transform, properties.getString("deadLetterQueue")); } return MapElements.via(transform); } @Override public BeamSqlTable buildBeamSqlTable(Table table) { checkArgument(table.getType().equals(getTableType())); validateSchema(table.getSchema()); Optional<PayloadSerializer> serializer = getSerializer(table.getSchema(), table.getProperties()); Location location = Location.parse(checkArgumentNotNull(table.getLocation())); RowHandler rowHandler = getRowHandler(table.getSchema(), serializer); switch (location.getKind()) { case TOPIC: checkArgument( !table.getSchema().hasField(RowHandler.PUBLISH_TIMESTAMP_FIELD), "May not write to publish timestamp, this field is read-only."); return new PubsubLiteTopicTable( table.getSchema(), location.topic(), addDlqIfPresent( SimpleFunction.fromSerializableFunctionWithOutputType( rowHandler::rowToMessage, TypeDescriptor.of(PubSubMessage.class)), table.getProperties())); case SUBSCRIPTION: return new PubsubLiteSubscriptionTable( table.getSchema(), location.subscription(), addDlqIfPresent( SimpleFunction.fromSerializableFunctionWithOutputType( rowHandler::messageToRow, TypeDescriptor.of(Row.class)), table.getProperties())); default: throw new IllegalArgumentException("Invalid kind for location: " + location.getKind()); } } }
/* * Copyright 2000-2012 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package git4idea.branch; import com.intellij.openapi.application.Application; import com.intellij.openapi.application.ApplicationManager; import com.intellij.openapi.progress.ProgressIndicator; import com.intellij.openapi.progress.Task; import com.intellij.openapi.project.Project; import com.intellij.openapi.util.registry.Registry; import com.intellij.vcs.log.VcsLogRangeFilter; import com.intellij.vcs.log.impl.VcsLogManager; import com.intellij.vcs.log.impl.VcsProjectLog; import com.intellij.vcs.log.util.VcsLogUtil; import com.intellij.vcs.log.visible.filters.VcsLogFilterObject; import git4idea.GitVcs; import git4idea.commands.Git; import git4idea.repo.GitRepository; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import java.util.List; import java.util.Map; class GitBrancherImpl implements GitBrancher { @NotNull private final Project myProject; @NotNull private final Git myGit; GitBrancherImpl(@NotNull Project project, @NotNull Git git) { myProject = project; myGit = git; } @Override public void checkoutNewBranch(@NotNull final String name, @NotNull final List<GitRepository> repositories) { new CommonBackgroundTask(myProject, "Checking out new branch " + name, null) { @Override public void execute(@NotNull ProgressIndicator indicator) { newWorker(indicator).checkoutNewBranch(name, repositories); } }.runInBackground(); } private GitBranchWorker newWorker(ProgressIndicator indicator) { return new GitBranchWorker(myProject, myGit, new GitBranchUiHandlerImpl(myProject, myGit, indicator)); } @Override public void createBranch(@NotNull String name, @NotNull Map<GitRepository, String> startPoints) { new CommonBackgroundTask(myProject, "Creating branch " + name, null) { @Override public void execute(@NotNull ProgressIndicator indicator) { newWorker(indicator).createBranch(name, startPoints); } }.runInBackground(); } @Override public void createNewTag(@NotNull final String name, @NotNull final String reference, @NotNull final List<GitRepository> repositories, @Nullable Runnable callInAwtLater) { new CommonBackgroundTask(myProject, "Checking out new branch " + name, callInAwtLater) { @Override public void execute(@NotNull ProgressIndicator indicator) { newWorker(indicator).createNewTag(name, reference, repositories); } }.runInBackground(); } @Override public void checkout(@NotNull final String reference, final boolean detach, @NotNull final List<GitRepository> repositories, @Nullable Runnable callInAwtLater) { new CommonBackgroundTask(myProject, "Checking out " + reference, callInAwtLater) { @Override public void execute(@NotNull ProgressIndicator indicator) { newWorker(indicator).checkout(reference, detach, repositories); } }.runInBackground(); } @Override public void checkoutNewBranchStartingFrom(@NotNull final String newBranchName, @NotNull final String startPoint, @NotNull final List<GitRepository> repositories, @Nullable Runnable callInAwtLater) { new CommonBackgroundTask(myProject, String.format("Checking out %s from %s", newBranchName, startPoint), callInAwtLater) { @Override public void execute(@NotNull ProgressIndicator indicator) { newWorker(indicator).checkoutNewBranchStartingFrom(newBranchName, startPoint, repositories); } }.runInBackground(); } @Override public void deleteBranch(@NotNull final String branchName, @NotNull final List<GitRepository> repositories) { new CommonBackgroundTask(myProject, "Deleting " + branchName, null) { @Override public void execute(@NotNull ProgressIndicator indicator) { newWorker(indicator).deleteBranch(branchName, repositories); } }.runInBackground(); } @Override public void deleteRemoteBranch(@NotNull final String branchName, @NotNull final List<GitRepository> repositories) { new CommonBackgroundTask(myProject, "Deleting " + branchName, null) { @Override public void execute(@NotNull ProgressIndicator indicator) { newWorker(indicator).deleteRemoteBranch(branchName, repositories); } }.runInBackground(); } @Override public void compare(@NotNull final String branchName, @NotNull final List<GitRepository> repositories, @NotNull final GitRepository selectedRepository) { if (Registry.is("git.compare.branches.as.tab")) { VcsLogUtil.runWhenLogIsReady(myProject, (log, logManager) -> { VcsLogRangeFilter filters = VcsLogFilterObject.fromRange("HEAD", branchName); log.getTabsManager().openAnotherLogTab(logManager, VcsLogFilterObject.collection(filters)); }); } else { new CommonBackgroundTask(myProject, "Comparing with " + branchName, null) { @Override public void execute(@NotNull ProgressIndicator indicator) { newWorker(indicator).compare(branchName, repositories, selectedRepository); } }.runInBackground(); } } private static void openComparingLogTab(@NotNull VcsProjectLog log, @NotNull VcsLogManager logManager, @NotNull String branchName) { VcsLogRangeFilter filters = VcsLogFilterObject.fromRange("HEAD", branchName); log.getTabsManager().openAnotherLogTab(logManager, VcsLogFilterObject.collection(filters)); } @Override public void merge(@NotNull final String branchName, @NotNull final DeleteOnMergeOption deleteOnMerge, @NotNull final List<GitRepository> repositories) { new CommonBackgroundTask(myProject, "Merging " + branchName, null) { @Override public void execute(@NotNull ProgressIndicator indicator) { newWorker(indicator).merge(branchName, deleteOnMerge, repositories); } }.runInBackground(); } @Override public void rebase(@NotNull final List<GitRepository> repositories, @NotNull final String branchName) { new CommonBackgroundTask(myProject, "Rebasing onto " + branchName, null) { @Override void execute(@NotNull ProgressIndicator indicator) { newWorker(indicator).rebase(repositories, branchName); } }.runInBackground(); } @Override public void rebaseOnCurrent(@NotNull final List<GitRepository> repositories, @NotNull final String branchName) { new CommonBackgroundTask(myProject, "Rebasing " + branchName + "...", null) { @Override void execute(@NotNull ProgressIndicator indicator) { newWorker(indicator).rebaseOnCurrent(repositories, branchName); } }.runInBackground(); } @Override public void renameBranch(@NotNull final String currentName, @NotNull final String newName, @NotNull final List<GitRepository> repositories) { new CommonBackgroundTask(myProject, "Renaming " + currentName + " to " + newName + "...", null) { @Override void execute(@NotNull ProgressIndicator indicator) { newWorker(indicator).renameBranch(currentName, newName, repositories); } }.runInBackground(); } @Override public void deleteTag(@NotNull String name, @NotNull List<GitRepository> repositories) { new CommonBackgroundTask(myProject, "Deleting tag " + name, null) { @Override public void execute(@NotNull ProgressIndicator indicator) { newWorker(indicator).deleteTag(name, repositories); } }.runInBackground(); } @Override public void deleteRemoteTag(@NotNull String name, @NotNull Map<GitRepository, String> repositories) { new CommonBackgroundTask(myProject, "Deleting tag " + name + " on remote", null) { @Override public void execute(@NotNull ProgressIndicator indicator) { newWorker(indicator).deleteRemoteTag(name, repositories); } }.runInBackground(); } /** * Executes common operations before/after executing the actual branch operation. */ private static abstract class CommonBackgroundTask extends Task.Backgroundable { @Nullable private final Runnable myCallInAwtAfterExecution; private CommonBackgroundTask(@Nullable final Project project, @NotNull final String title, @Nullable Runnable callInAwtAfterExecution) { super(project, title); myCallInAwtAfterExecution = callInAwtAfterExecution; } @Override public final void run(@NotNull ProgressIndicator indicator) { execute(indicator); if (myCallInAwtAfterExecution != null) { Application application = ApplicationManager.getApplication(); if (application.isUnitTestMode()) { myCallInAwtAfterExecution.run(); } else { application.invokeLater(myCallInAwtAfterExecution, application.getDefaultModalityState()); } } } abstract void execute(@NotNull ProgressIndicator indicator); void runInBackground() { GitVcs.runInBackground(this); } } }
/*! ****************************************************************************** * * Pentaho Data Integration * * Copyright (C) 2002-2013 by Pentaho : http://www.pentaho.com * ******************************************************************************* * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * ******************************************************************************/ package org.pentaho.di.trans.steps.sort; import java.io.BufferedInputStream; import java.io.BufferedOutputStream; import java.io.DataInputStream; import java.io.DataOutputStream; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import java.net.SocketTimeoutException; import java.util.ArrayList; import java.util.Collections; import java.util.Comparator; import java.util.List; import java.util.zip.GZIPInputStream; import java.util.zip.GZIPOutputStream; import org.apache.commons.vfs.FileObject; import org.apache.commons.vfs.FileSystemException; import org.pentaho.di.core.Const; import org.pentaho.di.core.exception.KettleException; import org.pentaho.di.core.exception.KettleFileException; import org.pentaho.di.core.exception.KettleValueException; import org.pentaho.di.core.row.RowMetaInterface; import org.pentaho.di.core.row.ValueMetaInterface; import org.pentaho.di.core.vfs.KettleVFS; import org.pentaho.di.i18n.BaseMessages; import org.pentaho.di.trans.Trans; import org.pentaho.di.trans.TransMeta; import org.pentaho.di.trans.step.BaseStep; import org.pentaho.di.trans.step.StepDataInterface; import org.pentaho.di.trans.step.StepInterface; import org.pentaho.di.trans.step.StepMeta; import org.pentaho.di.trans.step.StepMetaInterface; /** * Sort the rows in the input-streams based on certain criteria * * @author Matt * @since 29-apr-2003 */ public class SortRows extends BaseStep implements StepInterface { private static Class<?> PKG = SortRows.class; // for i18n private SortRowsMeta meta; private SortRowsData data; public SortRows( StepMeta stepMeta, StepDataInterface stepDataInterface, int copyNr, TransMeta transMeta, Trans trans ) { super( stepMeta, stepDataInterface, copyNr, transMeta, trans ); meta = (SortRowsMeta) getStepMeta().getStepMetaInterface(); data = (SortRowsData) stepDataInterface; } private boolean addBuffer( RowMetaInterface rowMeta, Object[] r ) throws KettleException { if ( r != null ) { // Do we need to convert binary string keys? // for ( int i = 0; i < data.fieldnrs.length; i++ ) { if ( data.convertKeysToNative[i] ) { int index = data.fieldnrs[i]; r[index] = rowMeta.getValueMeta( index ).convertBinaryStringToNativeType( (byte[]) r[index] ); } } // Save row // data.buffer.add( r ); } if ( data.files.size() == 0 && r == null ) { // No more records: sort buffer quickSort( data.buffer ); } // Check the free memory every 1000 rows... // data.freeCounter++; if ( data.sortSize <= 0 && data.freeCounter >= 1000 ) { data.freeMemoryPct = Const.getPercentageFreeMemory(); data.freeCounter = 0; if ( log.isDetailed() ) { data.memoryReporting++; if ( data.memoryReporting >= 10 ) { if ( log.isDetailed() ) { logDetailed( "Available memory : " + data.freeMemoryPct + "%" ); } data.memoryReporting = 0; } } } boolean doSort = data.buffer.size() == data.sortSize; // Buffer is full: // sort & dump to disk doSort |= data.files.size() > 0 && r == null && data.buffer.size() > 0; // No // more // records: // join // from // disk doSort |= data.freeMemoryPctLimit > 0 && data.freeMemoryPct < data.freeMemoryPctLimit && data.buffer.size() >= data.minSortSize; // time to sort the buffer and write the data to disk... // if ( doSort ) { sortExternalRows(); } return true; } private void sortExternalRows() throws KettleException { // First sort the rows in buffer[] quickSort( data.buffer ); // Then write them to disk... DataOutputStream dos; GZIPOutputStream gzos; int p; try { FileObject fileObject = KettleVFS.createTempFile( meta.getPrefix(), ".tmp", environmentSubstitute( meta.getDirectory() ), getTransMeta() ); data.files.add( fileObject ); // Remember the files! OutputStream outputStream = KettleVFS.getOutputStream( fileObject, false ); if ( data.compressFiles ) { gzos = new GZIPOutputStream( new BufferedOutputStream( outputStream ) ); dos = new DataOutputStream( gzos ); } else { dos = new DataOutputStream( new BufferedOutputStream( outputStream, 500000 ) ); gzos = null; } // Just write the data, nothing else List<Integer> duplicates = new ArrayList<Integer>(); Object[] previousRow = null; if ( meta.isOnlyPassingUniqueRows() ) { int index = 0; while ( index < data.buffer.size() ) { Object[] row = data.buffer.get( index ); if ( previousRow != null ) { int result = data.outputRowMeta.compare( row, previousRow, data.fieldnrs ); if ( result == 0 ) { duplicates.add( index ); if ( log.isRowLevel() ) { logRowlevel( "Duplicate row removed: " + data.outputRowMeta.getString( row ) ); } } } index++; previousRow = row; } } // How many records do we have left? data.bufferSizes.add( data.buffer.size() - duplicates.size() ); int duplicatesIndex = 0; for ( p = 0; p < data.buffer.size(); p++ ) { boolean skip = false; if ( duplicatesIndex < duplicates.size() ) { if ( p == duplicates.get( duplicatesIndex ) ) { skip = true; duplicatesIndex++; } } if ( !skip ) { data.outputRowMeta.writeData( dos, data.buffer.get( p ) ); } } if ( data.sortSize < 0 ) { if ( data.buffer.size() > data.minSortSize ) { data.minSortSize = data.buffer.size(); // if we did it once, we can do // it again. // Memory usage goes up over time, even with garbage collection // We need pointers, file handles, etc. // As such, we're going to lower the min sort size a bit // data.minSortSize = (int) Math.round( data.minSortSize * 0.90 ); } } // Clear the list data.buffer.clear(); // Close temp-file dos.close(); // close data stream if ( gzos != null ) { gzos.close(); // close gzip stream } outputStream.close(); // close file stream // How much memory do we have left? // data.freeMemoryPct = Const.getPercentageFreeMemory(); data.freeCounter = 0; if ( data.sortSize <= 0 ) { if ( log.isDetailed() ) { logDetailed( "Available memory : " + data.freeMemoryPct + "%" ); } } } catch ( Exception e ) { throw new KettleException( "Error processing temp-file!", e ); } data.getBufferIndex = 0; } private DataInputStream getDataInputStream( GZIPInputStream gzipInputStream ) { DataInputStream result = new DataInputStream( gzipInputStream ); data.gzis.add( gzipInputStream ); return result; } private Object[] getBuffer() throws KettleValueException { Object[] retval; // Open all files at once and read one row from each file... if ( data.files.size() > 0 && ( data.dis.size() == 0 || data.fis.size() == 0 ) ) { if ( log.isBasic() ) { logBasic( "Opening " + data.files.size() + " tmp-files..." ); } try { for ( int f = 0; f < data.files.size() && !isStopped(); f++ ) { FileObject fileObject = data.files.get( f ); String filename = KettleVFS.getFilename( fileObject ); if ( log.isDetailed() ) { logDetailed( "Opening tmp-file: [" + filename + "]" ); } InputStream fi = KettleVFS.getInputStream( fileObject ); DataInputStream di; data.fis.add( fi ); if ( data.compressFiles ) { di = getDataInputStream( new GZIPInputStream( new BufferedInputStream( fi ) ) ); } else { di = new DataInputStream( new BufferedInputStream( fi, 50000 ) ); } data.dis.add( di ); // How long is the buffer? int buffersize = data.bufferSizes.get( f ); if ( log.isDetailed() ) { logDetailed( "[" + filename + "] expecting " + buffersize + " rows..." ); } if ( buffersize > 0 ) { Object[] row = data.outputRowMeta.readData( di ); data.rowbuffer.add( row ); // new row from input stream data.tempRows.add( new RowTempFile( row, f ) ); } } // Sort the data row buffer Collections.sort( data.tempRows, data.comparator ); } catch ( Exception e ) { logError( "Error reading back tmp-files : " + e.toString() ); logError( Const.getStackTracker( e ) ); } } if ( data.files.size() == 0 ) { if ( data.getBufferIndex < data.buffer.size() ) { retval = data.buffer.get( data.getBufferIndex ); data.getBufferIndex++; } else { retval = null; } } else { if ( data.rowbuffer.size() == 0 ) { retval = null; } else { // We now have "filenr" rows waiting: which one is the smallest? // if ( log.isRowLevel() ) { for ( int i = 0; i < data.rowbuffer.size() && !isStopped(); i++ ) { Object[] b = data.rowbuffer.get( i ); logRowlevel( "--BR#" + i + ": " + data.outputRowMeta.getString( b ) ); } } RowTempFile rowTempFile = data.tempRows.remove( 0 ); retval = rowTempFile.row; int smallest = rowTempFile.fileNumber; // now get another Row for position smallest FileObject file = data.files.get( smallest ); DataInputStream di = data.dis.get( smallest ); InputStream fi = data.fis.get( smallest ); try { Object[] row2 = data.outputRowMeta.readData( di ); RowTempFile extra = new RowTempFile( row2, smallest ); int index = Collections.binarySearch( data.tempRows, extra, data.comparator ); if ( index < 0 ) { data.tempRows.add( index * ( -1 ) - 1, extra ); } else { data.tempRows.add( index, extra ); } } catch ( KettleFileException fe ) { // empty file or EOF mostly GZIPInputStream gzfi = ( data.compressFiles ) ? data.gzis.get( smallest ) : null; try { di.close(); fi.close(); if ( gzfi != null ) { gzfi.close(); } file.delete(); } catch ( IOException e ) { logError( "Unable to close/delete file #" + smallest + " --> " + file.toString() ); setErrors( 1 ); stopAll(); return null; } data.files.remove( smallest ); data.dis.remove( smallest ); data.fis.remove( smallest ); if ( gzfi != null ) { data.gzis.remove( smallest ); } // Also update all file numbers in in data.tempRows if they are larger // than smallest. // for ( RowTempFile rtf : data.tempRows ) { if ( rtf.fileNumber > smallest ) { rtf.fileNumber--; } } } catch ( SocketTimeoutException e ) { throw new KettleValueException( e ); // should never happen on local files } } } return retval; } public boolean processRow( StepMetaInterface smi, StepDataInterface sdi ) throws KettleException { // if Group Sort is not enabled then do the normal sort. if ( !meta.isGroupSortEnabled() ) { boolean retval = this.processSortRow( smi, sdi, getRow(), first ); return retval; } Object[] r = getRow(); // get row! if ( first ) { if ( r == null ) { this.setOutputDone(); return false; } data.groupnrs = new int[meta.getGroupFields().size()]; for ( int i = 0; i < meta.getGroupFields().size(); i++ ) { data.groupnrs[i] = getInputRowMeta().indexOfValue( meta.getGroupFields().get( i ) ); if ( data.groupnrs[i] < 0 ) { logError( String.format( "Presorted Field %s cound not be found", meta.getGroupFields().get( i ) ) ); setErrors( 1 ); stopAll(); return false; } } } boolean retval = true; if ( first || data.newBatch ) { first = false; data.newBatch = false; setPrevious( r ); // If there is no more input let processSortRow to finish the sorting. boolean moreInput = ( r != null ) ? true : false; // this enables Sort stuff to initialize it's state. retval = this.processSortRow( smi, sdi, r, moreInput ); } else { if ( this.sameGroup( data.previous, r ) ) { setPrevious( r ); // this performs SortRows normal row collection functionality. retval = this.processSortRow( smi, sdi, r, false ); } else { // this performs SortRows sort action. this.processSortRow( smi, sdi, null, false ); setPrevious( r ); data.newBatch = true; // this performs SortRows to initialize all it's state this.init( smi, sdi ); retval = this.processSortRow( smi, sdi, r, true ); } } if ( r == null ) { this.setOutputDone(); } return retval; } public boolean processSortRow( StepMetaInterface smi, StepDataInterface sdi, Object[] r, boolean first ) throws KettleException { boolean err = true; // initialize if ( first && r != null ) { first = false; data.convertKeysToNative = new boolean[meta.getFieldName().length]; data.fieldnrs = new int[meta.getFieldName().length]; for ( int i = 0; i < meta.getFieldName().length; i++ ) { data.fieldnrs[i] = getInputRowMeta().indexOfValue( meta.getFieldName()[i] ); if ( data.fieldnrs[i] < 0 ) { throw new KettleException( BaseMessages.getString( PKG, "SortRowsMeta.CheckResult.StepFieldNotInInputStream", meta.getFieldName()[i], getStepname() ) ); } data.convertKeysToNative[i] = getInputRowMeta().getValueMeta( data.fieldnrs[i] ).isStorageBinaryString(); } // Metadata data.outputRowMeta = getInputRowMeta().clone(); meta.getFields( data.outputRowMeta, getStepname(), null, null, this, repository, metaStore ); } err = addBuffer( getInputRowMeta(), r ); if ( !err ) { setOutputDone(); // signal receiver we're finished. return false; } if ( r == null ) { // no more input to be expected... passBuffer( !meta.isGroupSortEnabled() ); return false; } if ( checkFeedback( getLinesRead() ) ) { if ( log.isBasic() ) { logBasic( "Linenr " + getLinesRead() ); } } return true; } /** * This method passes all rows in the buffer to the next steps. * */ private void passBuffer( boolean signal ) throws KettleException { // Now we can start the output! // Object[] r = getBuffer(); Object[] previousRow = null; while ( r != null && !isStopped() ) { if ( log.isRowLevel() ) { logRowlevel( "Read row: " + getInputRowMeta().getString( r ) ); } // Do another verification pass for unique rows... // if ( meta.isOnlyPassingUniqueRows() ) { if ( previousRow != null ) { // See if this row is the same as the previous one as far as the keys // are concerned. // If so, we don't put forward this row. int result = data.outputRowMeta.compare( r, previousRow, data.fieldnrs ); if ( result != 0 ) { putRow( data.outputRowMeta, r ); // copy row to possible alternate // rowset(s). } } else { putRow( data.outputRowMeta, r ); // copy row to next steps } previousRow = r; } else { putRow( data.outputRowMeta, r ); // copy row to possible alternate // rowset(s). } r = getBuffer(); } // Clear out the buffer for the next batch // clearBuffers(); // signal receiver that we are finished only if we are asked to do so. haric if ( signal ) { setOutputDone(); // signal receiver we're finished. } } public boolean init( StepMetaInterface smi, StepDataInterface sdi ) { meta = (SortRowsMeta) smi; data = (SortRowsData) sdi; if ( super.init( smi, sdi ) ) { data.sortSize = Const.toInt( environmentSubstitute( meta.getSortSize() ), -1 ); data.freeMemoryPctLimit = Const.toInt( meta.getFreeMemoryLimit(), -1 ); if ( data.sortSize <= 0 && data.freeMemoryPctLimit <= 0 ) { // Prefer the memory limit as it should never fail // data.freeMemoryPctLimit = 25; } // In memory buffer // data.buffer = new ArrayList<Object[]>( 5000 ); // Buffer for reading from disk // data.rowbuffer = new ArrayList<Object[]>( 5000 ); data.compressFiles = getBooleanValueOfVariable( meta.getCompressFilesVariable(), meta.getCompressFiles() ); data.comparator = new Comparator<RowTempFile>() { public int compare( RowTempFile o1, RowTempFile o2 ) { try { return data.outputRowMeta.compare( o1.row, o2.row, data.fieldnrs ); } catch ( KettleValueException e ) { logError( "Error comparing rows: " + e.toString() ); return 0; } } }; data.tempRows = new ArrayList<RowTempFile>(); data.minSortSize = 5000; return true; } return false; } @Override public void dispose( StepMetaInterface smi, StepDataInterface sdi ) { clearBuffers(); super.dispose( smi, sdi ); } private void clearBuffers() { // Clean out the sort buffer // data.buffer = new ArrayList<Object[]>( 1 ); data.getBufferIndex = 0; data.rowbuffer = new ArrayList<Object[]>( 1 ); // close any open DataInputStream objects if ( ( data.dis != null ) && ( data.dis.size() > 0 ) ) { for ( DataInputStream dis : data.dis ) { BaseStep.closeQuietly( dis ); } } // close any open InputStream objects if ( ( data.fis != null ) && ( data.fis.size() > 0 ) ) { for ( InputStream is : data.fis ) { BaseStep.closeQuietly( is ); } } // remove temp files for ( int f = 0; f < data.files.size(); f++ ) { FileObject fileToDelete = data.files.get( f ); try { if ( fileToDelete != null && fileToDelete.exists() ) { fileToDelete.delete(); } } catch ( FileSystemException e ) { logError( e.getLocalizedMessage(), e ); } } } /** * Sort the entire vector, if it is not empty. */ public void quickSort( List<Object[]> elements ) throws KettleException { if ( log.isDetailed() ) { logDetailed( "Starting quickSort algorithm..." ); } if ( elements.size() > 0 ) { Collections.sort( elements, new Comparator<Object[]>() { public int compare( Object[] o1, Object[] o2 ) { Object[] r1 = o1; Object[] r2 = o2; try { return data.outputRowMeta.compare( r1, r2, data.fieldnrs ); } catch ( KettleValueException e ) { logError( "Error comparing rows: " + e.toString() ); return 0; } } } ); long nrConversions = 0L; for ( ValueMetaInterface valueMeta : data.outputRowMeta.getValueMetaList() ) { nrConversions += valueMeta.getNumberOfBinaryStringConversions(); valueMeta.setNumberOfBinaryStringConversions( 0L ); } if ( log.isDetailed() ) { logDetailed( "The number of binary string to data type conversions done in this sort block is " + nrConversions ); } } if ( log.isDetailed() ) { logDetailed( "QuickSort algorithm has finished." ); } } /** * Calling this method will alert the step that we finished passing records to the step. Specifically for steps like * "Sort Rows" it means that the buffered rows can be sorted and passed on. */ public void batchComplete() throws KettleException { if ( data.files.size() > 0 ) { sortExternalRows(); } else { quickSort( data.buffer ); } passBuffer( !meta.isGroupSortEnabled() ); } /* * Group Fields Implemenation haric */ // Is the row r of the same group as previous? private boolean sameGroup( Object[] previous, Object[] r ) throws KettleValueException { if ( r == null ) { return false; } return getInputRowMeta().compare( previous, r, data.groupnrs ) == 0; } private void setPrevious( Object[] r ) throws KettleException { if ( r != null ) { this.data.previous = getInputRowMeta().cloneRow( r ); } } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.giraffa.hbase; import static org.apache.hadoop.fs.CommonConfigurationKeysPublic.FS_TRASH_INTERVAL_DEFAULT; import static org.apache.hadoop.fs.CommonConfigurationKeysPublic.FS_TRASH_INTERVAL_KEY; import static org.apache.hadoop.fs.CommonConfigurationKeysPublic.IO_FILE_BUFFER_SIZE_DEFAULT; import static org.apache.hadoop.fs.CommonConfigurationKeysPublic.IO_FILE_BUFFER_SIZE_KEY; import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_BLOCK_SIZE_DEFAULT; import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_BLOCK_SIZE_KEY; import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_BYTES_PER_CHECKSUM_DEFAULT; import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_BYTES_PER_CHECKSUM_KEY; import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_CHECKSUM_TYPE_DEFAULT; import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_CHECKSUM_TYPE_KEY; import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_CLIENT_WRITE_PACKET_SIZE_DEFAULT; import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_CLIENT_WRITE_PACKET_SIZE_KEY; import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_ENCRYPT_DATA_TRANSFER_DEFAULT; import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_ENCRYPT_DATA_TRANSFER_KEY; import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_REPLICATION_DEFAULT; import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_REPLICATION_KEY; import java.io.FileNotFoundException; import java.io.IOException; import java.lang.reflect.InvocationHandler; import java.lang.reflect.Method; import java.lang.reflect.Proxy; import java.net.URI; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.giraffa.FileField; import org.apache.giraffa.GiraffaConfiguration; import org.apache.giraffa.NamespaceService; import org.apache.giraffa.RowKey; import org.apache.giraffa.RowKeyFactory; import org.apache.hadoop.fs.ContentSummary; import org.apache.hadoop.fs.CreateFlag; import org.apache.hadoop.fs.FileAlreadyExistsException; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.FsServerDefaults; import org.apache.hadoop.fs.Options.Rename; import org.apache.hadoop.fs.ParentNotDirectoryException; import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.UnresolvedLinkException; import org.apache.hadoop.fs.permission.FsPermission; import org.apache.hadoop.hbase.Coprocessor; import org.apache.hadoop.hbase.HBaseConfiguration; import org.apache.hadoop.hbase.HColumnDescriptor; import org.apache.hadoop.hbase.HTableDescriptor; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.TableNotFoundException; import org.apache.hadoop.hbase.ipc.CoprocessorRpcChannel; import org.apache.hadoop.hbase.client.HBaseAdmin; import org.apache.hadoop.hbase.client.HTable; import org.apache.hadoop.hbase.client.HTableInterface; import org.apache.hadoop.hdfs.protocol.AlreadyBeingCreatedException; import org.apache.hadoop.hdfs.protocol.ClientProtocol; import org.apache.hadoop.hdfs.protocol.CorruptFileBlocks; import org.apache.hadoop.hdfs.protocol.DSQuotaExceededException; import org.apache.hadoop.hdfs.protocol.DatanodeID; import org.apache.hadoop.hdfs.protocol.DatanodeInfo; import org.apache.hadoop.hdfs.protocol.DirectoryListing; import org.apache.hadoop.hdfs.protocol.ExtendedBlock; import org.apache.hadoop.hdfs.protocol.HdfsConstants.DatanodeReportType; import org.apache.hadoop.hdfs.protocol.HdfsConstants.SafeModeAction; import org.apache.hadoop.hdfs.protocol.HdfsFileStatus; import org.apache.hadoop.hdfs.protocol.LocatedBlock; import org.apache.hadoop.hdfs.protocol.LocatedBlocks; import org.apache.hadoop.hdfs.protocol.NSQuotaExceededException; import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.ClientNamenodeProtocol; import org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolPB; import org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolTranslatorPB; import org.apache.hadoop.hdfs.security.token.block.DataEncryptionKey; import org.apache.hadoop.hdfs.security.token.delegation.DelegationTokenIdentifier; import org.apache.hadoop.hdfs.server.namenode.NotReplicatedYetException; import org.apache.hadoop.hdfs.server.namenode.SafeModeException; import org.apache.hadoop.io.EnumSetWritable; import org.apache.hadoop.io.Text; import org.apache.hadoop.io.retry.Idempotent; import org.apache.hadoop.security.AccessControlException; import org.apache.hadoop.security.token.Token; import org.apache.hadoop.util.DataChecksum; /** * NamespaceAgent is the proxy used by DFSClient to communicate with HBase * as if it is a NameNode. * NamespaceAgent implements ClientProtocol and is a replacement of the * NameNode RPC proxy. */ public class NamespaceAgent implements NamespaceService { public static final String GRFA_COPROCESSOR_KEY = "grfa.coprocessor.class"; public static final String GRFA_COPROCESSOR_DEFAULT = BlockManagementAgent.class.getName(); public static final String GRFA_NAMESPACE_PROCESSOR_KEY = "grfa.namespace.processor.class"; public static final String GRFA_NAMESPACE_PROCESSOR_DEFAULT = NamespaceProcessor.class.getName(); public static enum BlockAction { CLOSE, ALLOCATE, DELETE } private HBaseAdmin hbAdmin; private HTableInterface nsTable; private FsServerDefaults serverDefaults; private static final Log LOG = LogFactory.getLog(NamespaceAgent.class.getName()); public NamespaceAgent() {} @Override // NamespaceService public void initialize(GiraffaConfiguration conf) throws IOException { RowKeyFactory.registerRowKey(conf); this.hbAdmin = new HBaseAdmin(conf); String tableName = conf.get(GiraffaConfiguration.GRFA_TABLE_NAME_KEY, GiraffaConfiguration.GRFA_TABLE_NAME_DEFAULT); // Get the checksum type from config String checksumTypeStr = conf.get(DFS_CHECKSUM_TYPE_KEY, DFS_CHECKSUM_TYPE_DEFAULT); DataChecksum.Type checksumType; try { checksumType = DataChecksum.Type.valueOf(checksumTypeStr); } catch (IllegalArgumentException iae) { throw new IOException("Invalid checksum type in " + DFS_CHECKSUM_TYPE_KEY + ": " + checksumTypeStr); } this.serverDefaults = new FsServerDefaults( conf.getLongBytes(DFS_BLOCK_SIZE_KEY, DFS_BLOCK_SIZE_DEFAULT), conf.getInt(DFS_BYTES_PER_CHECKSUM_KEY, DFS_BYTES_PER_CHECKSUM_DEFAULT), conf.getInt(DFS_CLIENT_WRITE_PACKET_SIZE_KEY, DFS_CLIENT_WRITE_PACKET_SIZE_DEFAULT), (short) conf.getInt(DFS_REPLICATION_KEY, DFS_REPLICATION_DEFAULT), conf.getInt(IO_FILE_BUFFER_SIZE_KEY, IO_FILE_BUFFER_SIZE_DEFAULT), conf.getBoolean(DFS_ENCRYPT_DATA_TRANSFER_KEY, DFS_ENCRYPT_DATA_TRANSFER_DEFAULT), conf.getLong(FS_TRASH_INTERVAL_KEY, FS_TRASH_INTERVAL_DEFAULT), checksumType); try { this.nsTable = new HTable(hbAdmin.getConfiguration(), tableName); } catch(TableNotFoundException tnfe) { throw new IOException("Giraffa is not formatted.", tnfe); } } private ClientProtocol getRegionProxy(String src) throws IOException { return getRegionProxy(RowKeyFactory.newInstance(src)); } private ClientProtocol getRegionProxy(RowKey key) { // load blocking stub for protocol based on row key CoprocessorRpcChannel channel = nsTable.coprocessorService(key.getKey()); final ClientNamenodeProtocol.BlockingInterface stub = ClientNamenodeProtocol.newBlockingStub(channel); // create a handler for forwarding proxy calls to blocking stub InvocationHandler handler = new InvocationHandler(){ @Override public Object invoke(Object proxy, Method method, Object[] args) throws Throwable { return method.invoke(stub, args); } }; // create a proxy implementation of ClientNamenodeProtocolPB Class<ClientNamenodeProtocolPB> classObj = ClientNamenodeProtocolPB.class; ClientNamenodeProtocolPB clientPB = (ClientNamenodeProtocolPB) Proxy.newProxyInstance( classObj.getClassLoader(), new Class[]{classObj}, handler); return new ClientNamenodeProtocolTranslatorPB(clientPB); } @Override // ClientProtocol public void abandonBlock(ExtendedBlock b, String src, String holder) throws AccessControlException, FileNotFoundException, UnresolvedLinkException, IOException { ClientProtocol proxy = getRegionProxy(src); proxy.abandonBlock(b, src, holder); } @Override // ClientProtocol public LocatedBlock addBlock( String src, String clientName, ExtendedBlock previous, DatanodeInfo[] excludeNodes) throws AccessControlException, FileNotFoundException, NotReplicatedYetException, SafeModeException, UnresolvedLinkException, IOException { ClientProtocol proxy = getRegionProxy(src); LocatedBlock blk = proxy.addBlock(src, clientName, previous, excludeNodes); if(blk == null) throw new FileNotFoundException("File does not exist: " + src); LOG.info("Added block " + blk + " to file: " + src); return blk; } @Override // ClientProtocol public LocatedBlock append(String src, String clientName) throws AccessControlException, DSQuotaExceededException, FileNotFoundException, SafeModeException, UnresolvedLinkException, IOException { throw new IOException("append not supported"); } @Override // ClientProtocol public void cancelDelegationToken(Token<DelegationTokenIdentifier> token) throws IOException { } @Override // ClientProtocol public boolean complete(String src, String clientName, ExtendedBlock last) throws AccessControlException, FileNotFoundException, SafeModeException, UnresolvedLinkException, IOException { ClientProtocol proxy = getRegionProxy(src); boolean res = proxy.complete(src, clientName, last); if(!res) throw new FileNotFoundException("File does not exist: " + src); LOG.info("File: " + src + " is " + (res ? "completed" : "not completed")); return res; } @Override // ClientProtocol public void concat(String trg, String[] srcs) throws IOException, UnresolvedLinkException { throw new IOException("concat is not supported"); } @Override // ClientProtocol public void create( String src, FsPermission masked, String clientName, EnumSetWritable<CreateFlag> createFlag, boolean createParent, short replication, long blockSize) throws AccessControlException, AlreadyBeingCreatedException, DSQuotaExceededException, NSQuotaExceededException, FileAlreadyExistsException, FileNotFoundException, ParentNotDirectoryException, SafeModeException, UnresolvedLinkException, IOException { if(new Path(src).getParent() == null) throw new IOException("Root cannot be a file."); ClientProtocol proxy = getRegionProxy(src); proxy.create(src, masked, clientName, createFlag, createParent, replication, blockSize); } @Override // ClientProtocol public void createSymlink( String target, String link, FsPermission dirPerm, boolean createParent) throws AccessControlException, FileAlreadyExistsException, FileNotFoundException, ParentNotDirectoryException, SafeModeException, UnresolvedLinkException, IOException { throw new IOException("symlinks are not supported"); } @Deprecated // ClientProtocol public boolean delete(String src) throws IOException { return delete(src, false); } /** * Guarantees to atomically delete the source file first, and any subsequent * files recursively if desired. */ @Override // ClientProtocol public boolean delete(String src, boolean recursive) throws IOException { Path path = new Path(src); //check parent path first Path parentPath = path.getParent(); if(parentPath == null) { throw new FileNotFoundException("Parent does not exist."); } ClientProtocol proxy = getRegionProxy(src); return proxy.delete(src, recursive); } @Override // ClientProtocol public void finalizeUpgrade() throws IOException { throw new IOException("upgrade is not supported"); } /** * Must be called before FileSystem can be used! * * @param conf * * @throws IOException */ @Override // NamespaceService public void format(GiraffaConfiguration conf) throws IOException { LOG.info("Format started..."); String tableName = conf.get(GiraffaConfiguration.GRFA_TABLE_NAME_KEY, GiraffaConfiguration.GRFA_TABLE_NAME_DEFAULT); URI gURI = FileSystem.getDefaultUri(conf); if( ! GiraffaConfiguration.GRFA_URI_SCHEME.equals(gURI.getScheme())) throw new IOException("Cannot format. Non-Giraffa URI found: " + gURI); HBaseAdmin hbAdmin = new HBaseAdmin(HBaseConfiguration.create(conf)); if(hbAdmin.tableExists(tableName)) { // remove existing table to renew it if(hbAdmin.isTableEnabled(tableName)) { hbAdmin.disableTable(tableName); } hbAdmin.deleteTable(tableName); } HTableDescriptor htd = buildGiraffaTable(conf); hbAdmin.createTable(htd); LOG.info("Created " + tableName); hbAdmin.close(); LOG.info("Format ended... adding work directory."); } private static HTableDescriptor buildGiraffaTable(GiraffaConfiguration conf) throws IOException { String tableName = conf.get(GiraffaConfiguration.GRFA_TABLE_NAME_KEY, GiraffaConfiguration.GRFA_TABLE_NAME_DEFAULT); HTableDescriptor htd = new HTableDescriptor(TableName.valueOf(tableName)); htd.addFamily(new HColumnDescriptor(FileField.getFileAttributes())); String coprocClass = conf.get(GRFA_COPROCESSOR_KEY, GRFA_COPROCESSOR_DEFAULT); htd.addCoprocessor(coprocClass, null, Coprocessor.PRIORITY_SYSTEM, null); LOG.info("Block management processor is set to: " + coprocClass); String nsProcClass = conf.get( GRFA_NAMESPACE_PROCESSOR_KEY, GRFA_NAMESPACE_PROCESSOR_DEFAULT); htd.addCoprocessor(nsProcClass, null, Coprocessor.PRIORITY_SYSTEM, null); LOG.info("Namespace processor is set to: " + nsProcClass); return htd; } @Override // ClientProtocol public void fsync(String src, String client, long lastBlockLength) throws AccessControlException, FileNotFoundException, UnresolvedLinkException, IOException { throw new IOException("fsync is not supported"); } @Override // ClientProtocol public LocatedBlocks getBlockLocations(String src, long offset, long length) throws AccessControlException, FileNotFoundException, UnresolvedLinkException, IOException { ClientProtocol proxy = getRegionProxy(src); LocatedBlocks lbs = proxy.getBlockLocations(src, offset, length); if(lbs == null) throw new FileNotFoundException("File does not exist: " + src); return lbs; } @Override // ClientProtocol public ContentSummary getContentSummary(String path) throws AccessControlException, FileNotFoundException, UnresolvedLinkException, IOException { ClientProtocol proxy = getRegionProxy(path); ContentSummary summary = proxy.getContentSummary(path); return summary; } @Override // ClientProtocol public DatanodeInfo[] getDatanodeReport(DatanodeReportType type) throws IOException { throw new IOException("getDatanodeReport is not supported"); } @Override // ClientProtocol public Token<DelegationTokenIdentifier> getDelegationToken(Text renewer) throws IOException { return null; } @Override // ClientProtocol public HdfsFileStatus getFileInfo(String src) throws AccessControlException, FileNotFoundException, UnresolvedLinkException, IOException { ClientProtocol proxy = getRegionProxy(src); HdfsFileStatus fStatus = proxy.getFileInfo(src); return fStatus; } @Override // ClientProtocol public HdfsFileStatus getFileLinkInfo(String src) throws AccessControlException, UnresolvedLinkException, IOException { throw new IOException("symlinks are not supported"); } @Override // ClientProtocol public String getLinkTarget(String path) throws AccessControlException, FileNotFoundException, IOException { throw new IOException("symlinks are not supported"); } @Override // ClientProtocol public DirectoryListing getListing( String src, byte[] startAfter, boolean needLocation) throws IOException { if(startAfter == null) startAfter = new byte[0]; ClientProtocol proxy = getRegionProxy(src); DirectoryListing files = proxy.getListing(src, startAfter, needLocation); return files; } @Override // ClientProtocol public long getPreferredBlockSize(String filename) throws IOException, UnresolvedLinkException { ClientProtocol proxy = getRegionProxy(filename); long blockSize = proxy.getPreferredBlockSize(filename); if(blockSize < 0) throw new FileNotFoundException("File does not exist: " + filename); return blockSize; } @Override // ClientProtocol public FsServerDefaults getServerDefaults() throws IOException { return this.serverDefaults; } @Override // ClientProtocol public long[] getStats() throws IOException { throw new IOException("getStats is not supported"); } @Override // ClientProtocol public void metaSave(String filename) throws IOException { throw new IOException("metaSave is not supported"); } @Override // ClientProtocol public boolean mkdirs(String src, FsPermission masked, boolean createParent) throws AccessControlException, FileAlreadyExistsException, FileNotFoundException, NSQuotaExceededException, ParentNotDirectoryException, SafeModeException, UnresolvedLinkException, IOException { ClientProtocol proxy = getRegionProxy(src); boolean created = proxy.mkdirs(src, masked, createParent); if(!createParent && !created) throw new FileNotFoundException("File does not exist: " + src); return created; } @Override // ClientProtocol public boolean recoverLease(String src, String clientName) throws IOException { return false; } @Override // ClientProtocol public void refreshNodes() throws IOException { throw new IOException("refreshNodes is not supported"); } @Override // ClientProtocol public boolean rename(String src, String dst) throws UnresolvedLinkException, IOException { throw new IOException("rename is not supported"); } @Override // ClientProtocol public void rename2(String src, String dst, Rename... options) throws AccessControlException, DSQuotaExceededException, FileAlreadyExistsException, FileNotFoundException, NSQuotaExceededException, ParentNotDirectoryException, SafeModeException, UnresolvedLinkException, IOException { throw new IOException("rename is not supported"); } @Override // ClientProtocol public long renewDelegationToken(Token<DelegationTokenIdentifier> token) throws IOException { return 0; } @Override // ClientProtocol public void renewLease(String clientName) throws AccessControlException, IOException { } @Override // ClientProtocol public void reportBadBlocks(LocatedBlock[] blocks) throws IOException { } @Override // ClientProtocol public boolean restoreFailedStorage(String arg) throws AccessControlException { return false; } @Override // ClientProtocol public void saveNamespace() throws AccessControlException, IOException { throw new IOException("saveNamespace is not supported"); } @Override // ClientProtocol public void setOwner(String src, String username, String groupname) throws AccessControlException, FileNotFoundException, SafeModeException, UnresolvedLinkException, IOException { ClientProtocol proxy = getRegionProxy(src); proxy.setOwner(src, username, groupname); } @Override // ClientProtocol public void setPermission(String src, FsPermission permission) throws AccessControlException, FileNotFoundException, SafeModeException, UnresolvedLinkException, IOException { ClientProtocol proxy = getRegionProxy(src); proxy.setPermission(src, permission); } @Override // ClientProtocol public void setQuota(String src, long namespaceQuota, long diskspaceQuota) throws AccessControlException, FileNotFoundException, UnresolvedLinkException, IOException { ClientProtocol proxy = getRegionProxy(src); proxy.setQuota(src, namespaceQuota, diskspaceQuota); } @Override // ClientProtocol public boolean setReplication(String src, short replication) throws AccessControlException, DSQuotaExceededException, FileNotFoundException, SafeModeException, UnresolvedLinkException, IOException { ClientProtocol proxy = getRegionProxy(src); return proxy.setReplication(src, replication); } @Override // ClientProtocol public boolean setSafeMode(SafeModeAction action, boolean isChecked) throws IOException { return false; } @Override // ClientProtocol public void setTimes(String src, long mtime, long atime) throws AccessControlException, FileNotFoundException, UnresolvedLinkException, IOException { ClientProtocol proxy = getRegionProxy(src); proxy.setTimes(src, mtime, atime); } @Override // ClientProtocol public LocatedBlock updateBlockForPipeline(ExtendedBlock block, String clientName) throws IOException { return null; } @Override // ClientProtocol public void updatePipeline( String clientName, ExtendedBlock oldBlock, ExtendedBlock newBlock, DatanodeID[] newNodes) throws IOException { } @Override // NamespaceService public void close() throws IOException { nsTable.close(); hbAdmin.close(); } @Override public LocatedBlock getAdditionalDatanode(String src, ExtendedBlock blk, DatanodeInfo[] existings, DatanodeInfo[] excludes, int numAdditionalNodes, String clientName) throws AccessControlException, FileNotFoundException, SafeModeException, UnresolvedLinkException, IOException { ClientProtocol proxy = getRegionProxy(src); return proxy.getAdditionalDatanode(src, blk, existings, excludes, numAdditionalNodes, clientName); } @Override public CorruptFileBlocks listCorruptFileBlocks(String arg0, String arg1) throws IOException { throw new IOException("corrupt file block listing is not supported"); } @Override public void setBalancerBandwidth(long arg0) throws IOException { throw new IOException("bandwidth balancing is not supported"); } @Override @Idempotent public long rollEdits() throws AccessControlException, IOException { throw new IOException("rollEdits is not supported"); } @Override public DataEncryptionKey getDataEncryptionKey() throws IOException { throw new IOException("data encryption is not supported"); } }
/* * The Alluxio Open Foundation licenses this work under the Apache License, version 2.0 * (the "License"). You may not use this work except in compliance with the License, which is * available at www.apache.org/licenses/LICENSE-2.0 * * This software is distributed on an "AS IS" basis, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, * either express or implied, as more fully set forth in the License. * * See the NOTICE file distributed with this work for information regarding copyright ownership. */ package alluxio.cli.fs.command; import alluxio.AlluxioURI; import alluxio.annotation.PublicApi; import alluxio.cli.CommandUtils; import alluxio.client.file.FileSystemContext; import alluxio.client.file.URIStatus; import alluxio.conf.PropertyKey; import alluxio.exception.AlluxioException; import alluxio.exception.ExceptionMessage; import alluxio.exception.status.InvalidArgumentException; import alluxio.grpc.ListStatusPOptions; import alluxio.grpc.LoadMetadataPType; import alluxio.util.CommonUtils; import alluxio.util.FormatUtils; import alluxio.util.SecurityUtils; import org.apache.commons.cli.CommandLine; import org.apache.commons.cli.Option; import org.apache.commons.cli.Options; import java.io.IOException; import java.util.Comparator; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Optional; import java.util.function.Function; import java.util.stream.Collectors; import javax.annotation.concurrent.ThreadSafe; /** * Displays information for the path specified in args. Depends on different options, this command * can also display the information for all directly children under the path, or recursively. */ @ThreadSafe @PublicApi public final class LsCommand extends AbstractFileSystemCommand { public static final String IN_ALLUXIO_STATE_DIR = "DIR"; public static final String IN_ALLUXIO_STATE_FILE_FORMAT = "%d%%"; // Permission: drwxrwxrwx+ public static final String LS_FORMAT_PERMISSION = "%-12s"; public static final String LS_FORMAT_FILE_SIZE = "%15s"; public static final String LS_FORMAT_TIMESTAMP = "%24s"; public static final String LS_FORMAT_ALLUXIO_STATE = "%5s"; public static final String LS_FORMAT_PERSISTENCE_STATE = "%16s"; public static final String LS_FORMAT_USER_NAME = "%-15s"; public static final String LS_FORMAT_GROUP_NAME = "%-15s"; public static final String LS_FORMAT_FILE_PATH = "%-5s"; public static final String LS_FORMAT_NO_ACL = LS_FORMAT_FILE_SIZE + LS_FORMAT_PERSISTENCE_STATE + LS_FORMAT_TIMESTAMP + LS_FORMAT_ALLUXIO_STATE + " " + LS_FORMAT_FILE_PATH + "%n"; public static final String LS_FORMAT = LS_FORMAT_PERMISSION + LS_FORMAT_USER_NAME + LS_FORMAT_GROUP_NAME + LS_FORMAT_FILE_SIZE + LS_FORMAT_PERSISTENCE_STATE + LS_FORMAT_TIMESTAMP + LS_FORMAT_ALLUXIO_STATE + " " + LS_FORMAT_FILE_PATH + "%n"; private static final Map<String, Comparator<URIStatus>> SORT_FIELD_COMPARATORS = new HashMap<>(); static { SORT_FIELD_COMPARATORS.put("creationTime", Comparator.comparingLong(URIStatus::getCreationTimeMs)); SORT_FIELD_COMPARATORS.put("inMemoryPercentage", Comparator.comparingLong(URIStatus::getInMemoryPercentage)); SORT_FIELD_COMPARATORS.put("lastAccessTime", Comparator.comparingLong(URIStatus::getLastAccessTimeMs)); SORT_FIELD_COMPARATORS.put("lastModificationTime", Comparator.comparingLong(URIStatus::getLastModificationTimeMs)); SORT_FIELD_COMPARATORS.put("name", Comparator.comparing(URIStatus::getName, String.CASE_INSENSITIVE_ORDER)); SORT_FIELD_COMPARATORS.put("path", Comparator.comparing(URIStatus::getPath)); SORT_FIELD_COMPARATORS.put("size", Comparator.comparingLong(URIStatus::getLength)); } private static final Map<String, Function<URIStatus, Long>> TIMESTAMP_FIELDS = new HashMap<>(); static { TIMESTAMP_FIELDS.put("creationTime", URIStatus::getCreationTimeMs); TIMESTAMP_FIELDS.put("lastAccessTime", URIStatus::getLastAccessTimeMs); TIMESTAMP_FIELDS.put("lastModificationTime", URIStatus::getLastModificationTimeMs); } private static final Option FORCE_OPTION = Option.builder("f") .required(false) .hasArg(false) .desc("force to load metadata for immediate children in a directory") .build(); private static final Option LIST_DIR_AS_FILE_OPTION = Option.builder("d") .required(false) .hasArg(false) .desc("list directories as plain files") .build(); private static final Option LIST_HUMAN_READABLE_OPTION = Option.builder("h") .required(false) .hasArg(false) .desc("print human-readable format sizes") .build(); private static final Option LIST_PINNED_FILES_OPTION = Option.builder("p") .required(false) .hasArg(false) .desc("list all pinned files") .build(); private static final Option RECURSIVE_OPTION = Option.builder("R") .required(false) .hasArg(false) .desc("list subdirectories recursively") .build(); private static final Option SORT_OPTION = Option.builder() .required(false) .longOpt("sort") .hasArg(true) .desc("sort statuses by the given field {" + String.join("|", SORT_FIELD_COMPARATORS.keySet()) + "}") .build(); private static final Option REVERSE_SORT_OPTION = Option.builder("r") .required(false) .hasArg(false) .desc("reverse order while sorting") .build(); private static final Option TIMESTAMP_OPTION = Option.builder() .required(false) .longOpt("timestamp") .hasArg(true) .desc("display specific timestamp(default is last modification time) {" + String.join("|", TIMESTAMP_FIELDS.keySet()) + "}") .build(); /** * Formats the ls result string. * * @param hSize print human-readable format sizes * @param acl whether security is enabled * @param isFolder whether this path is a file or a folder * @param permission permission string * @param userName user name * @param groupName group name * @param size size of the file in bytes * @param timestamp the epoch time in ms * @param inAlluxioPercentage whether the file is in Alluxio * @param persistenceState the persistence state of the file * @param path path of the file or folder * @param dateFormatPattern the format to follow when printing dates * @return the formatted string according to acl and isFolder */ public static String formatLsString(boolean hSize, boolean acl, boolean isFolder, String permission, String userName, String groupName, long size, long timestamp, int inAlluxioPercentage, String persistenceState, String path, String dateFormatPattern) { String inAlluxioState; String sizeStr; if (isFolder) { inAlluxioState = IN_ALLUXIO_STATE_DIR; sizeStr = String.valueOf(size); } else { inAlluxioState = String.format(IN_ALLUXIO_STATE_FILE_FORMAT, inAlluxioPercentage); sizeStr = hSize ? FormatUtils.getSizeFromBytes(size) : String.valueOf(size); } if (acl) { return String.format(LS_FORMAT, permission, userName, groupName, sizeStr, persistenceState, CommonUtils.convertMsToDate(timestamp, dateFormatPattern), inAlluxioState, path); } else { return String.format(LS_FORMAT_NO_ACL, sizeStr, persistenceState, CommonUtils.convertMsToDate(timestamp, dateFormatPattern), inAlluxioState, path); } } private void printLsString(URIStatus status, boolean hSize, Function<URIStatus, Long> timestampFunction, boolean pinnedOnly, boolean pinned) { if (pinnedOnly && !pinned) { return; } // detect the extended acls boolean hasExtended = status.getAcl().hasExtended() || !status.getDefaultAcl().isEmpty(); long timestamp = timestampFunction.apply(status); System.out.print(formatLsString(hSize, SecurityUtils.isSecurityEnabled(mFsContext.getClusterConf()), status.isFolder(), FormatUtils.formatMode((short) status.getMode(), status.isFolder(), hasExtended), status.getOwner(), status.getGroup(), status.getLength(), timestamp, status.getInAlluxioPercentage(), status.getPersistenceState(), status.getPath(), mFsContext.getPathConf(new AlluxioURI(status.getPath())).get( PropertyKey.USER_DATE_FORMAT_PATTERN))); } /** * Constructs a new instance to display information for all directories and files directly under * the path specified in args. * * @param fsContext the filesystem of Alluxio */ public LsCommand(FileSystemContext fsContext) { super(fsContext); } @Override public String getCommandName() { return "ls"; } @Override public Options getOptions() { return new Options() .addOption(FORCE_OPTION) .addOption(LIST_DIR_AS_FILE_OPTION) .addOption(LIST_HUMAN_READABLE_OPTION) .addOption(LIST_PINNED_FILES_OPTION) .addOption(RECURSIVE_OPTION) .addOption(REVERSE_SORT_OPTION) .addOption(SORT_OPTION) .addOption(TIMESTAMP_OPTION); } /** * Displays information for all directories and files directly under the path specified in args. * * @param path The {@link AlluxioURI} path as the input of the command * @param recursive Whether list the path recursively * @param dirAsFile list the directory status as a plain file * @param hSize print human-readable format sizes * @param sortField sort the result by this field */ private void ls(AlluxioURI path, boolean recursive, boolean forceLoadMetadata, boolean dirAsFile, boolean hSize, boolean pinnedOnly, String sortField, boolean reverse, String timestampOption) throws AlluxioException, IOException { Function<URIStatus, Long> timestampFunction = TIMESTAMP_FIELDS.get(timestampOption); if (dirAsFile) { URIStatus pathStatus = mFileSystem.getStatus(path); printLsString(pathStatus, hSize, timestampFunction, pinnedOnly, pathStatus.isPinned()); return; } ListStatusPOptions.Builder optionsBuilder = ListStatusPOptions.newBuilder(); if (forceLoadMetadata) { optionsBuilder.setLoadMetadataType(LoadMetadataPType.ALWAYS); } optionsBuilder.setRecursive(recursive); if (sortField == null) { mFileSystem.iterateStatus(path, optionsBuilder.build(), status -> printLsString(status, hSize, timestampFunction, pinnedOnly, status.isPinned())); return; } List<URIStatus> statusList = mFileSystem.listStatus(path, optionsBuilder.build()); List<URIStatus> sorted = sortByFieldAndOrder(statusList, sortField, reverse); for (URIStatus status : sorted) { printLsString(status, hSize, timestampFunction, pinnedOnly, status.isPinned()); } } private List<URIStatus> sortByFieldAndOrder( List<URIStatus> statuses, String sortField, boolean reverse) throws IOException { Optional<Comparator<URIStatus>> sortToUse = Optional.ofNullable( SORT_FIELD_COMPARATORS.get(sortField)); if (!sortToUse.isPresent()) { throw new InvalidArgumentException(ExceptionMessage.INVALID_ARGS_SORT_FIELD .getMessage(sortField)); } Comparator<URIStatus> sortBy = sortToUse.get(); if (reverse) { sortBy = sortBy.reversed(); } return statuses.stream().sorted(sortBy).collect(Collectors.toList()); } @Override protected void runPlainPath(AlluxioURI path, CommandLine cl) throws AlluxioException, IOException { ls(path, cl.hasOption("R"), cl.hasOption("f"), cl.hasOption("d"), cl.hasOption("h"), cl.hasOption("p"), cl.getOptionValue("sort", null), cl.hasOption("r"), cl.getOptionValue("timestamp", "lastModificationTime")); } @Override public int run(CommandLine cl) throws AlluxioException, IOException { String[] args = cl.getArgs(); for (String dirArg : args) { AlluxioURI path = new AlluxioURI(dirArg); runWildCardCmd(path, cl); } return 0; } @Override public String getUsage() { return "ls [-d|-f|-p|-R|-h|--sort=option|--timestamp=option|-r] <path> ..."; } @Override public String getDescription() { return "Displays information for all files and directories directly under the specified paths, " + "including permission, owner, group, size (bytes for files or the number of children " + "for directories, persistence state, last modified time, the percentage of content" + " already in Alluxio and the path in order."; } @Override public void validateArgs(CommandLine cl) throws InvalidArgumentException { CommandUtils.checkNumOfArgsNoLessThan(this, cl, 1); String timestampOption = cl.getOptionValue("timestamp"); if (timestampOption != null && !TIMESTAMP_FIELDS.containsKey(timestampOption)) { throw new InvalidArgumentException( String.format("Unrecognized timestamp option %s", timestampOption)); } } }
/* * Copyright (c) 2010, 2013, Oracle and/or its affiliates. All rights reserved. * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. * * This code is free software; you can redistribute it and/or modify it * under the terms of the GNU General Public License version 2 only, as * published by the Free Software Foundation. Oracle designates this * particular file as subject to the "Classpath" exception as provided * by Oracle in the LICENSE file that accompanied this code. * * This code is distributed in the hope that it will be useful, but WITHOUT * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License * version 2 for more details (a copy is included in the LICENSE file that * accompanied this code). * * You should have received a copy of the GNU General Public License version * 2 along with this work; if not, write to the Free Software Foundation, * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. * * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA * or visit www.oracle.com if you need additional information or have any * questions. */ /* * This file is available under and governed by the GNU General Public * License version 2 only, as published by the Free Software Foundation. * However, the following notice accompanied the original version of this * file, and Oracle licenses the original version of this file under the BSD * license: */ /* Copyright 2009-2013 Attila Szegedi Licensed under both the Apache License, Version 2.0 (the "Apache License") and the BSD License (the "BSD License"), with licensee being free to choose either of the two at their discretion. You may not use this file except in compliance with either the Apache License or the BSD License. If you choose to use this file in compliance with the Apache License, the following notice applies to you: You may obtain a copy of the Apache License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. If you choose to use this file in compliance with the BSD License, the following notice applies to you: Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: * Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. * Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. * Neither the name of the copyright holder nor the names of contributors may be used to endorse or promote products derived from this software without specific prior written permission. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL COPYRIGHT HOLDER BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ package jdk.internal.dynalink.support; import java.lang.invoke.MethodHandle; import java.lang.invoke.MethodHandles; import java.lang.invoke.MethodType; import java.lang.reflect.Constructor; import java.lang.reflect.Field; import java.lang.reflect.Method; /** * A wrapper around MethodHandles.Lookup that masks checked exceptions in those cases when you're looking up methods * within your own codebase (therefore it is an error if they are not present). * * @author Attila Szegedi */ public class Lookup { private final MethodHandles.Lookup lookup; /** * Creates a new instance, bound to an instance of {@link java.lang.invoke.MethodHandles.Lookup}. * * @param lookup the {@link java.lang.invoke.MethodHandles.Lookup} it delegates to. */ public Lookup(MethodHandles.Lookup lookup) { this.lookup = lookup; } /** * A canonical Lookup object that wraps {@link MethodHandles#publicLookup()}. */ public static final Lookup PUBLIC = new Lookup(MethodHandles.publicLookup()); /** * Performs a {@link java.lang.invoke.MethodHandles.Lookup#unreflect(Method)}, converting any encountered * {@link IllegalAccessException} into an {@link IllegalAccessError}. * * @param m the method to unreflect * @return the unreflected method handle. */ public MethodHandle unreflect(Method m) { return unreflect(lookup, m); } /** * Performs a {@link java.lang.invoke.MethodHandles.Lookup#unreflect(Method)}, converting any encountered * {@link IllegalAccessException} into an {@link IllegalAccessError}. * * @param lookup the lookup used to unreflect * @param m the method to unreflect * @return the unreflected method handle. */ public static MethodHandle unreflect(MethodHandles.Lookup lookup, Method m) { try { return lookup.unreflect(m); } catch(IllegalAccessException e) { final IllegalAccessError ee = new IllegalAccessError("Failed to unreflect method " + m); ee.initCause(e); throw ee; } } /** * Performs a {@link java.lang.invoke.MethodHandles.Lookup#unreflectGetter(Field)}, converting any encountered * {@link IllegalAccessException} into an {@link IllegalAccessError}. * * @param f the field for which a getter is unreflected * @return the unreflected field getter handle. */ public MethodHandle unreflectGetter(Field f) { try { return lookup.unreflectGetter(f); } catch(IllegalAccessException e) { final IllegalAccessError ee = new IllegalAccessError("Failed to unreflect getter for field " + f); ee.initCause(e); throw ee; } } /** * Performs a {@link java.lang.invoke.MethodHandles.Lookup#findGetter(Class, String, Class)}, converting any * encountered {@link IllegalAccessException} into an {@link IllegalAccessError} and {@link NoSuchFieldException} * into a {@link NoSuchFieldError}. * * @param refc the class declaring the field * @param name the name of the field * @param type the type of the field * @return the unreflected field getter handle. * @throws IllegalAccessError if the field is inaccessible. * @throws NoSuchFieldError if the field does not exist. */ public MethodHandle findGetter(Class<?>refc, String name, Class<?> type) { try { return lookup.findGetter(refc, name, type); } catch(IllegalAccessException e) { final IllegalAccessError ee = new IllegalAccessError("Failed to access getter for field " + refc.getName() + "." + name + " of type " + type.getName()); ee.initCause(e); throw ee; } catch(NoSuchFieldException e) { final NoSuchFieldError ee = new NoSuchFieldError("Failed to find getter for field " + refc.getName() + "." + name + " of type " + type.getName()); ee.initCause(e); throw ee; } } /** * Performs a {@link java.lang.invoke.MethodHandles.Lookup#unreflectSetter(Field)}, converting any encountered * {@link IllegalAccessException} into an {@link IllegalAccessError}. * * @param f the field for which a setter is unreflected * @return the unreflected field setter handle. */ public MethodHandle unreflectSetter(Field f) { try { return lookup.unreflectSetter(f); } catch(IllegalAccessException e) { final IllegalAccessError ee = new IllegalAccessError("Failed to unreflect setter for field " + f); ee.initCause(e); throw ee; } } /** * Performs a {@link java.lang.invoke.MethodHandles.Lookup#unreflectConstructor(Constructor)}, converting any * encountered {@link IllegalAccessException} into an {@link IllegalAccessError}. * * @param c the constructor to unreflect * @return the unreflected constructor handle. */ public MethodHandle unreflectConstructor(Constructor<?> c) { return unreflectConstructor(lookup, c); } /** * Performs a {@link java.lang.invoke.MethodHandles.Lookup#unreflectConstructor(Constructor)}, converting any * encountered {@link IllegalAccessException} into an {@link IllegalAccessError}. * * @param lookup the lookup used to unreflect * @param c the constructor to unreflect * @return the unreflected constructor handle. */ public static MethodHandle unreflectConstructor(MethodHandles.Lookup lookup, Constructor<?> c) { try { return lookup.unreflectConstructor(c); } catch(IllegalAccessException e) { final IllegalAccessError ee = new IllegalAccessError("Failed to unreflect constructor " + c); ee.initCause(e); throw ee; } } /** * Performs a findSpecial on the underlying lookup. Converts any encountered {@link IllegalAccessException} into an * {@link IllegalAccessError} and a {@link NoSuchMethodException} into a {@link NoSuchMethodError}. * * @param declaringClass class declaring the method * @param name the name of the method * @param type the type of the method * @return a method handle for the method * @throws IllegalAccessError if the method is inaccessible. * @throws NoSuchMethodError if the method does not exist. */ public MethodHandle findSpecial(Class<?> declaringClass, String name, MethodType type) { try { return lookup.findSpecial(declaringClass, name, type, declaringClass); } catch(IllegalAccessException e) { final IllegalAccessError ee = new IllegalAccessError("Failed to access special method " + methodDescription( declaringClass, name, type)); ee.initCause(e); throw ee; } catch(NoSuchMethodException e) { final NoSuchMethodError ee = new NoSuchMethodError("Failed to find special method " + methodDescription( declaringClass, name, type)); ee.initCause(e); throw ee; } } private static String methodDescription(Class<?> declaringClass, String name, MethodType type) { return declaringClass.getName() + "#" + name + type; } /** * Performs a findStatic on the underlying lookup. Converts any encountered {@link IllegalAccessException} into an * {@link IllegalAccessError} and a {@link NoSuchMethodException} into a {@link NoSuchMethodError}. * * @param declaringClass class declaring the method * @param name the name of the method * @param type the type of the method * @return a method handle for the method * @throws IllegalAccessError if the method is inaccessible. * @throws NoSuchMethodError if the method does not exist. */ public MethodHandle findStatic(Class<?> declaringClass, String name, MethodType type) { try { return lookup.findStatic(declaringClass, name, type); } catch(IllegalAccessException e) { final IllegalAccessError ee = new IllegalAccessError("Failed to access static method " + methodDescription( declaringClass, name, type)); ee.initCause(e); throw ee; } catch(NoSuchMethodException e) { final NoSuchMethodError ee = new NoSuchMethodError("Failed to find static method " + methodDescription( declaringClass, name, type)); ee.initCause(e); throw ee; } } /** * Performs a findVirtual on the underlying lookup. Converts any encountered {@link IllegalAccessException} into an * {@link IllegalAccessError} and a {@link NoSuchMethodException} into a {@link NoSuchMethodError}. * * @param declaringClass class declaring the method * @param name the name of the method * @param type the type of the method * @return a method handle for the method * @throws IllegalAccessError if the method is inaccessible. * @throws NoSuchMethodError if the method does not exist. */ public MethodHandle findVirtual(Class<?> declaringClass, String name, MethodType type) { try { return lookup.findVirtual(declaringClass, name, type); } catch(IllegalAccessException e) { final IllegalAccessError ee = new IllegalAccessError("Failed to access virtual method " + methodDescription( declaringClass, name, type)); ee.initCause(e); throw ee; } catch(NoSuchMethodException e) { final NoSuchMethodError ee = new NoSuchMethodError("Failed to find virtual method " + methodDescription( declaringClass, name, type)); ee.initCause(e); throw ee; } } /** * Given a lookup, finds using {@link #findSpecial(Class, String, MethodType)} a method on that lookup's class. * Useful in classes' code for convenient linking to their own privates. * @param lookup the lookup for the class * @param name the name of the method * @param rtype the return type of the method * @param ptypes the parameter types of the method * @return the method handle for the method */ public static MethodHandle findOwnSpecial(MethodHandles.Lookup lookup, String name, Class<?> rtype, Class<?>... ptypes) { return new Lookup(lookup).findOwnSpecial(name, rtype, ptypes); } /** * Finds using {@link #findSpecial(Class, String, MethodType)} a method on that lookup's class. Useful in classes' * code for convenient linking to their own privates. It's easier to use than {@code findSpecial} in that you can * just list the parameter types, and don't have to specify lookup class. * @param name the name of the method * @param rtype the return type of the method * @param ptypes the parameter types of the method * @return the method handle for the method */ public MethodHandle findOwnSpecial(String name, Class<?> rtype, Class<?>... ptypes) { return findSpecial(lookup.lookupClass(), name, MethodType.methodType(rtype, ptypes)); } /** * Given a lookup, finds using {@link #findStatic(Class, String, MethodType)} a method on that lookup's class. * Useful in classes' code for convenient linking to their own privates. It's easier to use than {@code findStatic} * in that you can just list the parameter types, and don't have to specify lookup class. * @param lookup the lookup for the class * @param name the name of the method * @param rtype the return type of the method * @param ptypes the parameter types of the method * @return the method handle for the method */ public static MethodHandle findOwnStatic(MethodHandles.Lookup lookup, String name, Class<?> rtype, Class<?>... ptypes) { return new Lookup(lookup).findOwnStatic(name, rtype, ptypes); } /** * Finds using {@link #findStatic(Class, String, MethodType)} a method on that lookup's class. Useful in classes' * code for convenient linking to their own privates. It's easier to use than {@code findStatic} in that you can * just list the parameter types, and don't have to specify lookup class. * @param name the name of the method * @param rtype the return type of the method * @param ptypes the parameter types of the method * @return the method handle for the method */ public MethodHandle findOwnStatic(String name, Class<?> rtype, Class<?>... ptypes) { return findStatic(lookup.lookupClass(), name, MethodType.methodType(rtype, ptypes)); } }
/* * Copyright (c) 2015, WSO2 Inc. (http://www.wso2.org) All Rights Reserved. * * WSO2 Inc. licenses this file to you under the Apache License, * Version 2.0 (the "License"); you may not use this file except * in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.wso2.carbon.device.mgt.iot.virtualfirealarm.agent.advanced.core; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.wso2.carbon.device.mgt.iot.virtualfirealarm.agent.advanced.communication.http.FireAlarmHTTPCommunicator; import org.wso2.carbon.device.mgt.iot.virtualfirealarm.agent.advanced.communication.mqtt.FireAlarmMQTTCommunicator; import org.wso2.carbon.device.mgt.iot.virtualfirealarm.agent.advanced.communication.xmpp.FireAlarmXMPPCommunicator; import org.wso2.carbon.device.mgt.iot.virtualfirealarm.agent.advanced.enrollment.EnrollmentManager; import org.wso2.carbon.device.mgt.iot.virtualfirealarm.agent.advanced.exception.AgentCoreOperationException; import org.wso2.carbon.device.mgt.iot.virtualfirealarm.agent.advanced.sidhdhi.SidhdhiQuery; import org.wso2.carbon.device.mgt.iot.virtualfirealarm.agent.advanced.transport.TransportHandler; import org.wso2.carbon.device.mgt.iot.virtualfirealarm.agent.advanced.transport.TransportHandlerException; import org.wso2.carbon.device.mgt.iot.virtualfirealarm.agent.advanced.transport.TransportUtils; import org.wso2.carbon.device.mgt.iot.virtualfirealarm.agent.advanced.virtual.VirtualHardwareManager; import java.nio.charset.StandardCharsets; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; public class AgentManager { private static final Log log = LogFactory.getLog(AgentManager.class); private static final Object lock = new Object(); private static AgentManager agentManager; private static Boolean policyUpdated = false; private String rootPath = ""; private boolean deviceReady = false; private boolean isAlarmOn = false; private String initialPolicy; private String deviceName, agentStatus; private int pushInterval; // seconds private String prevProtocol, protocol; private String networkInterface; private List<String> interfaceList, protocolList; private Map<String, TransportHandler> agentCommunicator; private AgentConfiguration agentConfigs; private String deviceIP; private String enrollmentEP; private String ipRegistrationEP; private String pushDataAPIEP; private AgentManager() { } public static synchronized AgentManager getInstance() { if (agentManager == null) { agentManager = new AgentManager(); } return agentManager; } public static void setUpdated(Boolean isUpdated) { synchronized (lock) { policyUpdated = isUpdated; } } public static Boolean isUpdated() { synchronized (lock) { Boolean temp = policyUpdated; policyUpdated = false; return temp; } } public void init() { agentCommunicator = new HashMap<>(); // Read IoT-Server specific configurations from the 'deviceConfig.properties' file this.agentConfigs = AgentUtilOperations.readIoTServerConfigs(); // Initialise IoT-Server URL endpoints from the configuration read from file AgentUtilOperations.initializeServerEndPoints(); String analyticsPageContext = String.format(AgentConstants.DEVICE_ANALYTICS_PAGE_URL, agentConfigs.getDeviceId(), agentConfigs.getDeviceName()); String controlPageContext = String.format(AgentConstants.DEVICE_DETAILS_PAGE_EP, AgentConstants.DEVICE_TYPE, agentConfigs.getDeviceId()); this.agentStatus = AgentConstants.NOT_REGISTERED; this.deviceName = this.agentConfigs.getDeviceName(); this.pushInterval = this.agentConfigs.getDataPushInterval(); this.networkInterface = AgentConstants.DEFAULT_NETWORK_INTERFACE; this.protocol = AgentConstants.DEFAULT_PROTOCOL; this.prevProtocol = protocol; Map<String, String> xmppIPPortMap; try { xmppIPPortMap = TransportUtils.getHostAndPort(agentConfigs.getXmppServerEndpoint()); String xmppServer = xmppIPPortMap.get("Host"); int xmppPort = Integer.parseInt(xmppIPPortMap.get("Port")); TransportHandler xmppCommunicator = new FireAlarmXMPPCommunicator(xmppServer, xmppPort); agentCommunicator.put(AgentConstants.XMPP_PROTOCOL, xmppCommunicator); } catch (TransportHandlerException e) { log.error("XMPP Endpoint String - " + agentConfigs.getXmppServerEndpoint() + ", provided in the configuration file is invalid."); } String mqttTopic = String.format(AgentConstants.MQTT_SUBSCRIBE_TOPIC, agentConfigs.getTenantDomain(), agentConfigs.getDeviceId()); // TransportHandler httpCommunicator = new FireAlarmHTTPCommunicator(); TransportHandler mqttCommunicator = new FireAlarmMQTTCommunicator(agentConfigs.getDeviceOwner(), agentConfigs.getDeviceId(), agentConfigs.getMqttBrokerEndpoint(), mqttTopic); // agentCommunicator.put(AgentConstants.HTTP_PROTOCOL, httpCommunicator); agentCommunicator.put(AgentConstants.MQTT_PROTOCOL, mqttCommunicator); try { interfaceList = new ArrayList<>(TransportUtils.getInterfaceIPMap().keySet()); protocolList = new ArrayList<>(agentCommunicator.keySet()); } catch (TransportHandlerException e) { log.error("An error occurred whilst retrieving all NetworkInterface-IP mappings"); } String siddhiQueryFilePath = rootPath + AgentConstants.CEP_FILE_NAME; (new Thread(new SidhdhiQuery())).start(); initialPolicy = SidhdhiQuery.readFile(siddhiQueryFilePath, StandardCharsets.UTF_8); //Initializing hardware at that point //AgentManger.setDeviceReady() method should invoked from hardware after initialization VirtualHardwareManager.getInstance().init(); //Wait till hardware get ready while (!deviceReady) { try { Thread.sleep(500); } catch (InterruptedException e) { log.info(AgentConstants.LOG_APPENDER + "Sleep error in 'device ready-flag' checking thread"); } } // try { // EnrollmentManager.getInstance().beginEnrollmentFlow(); // } catch (AgentCoreOperationException e) { // log.error("Device Enrollment Failed:\n"); // e.printStackTrace(); // System.exit(0); // } //Start agent communication agentCommunicator.get(protocol).connect(); } private void switchCommunicator(String stopProtocol, String startProtocol) { agentCommunicator.get(stopProtocol).disconnect(); while (agentCommunicator.get(stopProtocol).isConnected()) { try { Thread.sleep(250); } catch (InterruptedException e) { log.info(AgentConstants.LOG_APPENDER + "Sleep error in 'Switch-Communicator' Thread's shutdown wait."); } } agentCommunicator.get(startProtocol).connect(); } public void setInterface(int interfaceId) { if (interfaceId != -1) { String newInterface = interfaceList.get(interfaceId); if (!newInterface.equals(networkInterface)) { networkInterface = newInterface; if (protocol.equals(AgentConstants.HTTP_PROTOCOL) && !protocol.equals( prevProtocol)) { switchCommunicator(prevProtocol, protocol); } } } } public void setProtocol(int protocolId) { if (protocolId != -1) { String newProtocol = protocolList.get(protocolId); if (!protocol.equals(newProtocol)) { prevProtocol = protocol; protocol = newProtocol; switchCommunicator(prevProtocol, protocol); } } } public void changeAlarmStatus(boolean isOn) { VirtualHardwareManager.getInstance().changeAlarmStatus(isOn); isAlarmOn = isOn; } public void updateAgentStatus(String status) { this.agentStatus = status; } public void addToPolicyLog(String policy) { VirtualHardwareManager.getInstance().addToPolicyLog(policy); } public String getRootPath() { return rootPath; } /*------------------------------------------------------------------------------------------*/ /* Getter and Setter Methods for the private variables */ /*------------------------------------------------------------------------------------------*/ public void setRootPath(String rootPath) { this.rootPath = rootPath; } public void setDeviceReady(boolean deviceReady) { this.deviceReady = deviceReady; } public String getInitialPolicy() { return initialPolicy; } public AgentConfiguration getAgentConfigs() { return agentConfigs; } public String getDeviceIP() { return deviceIP; } public void setDeviceIP(String deviceIP) { this.deviceIP = deviceIP; } public String getEnrollmentEP() { return enrollmentEP; } public void setEnrollmentEP(String enrollmentEP) { this.enrollmentEP = enrollmentEP; } public String getIpRegistrationEP() { return ipRegistrationEP; } public void setIpRegistrationEP(String ipRegistrationEP) { this.ipRegistrationEP = ipRegistrationEP; } public String getPushDataAPIEP() { return pushDataAPIEP; } public void setPushDataAPIEP(String pushDataAPIEP) { this.pushDataAPIEP = pushDataAPIEP; } public String getDeviceName() { return deviceName; } public String getNetworkInterface() { return networkInterface; } public String getAgentStatus() { return agentStatus; } public int getPushInterval() { return pushInterval; } public void setPushInterval(int pushInterval) { this.pushInterval = pushInterval; TransportHandler transportHandler = agentCommunicator.get(protocol); switch (protocol) { case AgentConstants.HTTP_PROTOCOL: ((FireAlarmHTTPCommunicator) transportHandler).getDataPushServiceHandler() .cancel(true); break; case AgentConstants.MQTT_PROTOCOL: ((FireAlarmMQTTCommunicator) transportHandler).getDataPushServiceHandler() .cancel(true); break; case AgentConstants.XMPP_PROTOCOL: ((FireAlarmXMPPCommunicator) transportHandler).getDataPushServiceHandler() .cancel(true); break; default: log.warn("Unknown protocol " + protocol); } transportHandler.publishDeviceData(); if (log.isDebugEnabled()) { log.debug("The Data Publish Interval was changed to: " + pushInterval); } } public List<String> getInterfaceList() { return interfaceList; } public List<String> getProtocolList() { return protocolList; } /** * Get temperature reading from device * * @return Temperature */ public int getTemperature() { return VirtualHardwareManager.getInstance().getTemperature(); } /** * Get humidity reading from device * * @return Humidity */ public int getHumidity() { return VirtualHardwareManager.getInstance().getHumidity(); } public boolean isAlarmOn() { return isAlarmOn; } }
/* * Copyright 2014-2019 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with * the License. A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR * CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions * and limitations under the License. */ package com.amazonaws.services.cloudfront.model; import java.io.Serializable; import javax.annotation.Generated; /** * <p> * A complex type that specifies whether you want CloudFront to forward cookies to the origin and, if so, which ones. * For more information about forwarding cookies to the origin, see <a * href="https://docs.aws.amazon.com/AmazonCloudFront/latest/DeveloperGuide/header-caching.html"> Caching Content Based * on Request Headers</a> in the <i>Amazon CloudFront Developer Guide</i>. * </p> * * @see <a href="http://docs.aws.amazon.com/goto/WebAPI/cloudfront-2019-03-26/CookieNames" target="_top">AWS API * Documentation</a> */ @Generated("com.amazonaws:aws-java-sdk-code-generator") public class CookieNames implements Serializable, Cloneable { /** * <p> * The number of different cookies that you want CloudFront to forward to the origin for this cache behavior. * </p> */ private Integer quantity; /** * <p> * A complex type that contains one <code>Name</code> element for each cookie that you want CloudFront to forward to * the origin for this cache behavior. * </p> */ private com.amazonaws.internal.SdkInternalList<String> items; /** * <p> * The number of different cookies that you want CloudFront to forward to the origin for this cache behavior. * </p> * * @param quantity * The number of different cookies that you want CloudFront to forward to the origin for this cache behavior. */ public void setQuantity(Integer quantity) { this.quantity = quantity; } /** * <p> * The number of different cookies that you want CloudFront to forward to the origin for this cache behavior. * </p> * * @return The number of different cookies that you want CloudFront to forward to the origin for this cache * behavior. */ public Integer getQuantity() { return this.quantity; } /** * <p> * The number of different cookies that you want CloudFront to forward to the origin for this cache behavior. * </p> * * @param quantity * The number of different cookies that you want CloudFront to forward to the origin for this cache behavior. * @return Returns a reference to this object so that method calls can be chained together. */ public CookieNames withQuantity(Integer quantity) { setQuantity(quantity); return this; } /** * <p> * A complex type that contains one <code>Name</code> element for each cookie that you want CloudFront to forward to * the origin for this cache behavior. * </p> * * @return A complex type that contains one <code>Name</code> element for each cookie that you want CloudFront to * forward to the origin for this cache behavior. */ public java.util.List<String> getItems() { if (items == null) { items = new com.amazonaws.internal.SdkInternalList<String>(); } return items; } /** * <p> * A complex type that contains one <code>Name</code> element for each cookie that you want CloudFront to forward to * the origin for this cache behavior. * </p> * * @param items * A complex type that contains one <code>Name</code> element for each cookie that you want CloudFront to * forward to the origin for this cache behavior. */ public void setItems(java.util.Collection<String> items) { if (items == null) { this.items = null; return; } this.items = new com.amazonaws.internal.SdkInternalList<String>(items); } /** * <p> * A complex type that contains one <code>Name</code> element for each cookie that you want CloudFront to forward to * the origin for this cache behavior. * </p> * <p> * <b>NOTE:</b> This method appends the values to the existing list (if any). Use * {@link #setItems(java.util.Collection)} or {@link #withItems(java.util.Collection)} if you want to override the * existing values. * </p> * * @param items * A complex type that contains one <code>Name</code> element for each cookie that you want CloudFront to * forward to the origin for this cache behavior. * @return Returns a reference to this object so that method calls can be chained together. */ public CookieNames withItems(String... items) { if (this.items == null) { setItems(new com.amazonaws.internal.SdkInternalList<String>(items.length)); } for (String ele : items) { this.items.add(ele); } return this; } /** * <p> * A complex type that contains one <code>Name</code> element for each cookie that you want CloudFront to forward to * the origin for this cache behavior. * </p> * * @param items * A complex type that contains one <code>Name</code> element for each cookie that you want CloudFront to * forward to the origin for this cache behavior. * @return Returns a reference to this object so that method calls can be chained together. */ public CookieNames withItems(java.util.Collection<String> items) { setItems(items); return this; } /** * Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be * redacted from this string using a placeholder value. * * @return A string representation of this object. * * @see java.lang.Object#toString() */ @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append("{"); if (getQuantity() != null) sb.append("Quantity: ").append(getQuantity()).append(","); if (getItems() != null) sb.append("Items: ").append(getItems()); sb.append("}"); return sb.toString(); } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (obj instanceof CookieNames == false) return false; CookieNames other = (CookieNames) obj; if (other.getQuantity() == null ^ this.getQuantity() == null) return false; if (other.getQuantity() != null && other.getQuantity().equals(this.getQuantity()) == false) return false; if (other.getItems() == null ^ this.getItems() == null) return false; if (other.getItems() != null && other.getItems().equals(this.getItems()) == false) return false; return true; } @Override public int hashCode() { final int prime = 31; int hashCode = 1; hashCode = prime * hashCode + ((getQuantity() == null) ? 0 : getQuantity().hashCode()); hashCode = prime * hashCode + ((getItems() == null) ? 0 : getItems().hashCode()); return hashCode; } @Override public CookieNames clone() { try { return (CookieNames) super.clone(); } catch (CloneNotSupportedException e) { throw new IllegalStateException("Got a CloneNotSupportedException from Object.clone() " + "even though we're Cloneable!", e); } } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.apex.malhar.flume.storage; import java.io.File; import java.io.FileOutputStream; import java.io.IOException; import java.io.RandomAccessFile; import org.junit.After; import org.junit.Assert; import org.junit.Before; import org.junit.Rule; import org.junit.Test; import org.junit.rules.TestWatcher; import org.junit.runner.Description; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.apache.commons.io.FileUtils; import org.apache.flume.Context; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import com.datatorrent.netlet.util.Slice; /** * */ public class HDFSStorageTest { public static class TestMeta extends TestWatcher { public String baseDir; public String testFile; private String testData = "No and yes. There is also IdleTimeHandler that allows the operator to emit tuples. " + "There is overlap, why not have a single interface. \n" + "Also consider the possibility of an operator that does other processing and not consume nor emit tuples,"; @Override protected void starting(org.junit.runner.Description description) { String className = description.getClassName(); baseDir = "target/" + className; try { baseDir = (new File(baseDir)).getAbsolutePath(); FileUtils.forceMkdir(new File(baseDir)); testFile = baseDir + "/testInput.txt"; FileOutputStream outputStream = FileUtils.openOutputStream(new File(testFile)); outputStream.write(testData.getBytes()); outputStream.close(); } catch (IOException ex) { throw new RuntimeException(ex); } } @Override protected void finished(Description description) { try { FileUtils.deleteDirectory(new File(baseDir)); } catch (IOException ex) { throw new RuntimeException(ex); } } } @Rule public TestMeta testMeta = new TestMeta(); private String STORAGE_DIRECTORY; private HDFSStorage getStorage(String id, boolean restore) { Context ctx = new Context(); STORAGE_DIRECTORY = testMeta.baseDir; ctx.put(HDFSStorage.BASE_DIR_KEY, testMeta.baseDir); ctx.put(HDFSStorage.RESTORE_KEY, Boolean.toString(restore)); ctx.put(HDFSStorage.ID, id); ctx.put(HDFSStorage.BLOCKSIZE, "256"); HDFSStorage lstorage = new HDFSStorage(); lstorage.configure(ctx); lstorage.setup(null); return lstorage; } private HDFSStorage storage; @Before public void setup() { storage = getStorage("1", false); } @After public void teardown() { storage.teardown(); try { Thread.sleep(100); } catch (InterruptedException e) { throw new RuntimeException(e); } storage.cleanHelperFiles(); } /** * This test covers following use case 1. Some data is stored 2. File is flush but the file is not close 3. Some more * data is stored but the file doesn't roll-overs 4. Retrieve is called for the last returned address and it return * nulls 5. Some more data is stored again but the address is returned null because of previous retrieve call * * @throws Exception */ @Test public void testPartialFlush() throws Exception { Assert.assertNull(storage.retrieve(new byte[8])); byte[] b = "ab".getBytes(); byte[] address = storage.store(new Slice(b, 0, b.length)); Assert.assertNotNull(address); storage.flush(); b = "cb".getBytes(); byte[] addr = storage.store(new Slice(b, 0, b.length)); match(storage.retrieve(new byte[8]), "ab"); Assert.assertNull(storage.retrieve(addr)); Assert.assertNull(storage.store(new Slice(b, 0, b.length))); storage.flush(); match(storage.retrieve(address), "cb"); Assert.assertNotNull(storage.store(new Slice(b, 0, b.length))); } /** * This test covers following use case 1. Some data is stored to make sure that there is no roll over 2. File is * flushed but the file is not closed 3. Some more data is stored. The data stored is enough to make the file roll * over 4. Retrieve is called for the last returned address and it return nulls as the data is not flushed 5. Some * more data is stored again but the address is returned null because of previous retrieve call 6. The data is flushed * to make sure that the data is committed. 7. Now the data is retrieved from the starting and data returned matches * the data stored * * @throws Exception */ @Test public void testPartialFlushRollOver() throws Exception { Assert.assertNull(storage.retrieve(new byte[8])); byte[] b = new byte[]{48, 48, 48, 48, 98, 48, 52, 54, 49, 57, 55, 51, 52, 97, 53, 101, 56, 56, 97, 55, 98, 53, 52, 51, 98, 50, 102, 51, 49, 97, 97, 54, 1, 50, 48, 49, 51, 45, 49, 49, 45, 48, 55, 1, 50, 48, 49, 51, 45, 49, 49, 45, 48, 55, 32, 48, 48, 58, 51, 49, 58, 52, 56, 1, 49, 48, 53, 53, 57, 52, 50, 1, 50, 1, 49, 53, 49, 49, 54, 49, 56, 52, 1, 49, 53, 49, 49, 57, 50, 49, 49, 1, 49, 53, 49, 50, 57, 54, 54, 53, 1, 49, 53, 49, 50, 49, 53, 52, 56, 1, 49, 48, 48, 56, 48, 51, 52, 50, 1, 55, 56, 56, 50, 54, 53, 52, 56, 1, 49, 1, 48, 1, 48, 46, 48, 1, 48, 46, 48, 1, 48, 46, 48}; byte[] b_org = new byte[]{48, 48, 48, 48, 98, 48, 52, 54, 49, 57, 55, 51, 52, 97, 53, 101, 56, 56, 97, 55, 98, 53, 52, 51, 98, 50, 102, 51, 49, 97, 97, 54, 1, 50, 48, 49, 51, 45, 49, 49, 45, 48, 55, 1, 50, 48, 49, 51, 45, 49, 49, 45, 48, 55, 32, 48, 48, 58, 51, 49, 58, 52, 56, 1, 49, 48, 53, 53, 57, 52, 50, 1, 50, 1, 49, 53, 49, 49, 54, 49, 56, 52, 1, 49, 53, 49, 49, 57, 50, 49, 49, 1, 49, 53, 49, 50, 57, 54, 54, 53, 1, 49, 53, 49, 50, 49, 53, 52, 56, 1, 49, 48, 48, 56, 48, 51, 52, 50, 1, 55, 56, 56, 50, 54, 53, 52, 56, 1, 49, 1, 48, 1, 48, 46, 48, 1, 48, 46, 48, 1, 48, 46, 48}; byte[] address = storage.store(new Slice(b, 0, b.length)); Assert.assertNotNull(address); storage.flush(); byte[] addr = null; for (int i = 0; i < 5; i++) { b[0] = (byte)(b[0] + 1); addr = storage.store(new Slice(b, 0, b.length)); } Assert.assertNull(storage.retrieve(addr)); for (int i = 0; i < 5; i++) { b[0] = (byte)(b[0] + 1); Assert.assertNull(storage.store(new Slice(b, 0, b.length))); } storage.flush(); match(storage.retrieve(new byte[8]), new String(b_org)); b_org[0] = (byte)(b_org[0] + 1); match(storage.retrieve(address), new String(b_org)); b_org[0] = (byte)(b_org[0] + 1); match(storage.retrieveNext(), new String(b_org)); b_org[0] = (byte)(b_org[0] + 1); match(storage.retrieveNext(), new String(b_org)); } /** * This test covers following use case 1. Some data is stored to make sure that there is no roll over 2. File is * flushed but the file is not closed 3. Some more data is stored. The data stored is enough to make the file roll * over 4. The storage crashes and new storage is instiated. 5. Retrieve is called for the last returned address and * it return nulls as the data is not flushed 6. Some more data is stored again but the address is returned null * because of previous retrieve call 7. The data is flushed to make sure that the data is committed. 8. Now the data * is retrieved from the starting and data returned matches the data stored * * @throws Exception */ @Test public void testPartialFlushRollOverWithFailure() throws Exception { Assert.assertNull(storage.retrieve(new byte[8])); byte[] b = new byte[]{48, 48, 48, 48, 98, 48, 52, 54, 49, 57, 55, 51, 52, 97, 53, 101, 56, 56, 97, 55, 98, 53, 52, 51, 98, 50, 102, 51, 49, 97, 97, 54, 1, 50, 48, 49, 51, 45, 49, 49, 45, 48, 55, 1, 50, 48, 49, 51, 45, 49, 49, 45, 48, 55, 32, 48, 48, 58, 51, 49, 58, 52, 56, 1, 49, 48, 53, 53, 57, 52, 50, 1, 50, 1, 49, 53, 49, 49, 54, 49, 56, 52, 1, 49, 53, 49, 49, 57, 50, 49, 49, 1, 49, 53, 49, 50, 57, 54, 54, 53, 1, 49, 53, 49, 50, 49, 53, 52, 56, 1, 49, 48, 48, 56, 48, 51, 52, 50, 1, 55, 56, 56, 50, 54, 53, 52, 56, 1, 49, 1, 48, 1, 48, 46, 48, 1, 48, 46, 48, 1, 48, 46, 48}; byte[] b_org = new byte[]{48, 48, 48, 48, 98, 48, 52, 54, 49, 57, 55, 51, 52, 97, 53, 101, 56, 56, 97, 55, 98, 53, 52, 51, 98, 50, 102, 51, 49, 97, 97, 54, 1, 50, 48, 49, 51, 45, 49, 49, 45, 48, 55, 1, 50, 48, 49, 51, 45, 49, 49, 45, 48, 55, 32, 48, 48, 58, 51, 49, 58, 52, 56, 1, 49, 48, 53, 53, 57, 52, 50, 1, 50, 1, 49, 53, 49, 49, 54, 49, 56, 52, 1, 49, 53, 49, 49, 57, 50, 49, 49, 1, 49, 53, 49, 50, 57, 54, 54, 53, 1, 49, 53, 49, 50, 49, 53, 52, 56, 1, 49, 48, 48, 56, 48, 51, 52, 50, 1, 55, 56, 56, 50, 54, 53, 52, 56, 1, 49, 1, 48, 1, 48, 46, 48, 1, 48, 46, 48, 1, 48, 46, 48}; byte[] address = storage.store(new Slice(b, 0, b.length)); Assert.assertNotNull(address); storage.flush(); byte[] addr = null; for (int i = 0; i < 5; i++) { b[0] = (byte)(b[0] + 1); addr = storage.store(new Slice(b, 0, b.length)); } storage = getStorage("1", true); Assert.assertNull(storage.retrieve(addr)); for (int i = 0; i < 5; i++) { b[0] = (byte)(b[0] + 1); Assert.assertNull(storage.store(new Slice(b, 0, b.length))); } storage.flush(); match(storage.retrieve(new byte[8]), new String(b_org)); b_org[0] = (byte)(b_org[0] + 1); match(storage.retrieve(address), new String(b_org)); b_org[0] = (byte)(b_org[0] + 1); match(storage.retrieveNext(), new String(b_org)); b_org[0] = (byte)(b_org[0] + 1); match(storage.retrieveNext(), new String(b_org)); } /** * This tests clean when the file doesn't roll over * * @throws Exception */ @Test public void testPartialFlushWithClean() throws Exception { Assert.assertNull(storage.retrieve(new byte[8])); byte[] b = "ab".getBytes(); byte[] address = storage.store(new Slice(b, 0, b.length)); Assert.assertNotNull(address); storage.flush(); storage.clean(address); b = "cb".getBytes(); byte[] addr = storage.store(new Slice(b, 0, b.length)); Assert.assertNull(storage.retrieve(addr)); Assert.assertNull(storage.store(new Slice(b, 0, b.length))); storage.flush(); match(storage.retrieve(new byte[8]), "cb"); match(storage.retrieve(address), "cb"); Assert.assertNotNull(storage.store(new Slice(b, 0, b.length))); } /** * This tests clean when the file doesn't roll over * * @throws Exception */ @Test public void testPartialFlushWithCleanAndFailure() throws Exception { Assert.assertNull(storage.retrieve(new byte[8])); byte[] b = "ab".getBytes(); byte[] address = storage.store(new Slice(b, 0, b.length)); Assert.assertNotNull(address); storage.flush(); storage.clean(address); b = "cb".getBytes(); byte[] addr = storage.store(new Slice(b, 0, b.length)); storage = getStorage("1", true); Assert.assertNull(storage.retrieve(addr)); Assert.assertNull(storage.store(new Slice(b, 0, b.length))); storage.flush(); match(storage.retrieve(new byte[8]), "cb"); match(storage.retrieve(address), "cb"); Assert.assertNotNull(storage.store(new Slice(b, 0, b.length))); } /** * This test covers following use case 1. Some data is stored to make sure that there is no roll over 2. File is * flushed but the file is not closed 3. The data is cleaned till the last returned address 4. Some more data is * stored. The data stored is enough to make the file roll over 5. Retrieve is called for the last returned address * and it return nulls as the data is not flushed 6. Some more data is stored again but the address is returned null * because of previous retrieve call 7. The data is flushed to make sure that the data is committed. 8. Now the data * is retrieved from the starting and data returned matches the data stored * * @throws Exception */ @Test public void testPartialFlushWithCleanAndRollOver() throws Exception { Assert.assertNull(storage.retrieve(new byte[8])); byte[] b = new byte[]{48, 48, 48, 48, 98, 48, 52, 54, 49, 57, 55, 51, 52, 97, 53, 101, 56, 56, 97, 55, 98, 53, 52, 51, 98, 50, 102, 51, 49, 97, 97, 54, 1, 50, 48, 49, 51, 45, 49, 49, 45, 48, 55, 1, 50, 48, 49, 51, 45, 49, 49, 45, 48, 55, 32, 48, 48, 58, 51, 49, 58, 52, 56, 1, 49, 48, 53, 53, 57, 52, 50, 1, 50, 1, 49, 53, 49, 49, 54, 49, 56, 52, 1, 49, 53, 49, 49, 57, 50, 49, 49, 1, 49, 53, 49, 50, 57, 54, 54, 53, 1, 49, 53, 49, 50, 49, 53, 52, 56, 1, 49, 48, 48, 56, 48, 51, 52, 50, 1, 55, 56, 56, 50, 54, 53, 52, 56, 1, 49, 1, 48, 1, 48, 46, 48, 1, 48, 46, 48, 1, 48, 46, 48}; byte[] b_org = new byte[]{48, 48, 48, 48, 98, 48, 52, 54, 49, 57, 55, 51, 52, 97, 53, 101, 56, 56, 97, 55, 98, 53, 52, 51, 98, 50, 102, 51, 49, 97, 97, 54, 1, 50, 48, 49, 51, 45, 49, 49, 45, 48, 55, 1, 50, 48, 49, 51, 45, 49, 49, 45, 48, 55, 32, 48, 48, 58, 51, 49, 58, 52, 56, 1, 49, 48, 53, 53, 57, 52, 50, 1, 50, 1, 49, 53, 49, 49, 54, 49, 56, 52, 1, 49, 53, 49, 49, 57, 50, 49, 49, 1, 49, 53, 49, 50, 57, 54, 54, 53, 1, 49, 53, 49, 50, 49, 53, 52, 56, 1, 49, 48, 48, 56, 48, 51, 52, 50, 1, 55, 56, 56, 50, 54, 53, 52, 56, 1, 49, 1, 48, 1, 48, 46, 48, 1, 48, 46, 48, 1, 48, 46, 48}; byte[] address = storage.store(new Slice(b, 0, b.length)); Assert.assertNotNull(address); storage.flush(); storage.clean(address); byte[] addr = null; for (int i = 0; i < 5; i++) { b[0] = (byte)(b[0] + 1); addr = storage.store(new Slice(b, 0, b.length)); } Assert.assertNull(storage.retrieve(addr)); for (int i = 0; i < 5; i++) { b[0] = (byte)(b[0] + 1); Assert.assertNull(storage.store(new Slice(b, 0, b.length))); } storage.flush(); b_org[0] = (byte)(b_org[0] + 1); match(storage.retrieve(new byte[8]), new String(b_org)); match(storage.retrieve(address), new String(b_org)); b_org[0] = (byte)(b_org[0] + 1); match(storage.retrieveNext(), new String(b_org)); b_org[0] = (byte)(b_org[0] + 1); match(storage.retrieveNext(), new String(b_org)); } /** * This tests the clean when the files are roll-over and the storage fails * * @throws Exception */ @Test public void testPartialFlushWithCleanAndRollOverAndFailure() throws Exception { Assert.assertNull(storage.retrieve(new byte[8])); byte[] b = new byte[]{48, 48, 48, 48, 98, 48, 52, 54, 49, 57, 55, 51, 52, 97, 53, 101, 56, 56, 97, 55, 98, 53, 52, 51, 98, 50, 102, 51, 49, 97, 97, 54, 1, 50, 48, 49, 51, 45, 49, 49, 45, 48, 55, 1, 50, 48, 49, 51, 45, 49, 49, 45, 48, 55, 32, 48, 48, 58, 51, 49, 58, 52, 56, 1, 49, 48, 53, 53, 57, 52, 50, 1, 50, 1, 49, 53, 49, 49, 54, 49, 56, 52, 1, 49, 53, 49, 49, 57, 50, 49, 49, 1, 49, 53, 49, 50, 57, 54, 54, 53, 1, 49, 53, 49, 50, 49, 53, 52, 56, 1, 49, 48, 48, 56, 48, 51, 52, 50, 1, 55, 56, 56, 50, 54, 53, 52, 56, 1, 49, 1, 48, 1, 48, 46, 48, 1, 48, 46, 48, 1, 48, 46, 48}; byte[] b_org = new byte[]{48, 48, 48, 48, 98, 48, 52, 54, 49, 57, 55, 51, 52, 97, 53, 101, 56, 56, 97, 55, 98, 53, 52, 51, 98, 50, 102, 51, 49, 97, 97, 54, 1, 50, 48, 49, 51, 45, 49, 49, 45, 48, 55, 1, 50, 48, 49, 51, 45, 49, 49, 45, 48, 55, 32, 48, 48, 58, 51, 49, 58, 52, 56, 1, 49, 48, 53, 53, 57, 52, 50, 1, 50, 1, 49, 53, 49, 49, 54, 49, 56, 52, 1, 49, 53, 49, 49, 57, 50, 49, 49, 1, 49, 53, 49, 50, 57, 54, 54, 53, 1, 49, 53, 49, 50, 49, 53, 52, 56, 1, 49, 48, 48, 56, 48, 51, 52, 50, 1, 55, 56, 56, 50, 54, 53, 52, 56, 1, 49, 1, 48, 1, 48, 46, 48, 1, 48, 46, 48, 1, 48, 46, 48}; byte[] address = storage.store(new Slice(b, 0, b.length)); Assert.assertNotNull(address); storage.flush(); storage.clean(address); byte[] addr = null; for (int i = 0; i < 5; i++) { b[0] = (byte)(b[0] + 1); addr = storage.store(new Slice(b, 0, b.length)); } storage = getStorage("1", true); Assert.assertNull(storage.retrieve(addr)); for (int i = 0; i < 5; i++) { b[0] = (byte)(b[0] + 1); Assert.assertNull(storage.store(new Slice(b, 0, b.length))); } storage.flush(); b_org[0] = (byte)(b_org[0] + 1); match(storage.retrieve(address), new String(b_org)); b_org[0] = (byte)(b_org[0] + 1); match(storage.retrieveNext(), new String(b_org)); b_org[0] = (byte)(b_org[0] + 1); match(storage.retrieveNext(), new String(b_org)); } /** * This test covers following use case The file is flushed and then more data is written to the same file, but the new * data is not flushed and file is not roll over and storage fails The new storage comes up and client asks for data * at the last returned address from earlier storage instance. The new storage returns null. Client stores the data * again but the address returned this time is null and the retrieval of the earlier address now returns data * * @throws Exception */ @Test public void testPartialFlushWithFailure() throws Exception { Assert.assertNull(storage.retrieve(new byte[8])); byte[] b = "ab".getBytes(); byte[] address = storage.store(new Slice(b, 0, b.length)); Assert.assertNotNull(address); storage.flush(); b = "cb".getBytes(); byte[] addr = storage.store(new Slice(b, 0, b.length)); storage = getStorage("1", true); Assert.assertNull(storage.retrieve(addr)); Assert.assertNull(storage.store(new Slice(b, 0, b.length))); storage.flush(); match(storage.retrieve(address), "cb"); } private void match(byte[] data, String match) { byte[] tempData = new byte[data.length - 8]; System.arraycopy(data, 8, tempData, 0, tempData.length); Assert.assertEquals("matched the stored value with retrieved value", match, new String(tempData)); } @Test public void testStorage() throws IOException { Assert.assertNull(storage.retrieve(new byte[8])); byte[] b = new byte[200]; byte[] identifier; Assert.assertNotNull(storage.store(new Slice(b, 0, b.length))); Assert.assertNotNull(storage.store(new Slice(b, 0, b.length))); Assert.assertNull(storage.retrieve(new byte[8])); Assert.assertNotNull(storage.store(new Slice(b, 0, b.length))); Assert.assertNotNull(storage.store(new Slice(b, 0, b.length))); storage.flush(); byte[] data = storage.retrieve(new byte[8]); Assert.assertNotNull(storage.store(new Slice(b, 0, b.length))); identifier = storage.store(new Slice(b, 0, b.length)); byte[] tempData = new byte[data.length - 8]; System.arraycopy(data, 8, tempData, 0, tempData.length); Assert.assertEquals("matched the stored value with retrieved value", new String(b), new String(tempData)); Assert.assertNull(storage.retrieve(identifier)); } @Test public void testStorageWithRestore() throws IOException { Assert.assertNull(storage.retrieve(new byte[8])); byte[] b = new byte[200]; Assert.assertNotNull(storage.store(new Slice(b, 0, b.length))); storage.flush(); storage.teardown(); storage = getStorage("1", true); storage.store(new Slice(b, 0, b.length)); storage.flush(); Configuration conf = new Configuration(); FileSystem fs = FileSystem.get(conf); boolean exists = fs.exists(new Path(STORAGE_DIRECTORY + "/1/" + "1")); Assert.assertEquals("file should exist", true, exists); } @Test public void testCleanup() throws IOException { RandomAccessFile r = new RandomAccessFile(testMeta.testFile, "r"); r.seek(0); byte[] b = r.readLine().getBytes(); storage.store(new Slice(b, 0, b.length)); byte[] val = storage.store(new Slice(b, 0, b.length)); storage.flush(); storage.clean(val); Configuration conf = new Configuration(); FileSystem fs = FileSystem.get(conf); boolean exists = fs.exists(new Path(STORAGE_DIRECTORY + "/" + "0")); Assert.assertEquals("file should not exist", false, exists); r.close(); } @Test public void testNext() throws IOException { RandomAccessFile r = new RandomAccessFile(testMeta.testFile, "r"); r.seek(0); Assert.assertNull(storage.retrieve(new byte[8])); byte[] b = r.readLine().getBytes(); storage.store(new Slice(b, 0, b.length)); byte[] b1 = r.readLine().getBytes(); storage.store(new Slice(b1, 0, b1.length)); storage.store(new Slice(b, 0, b.length)); storage.flush(); storage.store(new Slice(b1, 0, b1.length)); storage.store(new Slice(b, 0, b.length)); storage.flush(); byte[] data = storage.retrieve(new byte[8]); byte[] tempData = new byte[data.length - 8]; System.arraycopy(data, 8, tempData, 0, tempData.length); Assert.assertEquals("matched the stored value with retrieved value", new String(b), new String(tempData)); data = storage.retrieveNext(); tempData = new byte[data.length - 8]; System.arraycopy(data, 8, tempData, 0, tempData.length); Assert.assertEquals("matched the stored value with retrieved value", new String(b1), new String(tempData)); data = storage.retrieveNext(); tempData = new byte[data.length - 8]; System.arraycopy(data, 8, tempData, 0, tempData.length); Assert.assertEquals("matched the stored value with retrieved value", new String(b), new String(tempData)); r.close(); } @Test public void testFailure() throws IOException { byte[] address; byte[] b = new byte[200]; storage.retrieve(new byte[8]); for (int i = 0; i < 5; i++) { storage.store(new Slice(b, 0, b.length)); address = storage.store(new Slice(b, 0, b.length)); storage.flush(); storage.clean(address); } storage.teardown(); byte[] identifier = new byte[8]; storage = getStorage("1", true); storage.retrieve(identifier); storage.store(new Slice(b, 0, b.length)); storage.store(new Slice(b, 0, b.length)); storage.store(new Slice(b, 0, b.length)); storage.flush(); byte[] data = storage.retrieve(identifier); byte[] tempData = new byte[data.length - 8]; System.arraycopy(data, 8, tempData, 0, tempData.length); Assert.assertEquals("matched the stored value with retrieved value", new String(b), new String(tempData)); } /** * This test case tests the clean call before any flush is called. * * @throws IOException */ @Test public void testCleanUnflushedData() throws IOException { for (int i = 0; i < 5; i++) { final byte[] bytes = (i + "").getBytes(); storage.store(new Slice(bytes, 0, bytes.length)); } storage.clean(new byte[8]); storage.flush(); match(storage.retrieve(new byte[8]), "0"); match(storage.retrieveNext(), "1"); } @Test public void testCleanForUnflushedData() throws IOException { byte[] address = null; byte[] b = new byte[200]; storage.retrieve(new byte[8]); for (int i = 0; i < 5; i++) { storage.store(new Slice(b, 0, b.length)); address = storage.store(new Slice(b, 0, b.length)); storage.flush(); // storage.clean(address); } byte[] lastWrittenAddress = null; for (int i = 0; i < 5; i++) { storage.store(new Slice(b, 0, b.length)); lastWrittenAddress = storage.store(new Slice(b, 0, b.length)); } storage.clean(lastWrittenAddress); byte[] cleanedOffset = storage.readData(new Path(STORAGE_DIRECTORY + "/1/cleanoffsetFile")); Assert.assertArrayEquals(address, cleanedOffset); } @Test public void testCleanForFlushedData() throws IOException { byte[] b = new byte[200]; storage.retrieve(new byte[8]); for (int i = 0; i < 5; i++) { storage.store(new Slice(b, 0, b.length)); storage.store(new Slice(b, 0, b.length)); storage.flush(); // storage.clean(address); } byte[] lastWrittenAddress = null; for (int i = 0; i < 5; i++) { storage.store(new Slice(b, 0, b.length)); lastWrittenAddress = storage.store(new Slice(b, 0, b.length)); } storage.flush(); storage.clean(lastWrittenAddress); byte[] cleanedOffset = storage.readData(new Path(STORAGE_DIRECTORY + "/1/cleanoffsetFile")); Assert.assertArrayEquals(lastWrittenAddress, cleanedOffset); } @Test public void testCleanForPartialFlushedData() throws IOException { byte[] b = new byte[8]; storage.retrieve(new byte[8]); storage.store(new Slice(b, 0, b.length)); byte[] bytes = "1a".getBytes(); byte[] address = storage.store(new Slice(bytes, 0, bytes.length)); storage.flush(); storage.clean(address); byte[] lastWrittenAddress = null; for (int i = 0; i < 5; i++) { final byte[] bytes1 = (i + "").getBytes(); storage.store(new Slice(bytes1, 0, bytes1.length)); lastWrittenAddress = storage.store(new Slice(b, 0, b.length)); } Assert.assertNull(storage.retrieve(new byte[8])); Assert.assertNull(storage.retrieve(lastWrittenAddress)); storage.store(new Slice(b, 0, b.length)); storage.flush(); Assert.assertNull(storage.retrieve(lastWrittenAddress)); } @Test public void testRandomSequence() throws IOException { storage.retrieve(new byte[]{0, 0, 0, 0, 0, 0, 0, 0}); byte[] bytes = new byte[]{48, 48, 48, 51, 101, 100, 55, 56, 55, 49, 53, 99, 52, 101, 55, 50, 97, 52, 48, 49, 51, 99, 97, 54, 102, 57, 55, 53, 57, 100, 49, 99, 1, 50, 48, 49, 51, 45, 49, 49, 45, 48, 55, 1, 50, 48, 49, 51, 45, 49, 49, 45, 48, 55, 32, 48, 48, 58, 48, 48, 58, 52, 54, 1, 52, 50, 49, 50, 51, 1, 50, 1, 49, 53, 49, 49, 52, 50, 54, 53, 1, 49, 53, 49, 49, 57, 51, 53, 49, 1, 49, 53, 49, 50, 57, 56, 50, 52, 1, 49, 53, 49, 50, 49, 55, 48, 55, 1, 49, 48, 48, 55, 55, 51, 57, 51, 1, 49, 57, 49, 52, 55, 50, 53, 52, 54, 49, 1, 49, 1, 48, 1, 48, 46, 48, 1, 48, 46, 48, 1, 48, 46, 48}; storage.store(new Slice(bytes, 0, bytes.length)); storage.flush(); storage.clean(new byte[]{-109, 0, 0, 0, 0, 0, 0, 0}); storage.retrieve(new byte[]{0, 0, 0, 0, 0, 0, 0, 0}); for (int i = 0; i < 2555; i++) { byte[] bytes1 = new byte[]{48, 48, 48, 55, 56, 51, 98, 101, 50, 54, 50, 98, 52, 102, 50, 54, 56, 97, 55, 56, 102, 48, 54, 54, 50, 49, 49, 54, 99, 98, 101, 99, 1, 50, 48, 49, 51, 45, 49, 49, 45, 48, 55, 1, 50, 48, 49, 51, 45, 49, 49, 45, 48, 55, 32, 48, 48, 58, 48, 48, 58, 53, 49, 1, 49, 49, 49, 49, 54, 51, 57, 1, 50, 1, 49, 53, 49, 48, 57, 57, 56, 51, 1, 49, 53, 49, 49, 49, 55, 48, 52, 1, 49, 53, 49, 50, 49, 51, 55, 49, 1, 49, 53, 49, 49, 52, 56, 51, 49, 1, 49, 48, 48, 55, 49, 57, 56, 49, 1, 49, 50, 48, 50, 55, 54, 49, 54, 56, 53, 1, 49, 1, 48, 1, 48, 46, 48, 1, 48, 46, 48, 1, 48, 46, 48}; storage.store(new Slice(bytes1, 0, bytes1.length)); storage.flush(); } storage.retrieve(new byte[]{0, 0, 0, 0, 0, 0, 0, 0}); for (int i = 0; i < 1297; i++) { storage.retrieveNext(); } storage.retrieve(new byte[]{0, 0, 0, 0, 0, 0, 0, 0}); for (int i = 0; i < 1302; i++) { storage.retrieveNext(); } storage.retrieve(new byte[]{0, 0, 0, 0, 0, 0, 0, 0}); for (int i = 0; i < 1317; i++) { storage.retrieveNext(); } storage.retrieve(new byte[]{0, 0, 0, 0, 0, 0, 0, 0}); for (int i = 0; i < 2007; i++) { storage.retrieveNext(); } storage.retrieve(new byte[]{0, 0, 0, 0, 0, 0, 0, 0}); for (int i = 0; i < 2556; i++) { storage.retrieveNext(); } byte[] bytes1 = new byte[]{48, 48, 48, 48, 98, 48, 52, 54, 49, 57, 55, 51, 52, 97, 53, 101, 56, 56, 97, 55, 98, 53, 52, 51, 98, 50, 102, 51, 49, 97, 97, 54, 1, 50, 48, 49, 51, 45, 49, 49, 45, 48, 55, 1, 50, 48, 49, 51, 45, 49, 49, 45, 48, 55, 32, 48, 48, 58, 51, 49, 58, 52, 56, 1, 49, 48, 53, 53, 57, 52, 50, 1, 50, 1, 49, 53, 49, 49, 54, 49, 56, 52, 1, 49, 53, 49, 49, 57, 50, 49, 49, 1, 49, 53, 49, 50, 57, 54, 54, 53, 1, 49, 53, 49, 50, 49, 53, 52, 56, 1, 49, 48, 48, 56, 48, 51, 52, 50, 1, 55, 56, 56, 50, 54, 53, 52, 56, 1, 49, 1, 48, 1, 48, 46, 48, 1, 48, 46, 48, 1, 48, 46, 48}; storage.store(new Slice(bytes1, 0, bytes1.length)); storage.flush(); storage.retrieve(new byte[]{0, 0, 0, 0, 0, 0, 0, 0}); for (int i = 0; i < 2062; i++) { storage.retrieveNext(); } } @SuppressWarnings("unused") private static final Logger logger = LoggerFactory.getLogger(HDFSStorageTest.class); }
/* * * ***** BEGIN LICENSE BLOCK ***** * Version: MPL 1.1/GPL 2.0 * * The contents of this file are subject to the Mozilla Public License Version * 1.1 (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * http://www.mozilla.org/MPL/ * * Software distributed under the License is distributed on an "AS IS" basis, * WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License * for the specific language governing rights and limitations under the * License. * * The Original Code is Rhino code, released * May 6, 1999. * * The Initial Developer of the Original Code is * Netscape Communications Corporation. * Portions created by the Initial Developer are Copyright (C) 1997-1999 * the Initial Developer. All Rights Reserved. * * Contributor(s): * Bob Jervis * Google Inc. * * Alternatively, the contents of this file may be used under the terms of * the GNU General Public License Version 2 or later (the "GPL"), in which * case the provisions of the GPL are applicable instead of those above. If * you wish to allow use of your version of this file only under the terms of * the GPL and not to allow others to use your version of this file under the * MPL, indicate your decision by deleting the provisions above and replacing * them with the notice and other provisions required by the GPL. If you do * not delete the provisions above, a recipient may use your version of this * file under either the MPL or the GPL. * * ***** END LICENSE BLOCK ***** */ package com.google.javascript.rhino.testing; import com.google.javascript.rhino.JSTypeExpression; import com.google.javascript.rhino.Node; import com.google.javascript.rhino.jstype.FunctionBuilder; import com.google.javascript.rhino.jstype.FunctionType; import com.google.javascript.rhino.jstype.JSType; import com.google.javascript.rhino.jstype.JSTypeNative; import com.google.javascript.rhino.jstype.JSTypeRegistry; import com.google.javascript.rhino.jstype.ObjectType; import com.google.javascript.rhino.jstype.RecordTypeBuilder; import junit.framework.TestCase; public abstract class BaseJSTypeTestCase extends TestCase { protected JSTypeRegistry registry; protected TestErrorReporter errorReporter; protected JSType ALL_TYPE; protected ObjectType NO_OBJECT_TYPE; protected ObjectType NO_TYPE; protected JSType ARRAY_FUNCTION_TYPE; protected ObjectType ARRAY_TYPE; protected JSType BOOLEAN_OBJECT_FUNCTION_TYPE; protected ObjectType BOOLEAN_OBJECT_TYPE; protected JSType BOOLEAN_TYPE; protected JSType CHECKED_UNKNOWN_TYPE; protected JSType DATE_FUNCTION_TYPE; protected ObjectType DATE_TYPE; protected JSType ERROR_FUNCTION_TYPE; protected ObjectType ERROR_TYPE; protected JSType EVAL_ERROR_FUNCTION_TYPE; protected ObjectType EVAL_ERROR_TYPE; protected FunctionType FUNCTION_FUNCTION_TYPE; protected FunctionType FUNCTION_INSTANCE_TYPE; protected ObjectType FUNCTION_PROTOTYPE; protected JSType GREATEST_FUNCTION_TYPE; protected JSType LEAST_FUNCTION_TYPE; protected JSType MATH_TYPE; protected JSType NULL_TYPE; protected JSType NUMBER_OBJECT_FUNCTION_TYPE; protected ObjectType NUMBER_OBJECT_TYPE; protected JSType NUMBER_STRING_BOOLEAN; protected JSType NUMBER_TYPE; protected FunctionType OBJECT_FUNCTION_TYPE; protected JSType OBJECT_NUMBER_STRING; protected JSType OBJECT_NUMBER_STRING_BOOLEAN; protected JSType OBJECT_PROTOTYPE; protected ObjectType OBJECT_TYPE; protected JSType RANGE_ERROR_FUNCTION_TYPE; protected ObjectType RANGE_ERROR_TYPE; protected JSType REFERENCE_ERROR_FUNCTION_TYPE; protected ObjectType REFERENCE_ERROR_TYPE; protected JSType REGEXP_FUNCTION_TYPE; protected ObjectType REGEXP_TYPE; protected JSType STRING_OBJECT_FUNCTION_TYPE; protected ObjectType STRING_OBJECT_TYPE; protected JSType STRING_TYPE; protected JSType SYNTAX_ERROR_FUNCTION_TYPE; protected ObjectType SYNTAX_ERROR_TYPE; protected JSType TYPE_ERROR_FUNCTION_TYPE; protected ObjectType TYPE_ERROR_TYPE; protected FunctionType U2U_CONSTRUCTOR_TYPE; protected FunctionType U2U_FUNCTION_TYPE; protected ObjectType UNKNOWN_TYPE; protected JSType URI_ERROR_FUNCTION_TYPE; protected ObjectType URI_ERROR_TYPE; protected JSType VOID_TYPE; protected int NATIVE_PROPERTIES_COUNT; @Override protected void setUp() throws Exception { super.setUp(); errorReporter = new TestErrorReporter(null, null); registry = new JSTypeRegistry(errorReporter); initTypes(); } protected void initTypes() { ALL_TYPE = registry.getNativeType(JSTypeNative.ALL_TYPE); NO_OBJECT_TYPE = registry.getNativeObjectType(JSTypeNative.NO_OBJECT_TYPE); NO_TYPE = registry.getNativeObjectType(JSTypeNative.NO_TYPE); ARRAY_FUNCTION_TYPE = registry.getNativeType(JSTypeNative.ARRAY_FUNCTION_TYPE); ARRAY_TYPE = registry.getNativeObjectType(JSTypeNative.ARRAY_TYPE); BOOLEAN_OBJECT_FUNCTION_TYPE = registry.getNativeType(JSTypeNative.BOOLEAN_OBJECT_FUNCTION_TYPE); BOOLEAN_OBJECT_TYPE = registry.getNativeObjectType(JSTypeNative.BOOLEAN_OBJECT_TYPE); BOOLEAN_TYPE = registry.getNativeType(JSTypeNative.BOOLEAN_TYPE); CHECKED_UNKNOWN_TYPE = registry.getNativeType(JSTypeNative.CHECKED_UNKNOWN_TYPE); DATE_FUNCTION_TYPE = registry.getNativeType(JSTypeNative.DATE_FUNCTION_TYPE); DATE_TYPE = registry.getNativeObjectType(JSTypeNative.DATE_TYPE); ERROR_FUNCTION_TYPE = registry.getNativeType(JSTypeNative.ERROR_FUNCTION_TYPE); ERROR_TYPE = registry.getNativeObjectType(JSTypeNative.ERROR_TYPE); EVAL_ERROR_FUNCTION_TYPE = registry.getNativeType(JSTypeNative.EVAL_ERROR_FUNCTION_TYPE); EVAL_ERROR_TYPE = registry.getNativeObjectType(JSTypeNative.EVAL_ERROR_TYPE); FUNCTION_FUNCTION_TYPE = registry.getNativeFunctionType(JSTypeNative.FUNCTION_FUNCTION_TYPE); FUNCTION_INSTANCE_TYPE = registry.getNativeFunctionType(JSTypeNative.FUNCTION_INSTANCE_TYPE); FUNCTION_PROTOTYPE = registry.getNativeObjectType(JSTypeNative.FUNCTION_PROTOTYPE); GREATEST_FUNCTION_TYPE = registry.getNativeType(JSTypeNative.GREATEST_FUNCTION_TYPE); LEAST_FUNCTION_TYPE = registry.getNativeType(JSTypeNative.LEAST_FUNCTION_TYPE); NULL_TYPE = registry.getNativeType(JSTypeNative.NULL_TYPE); NUMBER_OBJECT_FUNCTION_TYPE = registry.getNativeType(JSTypeNative.NUMBER_OBJECT_FUNCTION_TYPE); NUMBER_OBJECT_TYPE = registry.getNativeObjectType(JSTypeNative.NUMBER_OBJECT_TYPE); NUMBER_STRING_BOOLEAN = registry.getNativeType(JSTypeNative.NUMBER_STRING_BOOLEAN); NUMBER_TYPE = registry.getNativeType(JSTypeNative.NUMBER_TYPE); OBJECT_FUNCTION_TYPE = registry.getNativeFunctionType(JSTypeNative.OBJECT_FUNCTION_TYPE); OBJECT_NUMBER_STRING = registry.getNativeType(JSTypeNative.OBJECT_NUMBER_STRING); OBJECT_NUMBER_STRING_BOOLEAN = registry.getNativeType(JSTypeNative.OBJECT_NUMBER_STRING_BOOLEAN); OBJECT_PROTOTYPE = registry.getNativeType(JSTypeNative.OBJECT_PROTOTYPE); OBJECT_TYPE = registry.getNativeObjectType(JSTypeNative.OBJECT_TYPE); RANGE_ERROR_FUNCTION_TYPE = registry.getNativeType(JSTypeNative.RANGE_ERROR_FUNCTION_TYPE); RANGE_ERROR_TYPE = registry.getNativeObjectType(JSTypeNative.RANGE_ERROR_TYPE); REFERENCE_ERROR_FUNCTION_TYPE = registry.getNativeType(JSTypeNative.REFERENCE_ERROR_FUNCTION_TYPE); REFERENCE_ERROR_TYPE = registry.getNativeObjectType(JSTypeNative.REFERENCE_ERROR_TYPE); REGEXP_FUNCTION_TYPE = registry.getNativeType(JSTypeNative.REGEXP_FUNCTION_TYPE); REGEXP_TYPE = registry.getNativeObjectType(JSTypeNative.REGEXP_TYPE); STRING_OBJECT_FUNCTION_TYPE = registry.getNativeType(JSTypeNative.STRING_OBJECT_FUNCTION_TYPE); STRING_OBJECT_TYPE = registry.getNativeObjectType(JSTypeNative.STRING_OBJECT_TYPE); STRING_TYPE = registry.getNativeType(JSTypeNative.STRING_TYPE); SYNTAX_ERROR_FUNCTION_TYPE = registry.getNativeType(JSTypeNative.SYNTAX_ERROR_FUNCTION_TYPE); SYNTAX_ERROR_TYPE = registry.getNativeObjectType(JSTypeNative.SYNTAX_ERROR_TYPE); TYPE_ERROR_FUNCTION_TYPE = registry.getNativeType(JSTypeNative.TYPE_ERROR_FUNCTION_TYPE); TYPE_ERROR_TYPE = registry.getNativeObjectType(JSTypeNative.TYPE_ERROR_TYPE); U2U_CONSTRUCTOR_TYPE = registry.getNativeFunctionType(JSTypeNative.U2U_CONSTRUCTOR_TYPE); U2U_FUNCTION_TYPE = registry.getNativeFunctionType(JSTypeNative.U2U_FUNCTION_TYPE); UNKNOWN_TYPE = registry.getNativeObjectType(JSTypeNative.UNKNOWN_TYPE); URI_ERROR_FUNCTION_TYPE = registry.getNativeType(JSTypeNative.URI_ERROR_FUNCTION_TYPE); URI_ERROR_TYPE = registry.getNativeObjectType(JSTypeNative.URI_ERROR_TYPE); VOID_TYPE = registry.getNativeType(JSTypeNative.VOID_TYPE); addNativeProperties(registry); NATIVE_PROPERTIES_COUNT = OBJECT_TYPE.getPropertiesCount(); } /** Adds a basic set of properties to the native types. */ public static void addNativeProperties(JSTypeRegistry registry) { JSType booleanType = registry.getNativeType(JSTypeNative.BOOLEAN_TYPE); JSType numberType = registry.getNativeType(JSTypeNative.NUMBER_TYPE); JSType stringType = registry.getNativeType(JSTypeNative.STRING_TYPE); JSType unknownType = registry.getNativeType(JSTypeNative.UNKNOWN_TYPE); ObjectType objectType = registry.getNativeObjectType(JSTypeNative.OBJECT_TYPE); ObjectType arrayType = registry.getNativeObjectType(JSTypeNative.ARRAY_TYPE); ObjectType dateType = registry.getNativeObjectType(JSTypeNative.DATE_TYPE); ObjectType regexpType = registry.getNativeObjectType(JSTypeNative.REGEXP_TYPE); ObjectType booleanObjectType = registry.getNativeObjectType(JSTypeNative.BOOLEAN_OBJECT_TYPE); ObjectType numberObjectType = registry.getNativeObjectType(JSTypeNative.NUMBER_OBJECT_TYPE); ObjectType stringObjectType = registry.getNativeObjectType(JSTypeNative.STRING_OBJECT_TYPE); ObjectType objectPrototype = registry .getNativeFunctionType(JSTypeNative.OBJECT_FUNCTION_TYPE) .getPrototype(); addMethod(registry, objectPrototype, "constructor", objectType); addMethod(registry, objectPrototype, "toString", stringType); addMethod(registry, objectPrototype, "toLocaleString", stringType); addMethod(registry, objectPrototype, "valueOf", unknownType); addMethod(registry, objectPrototype, "hasOwnProperty", booleanType); addMethod(registry, objectPrototype, "isPrototypeOf", booleanType); addMethod(registry, objectPrototype, "propertyIsEnumerable", booleanType); ObjectType arrayPrototype = registry .getNativeFunctionType(JSTypeNative.ARRAY_FUNCTION_TYPE) .getPrototype(); addMethod(registry, arrayPrototype, "constructor", arrayType); addMethod(registry, arrayPrototype, "toString", stringType); addMethod(registry, arrayPrototype, "toLocaleString", stringType); addMethod(registry, arrayPrototype, "concat", arrayType); addMethod(registry, arrayPrototype, "join", stringType); addMethod(registry, arrayPrototype, "pop", unknownType); addMethod(registry, arrayPrototype, "push", numberType); addMethod(registry, arrayPrototype, "reverse", arrayType); addMethod(registry, arrayPrototype, "shift", unknownType); addMethod(registry, arrayPrototype, "slice", arrayType); addMethod(registry, arrayPrototype, "sort", arrayType); addMethod(registry, arrayPrototype, "splice", arrayType); addMethod(registry, arrayPrototype, "unshift", numberType); arrayType.defineDeclaredProperty("length", numberType, true, null); ObjectType booleanPrototype = registry .getNativeFunctionType(JSTypeNative.BOOLEAN_OBJECT_FUNCTION_TYPE) .getPrototype(); addMethod(registry, booleanPrototype, "constructor", booleanObjectType); addMethod(registry, booleanPrototype, "toString", stringType); addMethod(registry, booleanPrototype, "valueOf", booleanType); ObjectType datePrototype = registry .getNativeFunctionType(JSTypeNative.DATE_FUNCTION_TYPE) .getPrototype(); addMethod(registry, datePrototype, "constructor", dateType); addMethod(registry, datePrototype, "toString", stringType); addMethod(registry, datePrototype, "toDateString", stringType); addMethod(registry, datePrototype, "toTimeString", stringType); addMethod(registry, datePrototype, "toLocaleString", stringType); addMethod(registry, datePrototype, "toLocaleDateString", stringType); addMethod(registry, datePrototype, "toLocaleTimeString", stringType); addMethod(registry, datePrototype, "valueOf", numberType); addMethod(registry, datePrototype, "getTime", numberType); addMethod(registry, datePrototype, "getFullYear", numberType); addMethod(registry, datePrototype, "getUTCFullYear", numberType); addMethod(registry, datePrototype, "getMonth", numberType); addMethod(registry, datePrototype, "getUTCMonth", numberType); addMethod(registry, datePrototype, "getDate", numberType); addMethod(registry, datePrototype, "getUTCDate", numberType); addMethod(registry, datePrototype, "getDay", numberType); addMethod(registry, datePrototype, "getUTCDay", numberType); addMethod(registry, datePrototype, "getHours", numberType); addMethod(registry, datePrototype, "getUTCHours", numberType); addMethod(registry, datePrototype, "getMinutes", numberType); addMethod(registry, datePrototype, "getUTCMinutes", numberType); addMethod(registry, datePrototype, "getSeconds", numberType); addMethod(registry, datePrototype, "getUTCSeconds", numberType); addMethod(registry, datePrototype, "getMilliseconds", numberType); addMethod(registry, datePrototype, "getUTCMilliseconds", numberType); addMethod(registry, datePrototype, "getTimezoneOffset", numberType); addMethod(registry, datePrototype, "setTime", numberType); addMethod(registry, datePrototype, "setMilliseconds", numberType); addMethod(registry, datePrototype, "setUTCMilliseconds", numberType); addMethod(registry, datePrototype, "setSeconds", numberType); addMethod(registry, datePrototype, "setUTCSeconds", numberType); addMethod(registry, datePrototype, "setMinutes", numberType); addMethod(registry, datePrototype, "setUTCMinutes", numberType); addMethod(registry, datePrototype, "setHours", numberType); addMethod(registry, datePrototype, "setUTCHours", numberType); addMethod(registry, datePrototype, "setDate", numberType); addMethod(registry, datePrototype, "setUTCDate", numberType); addMethod(registry, datePrototype, "setMonth", numberType); addMethod(registry, datePrototype, "setUTCMonth", numberType); addMethod(registry, datePrototype, "setFullYear", numberType); addMethod(registry, datePrototype, "setUTCFullYear", numberType); addMethod(registry, datePrototype, "toUTCString", stringType); addMethod(registry, datePrototype, "toGMTString", stringType); ObjectType numberPrototype = registry .getNativeFunctionType(JSTypeNative.NUMBER_OBJECT_FUNCTION_TYPE) .getPrototype(); addMethod(registry, numberPrototype, "constructor", numberObjectType); addMethod(registry, numberPrototype, "toString", stringType); addMethod(registry, numberPrototype, "toLocaleString", stringType); addMethod(registry, numberPrototype, "valueOf", numberType); addMethod(registry, numberPrototype, "toFixed", stringType); addMethod(registry, numberPrototype, "toExponential", stringType); addMethod(registry, numberPrototype, "toPrecision", stringType); ObjectType regexpPrototype = registry .getNativeFunctionType(JSTypeNative.REGEXP_FUNCTION_TYPE) .getPrototype(); addMethod(registry, regexpPrototype, "constructor", regexpType); addMethod(registry, regexpPrototype, "exec", registry.createNullableType(arrayType)); addMethod(registry, regexpPrototype, "test", booleanType); addMethod(registry, regexpPrototype, "toString", stringType); regexpType.defineDeclaredProperty("source", stringType, true, null); regexpType.defineDeclaredProperty("global", booleanType, true, null); regexpType.defineDeclaredProperty("ignoreCase", booleanType, true, null); regexpType.defineDeclaredProperty("multiline", booleanType, true, null); regexpType.defineDeclaredProperty("lastIndex", numberType, true, null); ObjectType stringPrototype = registry .getNativeFunctionType(JSTypeNative.STRING_OBJECT_FUNCTION_TYPE) .getPrototype(); addMethod(registry, stringPrototype, "constructor", stringObjectType); addMethod(registry, stringPrototype, "toString", stringType); addMethod(registry, stringPrototype, "valueOf", stringType); addMethod(registry, stringPrototype, "charAt", stringType); addMethod(registry, stringPrototype, "charCodeAt", numberType); addMethod(registry, stringPrototype, "concat", stringType); addMethod(registry, stringPrototype, "indexOf", numberType); addMethod(registry, stringPrototype, "lastIndexOf", numberType); addMethod(registry, stringPrototype, "localeCompare", numberType); addMethod(registry, stringPrototype, "match", registry.createNullableType(arrayType)); addMethod(registry, stringPrototype, "replace", stringType); addMethod(registry, stringPrototype, "search", numberType); addMethod(registry, stringPrototype, "slice", stringType); addMethod(registry, stringPrototype, "split", arrayType); addMethod(registry, stringPrototype, "substring", stringType); addMethod(registry, stringPrototype, "toLowerCase", stringType); addMethod(registry, stringPrototype, "toLocaleLowerCase", stringType); addMethod(registry, stringPrototype, "toUpperCase", stringType); addMethod(registry, stringPrototype, "toLocaleUpperCase", stringType); stringObjectType.defineDeclaredProperty("length", numberType, true, null); } private static void addMethod( JSTypeRegistry registry, ObjectType receivingType, String methodName, JSType returnType) { receivingType.defineDeclaredProperty(methodName, new FunctionBuilder(registry).withReturnType(returnType).build(), true, null); } protected JSType createUnionType(JSType... variants) { return registry.createUnionType(variants); } protected RecordTypeBuilder createRecordTypeBuilder() { return new RecordTypeBuilder(registry); } protected JSType createNullableType(JSType type) { return registry.createNullableType(type); } protected JSType createOptionalType(JSType type) { return registry.createOptionalType(type); } /** * Asserts that a Node representing a type expression resolves to the * correct {@code JSType}. */ protected void assertTypeEquals(JSType expected, Node actual) { assertTypeEquals(expected, new JSTypeExpression(actual, "")); } /** * Asserts that a a type expression resolves to the correct {@code JSType}. */ protected void assertTypeEquals(JSType expected, JSTypeExpression actual) { assertEquals(expected, resolve(actual)); } /** * Resolves a type expression, expecting the given warnings. */ protected JSType resolve(JSTypeExpression n, String... warnings) { errorReporter.setWarnings(warnings); return n.evaluate(null, registry); } /** * A definition of all extern types. This should be kept in sync with * javascript/externs/es3.js. This is used to check that the builtin types * declared in {@link JSTypeRegistry} have the same type as that in the * externs. It can also be used for any tests that want to use builtin types * in their externs. */ public static final String ALL_NATIVE_EXTERN_TYPES = "/**\n" + " * @constructor\n" + " * @param {*} opt_value\n" + " */\n" + "function Object(opt_value) {}\n" + "\n" + "/**\n" + " * @constructor\n" + " * @extends {Object}\n" + " * @param {*} var_args\n" + " */\n" + "\n" + "function Function(var_args) {}\n" + "/**\n" + " * @constructor\n" + " * @extends {Object}\n" + " * @param {*} var_args\n" + " * @return {!Array}\n" + " */\n" + "function Array(var_args) {}\n" + "\n" + "/**\n" + " * @constructor\n" + " * @param {*} opt_value\n" + " * @return {boolean}\n" + " */\n" + "function Boolean(opt_value) {}\n" + "\n" + "/**\n" + " * @constructor\n" + " * @param {*} opt_value\n" + " * @return {number}\n" + " */\n" + "function Number(opt_value) {}\n" + "\n" + "/**\n" + " * @constructor\n" + " * @return {string}\n" + " */\n" + "function Date(opt_yr_num, opt_mo_num, opt_day_num, opt_hr_num," + " opt_min_num, opt_sec_num, opt_ms_num) {}\n" + "\n" + "/**\n" + " * @constructor\n" + " * @extends {Object}\n" + " * @param {*} opt_str\n" + " * @return {string}\n" + " */\n" + "function String(opt_str) {}\n" + "\n" + "/**\n" + " * @constructor\n" + " * @param {*} opt_pattern\n" + " * @param {*} opt_flags\n" + " * @return {!RegExp}\n" + " */\n" + "function RegExp(opt_pattern, opt_flags) {}\n" + "\n" + "/**\n" + " * @constructor\n" + " * @param {*} opt_message\n" + " * @param {*} opt_file\n" + " * @param {*} opt_line\n" + " * @return {!Error}\n" + " */\n" + "function Error(opt_message, opt_file, opt_line) {}\n" + "\n" + "/**\n" + " * @constructor\n" + " * @extends {Error}\n" + " * @param {*} opt_message\n" + " * @param {*} opt_file\n" + " * @param {*} opt_line\n" + " * @return {!EvalError}\n" + " */\n" + "function EvalError(opt_message, opt_file, opt_line) {}\n" + "\n" + "/**\n" + " * @constructor\n" + " * @extends {Error}\n" + " * @param {*} opt_message\n" + " * @param {*} opt_file\n" + " * @param {*} opt_line\n" + " * @return {!RangeError}\n" + " */\n" + "function RangeError(opt_message, opt_file, opt_line) {}\n" + "\n" + "/**\n" + " * @constructor\n" + " * @extends {Error}\n" + " * @param {*} opt_message\n" + " * @param {*} opt_file\n" + " * @param {*} opt_line\n" + " * @return {!ReferenceError}\n" + " */\n" + "function ReferenceError(opt_message, opt_file, opt_line) {}\n" + "\n" + "/**\n" + " * @constructor\n" + " * @extends {Error}\n" + " * @param {*} opt_message\n" + " * @param {*} opt_file\n" + " * @param {*} opt_line\n" + " * @return {!SyntaxError}\n" + " */\n" + "function SyntaxError(opt_message, opt_file, opt_line) {}\n" + "\n" + "/**\n" + " * @constructor\n" + " * @extends {Error}\n" + " * @param {*} opt_message\n" + " * @param {*} opt_file\n" + " * @param {*} opt_line\n" + " * @return {!TypeError}\n" + " */\n" + "function TypeError(opt_message, opt_file, opt_line) {}\n" + "\n" + "/**\n" + " * @constructor\n" + " * @extends {Error}\n" + " * @param {*} opt_message\n" + " * @param {*} opt_file\n" + " * @param {*} opt_line\n" + " * @return {!URIError}\n" + " */\n" + "function URIError(opt_message, opt_file, opt_line) {}\n" + "\n" + "/**\n" + " * @param {string} progId\n" + " * @param {string} opt_location\n" + " * @constructor\n" + " */\n" + "function ActiveXObject(progId, opt_location) {}\n"; }
/* * This file is part of the Wayback archival access software * (http://archive-access.sourceforge.net/projects/wayback/). * * Licensed to the Internet Archive (IA) by one or more individual * contributors. * * The IA licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.archive.wayback.resourcestore.indexer; import java.io.BufferedReader; import java.io.File; import java.io.FileInputStream; import java.io.IOException; import java.io.InputStreamReader; import java.io.PrintWriter; import java.util.logging.Logger; import org.archive.util.iterator.CloseableIterator; import org.archive.wayback.Shutdownable; import org.archive.wayback.resourcestore.locationdb.ResourceFileLocationDB; import org.archive.wayback.util.ByteOp; import org.archive.wayback.util.DirMaker; /** * This class polls a ResourceFileLocationDB repeatedly, to notice new files * arriving in the DB. Whenever new files are noticed, they are added to the * Index Queue. * * It uses a local file to store the last known "mark" of the location DB. * * @author brad * @version $Date$, $Revision$ */ public class IndexQueueUpdater implements Shutdownable { private static final Logger LOGGER = Logger.getLogger(IndexQueueUpdater.class.getName()); private ResourceFileLocationDB db = null; private IndexQueue queue = null; private UpdateThread thread = null; private MarkMemoryFile lastMark = null; private long interval = 120000; /** * The init method for this class. */ public void init() { if(interval > 0) { thread = new UpdateThread(this,interval); thread.start(); } } @Override public void shutdown() { if(thread != null) { thread.interrupt(); try { thread.join(1000); } catch (InterruptedException e) { e.printStackTrace(); } } } /** * Add new names to the IndexQueue. * @return The number of items added to the queue. * @throws IOException */ public int updateQueue() throws IOException { int added = 0; long lastMarkPoint = lastMark.getLastMark(); long currentMarkPoint = db.getCurrentMark(); if(currentMarkPoint > lastMarkPoint) { // TODO: touchy touchy... need transactions here to not have // state sync problems if something goes badly in this block.. // for example, it would be possible to constantly enqueue the // same files forever.. CloseableIterator<String> newNames = db.getNamesBetweenMarks(lastMarkPoint, currentMarkPoint); while(newNames.hasNext()) { String newName = newNames.next(); LOGGER.info("Queued " + newName + " for indexing."); queue.enqueue(newName); added++; } newNames.close(); lastMark.setLastMark(currentMarkPoint); } return added; } private class MarkMemoryFile { private File file = null; public MarkMemoryFile(File file) { this.file = file; } public long getLastMark() throws IOException { long mark = 0; if(file.isFile() && file.length() > 0) { FileInputStream fis = new FileInputStream(file); InputStreamReader isr = new InputStreamReader(fis,ByteOp.UTF8); BufferedReader ir = new BufferedReader(isr); String line = ir.readLine(); if(line != null) { mark = Long.parseLong(line); } } return mark; } public void setLastMark(long mark) throws IOException { PrintWriter pw = new PrintWriter(file); pw.println(mark); pw.close(); } public String getAbsolutePath() { return file.getAbsolutePath(); } } private class UpdateThread extends Thread { private long runInterval = 120000; private IndexQueueUpdater updater = null; public UpdateThread(IndexQueueUpdater updater, long runInterval) { this.updater = updater; this.runInterval = runInterval; } @Override public void run() { LOGGER.info("alive"); long sleepInterval = runInterval; while (true) { try { int updated = updater.updateQueue(); if(updated > 0) { sleepInterval = runInterval; } else { sleepInterval += runInterval; } sleep(sleepInterval); } catch (InterruptedException e) { LOGGER.info("Shutting Down."); return; } catch (IOException e) { e.printStackTrace(); } } } } /** * @return the db */ public ResourceFileLocationDB getDb() { return db; } /** * @param db the db to set */ public void setDb(ResourceFileLocationDB db) { this.db = db; } /** * @return the queue */ public IndexQueue getQueue() { return queue; } /** * @param queue the queue to set */ public void setQueue(IndexQueue queue) { this.queue = queue; } /** * @return the stateFile */ public String getLastMark() { if(lastMark != null) { return lastMark.getAbsolutePath(); } return null; } /** * @param path The filepath to use as mark. * @throws IOException */ public void setLastMark(String path) throws IOException { File tmp = new File(path); DirMaker.ensureDir(tmp.getParentFile().getAbsolutePath()); lastMark = new MarkMemoryFile(tmp); } /** * @return the interval */ public long getInterval() { return interval; } /** * @param interval the interval to set */ public void setInterval(long interval) { this.interval = interval; } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.accumulo.core.client.mapreduce; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNull; import static org.junit.Assert.assertTrue; import java.io.ByteArrayOutputStream; import java.io.DataOutputStream; import java.io.IOException; import java.util.Collection; import java.util.Collections; import java.util.HashSet; import java.util.List; import java.util.Set; import org.apache.accumulo.core.client.BatchWriter; import org.apache.accumulo.core.client.BatchWriterConfig; import org.apache.accumulo.core.client.Connector; import org.apache.accumulo.core.client.Instance; import org.apache.accumulo.core.client.IteratorSetting; import org.apache.accumulo.core.client.mock.MockInstance; import org.apache.accumulo.core.client.security.tokens.PasswordToken; import org.apache.accumulo.core.data.Key; import org.apache.accumulo.core.data.Mutation; import org.apache.accumulo.core.data.Value; import org.apache.accumulo.core.iterators.user.RegExFilter; import org.apache.accumulo.core.iterators.user.WholeRowIterator; import org.apache.accumulo.core.security.Authorizations; import org.apache.accumulo.core.util.CachedConfiguration; import org.apache.accumulo.core.util.Pair; import org.apache.commons.codec.binary.Base64; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configured; import org.apache.hadoop.io.Text; import org.apache.hadoop.mapreduce.InputFormat; import org.apache.hadoop.mapreduce.InputSplit; import org.apache.hadoop.mapreduce.Job; import org.apache.hadoop.mapreduce.Mapper; import org.apache.hadoop.mapreduce.lib.output.NullOutputFormat; import org.apache.hadoop.util.Tool; import org.apache.hadoop.util.ToolRunner; import org.apache.log4j.Level; import org.junit.Assert; import org.junit.Test; public class AccumuloInputFormatTest { private static final String PREFIX = AccumuloInputFormatTest.class.getSimpleName(); /** * Check that the iterator configuration is getting stored in the Job conf correctly. */ @Test public void testSetIterator() throws IOException { @SuppressWarnings("deprecation") Job job = new Job(); IteratorSetting is = new IteratorSetting(1, "WholeRow", "org.apache.accumulo.core.iterators.WholeRowIterator"); AccumuloInputFormat.addIterator(job, is); Configuration conf = job.getConfiguration(); ByteArrayOutputStream baos = new ByteArrayOutputStream(); is.write(new DataOutputStream(baos)); String iterators = conf.get("AccumuloInputFormat.ScanOpts.Iterators"); assertEquals(new String(Base64.encodeBase64(baos.toByteArray())), iterators); } @Test public void testAddIterator() throws IOException { @SuppressWarnings("deprecation") Job job = new Job(); AccumuloInputFormat.addIterator(job, new IteratorSetting(1, "WholeRow", WholeRowIterator.class)); AccumuloInputFormat.addIterator(job, new IteratorSetting(2, "Versions", "org.apache.accumulo.core.iterators.VersioningIterator")); IteratorSetting iter = new IteratorSetting(3, "Count", "org.apache.accumulo.core.iterators.CountingIterator"); iter.addOption("v1", "1"); iter.addOption("junk", "\0omg:!\\xyzzy"); AccumuloInputFormat.addIterator(job, iter); List<IteratorSetting> list = AccumuloInputFormat.getIterators(job); // Check the list size assertTrue(list.size() == 3); // Walk the list and make sure our settings are correct IteratorSetting setting = list.get(0); assertEquals(1, setting.getPriority()); assertEquals("org.apache.accumulo.core.iterators.user.WholeRowIterator", setting.getIteratorClass()); assertEquals("WholeRow", setting.getName()); assertEquals(0, setting.getOptions().size()); setting = list.get(1); assertEquals(2, setting.getPriority()); assertEquals("org.apache.accumulo.core.iterators.VersioningIterator", setting.getIteratorClass()); assertEquals("Versions", setting.getName()); assertEquals(0, setting.getOptions().size()); setting = list.get(2); assertEquals(3, setting.getPriority()); assertEquals("org.apache.accumulo.core.iterators.CountingIterator", setting.getIteratorClass()); assertEquals("Count", setting.getName()); assertEquals(2, setting.getOptions().size()); assertEquals("1", setting.getOptions().get("v1")); assertEquals("\0omg:!\\xyzzy", setting.getOptions().get("junk")); } /** * Test adding iterator options where the keys and values contain both the FIELD_SEPARATOR character (':') and ITERATOR_SEPARATOR (',') characters. There * should be no exceptions thrown when trying to parse these types of option entries. * * This test makes sure that the expected raw values, as appears in the Job, are equal to what's expected. */ @Test public void testIteratorOptionEncoding() throws Throwable { String key = "colon:delimited:key"; String value = "comma,delimited,value"; IteratorSetting someSetting = new IteratorSetting(1, "iterator", "Iterator.class"); someSetting.addOption(key, value); @SuppressWarnings("deprecation") Job job = new Job(); AccumuloInputFormat.addIterator(job, someSetting); List<IteratorSetting> list = AccumuloInputFormat.getIterators(job); assertEquals(1, list.size()); assertEquals(1, list.get(0).getOptions().size()); assertEquals(list.get(0).getOptions().get(key), value); someSetting.addOption(key + "2", value); someSetting.setPriority(2); someSetting.setName("it2"); AccumuloInputFormat.addIterator(job, someSetting); list = AccumuloInputFormat.getIterators(job); assertEquals(2, list.size()); assertEquals(1, list.get(0).getOptions().size()); assertEquals(list.get(0).getOptions().get(key), value); assertEquals(2, list.get(1).getOptions().size()); assertEquals(list.get(1).getOptions().get(key), value); assertEquals(list.get(1).getOptions().get(key + "2"), value); } /** * Test getting iterator settings for multiple iterators set */ @Test public void testGetIteratorSettings() throws IOException { @SuppressWarnings("deprecation") Job job = new Job(); AccumuloInputFormat.addIterator(job, new IteratorSetting(1, "WholeRow", "org.apache.accumulo.core.iterators.WholeRowIterator")); AccumuloInputFormat.addIterator(job, new IteratorSetting(2, "Versions", "org.apache.accumulo.core.iterators.VersioningIterator")); AccumuloInputFormat.addIterator(job, new IteratorSetting(3, "Count", "org.apache.accumulo.core.iterators.CountingIterator")); List<IteratorSetting> list = AccumuloInputFormat.getIterators(job); // Check the list size assertTrue(list.size() == 3); // Walk the list and make sure our settings are correct IteratorSetting setting = list.get(0); assertEquals(1, setting.getPriority()); assertEquals("org.apache.accumulo.core.iterators.WholeRowIterator", setting.getIteratorClass()); assertEquals("WholeRow", setting.getName()); setting = list.get(1); assertEquals(2, setting.getPriority()); assertEquals("org.apache.accumulo.core.iterators.VersioningIterator", setting.getIteratorClass()); assertEquals("Versions", setting.getName()); setting = list.get(2); assertEquals(3, setting.getPriority()); assertEquals("org.apache.accumulo.core.iterators.CountingIterator", setting.getIteratorClass()); assertEquals("Count", setting.getName()); } @Test public void testSetRegex() throws IOException { @SuppressWarnings("deprecation") Job job = new Job(); String regex = ">\"*%<>\'\\"; IteratorSetting is = new IteratorSetting(50, regex, RegExFilter.class); RegExFilter.setRegexs(is, regex, null, null, null, false); AccumuloInputFormat.addIterator(job, is); assertTrue(regex.equals(AccumuloInputFormat.getIterators(job).get(0).getName())); } private static AssertionError e1 = null; private static AssertionError e2 = null; private static class MRTester extends Configured implements Tool { private static class TestMapper extends Mapper<Key,Value,Key,Value> { Key key = null; int count = 0; @Override protected void map(Key k, Value v, Context context) throws IOException, InterruptedException { try { if (key != null) assertEquals(key.getRow().toString(), new String(v.get())); assertEquals(k.getRow(), new Text(String.format("%09x", count + 1))); assertEquals(new String(v.get()), String.format("%09x", count)); } catch (AssertionError e) { e1 = e; } key = new Key(k); count++; } @Override protected void cleanup(Context context) throws IOException, InterruptedException { try { assertEquals(100, count); } catch (AssertionError e) { e2 = e; } } } @Override public int run(String[] args) throws Exception { if (args.length != 5) { throw new IllegalArgumentException("Usage : " + MRTester.class.getName() + " <user> <pass> <table> <instanceName> <inputFormatClass>"); } String user = args[0]; String pass = args[1]; String table = args[2]; String instanceName = args[3]; String inputFormatClassName = args[4]; @SuppressWarnings("unchecked") Class<? extends InputFormat<?,?>> inputFormatClass = (Class<? extends InputFormat<?,?>>) Class.forName(inputFormatClassName); @SuppressWarnings("deprecation") Job job = new Job(getConf(), this.getClass().getSimpleName() + "_" + System.currentTimeMillis()); job.setJarByClass(this.getClass()); job.setInputFormatClass(inputFormatClass); AccumuloInputFormat.setConnectorInfo(job, user, new PasswordToken(pass)); AccumuloInputFormat.setInputTableName(job, table); AccumuloInputFormat.setMockInstance(job, instanceName); job.setMapperClass(TestMapper.class); job.setMapOutputKeyClass(Key.class); job.setMapOutputValueClass(Value.class); job.setOutputFormatClass(NullOutputFormat.class); job.setNumReduceTasks(0); job.waitForCompletion(true); return job.isSuccessful() ? 0 : 1; } public static int main(String[] args) throws Exception { return ToolRunner.run(CachedConfiguration.getInstance(), new MRTester(), args); } } @Test public void testMap() throws Exception { final String INSTANCE_NAME = PREFIX + "_mapreduce_instance"; final String TEST_TABLE_1 = PREFIX + "_mapreduce_table_1"; MockInstance mockInstance = new MockInstance(INSTANCE_NAME); Connector c = mockInstance.getConnector("root", new PasswordToken("")); c.tableOperations().create(TEST_TABLE_1); BatchWriter bw = c.createBatchWriter(TEST_TABLE_1, new BatchWriterConfig()); for (int i = 0; i < 100; i++) { Mutation m = new Mutation(new Text(String.format("%09x", i + 1))); m.put(new Text(), new Text(), new Value(String.format("%09x", i).getBytes())); bw.addMutation(m); } bw.close(); Assert.assertEquals(0, MRTester.main(new String[] {"root", "", TEST_TABLE_1, INSTANCE_NAME, AccumuloInputFormat.class.getCanonicalName()})); assertNull(e1); assertNull(e2); } @Test public void testCorrectRangeInputSplits() throws Exception { @SuppressWarnings("deprecation") Job job = new Job(new Configuration(), this.getClass().getSimpleName() + "_" + System.currentTimeMillis()); String username = "user", table = "table", instance = "instance"; PasswordToken password = new PasswordToken("password"); Authorizations auths = new Authorizations("foo"); Collection<Pair<Text,Text>> fetchColumns = Collections.singleton(new Pair<Text,Text>(new Text("foo"), new Text("bar"))); boolean isolated = true, localIters = true; Level level = Level.WARN; Instance inst = new MockInstance(instance); Connector connector = inst.getConnector(username, password); connector.tableOperations().create(table); AccumuloInputFormat.setConnectorInfo(job, username, password); AccumuloInputFormat.setInputTableName(job, table); AccumuloInputFormat.setScanAuthorizations(job, auths); AccumuloInputFormat.setMockInstance(job, instance); AccumuloInputFormat.setScanIsolation(job, isolated); AccumuloInputFormat.setLocalIterators(job, localIters); AccumuloInputFormat.fetchColumns(job, fetchColumns); AccumuloInputFormat.setLogLevel(job, level); AccumuloInputFormat aif = new AccumuloInputFormat(); List<InputSplit> splits = aif.getSplits(job); Assert.assertEquals(1, splits.size()); InputSplit split = splits.get(0); Assert.assertEquals(RangeInputSplit.class, split.getClass()); RangeInputSplit risplit = (RangeInputSplit) split; Assert.assertEquals(username, risplit.getPrincipal()); Assert.assertEquals(table, risplit.getTableName()); Assert.assertEquals(password, risplit.getToken()); Assert.assertEquals(auths, risplit.getAuths()); Assert.assertEquals(instance, risplit.getInstanceName()); Assert.assertEquals(isolated, risplit.isIsolatedScan()); Assert.assertEquals(localIters, risplit.usesLocalIterators()); Assert.assertEquals(fetchColumns, risplit.getFetchedColumns()); Assert.assertEquals(level, risplit.getLogLevel()); } @Test public void testPartialInputSplitDelegationToConfiguration() throws Exception { String user = "testPartialInputSplitUser"; PasswordToken password = new PasswordToken(""); MockInstance mockInstance = new MockInstance("testPartialInputSplitDelegationToConfiguration"); Connector c = mockInstance.getConnector(user, password); c.tableOperations().create("testtable"); BatchWriter bw = c.createBatchWriter("testtable", new BatchWriterConfig()); for (int i = 0; i < 100; i++) { Mutation m = new Mutation(new Text(String.format("%09x", i + 1))); m.put(new Text(), new Text(), new Value(String.format("%09x", i).getBytes())); bw.addMutation(m); } bw.close(); Assert.assertEquals( 0, MRTester.main(new String[] {user, "", "testtable", "testPartialInputSplitDelegationToConfiguration", EmptySplitsAccumuloInputFormat.class.getCanonicalName()})); assertNull(e1); assertNull(e2); } @Test public void testPartialFailedInputSplitDelegationToConfiguration() throws Exception { String user = "testPartialFailedInputSplit"; PasswordToken password = new PasswordToken(""); MockInstance mockInstance = new MockInstance("testPartialFailedInputSplitDelegationToConfiguration"); Connector c = mockInstance.getConnector(user, password); c.tableOperations().create("testtable"); BatchWriter bw = c.createBatchWriter("testtable", new BatchWriterConfig()); for (int i = 0; i < 100; i++) { Mutation m = new Mutation(new Text(String.format("%09x", i + 1))); m.put(new Text(), new Text(), new Value(String.format("%09x", i).getBytes())); bw.addMutation(m); } bw.close(); // We should fail before we even get into the Mapper because we can't make the RecordReader Assert.assertEquals( 1, MRTester.main(new String[] {user, "", "testtable", "testPartialFailedInputSplitDelegationToConfiguration", BadPasswordSplitsAccumuloInputFormat.class.getCanonicalName()})); assertNull(e1); assertNull(e2); } @Test public void testEmptyColumnFamily() throws IOException { @SuppressWarnings("deprecation") Job job = new Job(); Set<Pair<Text,Text>> cols = new HashSet<Pair<Text,Text>>(); cols.add(new Pair<Text,Text>(new Text(""), null)); cols.add(new Pair<Text,Text>(new Text("foo"), new Text("bar"))); cols.add(new Pair<Text,Text>(new Text(""), new Text("bar"))); cols.add(new Pair<Text,Text>(new Text(""), new Text(""))); cols.add(new Pair<Text,Text>(new Text("foo"), new Text(""))); AccumuloInputFormat.fetchColumns(job, cols); Set<Pair<Text,Text>> setCols = AccumuloInputFormat.getFetchedColumns(job); assertEquals(cols, setCols); } }
// Copyright 2018 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package com.google.location.suplclient.asn1.supl2.lpp; // Copyright 2008 Google Inc. All Rights Reserved. /* * This class is AUTOMATICALLY GENERATED. Do NOT EDIT. */ // // import com.google.location.suplclient.asn1.base.Asn1Enumerated; import com.google.location.suplclient.asn1.base.Asn1Integer; import com.google.location.suplclient.asn1.base.Asn1Object; import com.google.location.suplclient.asn1.base.Asn1Sequence; import com.google.location.suplclient.asn1.base.Asn1Tag; import com.google.location.suplclient.asn1.base.BitStream; import com.google.location.suplclient.asn1.base.BitStreamReader; import com.google.location.suplclient.asn1.base.SequenceComponent; import com.google.common.collect.ImmutableList; import java.util.Collection; import javax.annotation.Nullable; /** * */ public class EllipsoidPointWithAltitudeAndUncertaintyEllipsoid extends Asn1Sequence { // private static final Asn1Tag TAG_EllipsoidPointWithAltitudeAndUncertaintyEllipsoid = Asn1Tag.fromClassAndNumber(-1, -1); public EllipsoidPointWithAltitudeAndUncertaintyEllipsoid() { super(); } @Override @Nullable protected Asn1Tag getTag() { return TAG_EllipsoidPointWithAltitudeAndUncertaintyEllipsoid; } @Override protected boolean isTagImplicit() { return true; } public static Collection<Asn1Tag> getPossibleFirstTags() { if (TAG_EllipsoidPointWithAltitudeAndUncertaintyEllipsoid != null) { return ImmutableList.of(TAG_EllipsoidPointWithAltitudeAndUncertaintyEllipsoid); } else { return Asn1Sequence.getPossibleFirstTags(); } } /** * Creates a new EllipsoidPointWithAltitudeAndUncertaintyEllipsoid from encoded stream. */ public static EllipsoidPointWithAltitudeAndUncertaintyEllipsoid fromPerUnaligned(byte[] encodedBytes) { EllipsoidPointWithAltitudeAndUncertaintyEllipsoid result = new EllipsoidPointWithAltitudeAndUncertaintyEllipsoid(); result.decodePerUnaligned(new BitStreamReader(encodedBytes)); return result; } /** * Creates a new EllipsoidPointWithAltitudeAndUncertaintyEllipsoid from encoded stream. */ public static EllipsoidPointWithAltitudeAndUncertaintyEllipsoid fromPerAligned(byte[] encodedBytes) { EllipsoidPointWithAltitudeAndUncertaintyEllipsoid result = new EllipsoidPointWithAltitudeAndUncertaintyEllipsoid(); result.decodePerAligned(new BitStreamReader(encodedBytes)); return result; } @Override protected boolean isExtensible() { return false; } @Override public boolean containsExtensionValues() { for (SequenceComponent extensionComponent : getExtensionComponents()) { if (extensionComponent.isExplicitlySet()) return true; } return false; } private EllipsoidPointWithAltitudeAndUncertaintyEllipsoid.latitudeSignType latitudeSign_; public EllipsoidPointWithAltitudeAndUncertaintyEllipsoid.latitudeSignType getLatitudeSign() { return latitudeSign_; } /** * @throws ClassCastException if value is not a EllipsoidPointWithAltitudeAndUncertaintyEllipsoid.latitudeSignType */ public void setLatitudeSign(Asn1Object value) { this.latitudeSign_ = (EllipsoidPointWithAltitudeAndUncertaintyEllipsoid.latitudeSignType) value; } public EllipsoidPointWithAltitudeAndUncertaintyEllipsoid.latitudeSignType setLatitudeSignToNewInstance() { latitudeSign_ = new EllipsoidPointWithAltitudeAndUncertaintyEllipsoid.latitudeSignType(); return latitudeSign_; } private EllipsoidPointWithAltitudeAndUncertaintyEllipsoid.degreesLatitudeType degreesLatitude_; public EllipsoidPointWithAltitudeAndUncertaintyEllipsoid.degreesLatitudeType getDegreesLatitude() { return degreesLatitude_; } /** * @throws ClassCastException if value is not a EllipsoidPointWithAltitudeAndUncertaintyEllipsoid.degreesLatitudeType */ public void setDegreesLatitude(Asn1Object value) { this.degreesLatitude_ = (EllipsoidPointWithAltitudeAndUncertaintyEllipsoid.degreesLatitudeType) value; } public EllipsoidPointWithAltitudeAndUncertaintyEllipsoid.degreesLatitudeType setDegreesLatitudeToNewInstance() { degreesLatitude_ = new EllipsoidPointWithAltitudeAndUncertaintyEllipsoid.degreesLatitudeType(); return degreesLatitude_; } private EllipsoidPointWithAltitudeAndUncertaintyEllipsoid.degreesLongitudeType degreesLongitude_; public EllipsoidPointWithAltitudeAndUncertaintyEllipsoid.degreesLongitudeType getDegreesLongitude() { return degreesLongitude_; } /** * @throws ClassCastException if value is not a EllipsoidPointWithAltitudeAndUncertaintyEllipsoid.degreesLongitudeType */ public void setDegreesLongitude(Asn1Object value) { this.degreesLongitude_ = (EllipsoidPointWithAltitudeAndUncertaintyEllipsoid.degreesLongitudeType) value; } public EllipsoidPointWithAltitudeAndUncertaintyEllipsoid.degreesLongitudeType setDegreesLongitudeToNewInstance() { degreesLongitude_ = new EllipsoidPointWithAltitudeAndUncertaintyEllipsoid.degreesLongitudeType(); return degreesLongitude_; } private EllipsoidPointWithAltitudeAndUncertaintyEllipsoid.altitudeDirectionType altitudeDirection_; public EllipsoidPointWithAltitudeAndUncertaintyEllipsoid.altitudeDirectionType getAltitudeDirection() { return altitudeDirection_; } /** * @throws ClassCastException if value is not a EllipsoidPointWithAltitudeAndUncertaintyEllipsoid.altitudeDirectionType */ public void setAltitudeDirection(Asn1Object value) { this.altitudeDirection_ = (EllipsoidPointWithAltitudeAndUncertaintyEllipsoid.altitudeDirectionType) value; } public EllipsoidPointWithAltitudeAndUncertaintyEllipsoid.altitudeDirectionType setAltitudeDirectionToNewInstance() { altitudeDirection_ = new EllipsoidPointWithAltitudeAndUncertaintyEllipsoid.altitudeDirectionType(); return altitudeDirection_; } private EllipsoidPointWithAltitudeAndUncertaintyEllipsoid.altitudeType altitude_; public EllipsoidPointWithAltitudeAndUncertaintyEllipsoid.altitudeType getAltitude() { return altitude_; } /** * @throws ClassCastException if value is not a EllipsoidPointWithAltitudeAndUncertaintyEllipsoid.altitudeType */ public void setAltitude(Asn1Object value) { this.altitude_ = (EllipsoidPointWithAltitudeAndUncertaintyEllipsoid.altitudeType) value; } public EllipsoidPointWithAltitudeAndUncertaintyEllipsoid.altitudeType setAltitudeToNewInstance() { altitude_ = new EllipsoidPointWithAltitudeAndUncertaintyEllipsoid.altitudeType(); return altitude_; } private EllipsoidPointWithAltitudeAndUncertaintyEllipsoid.uncertaintySemiMajorType uncertaintySemiMajor_; public EllipsoidPointWithAltitudeAndUncertaintyEllipsoid.uncertaintySemiMajorType getUncertaintySemiMajor() { return uncertaintySemiMajor_; } /** * @throws ClassCastException if value is not a EllipsoidPointWithAltitudeAndUncertaintyEllipsoid.uncertaintySemiMajorType */ public void setUncertaintySemiMajor(Asn1Object value) { this.uncertaintySemiMajor_ = (EllipsoidPointWithAltitudeAndUncertaintyEllipsoid.uncertaintySemiMajorType) value; } public EllipsoidPointWithAltitudeAndUncertaintyEllipsoid.uncertaintySemiMajorType setUncertaintySemiMajorToNewInstance() { uncertaintySemiMajor_ = new EllipsoidPointWithAltitudeAndUncertaintyEllipsoid.uncertaintySemiMajorType(); return uncertaintySemiMajor_; } private EllipsoidPointWithAltitudeAndUncertaintyEllipsoid.uncertaintySemiMinorType uncertaintySemiMinor_; public EllipsoidPointWithAltitudeAndUncertaintyEllipsoid.uncertaintySemiMinorType getUncertaintySemiMinor() { return uncertaintySemiMinor_; } /** * @throws ClassCastException if value is not a EllipsoidPointWithAltitudeAndUncertaintyEllipsoid.uncertaintySemiMinorType */ public void setUncertaintySemiMinor(Asn1Object value) { this.uncertaintySemiMinor_ = (EllipsoidPointWithAltitudeAndUncertaintyEllipsoid.uncertaintySemiMinorType) value; } public EllipsoidPointWithAltitudeAndUncertaintyEllipsoid.uncertaintySemiMinorType setUncertaintySemiMinorToNewInstance() { uncertaintySemiMinor_ = new EllipsoidPointWithAltitudeAndUncertaintyEllipsoid.uncertaintySemiMinorType(); return uncertaintySemiMinor_; } private EllipsoidPointWithAltitudeAndUncertaintyEllipsoid.orientationMajorAxisType orientationMajorAxis_; public EllipsoidPointWithAltitudeAndUncertaintyEllipsoid.orientationMajorAxisType getOrientationMajorAxis() { return orientationMajorAxis_; } /** * @throws ClassCastException if value is not a EllipsoidPointWithAltitudeAndUncertaintyEllipsoid.orientationMajorAxisType */ public void setOrientationMajorAxis(Asn1Object value) { this.orientationMajorAxis_ = (EllipsoidPointWithAltitudeAndUncertaintyEllipsoid.orientationMajorAxisType) value; } public EllipsoidPointWithAltitudeAndUncertaintyEllipsoid.orientationMajorAxisType setOrientationMajorAxisToNewInstance() { orientationMajorAxis_ = new EllipsoidPointWithAltitudeAndUncertaintyEllipsoid.orientationMajorAxisType(); return orientationMajorAxis_; } private EllipsoidPointWithAltitudeAndUncertaintyEllipsoid.uncertaintyAltitudeType uncertaintyAltitude_; public EllipsoidPointWithAltitudeAndUncertaintyEllipsoid.uncertaintyAltitudeType getUncertaintyAltitude() { return uncertaintyAltitude_; } /** * @throws ClassCastException if value is not a EllipsoidPointWithAltitudeAndUncertaintyEllipsoid.uncertaintyAltitudeType */ public void setUncertaintyAltitude(Asn1Object value) { this.uncertaintyAltitude_ = (EllipsoidPointWithAltitudeAndUncertaintyEllipsoid.uncertaintyAltitudeType) value; } public EllipsoidPointWithAltitudeAndUncertaintyEllipsoid.uncertaintyAltitudeType setUncertaintyAltitudeToNewInstance() { uncertaintyAltitude_ = new EllipsoidPointWithAltitudeAndUncertaintyEllipsoid.uncertaintyAltitudeType(); return uncertaintyAltitude_; } private EllipsoidPointWithAltitudeAndUncertaintyEllipsoid.confidenceType confidence_; public EllipsoidPointWithAltitudeAndUncertaintyEllipsoid.confidenceType getConfidence() { return confidence_; } /** * @throws ClassCastException if value is not a EllipsoidPointWithAltitudeAndUncertaintyEllipsoid.confidenceType */ public void setConfidence(Asn1Object value) { this.confidence_ = (EllipsoidPointWithAltitudeAndUncertaintyEllipsoid.confidenceType) value; } public EllipsoidPointWithAltitudeAndUncertaintyEllipsoid.confidenceType setConfidenceToNewInstance() { confidence_ = new EllipsoidPointWithAltitudeAndUncertaintyEllipsoid.confidenceType(); return confidence_; } @Override public Iterable<? extends SequenceComponent> getComponents() { ImmutableList.Builder<SequenceComponent> builder = ImmutableList.builder(); builder.add(new SequenceComponent() { Asn1Tag tag = Asn1Tag.fromClassAndNumber(2, 0); @Override public boolean isExplicitlySet() { return getLatitudeSign() != null; } @Override public boolean hasDefaultValue() { return false; } @Override public boolean isOptional() { return false; } @Override public Asn1Object getComponentValue() { return getLatitudeSign(); } @Override public void setToNewInstance() { setLatitudeSignToNewInstance(); } @Override public Collection<Asn1Tag> getPossibleFirstTags() { return tag == null ? EllipsoidPointWithAltitudeAndUncertaintyEllipsoid.latitudeSignType.getPossibleFirstTags() : ImmutableList.of(tag); } @Override public Asn1Tag getTag() { return tag; } @Override public boolean isImplicitTagging() { return true; } @Override public String toIndentedString(String indent) { return "latitudeSign : " + getLatitudeSign().toIndentedString(indent); } }); builder.add(new SequenceComponent() { Asn1Tag tag = Asn1Tag.fromClassAndNumber(2, 1); @Override public boolean isExplicitlySet() { return getDegreesLatitude() != null; } @Override public boolean hasDefaultValue() { return false; } @Override public boolean isOptional() { return false; } @Override public Asn1Object getComponentValue() { return getDegreesLatitude(); } @Override public void setToNewInstance() { setDegreesLatitudeToNewInstance(); } @Override public Collection<Asn1Tag> getPossibleFirstTags() { return tag == null ? EllipsoidPointWithAltitudeAndUncertaintyEllipsoid.degreesLatitudeType.getPossibleFirstTags() : ImmutableList.of(tag); } @Override public Asn1Tag getTag() { return tag; } @Override public boolean isImplicitTagging() { return true; } @Override public String toIndentedString(String indent) { return "degreesLatitude : " + getDegreesLatitude().toIndentedString(indent); } }); builder.add(new SequenceComponent() { Asn1Tag tag = Asn1Tag.fromClassAndNumber(2, 2); @Override public boolean isExplicitlySet() { return getDegreesLongitude() != null; } @Override public boolean hasDefaultValue() { return false; } @Override public boolean isOptional() { return false; } @Override public Asn1Object getComponentValue() { return getDegreesLongitude(); } @Override public void setToNewInstance() { setDegreesLongitudeToNewInstance(); } @Override public Collection<Asn1Tag> getPossibleFirstTags() { return tag == null ? EllipsoidPointWithAltitudeAndUncertaintyEllipsoid.degreesLongitudeType.getPossibleFirstTags() : ImmutableList.of(tag); } @Override public Asn1Tag getTag() { return tag; } @Override public boolean isImplicitTagging() { return true; } @Override public String toIndentedString(String indent) { return "degreesLongitude : " + getDegreesLongitude().toIndentedString(indent); } }); builder.add(new SequenceComponent() { Asn1Tag tag = Asn1Tag.fromClassAndNumber(2, 3); @Override public boolean isExplicitlySet() { return getAltitudeDirection() != null; } @Override public boolean hasDefaultValue() { return false; } @Override public boolean isOptional() { return false; } @Override public Asn1Object getComponentValue() { return getAltitudeDirection(); } @Override public void setToNewInstance() { setAltitudeDirectionToNewInstance(); } @Override public Collection<Asn1Tag> getPossibleFirstTags() { return tag == null ? EllipsoidPointWithAltitudeAndUncertaintyEllipsoid.altitudeDirectionType.getPossibleFirstTags() : ImmutableList.of(tag); } @Override public Asn1Tag getTag() { return tag; } @Override public boolean isImplicitTagging() { return true; } @Override public String toIndentedString(String indent) { return "altitudeDirection : " + getAltitudeDirection().toIndentedString(indent); } }); builder.add(new SequenceComponent() { Asn1Tag tag = Asn1Tag.fromClassAndNumber(2, 4); @Override public boolean isExplicitlySet() { return getAltitude() != null; } @Override public boolean hasDefaultValue() { return false; } @Override public boolean isOptional() { return false; } @Override public Asn1Object getComponentValue() { return getAltitude(); } @Override public void setToNewInstance() { setAltitudeToNewInstance(); } @Override public Collection<Asn1Tag> getPossibleFirstTags() { return tag == null ? EllipsoidPointWithAltitudeAndUncertaintyEllipsoid.altitudeType.getPossibleFirstTags() : ImmutableList.of(tag); } @Override public Asn1Tag getTag() { return tag; } @Override public boolean isImplicitTagging() { return true; } @Override public String toIndentedString(String indent) { return "altitude : " + getAltitude().toIndentedString(indent); } }); builder.add(new SequenceComponent() { Asn1Tag tag = Asn1Tag.fromClassAndNumber(2, 5); @Override public boolean isExplicitlySet() { return getUncertaintySemiMajor() != null; } @Override public boolean hasDefaultValue() { return false; } @Override public boolean isOptional() { return false; } @Override public Asn1Object getComponentValue() { return getUncertaintySemiMajor(); } @Override public void setToNewInstance() { setUncertaintySemiMajorToNewInstance(); } @Override public Collection<Asn1Tag> getPossibleFirstTags() { return tag == null ? EllipsoidPointWithAltitudeAndUncertaintyEllipsoid.uncertaintySemiMajorType.getPossibleFirstTags() : ImmutableList.of(tag); } @Override public Asn1Tag getTag() { return tag; } @Override public boolean isImplicitTagging() { return true; } @Override public String toIndentedString(String indent) { return "uncertaintySemiMajor : " + getUncertaintySemiMajor().toIndentedString(indent); } }); builder.add(new SequenceComponent() { Asn1Tag tag = Asn1Tag.fromClassAndNumber(2, 6); @Override public boolean isExplicitlySet() { return getUncertaintySemiMinor() != null; } @Override public boolean hasDefaultValue() { return false; } @Override public boolean isOptional() { return false; } @Override public Asn1Object getComponentValue() { return getUncertaintySemiMinor(); } @Override public void setToNewInstance() { setUncertaintySemiMinorToNewInstance(); } @Override public Collection<Asn1Tag> getPossibleFirstTags() { return tag == null ? EllipsoidPointWithAltitudeAndUncertaintyEllipsoid.uncertaintySemiMinorType.getPossibleFirstTags() : ImmutableList.of(tag); } @Override public Asn1Tag getTag() { return tag; } @Override public boolean isImplicitTagging() { return true; } @Override public String toIndentedString(String indent) { return "uncertaintySemiMinor : " + getUncertaintySemiMinor().toIndentedString(indent); } }); builder.add(new SequenceComponent() { Asn1Tag tag = Asn1Tag.fromClassAndNumber(2, 7); @Override public boolean isExplicitlySet() { return getOrientationMajorAxis() != null; } @Override public boolean hasDefaultValue() { return false; } @Override public boolean isOptional() { return false; } @Override public Asn1Object getComponentValue() { return getOrientationMajorAxis(); } @Override public void setToNewInstance() { setOrientationMajorAxisToNewInstance(); } @Override public Collection<Asn1Tag> getPossibleFirstTags() { return tag == null ? EllipsoidPointWithAltitudeAndUncertaintyEllipsoid.orientationMajorAxisType.getPossibleFirstTags() : ImmutableList.of(tag); } @Override public Asn1Tag getTag() { return tag; } @Override public boolean isImplicitTagging() { return true; } @Override public String toIndentedString(String indent) { return "orientationMajorAxis : " + getOrientationMajorAxis().toIndentedString(indent); } }); builder.add(new SequenceComponent() { Asn1Tag tag = Asn1Tag.fromClassAndNumber(2, 8); @Override public boolean isExplicitlySet() { return getUncertaintyAltitude() != null; } @Override public boolean hasDefaultValue() { return false; } @Override public boolean isOptional() { return false; } @Override public Asn1Object getComponentValue() { return getUncertaintyAltitude(); } @Override public void setToNewInstance() { setUncertaintyAltitudeToNewInstance(); } @Override public Collection<Asn1Tag> getPossibleFirstTags() { return tag == null ? EllipsoidPointWithAltitudeAndUncertaintyEllipsoid.uncertaintyAltitudeType.getPossibleFirstTags() : ImmutableList.of(tag); } @Override public Asn1Tag getTag() { return tag; } @Override public boolean isImplicitTagging() { return true; } @Override public String toIndentedString(String indent) { return "uncertaintyAltitude : " + getUncertaintyAltitude().toIndentedString(indent); } }); builder.add(new SequenceComponent() { Asn1Tag tag = Asn1Tag.fromClassAndNumber(2, 9); @Override public boolean isExplicitlySet() { return getConfidence() != null; } @Override public boolean hasDefaultValue() { return false; } @Override public boolean isOptional() { return false; } @Override public Asn1Object getComponentValue() { return getConfidence(); } @Override public void setToNewInstance() { setConfidenceToNewInstance(); } @Override public Collection<Asn1Tag> getPossibleFirstTags() { return tag == null ? EllipsoidPointWithAltitudeAndUncertaintyEllipsoid.confidenceType.getPossibleFirstTags() : ImmutableList.of(tag); } @Override public Asn1Tag getTag() { return tag; } @Override public boolean isImplicitTagging() { return true; } @Override public String toIndentedString(String indent) { return "confidence : " + getConfidence().toIndentedString(indent); } }); return builder.build(); } @Override public Iterable<? extends SequenceComponent> getExtensionComponents() { ImmutableList.Builder<SequenceComponent> builder = ImmutableList.builder(); return builder.build(); } // Copyright 2008 Google Inc. All Rights Reserved. /* * AUTOMATICALLY GENERATED. Do NOT EDIT. */ // /** * */ public static class latitudeSignType extends Asn1Enumerated { public enum Value implements Asn1Enumerated.Value { north(0), south(1), ; Value(int i) { value = i; } private int value; public int getAssignedValue() { return value; } @Override public boolean isExtensionValue() { return false; } } @Override protected Value getDefaultValue() { return null ; } @SuppressWarnings("unchecked") public Value enumValue() { return (Value) getValue(); } public void setTo_north() { setValue(Value.north); } public void setTo_south() { setValue(Value.south); } public enum ExtensionValue implements Asn1Enumerated.Value { ; ExtensionValue(int i) { value = i; } private int value; @Override public int getAssignedValue() { return value; } @Override public boolean isExtensionValue() { return true; } } @SuppressWarnings("unchecked") public ExtensionValue extEnumValue() { return (ExtensionValue) getValue(); } private static final Asn1Tag TAG_latitudeSignType = Asn1Tag.fromClassAndNumber(-1, -1); public latitudeSignType() { super(); // use template substitution instead of calling getDefaultValue(), since // calling virtual methods from a ctor is frowned upon here. setValue(null ); } @Override @Nullable protected Asn1Tag getTag() { return TAG_latitudeSignType; } @Override protected boolean isTagImplicit() { return true; } public static Collection<Asn1Tag> getPossibleFirstTags() { if (TAG_latitudeSignType != null) { return ImmutableList.of(TAG_latitudeSignType); } else { return Asn1Enumerated.getPossibleFirstTags(); } } @Override protected boolean isExtensible() { return false; } @Override protected Asn1Enumerated.Value lookupValue(int ordinal) { return Value.values()[ordinal]; } @Override protected Asn1Enumerated.Value lookupExtensionValue(int ordinal) { return ExtensionValue.values()[ordinal]; } @Override protected int getValueCount() { return Value.values().length; } /** * Creates a new latitudeSignType from encoded stream. */ public static latitudeSignType fromPerUnaligned(byte[] encodedBytes) { latitudeSignType result = new latitudeSignType(); result.decodePerUnaligned(new BitStreamReader(encodedBytes)); return result; } /** * Creates a new latitudeSignType from encoded stream. */ public static latitudeSignType fromPerAligned(byte[] encodedBytes) { latitudeSignType result = new latitudeSignType(); result.decodePerAligned(new BitStreamReader(encodedBytes)); return result; } @Override public Iterable<BitStream> encodePerUnaligned() { return super.encodePerUnaligned(); } @Override public Iterable<BitStream> encodePerAligned() { return super.encodePerAligned(); } @Override public void decodePerUnaligned(BitStreamReader reader) { super.decodePerUnaligned(reader); } @Override public void decodePerAligned(BitStreamReader reader) { super.decodePerAligned(reader); } @Override public String toString() { return toIndentedString(""); } public String toIndentedString(String indent) { return "latitudeSignType = " + getValue() + ";\n"; } } // Copyright 2008 Google Inc. All Rights Reserved. /* * This class is AUTOMATICALLY GENERATED. Do NOT EDIT. */ // /** * */ public static class degreesLatitudeType extends Asn1Integer { // private static final Asn1Tag TAG_degreesLatitudeType = Asn1Tag.fromClassAndNumber(-1, -1); public degreesLatitudeType() { super(); setValueRange(new java.math.BigInteger("0"), new java.math.BigInteger("8388607")); } @Override @Nullable protected Asn1Tag getTag() { return TAG_degreesLatitudeType; } @Override protected boolean isTagImplicit() { return true; } public static Collection<Asn1Tag> getPossibleFirstTags() { if (TAG_degreesLatitudeType != null) { return ImmutableList.of(TAG_degreesLatitudeType); } else { return Asn1Integer.getPossibleFirstTags(); } } /** * Creates a new degreesLatitudeType from encoded stream. */ public static degreesLatitudeType fromPerUnaligned(byte[] encodedBytes) { degreesLatitudeType result = new degreesLatitudeType(); result.decodePerUnaligned(new BitStreamReader(encodedBytes)); return result; } /** * Creates a new degreesLatitudeType from encoded stream. */ public static degreesLatitudeType fromPerAligned(byte[] encodedBytes) { degreesLatitudeType result = new degreesLatitudeType(); result.decodePerAligned(new BitStreamReader(encodedBytes)); return result; } @Override public Iterable<BitStream> encodePerUnaligned() { return super.encodePerUnaligned(); } @Override public Iterable<BitStream> encodePerAligned() { return super.encodePerAligned(); } @Override public void decodePerUnaligned(BitStreamReader reader) { super.decodePerUnaligned(reader); } @Override public void decodePerAligned(BitStreamReader reader) { super.decodePerAligned(reader); } @Override public String toString() { return toIndentedString(""); } public String toIndentedString(String indent) { return "degreesLatitudeType = " + getInteger() + ";\n"; } } // Copyright 2008 Google Inc. All Rights Reserved. /* * This class is AUTOMATICALLY GENERATED. Do NOT EDIT. */ // /** * */ public static class degreesLongitudeType extends Asn1Integer { // private static final Asn1Tag TAG_degreesLongitudeType = Asn1Tag.fromClassAndNumber(-1, -1); public degreesLongitudeType() { super(); setValueRange(new java.math.BigInteger("-8388608"), new java.math.BigInteger("8388607")); } @Override @Nullable protected Asn1Tag getTag() { return TAG_degreesLongitudeType; } @Override protected boolean isTagImplicit() { return true; } public static Collection<Asn1Tag> getPossibleFirstTags() { if (TAG_degreesLongitudeType != null) { return ImmutableList.of(TAG_degreesLongitudeType); } else { return Asn1Integer.getPossibleFirstTags(); } } /** * Creates a new degreesLongitudeType from encoded stream. */ public static degreesLongitudeType fromPerUnaligned(byte[] encodedBytes) { degreesLongitudeType result = new degreesLongitudeType(); result.decodePerUnaligned(new BitStreamReader(encodedBytes)); return result; } /** * Creates a new degreesLongitudeType from encoded stream. */ public static degreesLongitudeType fromPerAligned(byte[] encodedBytes) { degreesLongitudeType result = new degreesLongitudeType(); result.decodePerAligned(new BitStreamReader(encodedBytes)); return result; } @Override public Iterable<BitStream> encodePerUnaligned() { return super.encodePerUnaligned(); } @Override public Iterable<BitStream> encodePerAligned() { return super.encodePerAligned(); } @Override public void decodePerUnaligned(BitStreamReader reader) { super.decodePerUnaligned(reader); } @Override public void decodePerAligned(BitStreamReader reader) { super.decodePerAligned(reader); } @Override public String toString() { return toIndentedString(""); } public String toIndentedString(String indent) { return "degreesLongitudeType = " + getInteger() + ";\n"; } } // Copyright 2008 Google Inc. All Rights Reserved. /* * AUTOMATICALLY GENERATED. Do NOT EDIT. */ // /** * */ public static class altitudeDirectionType extends Asn1Enumerated { public enum Value implements Asn1Enumerated.Value { height(0), depth(1), ; Value(int i) { value = i; } private int value; public int getAssignedValue() { return value; } @Override public boolean isExtensionValue() { return false; } } @Override protected Value getDefaultValue() { return null ; } @SuppressWarnings("unchecked") public Value enumValue() { return (Value) getValue(); } public void setTo_height() { setValue(Value.height); } public void setTo_depth() { setValue(Value.depth); } public enum ExtensionValue implements Asn1Enumerated.Value { ; ExtensionValue(int i) { value = i; } private int value; @Override public int getAssignedValue() { return value; } @Override public boolean isExtensionValue() { return true; } } @SuppressWarnings("unchecked") public ExtensionValue extEnumValue() { return (ExtensionValue) getValue(); } private static final Asn1Tag TAG_altitudeDirectionType = Asn1Tag.fromClassAndNumber(-1, -1); public altitudeDirectionType() { super(); // use template substitution instead of calling getDefaultValue(), since // calling virtual methods from a ctor is frowned upon here. setValue(null ); } @Override @Nullable protected Asn1Tag getTag() { return TAG_altitudeDirectionType; } @Override protected boolean isTagImplicit() { return true; } public static Collection<Asn1Tag> getPossibleFirstTags() { if (TAG_altitudeDirectionType != null) { return ImmutableList.of(TAG_altitudeDirectionType); } else { return Asn1Enumerated.getPossibleFirstTags(); } } @Override protected boolean isExtensible() { return false; } @Override protected Asn1Enumerated.Value lookupValue(int ordinal) { return Value.values()[ordinal]; } @Override protected Asn1Enumerated.Value lookupExtensionValue(int ordinal) { return ExtensionValue.values()[ordinal]; } @Override protected int getValueCount() { return Value.values().length; } /** * Creates a new altitudeDirectionType from encoded stream. */ public static altitudeDirectionType fromPerUnaligned(byte[] encodedBytes) { altitudeDirectionType result = new altitudeDirectionType(); result.decodePerUnaligned(new BitStreamReader(encodedBytes)); return result; } /** * Creates a new altitudeDirectionType from encoded stream. */ public static altitudeDirectionType fromPerAligned(byte[] encodedBytes) { altitudeDirectionType result = new altitudeDirectionType(); result.decodePerAligned(new BitStreamReader(encodedBytes)); return result; } @Override public Iterable<BitStream> encodePerUnaligned() { return super.encodePerUnaligned(); } @Override public Iterable<BitStream> encodePerAligned() { return super.encodePerAligned(); } @Override public void decodePerUnaligned(BitStreamReader reader) { super.decodePerUnaligned(reader); } @Override public void decodePerAligned(BitStreamReader reader) { super.decodePerAligned(reader); } @Override public String toString() { return toIndentedString(""); } public String toIndentedString(String indent) { return "altitudeDirectionType = " + getValue() + ";\n"; } } // Copyright 2008 Google Inc. All Rights Reserved. /* * This class is AUTOMATICALLY GENERATED. Do NOT EDIT. */ // /** * */ public static class altitudeType extends Asn1Integer { // private static final Asn1Tag TAG_altitudeType = Asn1Tag.fromClassAndNumber(-1, -1); public altitudeType() { super(); setValueRange(new java.math.BigInteger("0"), new java.math.BigInteger("32767")); } @Override @Nullable protected Asn1Tag getTag() { return TAG_altitudeType; } @Override protected boolean isTagImplicit() { return true; } public static Collection<Asn1Tag> getPossibleFirstTags() { if (TAG_altitudeType != null) { return ImmutableList.of(TAG_altitudeType); } else { return Asn1Integer.getPossibleFirstTags(); } } /** * Creates a new altitudeType from encoded stream. */ public static altitudeType fromPerUnaligned(byte[] encodedBytes) { altitudeType result = new altitudeType(); result.decodePerUnaligned(new BitStreamReader(encodedBytes)); return result; } /** * Creates a new altitudeType from encoded stream. */ public static altitudeType fromPerAligned(byte[] encodedBytes) { altitudeType result = new altitudeType(); result.decodePerAligned(new BitStreamReader(encodedBytes)); return result; } @Override public Iterable<BitStream> encodePerUnaligned() { return super.encodePerUnaligned(); } @Override public Iterable<BitStream> encodePerAligned() { return super.encodePerAligned(); } @Override public void decodePerUnaligned(BitStreamReader reader) { super.decodePerUnaligned(reader); } @Override public void decodePerAligned(BitStreamReader reader) { super.decodePerAligned(reader); } @Override public String toString() { return toIndentedString(""); } public String toIndentedString(String indent) { return "altitudeType = " + getInteger() + ";\n"; } } // Copyright 2008 Google Inc. All Rights Reserved. /* * This class is AUTOMATICALLY GENERATED. Do NOT EDIT. */ // /** * */ public static class uncertaintySemiMajorType extends Asn1Integer { // private static final Asn1Tag TAG_uncertaintySemiMajorType = Asn1Tag.fromClassAndNumber(-1, -1); public uncertaintySemiMajorType() { super(); setValueRange(new java.math.BigInteger("0"), new java.math.BigInteger("127")); } @Override @Nullable protected Asn1Tag getTag() { return TAG_uncertaintySemiMajorType; } @Override protected boolean isTagImplicit() { return true; } public static Collection<Asn1Tag> getPossibleFirstTags() { if (TAG_uncertaintySemiMajorType != null) { return ImmutableList.of(TAG_uncertaintySemiMajorType); } else { return Asn1Integer.getPossibleFirstTags(); } } /** * Creates a new uncertaintySemiMajorType from encoded stream. */ public static uncertaintySemiMajorType fromPerUnaligned(byte[] encodedBytes) { uncertaintySemiMajorType result = new uncertaintySemiMajorType(); result.decodePerUnaligned(new BitStreamReader(encodedBytes)); return result; } /** * Creates a new uncertaintySemiMajorType from encoded stream. */ public static uncertaintySemiMajorType fromPerAligned(byte[] encodedBytes) { uncertaintySemiMajorType result = new uncertaintySemiMajorType(); result.decodePerAligned(new BitStreamReader(encodedBytes)); return result; } @Override public Iterable<BitStream> encodePerUnaligned() { return super.encodePerUnaligned(); } @Override public Iterable<BitStream> encodePerAligned() { return super.encodePerAligned(); } @Override public void decodePerUnaligned(BitStreamReader reader) { super.decodePerUnaligned(reader); } @Override public void decodePerAligned(BitStreamReader reader) { super.decodePerAligned(reader); } @Override public String toString() { return toIndentedString(""); } public String toIndentedString(String indent) { return "uncertaintySemiMajorType = " + getInteger() + ";\n"; } } // Copyright 2008 Google Inc. All Rights Reserved. /* * This class is AUTOMATICALLY GENERATED. Do NOT EDIT. */ // /** * */ public static class uncertaintySemiMinorType extends Asn1Integer { // private static final Asn1Tag TAG_uncertaintySemiMinorType = Asn1Tag.fromClassAndNumber(-1, -1); public uncertaintySemiMinorType() { super(); setValueRange(new java.math.BigInteger("0"), new java.math.BigInteger("127")); } @Override @Nullable protected Asn1Tag getTag() { return TAG_uncertaintySemiMinorType; } @Override protected boolean isTagImplicit() { return true; } public static Collection<Asn1Tag> getPossibleFirstTags() { if (TAG_uncertaintySemiMinorType != null) { return ImmutableList.of(TAG_uncertaintySemiMinorType); } else { return Asn1Integer.getPossibleFirstTags(); } } /** * Creates a new uncertaintySemiMinorType from encoded stream. */ public static uncertaintySemiMinorType fromPerUnaligned(byte[] encodedBytes) { uncertaintySemiMinorType result = new uncertaintySemiMinorType(); result.decodePerUnaligned(new BitStreamReader(encodedBytes)); return result; } /** * Creates a new uncertaintySemiMinorType from encoded stream. */ public static uncertaintySemiMinorType fromPerAligned(byte[] encodedBytes) { uncertaintySemiMinorType result = new uncertaintySemiMinorType(); result.decodePerAligned(new BitStreamReader(encodedBytes)); return result; } @Override public Iterable<BitStream> encodePerUnaligned() { return super.encodePerUnaligned(); } @Override public Iterable<BitStream> encodePerAligned() { return super.encodePerAligned(); } @Override public void decodePerUnaligned(BitStreamReader reader) { super.decodePerUnaligned(reader); } @Override public void decodePerAligned(BitStreamReader reader) { super.decodePerAligned(reader); } @Override public String toString() { return toIndentedString(""); } public String toIndentedString(String indent) { return "uncertaintySemiMinorType = " + getInteger() + ";\n"; } } // Copyright 2008 Google Inc. All Rights Reserved. /* * This class is AUTOMATICALLY GENERATED. Do NOT EDIT. */ // /** * */ public static class orientationMajorAxisType extends Asn1Integer { // private static final Asn1Tag TAG_orientationMajorAxisType = Asn1Tag.fromClassAndNumber(-1, -1); public orientationMajorAxisType() { super(); setValueRange(new java.math.BigInteger("0"), new java.math.BigInteger("179")); } @Override @Nullable protected Asn1Tag getTag() { return TAG_orientationMajorAxisType; } @Override protected boolean isTagImplicit() { return true; } public static Collection<Asn1Tag> getPossibleFirstTags() { if (TAG_orientationMajorAxisType != null) { return ImmutableList.of(TAG_orientationMajorAxisType); } else { return Asn1Integer.getPossibleFirstTags(); } } /** * Creates a new orientationMajorAxisType from encoded stream. */ public static orientationMajorAxisType fromPerUnaligned(byte[] encodedBytes) { orientationMajorAxisType result = new orientationMajorAxisType(); result.decodePerUnaligned(new BitStreamReader(encodedBytes)); return result; } /** * Creates a new orientationMajorAxisType from encoded stream. */ public static orientationMajorAxisType fromPerAligned(byte[] encodedBytes) { orientationMajorAxisType result = new orientationMajorAxisType(); result.decodePerAligned(new BitStreamReader(encodedBytes)); return result; } @Override public Iterable<BitStream> encodePerUnaligned() { return super.encodePerUnaligned(); } @Override public Iterable<BitStream> encodePerAligned() { return super.encodePerAligned(); } @Override public void decodePerUnaligned(BitStreamReader reader) { super.decodePerUnaligned(reader); } @Override public void decodePerAligned(BitStreamReader reader) { super.decodePerAligned(reader); } @Override public String toString() { return toIndentedString(""); } public String toIndentedString(String indent) { return "orientationMajorAxisType = " + getInteger() + ";\n"; } } // Copyright 2008 Google Inc. All Rights Reserved. /* * This class is AUTOMATICALLY GENERATED. Do NOT EDIT. */ // /** * */ public static class uncertaintyAltitudeType extends Asn1Integer { // private static final Asn1Tag TAG_uncertaintyAltitudeType = Asn1Tag.fromClassAndNumber(-1, -1); public uncertaintyAltitudeType() { super(); setValueRange(new java.math.BigInteger("0"), new java.math.BigInteger("127")); } @Override @Nullable protected Asn1Tag getTag() { return TAG_uncertaintyAltitudeType; } @Override protected boolean isTagImplicit() { return true; } public static Collection<Asn1Tag> getPossibleFirstTags() { if (TAG_uncertaintyAltitudeType != null) { return ImmutableList.of(TAG_uncertaintyAltitudeType); } else { return Asn1Integer.getPossibleFirstTags(); } } /** * Creates a new uncertaintyAltitudeType from encoded stream. */ public static uncertaintyAltitudeType fromPerUnaligned(byte[] encodedBytes) { uncertaintyAltitudeType result = new uncertaintyAltitudeType(); result.decodePerUnaligned(new BitStreamReader(encodedBytes)); return result; } /** * Creates a new uncertaintyAltitudeType from encoded stream. */ public static uncertaintyAltitudeType fromPerAligned(byte[] encodedBytes) { uncertaintyAltitudeType result = new uncertaintyAltitudeType(); result.decodePerAligned(new BitStreamReader(encodedBytes)); return result; } @Override public Iterable<BitStream> encodePerUnaligned() { return super.encodePerUnaligned(); } @Override public Iterable<BitStream> encodePerAligned() { return super.encodePerAligned(); } @Override public void decodePerUnaligned(BitStreamReader reader) { super.decodePerUnaligned(reader); } @Override public void decodePerAligned(BitStreamReader reader) { super.decodePerAligned(reader); } @Override public String toString() { return toIndentedString(""); } public String toIndentedString(String indent) { return "uncertaintyAltitudeType = " + getInteger() + ";\n"; } } // Copyright 2008 Google Inc. All Rights Reserved. /* * This class is AUTOMATICALLY GENERATED. Do NOT EDIT. */ // /** * */ public static class confidenceType extends Asn1Integer { // private static final Asn1Tag TAG_confidenceType = Asn1Tag.fromClassAndNumber(-1, -1); public confidenceType() { super(); setValueRange(new java.math.BigInteger("0"), new java.math.BigInteger("100")); } @Override @Nullable protected Asn1Tag getTag() { return TAG_confidenceType; } @Override protected boolean isTagImplicit() { return true; } public static Collection<Asn1Tag> getPossibleFirstTags() { if (TAG_confidenceType != null) { return ImmutableList.of(TAG_confidenceType); } else { return Asn1Integer.getPossibleFirstTags(); } } /** * Creates a new confidenceType from encoded stream. */ public static confidenceType fromPerUnaligned(byte[] encodedBytes) { confidenceType result = new confidenceType(); result.decodePerUnaligned(new BitStreamReader(encodedBytes)); return result; } /** * Creates a new confidenceType from encoded stream. */ public static confidenceType fromPerAligned(byte[] encodedBytes) { confidenceType result = new confidenceType(); result.decodePerAligned(new BitStreamReader(encodedBytes)); return result; } @Override public Iterable<BitStream> encodePerUnaligned() { return super.encodePerUnaligned(); } @Override public Iterable<BitStream> encodePerAligned() { return super.encodePerAligned(); } @Override public void decodePerUnaligned(BitStreamReader reader) { super.decodePerUnaligned(reader); } @Override public void decodePerAligned(BitStreamReader reader) { super.decodePerAligned(reader); } @Override public String toString() { return toIndentedString(""); } public String toIndentedString(String indent) { return "confidenceType = " + getInteger() + ";\n"; } } @Override public Iterable<BitStream> encodePerUnaligned() { return super.encodePerUnaligned(); } @Override public Iterable<BitStream> encodePerAligned() { return super.encodePerAligned(); } @Override public void decodePerUnaligned(BitStreamReader reader) { super.decodePerUnaligned(reader); } @Override public void decodePerAligned(BitStreamReader reader) { super.decodePerAligned(reader); } @Override public String toString() { return toIndentedString(""); } public String toIndentedString(String indent) { StringBuilder builder = new StringBuilder(); builder.append("EllipsoidPointWithAltitudeAndUncertaintyEllipsoid = {\n"); final String internalIndent = indent + " "; for (SequenceComponent component : getComponents()) { if (component.isExplicitlySet()) { builder.append(internalIndent) .append(component.toIndentedString(internalIndent)); } } if (isExtensible()) { builder.append(internalIndent).append("...\n"); for (SequenceComponent component : getExtensionComponents()) { if (component.isExplicitlySet()) { builder.append(internalIndent) .append(component.toIndentedString(internalIndent)); } } } builder.append(indent).append("};\n"); return builder.toString(); } }
package com.google.ads.googleads.v10.services; import static io.grpc.MethodDescriptor.generateFullMethodName; /** * <pre> * Service to manage feed items. * </pre> */ @javax.annotation.Generated( value = "by gRPC proto compiler", comments = "Source: google/ads/googleads/v10/services/feed_item_service.proto") @io.grpc.stub.annotations.GrpcGenerated public final class FeedItemServiceGrpc { private FeedItemServiceGrpc() {} public static final String SERVICE_NAME = "google.ads.googleads.v10.services.FeedItemService"; // Static method descriptors that strictly reflect the proto. private static volatile io.grpc.MethodDescriptor<com.google.ads.googleads.v10.services.MutateFeedItemsRequest, com.google.ads.googleads.v10.services.MutateFeedItemsResponse> getMutateFeedItemsMethod; @io.grpc.stub.annotations.RpcMethod( fullMethodName = SERVICE_NAME + '/' + "MutateFeedItems", requestType = com.google.ads.googleads.v10.services.MutateFeedItemsRequest.class, responseType = com.google.ads.googleads.v10.services.MutateFeedItemsResponse.class, methodType = io.grpc.MethodDescriptor.MethodType.UNARY) public static io.grpc.MethodDescriptor<com.google.ads.googleads.v10.services.MutateFeedItemsRequest, com.google.ads.googleads.v10.services.MutateFeedItemsResponse> getMutateFeedItemsMethod() { io.grpc.MethodDescriptor<com.google.ads.googleads.v10.services.MutateFeedItemsRequest, com.google.ads.googleads.v10.services.MutateFeedItemsResponse> getMutateFeedItemsMethod; if ((getMutateFeedItemsMethod = FeedItemServiceGrpc.getMutateFeedItemsMethod) == null) { synchronized (FeedItemServiceGrpc.class) { if ((getMutateFeedItemsMethod = FeedItemServiceGrpc.getMutateFeedItemsMethod) == null) { FeedItemServiceGrpc.getMutateFeedItemsMethod = getMutateFeedItemsMethod = io.grpc.MethodDescriptor.<com.google.ads.googleads.v10.services.MutateFeedItemsRequest, com.google.ads.googleads.v10.services.MutateFeedItemsResponse>newBuilder() .setType(io.grpc.MethodDescriptor.MethodType.UNARY) .setFullMethodName(generateFullMethodName(SERVICE_NAME, "MutateFeedItems")) .setSampledToLocalTracing(true) .setRequestMarshaller(io.grpc.protobuf.ProtoUtils.marshaller( com.google.ads.googleads.v10.services.MutateFeedItemsRequest.getDefaultInstance())) .setResponseMarshaller(io.grpc.protobuf.ProtoUtils.marshaller( com.google.ads.googleads.v10.services.MutateFeedItemsResponse.getDefaultInstance())) .setSchemaDescriptor(new FeedItemServiceMethodDescriptorSupplier("MutateFeedItems")) .build(); } } } return getMutateFeedItemsMethod; } /** * Creates a new async stub that supports all call types for the service */ public static FeedItemServiceStub newStub(io.grpc.Channel channel) { io.grpc.stub.AbstractStub.StubFactory<FeedItemServiceStub> factory = new io.grpc.stub.AbstractStub.StubFactory<FeedItemServiceStub>() { @java.lang.Override public FeedItemServiceStub newStub(io.grpc.Channel channel, io.grpc.CallOptions callOptions) { return new FeedItemServiceStub(channel, callOptions); } }; return FeedItemServiceStub.newStub(factory, channel); } /** * Creates a new blocking-style stub that supports unary and streaming output calls on the service */ public static FeedItemServiceBlockingStub newBlockingStub( io.grpc.Channel channel) { io.grpc.stub.AbstractStub.StubFactory<FeedItemServiceBlockingStub> factory = new io.grpc.stub.AbstractStub.StubFactory<FeedItemServiceBlockingStub>() { @java.lang.Override public FeedItemServiceBlockingStub newStub(io.grpc.Channel channel, io.grpc.CallOptions callOptions) { return new FeedItemServiceBlockingStub(channel, callOptions); } }; return FeedItemServiceBlockingStub.newStub(factory, channel); } /** * Creates a new ListenableFuture-style stub that supports unary calls on the service */ public static FeedItemServiceFutureStub newFutureStub( io.grpc.Channel channel) { io.grpc.stub.AbstractStub.StubFactory<FeedItemServiceFutureStub> factory = new io.grpc.stub.AbstractStub.StubFactory<FeedItemServiceFutureStub>() { @java.lang.Override public FeedItemServiceFutureStub newStub(io.grpc.Channel channel, io.grpc.CallOptions callOptions) { return new FeedItemServiceFutureStub(channel, callOptions); } }; return FeedItemServiceFutureStub.newStub(factory, channel); } /** * <pre> * Service to manage feed items. * </pre> */ public static abstract class FeedItemServiceImplBase implements io.grpc.BindableService { /** * <pre> * Creates, updates, or removes feed items. Operation statuses are * returned. * List of thrown errors: * [AuthenticationError]() * [AuthorizationError]() * [CollectionSizeError]() * [CriterionError]() * [DatabaseError]() * [DateError]() * [DistinctError]() * [FeedItemError]() * [FieldError]() * [FieldMaskError]() * [HeaderError]() * [IdError]() * [InternalError]() * [ListOperationError]() * [MutateError]() * [NotEmptyError]() * [NullError]() * [OperatorError]() * [QuotaError]() * [RangeError]() * [RequestError]() * [SizeLimitError]() * [StringFormatError]() * [StringLengthError]() * [UrlFieldError]() * </pre> */ public void mutateFeedItems(com.google.ads.googleads.v10.services.MutateFeedItemsRequest request, io.grpc.stub.StreamObserver<com.google.ads.googleads.v10.services.MutateFeedItemsResponse> responseObserver) { io.grpc.stub.ServerCalls.asyncUnimplementedUnaryCall(getMutateFeedItemsMethod(), responseObserver); } @java.lang.Override public final io.grpc.ServerServiceDefinition bindService() { return io.grpc.ServerServiceDefinition.builder(getServiceDescriptor()) .addMethod( getMutateFeedItemsMethod(), io.grpc.stub.ServerCalls.asyncUnaryCall( new MethodHandlers< com.google.ads.googleads.v10.services.MutateFeedItemsRequest, com.google.ads.googleads.v10.services.MutateFeedItemsResponse>( this, METHODID_MUTATE_FEED_ITEMS))) .build(); } } /** * <pre> * Service to manage feed items. * </pre> */ public static final class FeedItemServiceStub extends io.grpc.stub.AbstractAsyncStub<FeedItemServiceStub> { private FeedItemServiceStub( io.grpc.Channel channel, io.grpc.CallOptions callOptions) { super(channel, callOptions); } @java.lang.Override protected FeedItemServiceStub build( io.grpc.Channel channel, io.grpc.CallOptions callOptions) { return new FeedItemServiceStub(channel, callOptions); } /** * <pre> * Creates, updates, or removes feed items. Operation statuses are * returned. * List of thrown errors: * [AuthenticationError]() * [AuthorizationError]() * [CollectionSizeError]() * [CriterionError]() * [DatabaseError]() * [DateError]() * [DistinctError]() * [FeedItemError]() * [FieldError]() * [FieldMaskError]() * [HeaderError]() * [IdError]() * [InternalError]() * [ListOperationError]() * [MutateError]() * [NotEmptyError]() * [NullError]() * [OperatorError]() * [QuotaError]() * [RangeError]() * [RequestError]() * [SizeLimitError]() * [StringFormatError]() * [StringLengthError]() * [UrlFieldError]() * </pre> */ public void mutateFeedItems(com.google.ads.googleads.v10.services.MutateFeedItemsRequest request, io.grpc.stub.StreamObserver<com.google.ads.googleads.v10.services.MutateFeedItemsResponse> responseObserver) { io.grpc.stub.ClientCalls.asyncUnaryCall( getChannel().newCall(getMutateFeedItemsMethod(), getCallOptions()), request, responseObserver); } } /** * <pre> * Service to manage feed items. * </pre> */ public static final class FeedItemServiceBlockingStub extends io.grpc.stub.AbstractBlockingStub<FeedItemServiceBlockingStub> { private FeedItemServiceBlockingStub( io.grpc.Channel channel, io.grpc.CallOptions callOptions) { super(channel, callOptions); } @java.lang.Override protected FeedItemServiceBlockingStub build( io.grpc.Channel channel, io.grpc.CallOptions callOptions) { return new FeedItemServiceBlockingStub(channel, callOptions); } /** * <pre> * Creates, updates, or removes feed items. Operation statuses are * returned. * List of thrown errors: * [AuthenticationError]() * [AuthorizationError]() * [CollectionSizeError]() * [CriterionError]() * [DatabaseError]() * [DateError]() * [DistinctError]() * [FeedItemError]() * [FieldError]() * [FieldMaskError]() * [HeaderError]() * [IdError]() * [InternalError]() * [ListOperationError]() * [MutateError]() * [NotEmptyError]() * [NullError]() * [OperatorError]() * [QuotaError]() * [RangeError]() * [RequestError]() * [SizeLimitError]() * [StringFormatError]() * [StringLengthError]() * [UrlFieldError]() * </pre> */ public com.google.ads.googleads.v10.services.MutateFeedItemsResponse mutateFeedItems(com.google.ads.googleads.v10.services.MutateFeedItemsRequest request) { return io.grpc.stub.ClientCalls.blockingUnaryCall( getChannel(), getMutateFeedItemsMethod(), getCallOptions(), request); } } /** * <pre> * Service to manage feed items. * </pre> */ public static final class FeedItemServiceFutureStub extends io.grpc.stub.AbstractFutureStub<FeedItemServiceFutureStub> { private FeedItemServiceFutureStub( io.grpc.Channel channel, io.grpc.CallOptions callOptions) { super(channel, callOptions); } @java.lang.Override protected FeedItemServiceFutureStub build( io.grpc.Channel channel, io.grpc.CallOptions callOptions) { return new FeedItemServiceFutureStub(channel, callOptions); } /** * <pre> * Creates, updates, or removes feed items. Operation statuses are * returned. * List of thrown errors: * [AuthenticationError]() * [AuthorizationError]() * [CollectionSizeError]() * [CriterionError]() * [DatabaseError]() * [DateError]() * [DistinctError]() * [FeedItemError]() * [FieldError]() * [FieldMaskError]() * [HeaderError]() * [IdError]() * [InternalError]() * [ListOperationError]() * [MutateError]() * [NotEmptyError]() * [NullError]() * [OperatorError]() * [QuotaError]() * [RangeError]() * [RequestError]() * [SizeLimitError]() * [StringFormatError]() * [StringLengthError]() * [UrlFieldError]() * </pre> */ public com.google.common.util.concurrent.ListenableFuture<com.google.ads.googleads.v10.services.MutateFeedItemsResponse> mutateFeedItems( com.google.ads.googleads.v10.services.MutateFeedItemsRequest request) { return io.grpc.stub.ClientCalls.futureUnaryCall( getChannel().newCall(getMutateFeedItemsMethod(), getCallOptions()), request); } } private static final int METHODID_MUTATE_FEED_ITEMS = 0; private static final class MethodHandlers<Req, Resp> implements io.grpc.stub.ServerCalls.UnaryMethod<Req, Resp>, io.grpc.stub.ServerCalls.ServerStreamingMethod<Req, Resp>, io.grpc.stub.ServerCalls.ClientStreamingMethod<Req, Resp>, io.grpc.stub.ServerCalls.BidiStreamingMethod<Req, Resp> { private final FeedItemServiceImplBase serviceImpl; private final int methodId; MethodHandlers(FeedItemServiceImplBase serviceImpl, int methodId) { this.serviceImpl = serviceImpl; this.methodId = methodId; } @java.lang.Override @java.lang.SuppressWarnings("unchecked") public void invoke(Req request, io.grpc.stub.StreamObserver<Resp> responseObserver) { switch (methodId) { case METHODID_MUTATE_FEED_ITEMS: serviceImpl.mutateFeedItems((com.google.ads.googleads.v10.services.MutateFeedItemsRequest) request, (io.grpc.stub.StreamObserver<com.google.ads.googleads.v10.services.MutateFeedItemsResponse>) responseObserver); break; default: throw new AssertionError(); } } @java.lang.Override @java.lang.SuppressWarnings("unchecked") public io.grpc.stub.StreamObserver<Req> invoke( io.grpc.stub.StreamObserver<Resp> responseObserver) { switch (methodId) { default: throw new AssertionError(); } } } private static abstract class FeedItemServiceBaseDescriptorSupplier implements io.grpc.protobuf.ProtoFileDescriptorSupplier, io.grpc.protobuf.ProtoServiceDescriptorSupplier { FeedItemServiceBaseDescriptorSupplier() {} @java.lang.Override public com.google.protobuf.Descriptors.FileDescriptor getFileDescriptor() { return com.google.ads.googleads.v10.services.FeedItemServiceProto.getDescriptor(); } @java.lang.Override public com.google.protobuf.Descriptors.ServiceDescriptor getServiceDescriptor() { return getFileDescriptor().findServiceByName("FeedItemService"); } } private static final class FeedItemServiceFileDescriptorSupplier extends FeedItemServiceBaseDescriptorSupplier { FeedItemServiceFileDescriptorSupplier() {} } private static final class FeedItemServiceMethodDescriptorSupplier extends FeedItemServiceBaseDescriptorSupplier implements io.grpc.protobuf.ProtoMethodDescriptorSupplier { private final String methodName; FeedItemServiceMethodDescriptorSupplier(String methodName) { this.methodName = methodName; } @java.lang.Override public com.google.protobuf.Descriptors.MethodDescriptor getMethodDescriptor() { return getServiceDescriptor().findMethodByName(methodName); } } private static volatile io.grpc.ServiceDescriptor serviceDescriptor; public static io.grpc.ServiceDescriptor getServiceDescriptor() { io.grpc.ServiceDescriptor result = serviceDescriptor; if (result == null) { synchronized (FeedItemServiceGrpc.class) { result = serviceDescriptor; if (result == null) { serviceDescriptor = result = io.grpc.ServiceDescriptor.newBuilder(SERVICE_NAME) .setSchemaDescriptor(new FeedItemServiceFileDescriptorSupplier()) .addMethod(getMutateFeedItemsMethod()) .build(); } } } return result; } }
/* * Copyright 2013 The Apache Software Foundation. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.juddi.adminconsole.hub; import java.io.StringReader; import java.io.StringWriter; import java.util.GregorianCalendar; import java.util.logging.Level; import java.util.logging.Logger; import javax.xml.XMLConstants; import javax.xml.bind.JAXB; import javax.xml.datatype.DatatypeConfigurationException; import javax.xml.datatype.DatatypeFactory; import javax.xml.transform.OutputKeys; import javax.xml.transform.Transformer; import javax.xml.transform.TransformerFactory; import javax.xml.transform.stream.StreamResult; import javax.xml.transform.stream.StreamSource; import org.apache.commons.configuration.ConfigurationException; import org.apache.juddi.api_v3.AdminSaveBusiness; import org.apache.juddi.api_v3.AdminSaveBusinessWrapper; import org.apache.juddi.api_v3.AdminSaveSubscriptionRequest; import org.apache.juddi.api_v3.AdminSaveTModel; import org.apache.juddi.api_v3.AdminSaveTModelWrapper; import org.apache.juddi.api_v3.Clerk; import org.apache.juddi.api_v3.ClientSubscriptionInfo; import org.apache.juddi.api_v3.SaveClientSubscriptionInfo; import org.apache.juddi.api_v3.SyncSubscription; import org.apache.juddi.config.AppConfig; import org.apache.juddi.config.Property; import org.apache.juddi.v3.client.UDDIConstants; import org.apache.juddi.v3.client.cryptor.XmlUtils; import org.uddi.api_v3.BusinessEntity; import org.uddi.api_v3.Contact; import org.uddi.api_v3.FindBusiness; import org.uddi.api_v3.FindQualifiers; import org.uddi.api_v3.Name; import org.uddi.api_v3.PersonName; import org.uddi.api_v3.TModel; import org.uddi.repl_v3.CommunicationGraph; import org.uddi.repl_v3.Operator; import org.uddi.repl_v3.OperatorStatusType; import org.uddi.repl_v3.ReplicationConfiguration; import org.uddi.sub_v3.CoveragePeriod; import org.uddi.sub_v3.GetSubscriptionResults; import org.uddi.sub_v3.Subscription; import org.uddi.sub_v3.SubscriptionFilter; /** * This class generates XML as String objects for UDDI requests. This is used * from the "advanced" web pages * * @author <a href="mailto:alexoree@apache.org">Alex O'Ree</a> */ public class JUDDIRequestsAsXML { private static String PrettyPrintXML(String input) { if (input == null || input.length() == 0) { return ""; } try { TransformerFactory transFactory = TransformerFactory.newInstance(); transFactory.setAttribute(XMLConstants.ACCESS_EXTERNAL_DTD, ""); transFactory.setAttribute(XMLConstants.ACCESS_EXTERNAL_STYLESHEET, ""); Transformer transformer = transFactory.newTransformer(); transformer.setOutputProperty(OutputKeys.INDENT, "yes"); transformer.setOutputProperty(OutputKeys.OMIT_XML_DECLARATION, "yes"); //initialize StreamResult with File object to save to file StreamResult result = new StreamResult(new StringWriter()); StreamSource source = new StreamSource(new StringReader(input.trim())); transformer.transform(source, result); String xmlString = result.getWriter().toString(); return (xmlString); } catch (Exception ex) { } return null; } public static String getSampleXML(String method) { StringWriter sw = new StringWriter(); if (method.equalsIgnoreCase("save_ClientSubscriptionInfo")) { SaveClientSubscriptionInfo obj = new SaveClientSubscriptionInfo(); obj.getClientSubscriptionInfo().add(new ClientSubscriptionInfo()); obj.getClientSubscriptionInfo().get(0).setFromClerk(new Clerk()); obj.getClientSubscriptionInfo().get(0).setToClerk(new Clerk()); obj.getClientSubscriptionInfo().get(0).setSubscriptionKey("key"); JAXB.marshal(obj, sw); } if (method.equalsIgnoreCase("invoke_SyncSubscription")) { SyncSubscription obj = new SyncSubscription(); obj.getGetSubscriptionResultsList().add(new GetSubscriptionResults()); obj.getGetSubscriptionResultsList().get(0).setSubscriptionKey("key"); obj.getGetSubscriptionResultsList().get(0).setCoveragePeriod(new CoveragePeriod()); DatatypeFactory newInstance; try { newInstance = DatatypeFactory.newInstance(); obj.getGetSubscriptionResultsList().get(0).getCoveragePeriod().setEndPoint(newInstance.newXMLGregorianCalendar(new GregorianCalendar())); obj.getGetSubscriptionResultsList().get(0).getCoveragePeriod().setStartPoint(newInstance.newXMLGregorianCalendar(new GregorianCalendar())); } catch (DatatypeConfigurationException ex) { Logger.getLogger(JUDDIRequestsAsXML.class.getName()).log(Level.SEVERE, null, ex); } JAXB.marshal(obj, sw); } if (method.equalsIgnoreCase("admin_SaveBusiness")) { AdminSaveBusiness obj = new AdminSaveBusiness(); obj.getValues().add(new AdminSaveBusinessWrapper()); obj.getValues().get(0).setPublisherID("username"); obj.getValues().get(0).getBusinessEntity().add(new BusinessEntity()); obj.getValues().get(0).getBusinessEntity().get(0).getName().add(new Name("Business Name", "en")); JAXB.marshal(obj, sw); } if (method.equalsIgnoreCase("admin_SaveTModel")) { AdminSaveTModel obj = new AdminSaveTModel(); obj.getValues().add(new AdminSaveTModelWrapper()); obj.getValues().get(0).setPublisherID("username"); obj.getValues().get(0).getTModel().add(new TModel()); obj.getValues().get(0).getTModel().get(0).setName(new Name("TModel Name", "en")); JAXB.marshal(obj, sw); } if (method.equalsIgnoreCase("admin_SaveSubscription")) { AdminSaveSubscriptionRequest obj = new AdminSaveSubscriptionRequest(); obj.setPublisherOrUsername("username"); obj.getSubscriptions().add(new Subscription()); obj.getSubscriptions().get(0).setSubscriptionFilter(new SubscriptionFilter()); obj.getSubscriptions().get(0).setBrief(Boolean.TRUE); obj.getSubscriptions().get(0).getSubscriptionFilter().setFindBusiness(new FindBusiness()); obj.getSubscriptions().get(0).getSubscriptionFilter().getFindBusiness().getName().add(new Name(UDDIConstants.WILDCARD, null)); obj.getSubscriptions().get(0).getSubscriptionFilter().getFindBusiness().setFindQualifiers(new FindQualifiers()); obj.getSubscriptions().get(0).getSubscriptionFilter().getFindBusiness().getFindQualifiers().getFindQualifier().add(UDDIConstants.APPROXIMATE_MATCH); obj.getSubscriptions().get(0).getSubscriptionFilter().getFindBusiness().getFindQualifiers().getFindQualifier().add(UDDIConstants.CASE_INSENSITIVE_MATCH); //obj.getSubscriptions().get(0).getSubscriptionFilter().getFindBusiness() JAXB.marshal(obj, sw); } if (method.equalsIgnoreCase("set_ReplicationNodes")) { ReplicationConfiguration replicationConfiguration = new ReplicationConfiguration(); replicationConfiguration.setCommunicationGraph(new CommunicationGraph()); String thisnode = "NODEID"; try { thisnode = AppConfig.getConfiguration().getString(Property.JUDDI_NODE_ID); } catch (ConfigurationException ex) { Logger.getLogger(JUDDIRequestsAsXML.class.getName()).log(Level.SEVERE, null, ex); } replicationConfiguration.getCommunicationGraph().getNode().add(thisnode); Operator op = new Operator(); op.setOperatorNodeID(thisnode); op.setOperatorStatus(OperatorStatusType.NORMAL); String url = "https://localhost:8443/juddiv3replication/services/replication"; try { url = AppConfig.getConfiguration().getString(Property.JUDDI_BASE_URL_SECURE) + "replication/services/replication"; } catch (ConfigurationException ex) { Logger.getLogger(JUDDIRequestsAsXML.class.getName()).log(Level.SEVERE, null, ex); } op.setSoapReplicationURL(url); replicationConfiguration.getOperator().add(op); replicationConfiguration.setRegistryContact(new ReplicationConfiguration.RegistryContact()); replicationConfiguration.getRegistryContact().setContact(new Contact()); replicationConfiguration.getRegistryContact().getContact().getPersonName().add(new PersonName("UNKNOWN", "en")); JAXB.marshal(replicationConfiguration, sw); } return PrettyPrintXML(sw.toString()); } public static Object getObjectJuddi(String method, String content) { StringReader sr = new StringReader(content); if (method.equalsIgnoreCase("save_ClientSubscriptionInfo")) { return XmlUtils.unmarshal(sr, SaveClientSubscriptionInfo.class); } if (method.equalsIgnoreCase("invoke_SyncSubscription")) { return XmlUtils.unmarshal(sr, SyncSubscription.class); } if (method.equalsIgnoreCase("admin_SaveBusiness")) { return XmlUtils.unmarshal(sr, AdminSaveBusiness.class); } if (method.equalsIgnoreCase("admin_SaveTModel")) { //System.out.println(content); return XmlUtils.unmarshal(sr, AdminSaveTModel.class); } if (method.equalsIgnoreCase("admin_SaveSubscription")) { return XmlUtils.unmarshal(sr, AdminSaveSubscriptionRequest.class); } if (method.equalsIgnoreCase("set_ReplicationNodes")) { return XmlUtils.unmarshal(sr, ReplicationConfiguration.class); } return null; } }
/** * Copyright (C) 2009 - present by OpenGamma Inc. and the OpenGamma group of companies * * Please see distribution for license. */ package com.opengamma.strata.collect.timeseries; import static com.opengamma.strata.collect.timeseries.DenseLocalDateDoubleTimeSeries.DenseTimeSeriesCalculation.INCLUDE_WEEKENDS; import static com.opengamma.strata.collect.timeseries.DenseLocalDateDoubleTimeSeries.DenseTimeSeriesCalculation.SKIP_WEEKENDS; import java.time.LocalDate; import java.time.temporal.ChronoField; import java.util.Collection; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.OptionalDouble; import java.util.SortedMap; import java.util.TreeMap; import java.util.function.DoubleBinaryOperator; import java.util.stream.Stream; import com.opengamma.strata.collect.ArgChecker; /** * Builder to create the immutable {@code LocalDateDoubleTimeSeries}. * <p> * This builder allows a time-series to be created. * Entries can be added to the builder in any order. * If a date is duplicated it will overwrite an earlier entry. * <p> * Use {@link LocalDateDoubleTimeSeries#builder()} to create an instance. */ public final class LocalDateDoubleTimeSeriesBuilder { /** * Threshold for deciding whether we use the dense or sparse time-series implementation. */ private static final double DENSITY_THRESHOLD = 0.7; /** * The entries for the time-series. */ private final SortedMap<LocalDate, Double> entries = new TreeMap<>(); /** * Keep track of whether we have weekends in the data. */ private boolean containsWeekends; //------------------------------------------------------------------------- /** * Creates an instance. * <p> * Use {@link LocalDateDoubleTimeSeries#builder()}. */ LocalDateDoubleTimeSeriesBuilder() { } /** * Creates an instance. * <p> * Use {@link LocalDateDoubleTimeSeries#toBuilder()}. * * @param dates the dates to initialize with * @param values the values to initialize with */ LocalDateDoubleTimeSeriesBuilder(LocalDate[] dates, double[] values) { for (int i = 0; i < dates.length; i++) { put(dates[i], values[i]); } } /** * Creates an instance. * <p> * Use {@link DenseLocalDateDoubleTimeSeries#toBuilder()}. * * @param points the stream of points to initialize with */ LocalDateDoubleTimeSeriesBuilder(Stream<LocalDateDoublePoint> points) { points.forEach(pt -> put(pt.getDate(), pt.getValue())); } //------------------------------------------------------------------------- /** * Gets the value associated with the specified date. * <p> * The result is an {@link OptionalDouble} which avoids the need to handle null * or exceptions. Use {@code isPresent()} to check whether the value is present. * Use {@code orElse(double)} to default a missing value. * * @param date the date to get the value for * @return the value associated with the date, optional empty if the date is not present */ public OptionalDouble get(LocalDate date) { Double value = entries.get(date); return (value != null ? OptionalDouble.of(value) : OptionalDouble.empty()); } //------------------------------------------------------------------------- /** * Puts the specified date/value point into this builder. * * @param date the date to be added * @param value the value associated with the date * @return this builder */ public LocalDateDoubleTimeSeriesBuilder put(LocalDate date, double value) { ArgChecker.notNull(date, "date"); ArgChecker.isFalse(Double.isNaN(value), "NaN is not allowed as a value"); entries.put(date, value); if (!containsWeekends && date.get(ChronoField.DAY_OF_WEEK) > 5) { containsWeekends = true; } return this; } /** * Puts the specified date/value point into this builder. * * @param point the point to be added * @return this builder */ public LocalDateDoubleTimeSeriesBuilder put(LocalDateDoublePoint point) { ArgChecker.notNull(point, "point"); put(point.getDate(), point.getValue()); return this; } //------------------------------------------------------------------------- /** * Merges the specified date/value point into this builder. * <p> * The operator is invoked if the date already exists. * * @param date the date to be added * @param value the value associated with the date * @param operator the operator to use for merging * @return this builder */ public LocalDateDoubleTimeSeriesBuilder merge(LocalDate date, double value, DoubleBinaryOperator operator) { ArgChecker.notNull(date, "date"); ArgChecker.notNull(operator, "operator"); entries.merge(date, value, (a, b) -> operator.applyAsDouble(a, b)); return this; } /** * Merges the specified date/value point into this builder. * <p> * The operator is invoked if the date already exists. * * @param point the point to be added * @param operator the operator to use for merging * @return this builder */ public LocalDateDoubleTimeSeriesBuilder merge(LocalDateDoublePoint point, DoubleBinaryOperator operator) { ArgChecker.notNull(point, "point"); entries.merge(point.getDate(), point.getValue(), (a, b) -> operator.applyAsDouble(a, b)); return this; } //------------------------------------------------------------------------- /** * Puts all the specified dates and values into this builder. * <p> * The date and value collections must be the same size. * <p> * The date-value pairs are added one by one. * If a date is duplicated it will overwrite an earlier entry. * * @param dates the dates to be added * @param values the values to be added * @return this builder */ public LocalDateDoubleTimeSeriesBuilder putAll(Collection<LocalDate> dates, Collection<Double> values) { ArgChecker.noNulls(dates, "dates"); ArgChecker.noNulls(values, "values"); ArgChecker.isTrue(dates.size() == values.size(), "Arrays are of different sizes - dates: {}, values: {}", dates.size(), values.size()); Iterator<LocalDate> itDate = dates.iterator(); Iterator<Double> itValue = values.iterator(); for (int i = 0; i < dates.size(); i++) { put(itDate.next(), itValue.next()); } return this; } /** * Puts all the specified dates and values into this builder. * <p> * The date collection and value array must be the same size. * <p> * The date-value pairs are added one by one. * If a date is duplicated it will overwrite an earlier entry. * * @param dates the dates to be added * @param values the values to be added * @return this builder */ public LocalDateDoubleTimeSeriesBuilder putAll(Collection<LocalDate> dates, double[] values) { ArgChecker.noNulls(dates, "dates"); ArgChecker.notNull(values, "values"); ArgChecker.isTrue(dates.size() == values.length, "Arrays are of different sizes - dates: {}, values: {}", dates.size(), values.length); Iterator<LocalDate> itDate = dates.iterator(); for (int i = 0; i < dates.size(); i++) { put(itDate.next(), values[i]); } return this; } /** * Puts all the specified points into this builder. * <p> * The points are added one by one. * If a date is duplicated it will overwrite an earlier entry. * * @param points the points to be added * @return this builder */ public LocalDateDoubleTimeSeriesBuilder putAll(Stream<LocalDateDoublePoint> points) { ArgChecker.notNull(points, "points"); points.forEach(this::put); return this; } /** * Puts all the specified points into this builder. * <p> * The points are added one by one. * If a date is duplicated it will overwrite an earlier entry. * * @param points the points to be added * @return this builder */ public LocalDateDoubleTimeSeriesBuilder putAll(List<LocalDateDoublePoint> points) { ArgChecker.notNull(points, "points"); return putAll(points.stream()); } /** * Puts the contents of the specified builder into this builder. * <p> * The points are added one by one. * If a date is duplicated it will overwrite an earlier entry. * * @param other the other builder * @return this builder */ public LocalDateDoubleTimeSeriesBuilder putAll(LocalDateDoubleTimeSeriesBuilder other) { ArgChecker.notNull(other, "other"); entries.putAll(other.entries); containsWeekends = containsWeekends || other.containsWeekends; return this; } /** * Puts all the entries from the supplied map into this builder. * <p> * If a date is duplicated it will overwrite an earlier entry. * * @param map the map of points to be added * @return this builder */ public LocalDateDoubleTimeSeriesBuilder putAll(Map<LocalDate, Double> map) { ArgChecker.noNulls(map, "map"); map.entrySet().forEach(e -> put(e.getKey(), e.getValue())); return this; } //------------------------------------------------------------------------- /** * Build the time-series from the builder. * * @return a time-series containing the entries from the builder */ public LocalDateDoubleTimeSeries build() { if (entries.isEmpty()) { return LocalDateDoubleTimeSeries.empty(); } // Depending on how dense the data is, judge which type of time series // is the best fit return density() > DENSITY_THRESHOLD ? createDenseSeries() : createSparseSeries(); } private LocalDateDoubleTimeSeries createDenseSeries() { return DenseLocalDateDoubleTimeSeries.of( entries.firstKey(), entries.lastKey(), streamEntries(), determineCalculation()); } private SparseLocalDateDoubleTimeSeries createSparseSeries() { return SparseLocalDateDoubleTimeSeries.of(entries.keySet(), entries.values()); } private Stream<LocalDateDoublePoint> streamEntries() { return entries.entrySet() .stream() .map(e -> LocalDateDoublePoint.of(e.getKey(), e.getValue())); } private DenseLocalDateDoubleTimeSeries.DenseTimeSeriesCalculation determineCalculation() { return containsWeekends ? INCLUDE_WEEKENDS : SKIP_WEEKENDS; } private double density() { // We can use the calculators to work out range size double rangeSize = determineCalculation().calculatePosition(entries.firstKey(), entries.lastKey()) + 1; return entries.size() / rangeSize; } }
// Copyright (c) 2008 The Board of Trustees of The Leland Stanford Junior University // Copyright (c) 2011, 2012 Open Networking Foundation // Copyright (c) 2012, 2013 Big Switch Networks, Inc. // This library was generated by the LoxiGen Compiler. // See the file LICENSE.txt which should have been included in the source distribution // Automatically generated by LOXI from template of_class.java // Do not modify package org.projectfloodlight.openflow.protocol.ver14; import org.projectfloodlight.openflow.protocol.*; import org.projectfloodlight.openflow.protocol.action.*; import org.projectfloodlight.openflow.protocol.actionid.*; import org.projectfloodlight.openflow.protocol.bsntlv.*; import org.projectfloodlight.openflow.protocol.errormsg.*; import org.projectfloodlight.openflow.protocol.meterband.*; import org.projectfloodlight.openflow.protocol.instruction.*; import org.projectfloodlight.openflow.protocol.instructionid.*; import org.projectfloodlight.openflow.protocol.match.*; import org.projectfloodlight.openflow.protocol.oxm.*; import org.projectfloodlight.openflow.protocol.queueprop.*; import org.projectfloodlight.openflow.types.*; import org.projectfloodlight.openflow.util.*; import org.projectfloodlight.openflow.exceptions.*; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.util.Set; import org.jboss.netty.buffer.ChannelBuffer; import com.google.common.hash.PrimitiveSink; import com.google.common.hash.Funnel; class OFGetConfigRequestVer14 implements OFGetConfigRequest { private static final Logger logger = LoggerFactory.getLogger(OFGetConfigRequestVer14.class); // version: 1.4 final static byte WIRE_VERSION = 5; final static int LENGTH = 8; private final static long DEFAULT_XID = 0x0L; // OF message fields private final long xid; // // Immutable default instance final static OFGetConfigRequestVer14 DEFAULT = new OFGetConfigRequestVer14( DEFAULT_XID ); // package private constructor - used by readers, builders, and factory OFGetConfigRequestVer14(long xid) { this.xid = xid; } // Accessors for OF message fields @Override public OFVersion getVersion() { return OFVersion.OF_14; } @Override public OFType getType() { return OFType.GET_CONFIG_REQUEST; } @Override public long getXid() { return xid; } public OFGetConfigRequest.Builder createBuilder() { return new BuilderWithParent(this); } static class BuilderWithParent implements OFGetConfigRequest.Builder { final OFGetConfigRequestVer14 parentMessage; // OF message fields private boolean xidSet; private long xid; BuilderWithParent(OFGetConfigRequestVer14 parentMessage) { this.parentMessage = parentMessage; } @Override public OFVersion getVersion() { return OFVersion.OF_14; } @Override public OFType getType() { return OFType.GET_CONFIG_REQUEST; } @Override public long getXid() { return xid; } @Override public OFGetConfigRequest.Builder setXid(long xid) { this.xid = xid; this.xidSet = true; return this; } @Override public OFGetConfigRequest build() { long xid = this.xidSet ? this.xid : parentMessage.xid; // return new OFGetConfigRequestVer14( xid ); } } static class Builder implements OFGetConfigRequest.Builder { // OF message fields private boolean xidSet; private long xid; @Override public OFVersion getVersion() { return OFVersion.OF_14; } @Override public OFType getType() { return OFType.GET_CONFIG_REQUEST; } @Override public long getXid() { return xid; } @Override public OFGetConfigRequest.Builder setXid(long xid) { this.xid = xid; this.xidSet = true; return this; } // @Override public OFGetConfigRequest build() { long xid = this.xidSet ? this.xid : DEFAULT_XID; return new OFGetConfigRequestVer14( xid ); } } final static Reader READER = new Reader(); static class Reader implements OFMessageReader<OFGetConfigRequest> { @Override public OFGetConfigRequest readFrom(ChannelBuffer bb) throws OFParseError { int start = bb.readerIndex(); // fixed value property version == 5 byte version = bb.readByte(); if(version != (byte) 0x5) throw new OFParseError("Wrong version: Expected=OFVersion.OF_14(5), got="+version); // fixed value property type == 7 byte type = bb.readByte(); if(type != (byte) 0x7) throw new OFParseError("Wrong type: Expected=OFType.GET_CONFIG_REQUEST(7), got="+type); int length = U16.f(bb.readShort()); if(length != 8) throw new OFParseError("Wrong length: Expected=8(8), got="+length); if(bb.readableBytes() + (bb.readerIndex() - start) < length) { // Buffer does not have all data yet bb.readerIndex(start); return null; } if(logger.isTraceEnabled()) logger.trace("readFrom - length={}", length); long xid = U32.f(bb.readInt()); OFGetConfigRequestVer14 getConfigRequestVer14 = new OFGetConfigRequestVer14( xid ); if(logger.isTraceEnabled()) logger.trace("readFrom - read={}", getConfigRequestVer14); return getConfigRequestVer14; } } public void putTo(PrimitiveSink sink) { FUNNEL.funnel(this, sink); } final static OFGetConfigRequestVer14Funnel FUNNEL = new OFGetConfigRequestVer14Funnel(); static class OFGetConfigRequestVer14Funnel implements Funnel<OFGetConfigRequestVer14> { private static final long serialVersionUID = 1L; @Override public void funnel(OFGetConfigRequestVer14 message, PrimitiveSink sink) { // fixed value property version = 5 sink.putByte((byte) 0x5); // fixed value property type = 7 sink.putByte((byte) 0x7); // fixed value property length = 8 sink.putShort((short) 0x8); sink.putLong(message.xid); } } public void writeTo(ChannelBuffer bb) { WRITER.write(bb, this); } final static Writer WRITER = new Writer(); static class Writer implements OFMessageWriter<OFGetConfigRequestVer14> { @Override public void write(ChannelBuffer bb, OFGetConfigRequestVer14 message) { // fixed value property version = 5 bb.writeByte((byte) 0x5); // fixed value property type = 7 bb.writeByte((byte) 0x7); // fixed value property length = 8 bb.writeShort((short) 0x8); bb.writeInt(U32.t(message.xid)); } } @Override public String toString() { StringBuilder b = new StringBuilder("OFGetConfigRequestVer14("); b.append("xid=").append(xid); b.append(")"); return b.toString(); } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (getClass() != obj.getClass()) return false; OFGetConfigRequestVer14 other = (OFGetConfigRequestVer14) obj; if( xid != other.xid) return false; return true; } @Override public int hashCode() { final int prime = 31; int result = 1; result = prime * (int) (xid ^ (xid >>> 32)); return result; } }
#set( $symbol_pound = '#' ) #set( $symbol_dollar = '$' ) #set( $symbol_escape = '\' ) /* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package dom.todo; import dom.todo.ToDoItem.Category; import dom.todo.ToDoItem.Subcategory; import java.math.BigDecimal; import java.util.List; import com.google.common.base.Predicates; import org.joda.time.LocalDate; import org.apache.isis.applib.DomainObjectContainer; import org.apache.isis.applib.annotation.Action; import org.apache.isis.applib.annotation.ActionLayout; import org.apache.isis.applib.annotation.BookmarkPolicy; import org.apache.isis.applib.annotation.DomainService; import org.apache.isis.applib.annotation.DomainServiceLayout; import org.apache.isis.applib.annotation.RestrictTo; import org.apache.isis.applib.annotation.MemberOrder; import org.apache.isis.applib.annotation.Optionality; import org.apache.isis.applib.annotation.Parameter; import org.apache.isis.applib.annotation.ParameterLayout; import org.apache.isis.applib.annotation.Programmatic; import org.apache.isis.applib.annotation.SemanticsOf; import org.apache.isis.applib.query.QueryDefault; import org.apache.isis.applib.services.clock.ClockService; @DomainServiceLayout(named="ToDos", menuOrder = "10") @DomainService(repositoryFor = ToDoItem.class) public class ToDoItems { //region > notYetComplete (action) @Action(semantics = SemanticsOf.SAFE) @ActionLayout( cssClassFa = "fa fa-thumbs-down", bookmarking = BookmarkPolicy.AS_ROOT ) @MemberOrder(sequence = "10") public List<ToDoItem> notYetComplete() { final List<ToDoItem> items = notYetCompleteNoUi(); if(items.isEmpty()) { container.informUser("All to-do items have been completed :-)"); } return items; } @Programmatic public List<ToDoItem> notYetCompleteNoUi() { return container.allMatches( new QueryDefault<>(ToDoItem.class, "findByOwnedByAndCompleteIsFalse", "ownedBy", currentUserName())); } //endregion //region > complete (action) @ActionLayout( cssClassFa = "fa fa-thumbs-up" ) @Action(semantics = SemanticsOf.SAFE) @MemberOrder(sequence = "20") public List<ToDoItem> complete() { final List<ToDoItem> items = completeNoUi(); if(items.isEmpty()) { container.informUser("No to-do items have yet been completed :-("); } return items; } @Programmatic public List<ToDoItem> completeNoUi() { return container.allMatches( new QueryDefault<>(ToDoItem.class, "findByOwnedByAndCompleteIsTrue", "ownedBy", currentUserName())); } //endregion //region > categorized (action) @SuppressWarnings("unchecked") @Action(semantics = SemanticsOf.SAFE) @ActionLayout( cssClassFa = "fa fa-question", bookmarking = BookmarkPolicy.AS_ROOT ) @MemberOrder(sequence = "40") public List<ToDoItem> categorized( @ParameterLayout(named="Category") final Category category, @ParameterLayout(named="Subcategory") final Subcategory subcategory, @ParameterLayout(named="Completed?") final boolean completed) { // an example "naive" implementation (filtered in Java code, not DBMS) return container.allMatches(ToDoItem.class, Predicates.and( ToDoItem.Predicates.thoseOwnedBy(currentUserName()), ToDoItem.Predicates.thoseCompleted(completed), ToDoItem.Predicates.thoseCategorised(category, subcategory))); } public Category default0Categorized() { return Category.Professional; } public Subcategory default1Categorized() { return default0Categorized().subcategories().get(0); } public boolean default2Categorized() { return false; } public List<Subcategory> choices1Categorized( final Category category) { return Subcategory.listFor(category); } public String validateCategorized( final Category category, final Subcategory subcategory, final boolean completed) { return Subcategory.validate(category, subcategory); } //endregion //region > newToDo (action) @ActionLayout(cssClassFa = "fa fa-plus") @MemberOrder(sequence = "5") public ToDoItem newToDo( @Parameter(regexPattern = "${symbol_escape}${symbol_escape}w[@&:${symbol_escape}${symbol_escape}-${symbol_escape}${symbol_escape},${symbol_escape}${symbol_escape}.${symbol_escape}${symbol_escape}+ ${symbol_escape}${symbol_escape}w]*") @ParameterLayout(named="Description") final String description, @ParameterLayout(named="Category") final Category category, @Parameter(optional = Optionality.TRUE) @ParameterLayout(named="Subcategory") final Subcategory subcategory, @Parameter(optional = Optionality.TRUE) @ParameterLayout(named="Due by") final LocalDate dueBy, @Parameter(optional = Optionality.TRUE) @ParameterLayout(named="Cost") final BigDecimal cost) { return newToDo(description, category, subcategory, currentUserName(), dueBy, cost); } public Category default1NewToDo() { return Category.Professional; } public Subcategory default2NewToDo() { return Category.Professional.subcategories().get(0); } public LocalDate default3NewToDo() { return clockService.now().plusDays(14); } public List<Subcategory> choices2NewToDo( final String description, final Category category) { return Subcategory.listFor(category); } public String validateNewToDo( final String description, final Category category, final Subcategory subcategory, final LocalDate dueBy, final BigDecimal cost) { return Subcategory.validate(category, subcategory); } //endregion //region > allToDos (action) @ActionLayout( cssClassFa = "fa fa-globe" ) @Action( semantics = SemanticsOf.SAFE, restrictTo = RestrictTo.PROTOTYPING ) @MemberOrder(sequence = "50") public List<ToDoItem> allToDos() { final List<ToDoItem> items = container.allMatches( new QueryDefault<>(ToDoItem.class, "findByOwnedBy", "ownedBy", currentUserName())); if(items.isEmpty()) { container.warnUser("No to-do items found."); } return items; } //endregion //region > autoComplete (programmatic) @Programmatic // not part of metamodel public List<ToDoItem> autoComplete(final String description) { return container.allMatches( new QueryDefault<>(ToDoItem.class, "findByOwnedByAndDescriptionContains", "ownedBy", currentUserName(), "description", description)); } //endregion //region > helpers @Programmatic // for use by fixtures public ToDoItem newToDo( final String description, final Category category, final Subcategory subcategory, final String userName, final LocalDate dueBy, final BigDecimal cost) { final ToDoItem toDoItem = container.newTransientInstance(ToDoItem.class); toDoItem.setDescription(description); toDoItem.setCategory(category); toDoItem.setSubcategory(subcategory); toDoItem.setOwnedBy(userName); toDoItem.setDueBy(dueBy); toDoItem.setCost(cost); container.persist(toDoItem); container.flush(); return toDoItem; } private String currentUserName() { return container.getUser().getName(); } //endregion //region > common validation private static final long ONE_WEEK_IN_MILLIS = 7 * 24 * 60 * 60 * 1000L; @Programmatic public String validateDueBy(final LocalDate dueBy) { return isMoreThanOneWeekInPast(dueBy) ? "Due by date cannot be more than one week old" : null; } @Programmatic boolean isMoreThanOneWeekInPast(final LocalDate dueBy) { return dueBy.toDateTimeAtStartOfDay().getMillis() < clockService.nowAsMillis() - ONE_WEEK_IN_MILLIS; } //endregion //region > injected services @javax.inject.Inject private DomainObjectContainer container; @javax.inject.Inject private ClockService clockService; //endregion }
/* * Copyright (c) 2014. Real Time Genomics Limited. * * All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are * met: * * 1. Redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer. * * 2. Redistributions in binary form must reproduce the above copyright * notice, this list of conditions and the following disclaimer in the * documentation and/or other materials provided with the * distribution. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR * A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT * HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT * LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ package com.rtg.reader; import java.io.File; import java.io.IOException; import com.rtg.mode.SequenceType; /** * Made for a special purpose to behave like two sequence readers are interleaved. * If you want more functionality you'll have to implement it. */ public class AlternatingSequencesReader implements SequencesReader { private final SequencesReader mFirst; private final SequencesReader mSecond; /** * Constructs a sequence reader which alternates between two given sequence readers. * @param first the first sequence reader * @param second the second sequence reader */ public AlternatingSequencesReader(final SequencesReader first, final SequencesReader second) { mFirst = first; mSecond = second; } private SequencesReader select(long sequenceIndex) { return (sequenceIndex & 1L) == 0 ? mFirst : mSecond; } @Override public long dataChecksum() { throw new UnsupportedOperationException("Not supported yet."); } @Override public long qualityChecksum() { throw new UnsupportedOperationException("Not supported yet."); } @Override public long nameChecksum() { throw new UnsupportedOperationException("Not supported yet."); } @Override public void close() throws IOException { try { mFirst.close(); } finally { mSecond.close(); } } @Override public SequencesReader copy() { throw new UnsupportedOperationException("Not supported yet."); } @Override public File path() { throw new UnsupportedOperationException("Not supported yet."); } @Override public PrereadArm getArm() { throw new UnsupportedOperationException("Not supported yet."); } @Override public PrereadType getPrereadType() { throw new UnsupportedOperationException("Not supported yet."); } @Override public double globalQualityAverage() { throw new UnsupportedOperationException("Not supported yet."); } @Override public SdfId getSdfId() { throw new UnsupportedOperationException("Not supported yet."); } @Override public boolean hasHistogram() { throw new UnsupportedOperationException("Not supported yet."); } @Override public boolean hasQualityData() { throw new UnsupportedOperationException("Not supported yet."); } @Override public boolean hasNames() { throw new UnsupportedOperationException("Not supported yet."); } @Override public SequencesReaderReferenceSource referenceSource() { throw new UnsupportedOperationException("Not supported yet."); } @Override public long[] histogram() { throw new UnsupportedOperationException("Not supported yet."); } @Override public long lengthBetween(long start, long end) { throw new UnsupportedOperationException("Not supported yet."); } @Override public long longestNBlock() { throw new UnsupportedOperationException("Not supported yet."); } @Override public long maxLength() { throw new UnsupportedOperationException("Not supported yet."); } @Override public long minLength() { throw new UnsupportedOperationException("Not supported yet."); } @Override public long nBlockCount() { throw new UnsupportedOperationException("Not supported yet."); } @Override public Names names() { throw new UnsupportedOperationException("Not supported yet."); } @Override public long numberSequences() { return mFirst.numberSequences() + mSecond.numberSequences(); } @Override public long[] posHistogram() { throw new UnsupportedOperationException("Not supported yet."); } @Override public double[] positionQualityAverage() { throw new UnsupportedOperationException("Not supported yet."); } @Override public int length(long sequenceIndex) throws IOException { return select(sequenceIndex).length(sequenceIndex / 2); } @Override public byte sequenceDataChecksum(long sequenceIndex) throws IOException { return select(sequenceIndex).sequenceDataChecksum(sequenceIndex / 2); } @Override public String name(long sequenceIndex) throws IOException { return select(sequenceIndex).name(sequenceIndex / 2); } @Override public String fullName(long sequenceIndex) throws IOException { return name(sequenceIndex); } @Override public byte[] read(long sequenceIndex) throws IOException { final byte[] result = new byte[length(sequenceIndex)]; read(sequenceIndex, result); return result; } @Override public int read(long sequenceIndex, byte[] dataOut) throws IOException { return read(sequenceIndex, dataOut, 0, length(sequenceIndex)); } @Override public int read(long sequenceIndex, byte[] dataOut, int start, int length) throws IOException { return select(sequenceIndex).read(sequenceIndex, dataOut, start, length); } @Override public byte[] readQuality(long sequenceIndex) throws IOException { final byte[] result = new byte[length(sequenceIndex)]; readQuality(sequenceIndex, result); return result; } @Override public int readQuality(long sequenceIndex, byte[] dest) throws IOException { return readQuality(sequenceIndex, dest, 0, length(sequenceIndex)); } @Override public int readQuality(long sequenceIndex, byte[] dest, int start, int length) throws IOException { return select(sequenceIndex).readQuality(sequenceIndex, dest, start, length); } @Override public long[] residueCounts() { throw new UnsupportedOperationException("Not supported yet."); } @Override public long sdfVersion() { throw new UnsupportedOperationException("Not supported yet."); } @Override public int[] sequenceLengths(long start, long end) { throw new UnsupportedOperationException("Not supported yet."); } @Override public long totalLength() { return mFirst.totalLength() + mSecond.totalLength(); } @Override public SequenceType type() { throw new UnsupportedOperationException("Not supported yet."); } @Override public boolean compressed() { throw new UnsupportedOperationException("Not supported yet."); } @Override public String nameSuffix(long sequenceIndex) { throw new UnsupportedOperationException("Not supported yet."); } @Override public long suffixChecksum() { throw new UnsupportedOperationException("Not supported yet."); } @Override public String getReadMe() { return null; } @Override public SequencesIterator iterator() { return new DefaultSequencesIterator(this); } @Override public IndexFile index() { return null; } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more contributor license * agreements. See the NOTICE file distributed with this work for additional information regarding * copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance with the License. You may obtain a * copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express * or implied. See the License for the specific language governing permissions and limitations under * the License. */ package org.apache.geode.internal.logging; import java.util.ArrayList; import java.util.Collection; import java.util.Iterator; import org.apache.logging.log4j.Logger; import org.apache.geode.SystemFailure; import org.apache.geode.distributed.internal.InternalDistributedSystem; import org.apache.geode.internal.Assert; import org.apache.geode.internal.i18n.LocalizedStrings; import org.apache.geode.internal.logging.log4j.LocalizedMessage; import org.apache.geode.i18n.StringId; /** * A <code>ThreadGroup</code> that logs all {@linkplain #uncaughtException uncaught exceptions} to a * GemFire <code>LogWriterI18n</code>. It also keeps track of the uncaught exceptions that were * thrown by its threads. This is comes in handy when a thread fails to initialize properly (see bug * 32550). * * @see LoggingThreadGroup#createThreadGroup * * @since GemFire 4.0 */ public class LoggingThreadGroup extends ThreadGroup { /** A "local" log writer that logs exceptions to standard error */ private static final StandardErrorPrinter stderr = new StandardErrorPrinter(InternalLogWriter.ALL_LEVEL); /** A set of all created LoggingThreadGroups */ private static final Collection<LoggingThreadGroup> loggingThreadGroups = new ArrayList<LoggingThreadGroup>(); /** * Returns a <code>ThreadGroup</code> whose {@link ThreadGroup#uncaughtException} method logs to * both {#link System#err} and the given <code>InternalLogWriter</code>. * * @param name The name of the <code>ThreadGroup</code> */ public static LoggingThreadGroup createThreadGroup(final String name) { return createThreadGroup(name, (Logger) null); } /** * Returns a <code>ThreadGroup</code> whose {@link ThreadGroup#uncaughtException} method logs to * both {#link System#err} and the given <code>InternalLogWriter</code>. * * @param name The name of the <code>ThreadGroup</code> * @param logWriter A <code>InternalLogWriter</code> to log uncaught exceptions to. It is okay for * this argument to be <code>null</code>. * * author David Whitlock * @since GemFire 3.0 */ public static LoggingThreadGroup createThreadGroup(final String name, final InternalLogWriter logWriter) { // Cache the LoggingThreadGroups so that we don't create a // gazillion of them. LoggingThreadGroup group = null; synchronized (loggingThreadGroups) { for (Iterator<LoggingThreadGroup> iter = loggingThreadGroups.iterator(); iter.hasNext();) { LoggingThreadGroup group2 = (LoggingThreadGroup) iter.next(); if (group2.isDestroyed()) { // Clean is this guy out iter.remove(); continue; } if (name.equals(group2.getName())) { // We already have one! // Change the underlying logger to point to new one (creating new // thread groups for different loggers leaks groups for repeated // connect/disconnects as in dunits for example) if (logWriter != group2.logWriter) { group2.logWriter = logWriter; } group = group2; break; } } if (group == null) { group = new LoggingThreadGroup(name, logWriter); // force autoclean to false and not inherit from parent group group.setDaemon(false); loggingThreadGroups.add(group); } } Assert.assertTrue(!group.isDestroyed()); return group; } /** * Returns a <code>ThreadGroup</code> whose {@link ThreadGroup#uncaughtException} method logs to * both {#link System#err} and the given <code>InternalLogWriter</code>. * * @param name The name of the <code>ThreadGroup</code> * @param logger A <code>InternalLogWriter</code> to log uncaught exceptions to. It is okay for * this argument to be <code>null</code>. * * author David Whitlock * @since GemFire 3.0 */ public static LoggingThreadGroup createThreadGroup(final String name, final Logger logger) { // Cache the LoggingThreadGroups so that we don't create a // gazillion of them. LoggingThreadGroup group = null; synchronized (loggingThreadGroups) { for (Iterator<LoggingThreadGroup> iter = loggingThreadGroups.iterator(); iter.hasNext();) { LoggingThreadGroup group2 = (LoggingThreadGroup) iter.next(); if (group2.isDestroyed()) { // Clean is this guy out iter.remove(); continue; } if (name.equals(group2.getName())) { // We already have one! // Change the underlying logger to point to new one (creating new // thread groups for different loggers leaks groups for repeated // connect/disconnects as in dunits for example) if (logger != group2.logger) { group2.logger = logger; } group = group2; break; } } if (group == null) { group = new LoggingThreadGroup(name, logger); // force autoclean to false and not inherit from parent group group.setDaemon(false); loggingThreadGroups.add(group); } } Assert.assertTrue(!group.isDestroyed()); return group; } // /** // * @deprecated Only for use by hydra for backwards compatability reasons. // * Returns a <code>ThreadGroup</code> whose {@link // * ThreadGroup#uncaughtException} method logs to both {#link // * System#err} and the given <code>LogWriterI18n</code>. // * // * @param name // * The name of the <code>ThreadGroup</code> // * @param logger // * A <code>LogWriter</code> to log uncaught exceptions to. It // * is okay for this argument to be <code>null</code>. // * // * author kbanks // * @since GemFire 6.0 // */ // @Deprecated public static LoggingThreadGroup createThreadGroup(final String name, // final LogWriter logger) { // return createThreadGroup(name, // logger != null ? logger.convertToLogWriterI18n() : null); // } public static void cleanUpThreadGroups() { synchronized (loggingThreadGroups) { LoggingThreadGroup group; Iterator<?> itr = loggingThreadGroups.iterator(); while (itr.hasNext()) { group = (LoggingThreadGroup) itr.next(); if (!group.getName().equals(InternalDistributedSystem.SHUTDOWN_HOOK_NAME) && !group.getName().equals("GemFireConnectionFactory Shutdown Hook")) { group.cleanup(); } } } } /** * Note: Must be used for test purposes ONLY. * * @param threadGroupName * @return thread group with given name. */ public static ThreadGroup getThreadGroup(final String threadGroupName) { synchronized (loggingThreadGroups) { for (Object object : loggingThreadGroups) { LoggingThreadGroup threadGroup = (LoggingThreadGroup) object; if (threadGroup.getName().equals(threadGroupName)) { return threadGroup; } } return null; } } /** * A log writer that the user has specified for logging uncaught exceptions. */ protected volatile InternalLogWriter logWriter; /** * A logger that the user has specified for logging uncaught exceptions. */ protected volatile Logger logger; /** * The count uncaught exceptions that were thrown by threads in this thread group. */ private long uncaughtExceptionsCount; /** * Creates a new <code>LoggingThreadGroup</code> that logs uncaught exceptions to the given log * writer. * * @param name The name of the thread group * @param logWriter A logWriter to which uncaught exceptions are logged. May be <code>null</code>. */ LoggingThreadGroup(final String name, final InternalLogWriter logWriter) { super(name); this.logWriter = logWriter; } /** * Creates a new <code>LoggingThreadGroup</code> that logs uncaught exceptions to the given * logger. * * @param name The name of the thread group * @param logger A logger to which uncaught exceptions are logged. May be <code>null</code>. */ LoggingThreadGroup(final String name, final Logger logger) { super(name); this.logger = logger; } private Object dispatchLock = new Object(); /** * Logs an uncaught exception to a log writer */ @Override public void uncaughtException(final Thread t, final Throwable ex) { synchronized (this.dispatchLock) { if (ex instanceof VirtualMachineError) { SystemFailure.setFailure((VirtualMachineError) ex); // don't throw } // Solution to treat the shutdown hook error as a special case. // Do not change the hook's thread name without also changing it here. String threadName = t.getName(); if ((ex instanceof NoClassDefFoundError) && (threadName.equals(InternalDistributedSystem.SHUTDOWN_HOOK_NAME))) { final StringId msg = LocalizedStrings.UNCAUGHT_EXCEPTION_IN_THREAD_0_THIS_MESSAGE_CAN_BE_DISREGARDED_IF_IT_OCCURRED_DURING_AN_APPLICATION_SERVER_SHUTDOWN_THE_EXCEPTION_MESSAGE_WAS_1; final Object[] msgArgs = new Object[] {t, ex.getLocalizedMessage()}; stderr.info(msg, msgArgs); if (this.logger != null) { this.logger.info(LocalizedMessage.create(msg, msgArgs)); } if (this.logWriter != null) { this.logWriter.info(msg, msgArgs); } } else { stderr.severe(LocalizedStrings.UNCAUGHT_EXCEPTION_IN_THREAD_0, t, ex); if (this.logger != null) { this.logger.fatal( LocalizedMessage.create(LocalizedStrings.UNCAUGHT_EXCEPTION_IN_THREAD_0, t), ex); } if (this.logWriter != null) { this.logWriter.severe(LocalizedStrings.UNCAUGHT_EXCEPTION_IN_THREAD_0, t, ex); } } // if (!(ex instanceof RuntimeException) && (ex instanceof Exception)) { // something's fishy - checked exceptions shouldn't get here // this.logger.severe("stack trace showing origin of uncaught checked exception", new // Exception("stack trace")); // } this.uncaughtExceptionsCount++; } } /** * clear number of uncaught exceptions */ public void clearUncaughtExceptionsCount() { synchronized (this.dispatchLock) { this.uncaughtExceptionsCount = 0; } } /** * Returns the number of uncaught exceptions that occurred in threads in this thread group. */ public long getUncaughtExceptionsCount() { synchronized (this.dispatchLock) { return uncaughtExceptionsCount; } } /** * clean up the threadgroup, releasing resources that could be problematic (bug 35388) * * @since GemFire 4.2.3 */ public synchronized void cleanup() { // the logwriter holds onto a distribution config, which holds onto // the InternalDistributedSystem, which holds onto the // DistributionManager, which holds onto ... you get the idea this.logger = null; this.logWriter = null; } }
/* * * Derby - Class ShutDownDBWhenNSShutsDownTest * * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, * either express or implied. See the License for the specific * language governing permissions and limitations under the License. */ package org.apache.derbyTesting.functionTests.tests.derbynet; import java.io.File; import java.sql.Connection; import java.sql.SQLException; import java.sql.Statement; import junit.framework.Test; import junit.framework.TestSuite; import org.apache.derby.drda.NetworkServerControl; import org.apache.derbyTesting.functionTests.util.PrivilegedFileOpsForTests; import org.apache.derbyTesting.junit.BaseJDBCTestCase; import org.apache.derbyTesting.junit.NetworkServerTestSetup; import org.apache.derbyTesting.junit.TestConfiguration; /** * Derby-1274 - Network Server should shutdown the databases it has booted when * started from the command line. * * Tests that the network server will shutdown the databases it has booted when * started from the command line and that it will not shut down the databases * when started from the API. */ public class ShutDownDBWhenNSShutsDownTest extends BaseJDBCTestCase { /** * Only run the fixtures in network server mode as that's what they are testing. */ public static Test suite() { TestSuite suite = new TestSuite("ShutDownDBWhenNSShutsDownTest"); suite.addTest(TestConfiguration.clientServerDecorator( new ShutDownDBWhenNSShutsDownTest( "testEngineShutdownDoesNotTakeDownNSManualReload"))); /* DERBY-2066 suite.addTest(TestConfiguration.clientServerDecorator( new ShutDownDBWhenNSShutsDownTest( "testEngineShutdownDoesNotTakeDownNSAutoReload"))); */ suite.addTest(TestConfiguration.clientServerDecorator( new ShutDownDBWhenNSShutsDownTest( "testDatabasesShutDownWhenNSShutdownAPI"))); return suite; } /** * Creates a new instance of ShutDownDBWhenNSShutsDownTest */ public ShutDownDBWhenNSShutsDownTest(String name) { super(name); } /** * Test the scenario from scenarioEngineShutdownDoesNotTakeDownNS * reloading the embedded driver after the network server has shutdown. * @throws Exception */ public void testEngineShutdownDoesNotTakeDownNSManualReload() throws Exception { scenarioEngineShutdownDoesNotTakeDownNS(true); } /** * Test the scenario from scenarioEngineShutdownDoesNotTakeDownNS * relying on the network server to reloading the embedded driver * after the network server has shutdown. * @throws Exception */ public void testEngineShutdownDoesNotTakeDownNSAutoReload() throws Exception { scenarioEngineShutdownDoesNotTakeDownNS(false); } /** * Test that a shutdown of the engine does not take down the network * server. Before DERBY-1326 was fixed, shutting down the engine would * leave the network server in an inconsistent state which could make * clients hang infinitely. */ private void scenarioEngineShutdownDoesNotTakeDownNS( boolean loadEmbeddedDriver) throws Exception { Connection[] conns = new Connection[20]; // first make sure there are 20 active worker threads on the server for (int i = 0; i < conns.length; i++) { conns[i] = openDefaultConnection(); } // then close them, leaving 20 free worker threads ready to pick up new // sessions for (int i = 0; i < conns.length; i++) { conns[i].close(); conns[i] = null; } // Give the free threads a little time to close their sessions. This is // done to ensure that there are free threads waiting for new sessions, // which makes the DERBY-1326 hang more reliably reproducible. Thread.sleep(500); // shut down the engine TestConfiguration.getCurrent().shutdownEngine(); if (loadEmbeddedDriver) Class.forName("org.apache.derby.jdbc.EmbeddedDriver").newInstance(); // see if it is still possible to connect to the server (before // DERBY-1326, this would hang) for (int i = 0; i < 20; i++) { openDefaultConnection().close(); } } /** * Test that the NetworkServer does not shut down the * databases it has booted when started from the API. * This fixture must be run with a clientServerDecorator(). */ public void testDatabasesShutDownWhenNSShutdownAPI() throws Exception { // Check that the databases will not be shutdown when the server is // shut down. shutdownServerCheckDBShutDown(false); } /** * Test that the NetworkServer shuts down the databases it has booted when * started from the command line, and that it does not shut down the * databases it has booted when started from the API. */ public void XXtestDatabasesShutDownWhenNSShutdown() throws Exception { // The server was started from the command line when the test was // started. Check that the database will be shut down when the server // is shut down. shutdownServerCheckDBShutDown(true); } /** * Checks whether the server shuts down causes the databases it has booted * to be shut down. * * Creates a database and shuts down the server. If the server was started * from the command line the database should be shut down. If the server * was started from the api the database should not be shut down. * * If the database has been shut down the db.lck file should not exist. * * @param dbShutDown Indicates whether the database should have been shut * down. */ private void shutdownServerCheckDBShutDown(boolean dbShutDown) throws Exception { // connect to database createDatabase(); NetworkServerControl server = NetworkServerTestSetup.getNetworkServerControl(); // shut down the server server.shutdown(); // check if db.lck exists String fileName = getSystemProperty("derby.system.home") + java.io.File.separator + TestConfiguration.getCurrent().getDefaultDatabaseName() + java.io.File.separator + "db.lck"; File lockFile = new File(fileName); boolean fileNotFound = false; int i = 0; do { Thread.sleep(500); fileNotFound = !PrivilegedFileOpsForTests.exists(lockFile); i ++; } while (fileNotFound != dbShutDown && i < 120); assertEquals("Database is shut down", dbShutDown, fileNotFound); } private void createDatabase() throws SQLException { Connection conn = getConnection(); conn.setAutoCommit(false); Statement st = conn.createStatement(); st.execute("CREATE TABLE T1 (a int)"); st.execute("INSERT INTO T1 VALUES (1), (2), (3), (4), (5)"); st.execute("DROP TABLE T1"); conn.commit(); conn.close(); } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.activemq.artemis.service.extensions.xa.recovery; import java.util.Arrays; import java.util.Collections; import java.util.HashMap; import java.util.Map; import org.apache.activemq.artemis.api.core.DiscoveryGroupConfiguration; import org.apache.activemq.artemis.api.core.TransportConfiguration; import org.apache.activemq.artemis.api.core.client.ActiveMQClient; import org.apache.activemq.artemis.api.core.client.ServerLocator; import org.apache.activemq.artemis.jms.client.ActiveMQConnectionFactory; import org.apache.activemq.artemis.spi.core.remoting.ClientProtocolManagerFactory; /** * This represents the configuration of a single connection factory. * * A wrapper around info needed for the xa recovery resource */ public class XARecoveryConfig { public static final String JNDI_NAME_PROPERTY_KEY = "JNDI_NAME"; private final boolean ha; private final TransportConfiguration[] transportConfiguration; private final DiscoveryGroupConfiguration discoveryConfiguration; private final String username; private final String password; private final Map<String, String> properties; private final ClientProtocolManagerFactory clientProtocolManager; public static XARecoveryConfig newConfig(ActiveMQConnectionFactory factory, String userName, String password, Map<String, String> properties) { if (factory.getServerLocator().getDiscoveryGroupConfiguration() != null) { return new XARecoveryConfig(factory.getServerLocator().isHA(), factory.getServerLocator().getDiscoveryGroupConfiguration(), userName, password, properties, factory.getServerLocator().getProtocolManagerFactory()); } else { return new XARecoveryConfig(factory.getServerLocator().isHA(), factory.getServerLocator().getStaticTransportConfigurations(), userName, password, properties, factory.getServerLocator().getProtocolManagerFactory()); } } public XARecoveryConfig(final boolean ha, final TransportConfiguration[] transportConfiguration, final String username, final String password, final Map<String, String> properties, final ClientProtocolManagerFactory clientProtocolManager) { TransportConfiguration[] newTransportConfiguration = new TransportConfiguration[transportConfiguration.length]; for (int i = 0; i < transportConfiguration.length; i++) { if (clientProtocolManager != null) { newTransportConfiguration[i] = clientProtocolManager.adaptTransportConfiguration(transportConfiguration[i].newTransportConfig("")); } else { newTransportConfiguration[i] = transportConfiguration[i].newTransportConfig(""); } } this.transportConfiguration = newTransportConfiguration; this.discoveryConfiguration = null; this.username = username; this.password = password; this.ha = ha; this.properties = properties == null ? Collections.unmodifiableMap(new HashMap<String, String>()) : Collections.unmodifiableMap(properties); this.clientProtocolManager = clientProtocolManager; } public XARecoveryConfig(final boolean ha, final TransportConfiguration[] transportConfiguration, final String username, final String password, final Map<String, String> properties) { this(ha, transportConfiguration, username, password, properties, null); } public XARecoveryConfig(final boolean ha, final DiscoveryGroupConfiguration discoveryConfiguration, final String username, final String password, final Map<String, String> properties, final ClientProtocolManagerFactory clientProtocolManager) { this.discoveryConfiguration = discoveryConfiguration; this.transportConfiguration = null; this.username = username; this.password = password; this.ha = ha; this.clientProtocolManager = clientProtocolManager; this.properties = properties == null ? Collections.unmodifiableMap(new HashMap<String, String>()) : Collections.unmodifiableMap(properties); } public XARecoveryConfig(final boolean ha, final DiscoveryGroupConfiguration discoveryConfiguration, final String username, final String password, final Map<String, String> properties) { this(ha, discoveryConfiguration, username, password, properties, null); } public boolean isHA() { return ha; } public DiscoveryGroupConfiguration getDiscoveryConfiguration() { return discoveryConfiguration; } public TransportConfiguration[] getTransportConfig() { return transportConfiguration; } public String getUsername() { return username; } public String getPassword() { return password; } public Map<String, String> getProperties() { return properties; } public ClientProtocolManagerFactory getClientProtocolManager() { return clientProtocolManager; } /** * Create a serverLocator using the configuration * * @return locator */ public ServerLocator createServerLocator() { if (getDiscoveryConfiguration() != null) { return ActiveMQClient.createServerLocator(isHA(), getDiscoveryConfiguration()).setProtocolManagerFactory(clientProtocolManager); } else { return ActiveMQClient.createServerLocator(isHA(), getTransportConfig()).setProtocolManagerFactory(clientProtocolManager); } } @Override public int hashCode() { final int prime = 31; int result = 1; result = prime * result + ((discoveryConfiguration == null) ? 0 : discoveryConfiguration.hashCode()); result = prime * result + Arrays.hashCode(transportConfiguration); return result; } /* * We don't use username and password on purpose. * Just having the connector is enough, as we don't want to duplicate resources just because of usernames */ @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (getClass() != obj.getClass()) return false; XARecoveryConfig other = (XARecoveryConfig) obj; if (discoveryConfiguration == null) { if (other.discoveryConfiguration != null) return false; } else if (!discoveryConfiguration.equals(other.discoveryConfiguration)) return false; if (!Arrays.equals(transportConfiguration, other.transportConfiguration)) return false; return true; } /* (non-Javadoc) * @see java.lang.Object#toString() */ @Override public String toString() { StringBuilder builder = new StringBuilder(); builder.append("XARecoveryConfig [transportConfiguration=" + Arrays.toString(transportConfiguration)); builder.append(", discoveryConfiguration=" + discoveryConfiguration); builder.append(", username=" + username); builder.append(", password=****"); for (Map.Entry<String, String> entry : properties.entrySet()) { builder.append(", " + entry.getKey() + "=" + entry.getValue()); } builder.append("]"); return builder.toString(); } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.hive.ql.exec.mr; import java.io.IOException; import java.util.ArrayList; import java.util.Iterator; import java.util.List; import org.apache.hadoop.hive.ql.exec.MapredContext; import org.apache.hadoop.hive.ql.exec.Operator; import org.apache.hadoop.hive.ql.exec.Utilities; import org.apache.hadoop.hive.ql.exec.mr.ExecMapper.ReportStats; import org.apache.hadoop.hive.ql.metadata.HiveException; import org.apache.hadoop.hive.ql.plan.ReduceWork; import org.apache.hadoop.hive.ql.plan.TableDesc; import org.apache.hadoop.hive.serde2.AbstractSerDe; import org.apache.hadoop.hive.serde2.Deserializer; import org.apache.hadoop.hive.serde2.SerDeException; import org.apache.hadoop.hive.serde2.SerDeUtils; import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory; import org.apache.hadoop.io.BytesWritable; import org.apache.hadoop.mapred.JobConf; import org.apache.hadoop.mapred.MapReduceBase; import org.apache.hadoop.mapred.OutputCollector; import org.apache.hadoop.mapred.Reducer; import org.apache.hadoop.mapred.Reporter; import org.apache.hadoop.util.ReflectionUtils; import org.apache.hadoop.util.StringUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * ExecReducer is the generic Reducer class for Hive. Together with ExecMapper it is * the bridge between the map-reduce framework and the Hive operator pipeline at * execution time. It's main responsibilities are: * * - Load and setup the operator pipeline from XML * - Run the pipeline by transforming key, value pairs to records and forwarding them to the operators * - Sending start and end group messages to separate records with same key from one another * - Catch and handle errors during execution of the operators. * */ public class ExecReducer extends MapReduceBase implements Reducer { private static final Logger LOG = LoggerFactory.getLogger("ExecReducer"); private static final String PLAN_KEY = "__REDUCE_PLAN__"; // Input value serde needs to be an array to support different SerDe // for different tags private final Deserializer[] inputValueDeserializer = new Deserializer[Byte.MAX_VALUE]; private final Object[] valueObject = new Object[Byte.MAX_VALUE]; private final List<Object> row = new ArrayList<Object>(Utilities.reduceFieldNameList.size()); private AbstractSerDe inputKeySerDe; private JobConf jc; private OutputCollector<?, ?> oc; private Operator<?> reducer; private Reporter rp; private boolean abort = false; private boolean isTagged = false; private TableDesc keyTableDesc; private TableDesc[] valueTableDesc; private ObjectInspector[] rowObjectInspector; // runtime objects private transient Object keyObject; private transient BytesWritable groupKey; @Override public void configure(JobConf job) { rowObjectInspector = new ObjectInspector[Byte.MAX_VALUE]; ObjectInspector[] valueObjectInspector = new ObjectInspector[Byte.MAX_VALUE]; ObjectInspector keyObjectInspector; Utilities.tryLoggingClassPaths(job, LOG); jc = job; ReduceWork gWork = Utilities.getReduceWork(job); reducer = gWork.getReducer(); reducer.setParentOperators(null); // clear out any parents as reducer is the // root isTagged = gWork.getNeedsTagging(); try { keyTableDesc = gWork.getKeyDesc(); inputKeySerDe = ReflectionUtils.newInstance(keyTableDesc .getSerDeClass(), null); inputKeySerDe.initialize(null, keyTableDesc.getProperties(), null); keyObjectInspector = inputKeySerDe.getObjectInspector(); valueTableDesc = new TableDesc[gWork.getTagToValueDesc().size()]; for (int tag = 0; tag < gWork.getTagToValueDesc().size(); tag++) { // We should initialize the SerDe with the TypeInfo when available. valueTableDesc[tag] = gWork.getTagToValueDesc().get(tag); AbstractSerDe valueObjectSerDe = ReflectionUtils.newInstance(valueTableDesc[tag].getSerDeClass(), null); valueObjectSerDe.initialize(null, valueTableDesc[tag].getProperties(), null); inputValueDeserializer[tag] = valueObjectSerDe; valueObjectInspector[tag] = inputValueDeserializer[tag].getObjectInspector(); ArrayList<ObjectInspector> ois = new ArrayList<ObjectInspector>(); ois.add(keyObjectInspector); ois.add(valueObjectInspector[tag]); rowObjectInspector[tag] = ObjectInspectorFactory .getStandardStructObjectInspector(Utilities.reduceFieldNameList, ois); } } catch (Exception e) { throw new RuntimeException(e); } MapredContext.init(false, new JobConf(jc)); // initialize reduce operator tree try { LOG.info(reducer.dump(0)); reducer.initialize(jc, rowObjectInspector); } catch (Throwable e) { abort = true; if (e instanceof OutOfMemoryError) { // Don't create a new object if we are already out of memory throw (OutOfMemoryError) e; } else { throw new RuntimeException("Reduce operator initialization failed", e); } } } @Override public void reduce(Object key, Iterator values, OutputCollector output, Reporter reporter) throws IOException { if (reducer.getDone()) { return; } if (oc == null) { // propagate reporter and output collector to all operators oc = output; rp = reporter; reducer.setReporter(rp); MapredContext.get().setReporter(reporter); } try { BytesWritable keyWritable = (BytesWritable) key; byte tag = 0; if (isTagged) { // remove the tag from key coming out of reducer // and store it in separate variable. int size = keyWritable.getSize() - 1; tag = keyWritable.get()[size]; keyWritable.setSize(size); } if (!keyWritable.equals(groupKey)) { // If a operator wants to do some work at the beginning of a group if (groupKey == null) { // the first group groupKey = new BytesWritable(); } else { // If a operator wants to do some work at the end of a group LOG.trace("End Group"); reducer.endGroup(); } try { keyObject = inputKeySerDe.deserialize(keyWritable); } catch (Exception e) { throw new HiveException( "Hive Runtime Error: Unable to deserialize reduce input key from " + Utilities.formatBinaryString(keyWritable.get(), 0, keyWritable.getSize()) + " with properties " + keyTableDesc.getProperties(), e); } groupKey.set(keyWritable.get(), 0, keyWritable.getSize()); LOG.trace("Start Group"); reducer.startGroup(); reducer.setGroupKeyObject(keyObject); } // System.err.print(keyObject.toString()); while (values.hasNext()) { BytesWritable valueWritable = (BytesWritable) values.next(); // System.err.print(who.getHo().toString()); try { valueObject[tag] = inputValueDeserializer[tag].deserialize(valueWritable); } catch (SerDeException e) { throw new HiveException( "Hive Runtime Error: Unable to deserialize reduce input value (tag=" + tag + ") from " + Utilities.formatBinaryString(valueWritable.get(), 0, valueWritable.getSize()) + " with properties " + valueTableDesc[tag].getProperties(), e); } row.clear(); row.add(keyObject); row.add(valueObject[tag]); try { reducer.process(row, tag); } catch (Exception e) { String rowString = null; try { rowString = SerDeUtils.getJSONString(row, rowObjectInspector[tag]); } catch (Exception e2) { rowString = "[Error getting row data with exception " + StringUtils.stringifyException(e2) + " ]"; } // Log the contents of the row that caused exception so that it's available for debugging. But // when exposed through an error message it can leak sensitive information, even to the // client application. LOG.trace("Hive Runtime Error while processing row (tag=" + tag + ") " + rowString); throw new HiveException("Hive Runtime Error while processing row", e); } } } catch (Throwable e) { abort = true; if (e instanceof OutOfMemoryError) { // Don't create a new object if we are already out of memory throw (OutOfMemoryError) e; } else { LOG.error("Reduce failed", e); throw new RuntimeException(e); } } } @Override public void close() { // No row was processed if (oc == null) { LOG.trace("Close called without any rows processed"); } try { if (groupKey != null) { // If a operator wants to do some work at the end of a group LOG.trace("End Group"); reducer.endGroup(); } reducer.close(abort); ReportStats rps = new ReportStats(rp, jc); reducer.preorderMap(rps); } catch (Exception e) { if (!abort) { // signal new failure to map-reduce LOG.error("Hit error while closing operators - failing tree"); throw new RuntimeException("Hive Runtime Error while closing operators: " + e.getMessage(), e); } } finally { MapredContext.close(); Utilities.clearWorkMap(jc); } } }
/*! ****************************************************************************** * * Pentaho Data Integration * * Copyright (C) 2002-2013 by Pentaho : http://www.pentaho.com * ******************************************************************************* * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * ******************************************************************************/ package org.pentaho.di.ui.trans.steps.databaselookup; import java.util.ArrayList; import java.util.Arrays; import java.util.List; import org.eclipse.swt.SWT; import org.eclipse.swt.custom.CCombo; import org.eclipse.swt.events.ModifyEvent; import org.eclipse.swt.events.ModifyListener; import org.eclipse.swt.events.SelectionAdapter; import org.eclipse.swt.events.SelectionEvent; import org.eclipse.swt.events.SelectionListener; import org.eclipse.swt.events.ShellAdapter; import org.eclipse.swt.events.ShellEvent; import org.eclipse.swt.graphics.Point; import org.eclipse.swt.layout.FormAttachment; import org.eclipse.swt.layout.FormData; import org.eclipse.swt.layout.FormLayout; import org.eclipse.swt.widgets.Button; import org.eclipse.swt.widgets.Display; import org.eclipse.swt.widgets.Event; import org.eclipse.swt.widgets.Label; import org.eclipse.swt.widgets.Listener; import org.eclipse.swt.widgets.MessageBox; import org.eclipse.swt.widgets.Shell; import org.eclipse.swt.widgets.TableItem; import org.eclipse.swt.widgets.Text; import org.pentaho.di.core.Const; import org.pentaho.di.core.database.Database; import org.pentaho.di.core.database.DatabaseMeta; import org.pentaho.di.core.exception.KettleException; import org.pentaho.di.core.row.RowMeta; import org.pentaho.di.core.row.RowMetaInterface; import org.pentaho.di.core.row.ValueMeta; import org.pentaho.di.core.row.ValueMetaInterface; import org.pentaho.di.i18n.BaseMessages; import org.pentaho.di.trans.TransMeta; import org.pentaho.di.trans.step.BaseStepMeta; import org.pentaho.di.trans.step.StepDialogInterface; import org.pentaho.di.trans.steps.databaselookup.DatabaseLookupMeta; import org.pentaho.di.ui.core.database.dialog.DatabaseExplorerDialog; import org.pentaho.di.ui.core.dialog.EnterSelectionDialog; import org.pentaho.di.ui.core.dialog.ErrorDialog; import org.pentaho.di.ui.core.widget.ColumnInfo; import org.pentaho.di.ui.core.widget.TableView; import org.pentaho.di.ui.core.widget.TextVar; import org.pentaho.di.ui.trans.step.BaseStepDialog; import org.pentaho.di.ui.trans.step.TableItemInsertListener; public class DatabaseLookupDialog extends BaseStepDialog implements StepDialogInterface { private static Class<?> PKG = DatabaseLookupMeta.class; // for i18n purposes, needed by Translator2!! private CCombo wConnection; private Label wlCache; private Button wCache; private FormData fdlCache, fdCache; private Label wlCacheLoadAll; private Button wCacheLoadAll; private FormData fdlCacheLoadAll, fdCacheLoadAll; private Label wlCachesize; private Text wCachesize; private FormData fdlCachesize, fdCachesize; private Label wlKey; private TableView wKey; private FormData fdlKey, fdKey; private Label wlSchema; private TextVar wSchema; private FormData fdlSchema, fdSchema; private Button wbSchema; private FormData fdbSchema; private Label wlTable; private Button wbTable; private TextVar wTable; private FormData fdlTable, fdbTable, fdTable; private Label wlReturn; private TableView wReturn; private FormData fdlReturn, fdReturn; private Label wlOrderBy; private Text wOrderBy; private FormData fdlOrderBy, fdOrderBy; private Label wlFailMultiple; private Button wFailMultiple; private FormData fdlFailMultiple, fdFailMultiple; private Label wlEatRows; private Button wEatRows; private FormData fdlEatRows, fdEatRows; private Button wGet, wGetLU; private Listener lsGet, lsGetLU; private DatabaseLookupMeta input; /** * List of ColumnInfo that should have the field names of the selected database table */ private List<ColumnInfo> tableFieldColumns = new ArrayList<ColumnInfo>(); /** * List of ColumnInfo that should have the previous fields combo box */ private List<ColumnInfo> fieldColumns = new ArrayList<ColumnInfo>(); /** * all fields from the previous steps */ private RowMetaInterface prevFields = null; public DatabaseLookupDialog( Shell parent, Object in, TransMeta transMeta, String sname ) { super( parent, (BaseStepMeta) in, transMeta, sname ); input = (DatabaseLookupMeta) in; } public String open() { Shell parent = getParent(); Display display = parent.getDisplay(); shell = new Shell( parent, SWT.DIALOG_TRIM | SWT.RESIZE | SWT.MAX | SWT.MIN ); props.setLook( shell ); setShellImage( shell, input ); ModifyListener lsMod = new ModifyListener() { public void modifyText( ModifyEvent e ) { input.setChanged(); } }; ModifyListener lsConnectionMod = new ModifyListener() { public void modifyText( ModifyEvent e ) { input.setChanged(); setTableFieldCombo(); } }; ModifyListener lsTableMod = new ModifyListener() { public void modifyText( ModifyEvent arg0 ) { input.setChanged(); setTableFieldCombo(); } }; SelectionListener lsSelection = new SelectionAdapter() { public void widgetSelected( SelectionEvent e ) { input.setChanged(); setTableFieldCombo(); } }; backupChanged = input.hasChanged(); FormLayout formLayout = new FormLayout(); formLayout.marginWidth = Const.FORM_MARGIN; formLayout.marginHeight = Const.FORM_MARGIN; shell.setLayout( formLayout ); shell.setText( BaseMessages.getString( PKG, "DatabaseLookupDialog.shell.Title" ) ); int middle = props.getMiddlePct(); int margin = Const.MARGIN; // Stepname line wlStepname = new Label( shell, SWT.RIGHT ); wlStepname.setText( BaseMessages.getString( PKG, "DatabaseLookupDialog.Stepname.Label" ) ); props.setLook( wlStepname ); fdlStepname = new FormData(); fdlStepname.left = new FormAttachment( 0, 0 ); fdlStepname.right = new FormAttachment( middle, -margin ); fdlStepname.top = new FormAttachment( 0, margin ); wlStepname.setLayoutData( fdlStepname ); wStepname = new Text( shell, SWT.SINGLE | SWT.LEFT | SWT.BORDER ); wStepname.setText( stepname ); props.setLook( wStepname ); wStepname.addModifyListener( lsMod ); fdStepname = new FormData(); fdStepname.left = new FormAttachment( middle, 0 ); fdStepname.top = new FormAttachment( 0, margin ); fdStepname.right = new FormAttachment( 100, 0 ); wStepname.setLayoutData( fdStepname ); // Connection line wConnection = addConnectionLine( shell, wStepname, middle, margin ); if ( input.getDatabaseMeta() == null && transMeta.nrDatabases() == 1 ) { wConnection.select( 0 ); } wConnection.addModifyListener( lsConnectionMod ); wConnection.addSelectionListener( lsSelection ); // Schema line... wlSchema = new Label( shell, SWT.RIGHT ); wlSchema.setText( BaseMessages.getString( PKG, "DatabaseLookupDialog.TargetSchema.Label" ) ); props.setLook( wlSchema ); fdlSchema = new FormData(); fdlSchema.left = new FormAttachment( 0, 0 ); fdlSchema.right = new FormAttachment( middle, -margin ); fdlSchema.top = new FormAttachment( wConnection, margin * 2 ); wlSchema.setLayoutData( fdlSchema ); wbSchema = new Button( shell, SWT.PUSH | SWT.CENTER ); props.setLook( wbSchema ); wbSchema.setText( BaseMessages.getString( PKG, "System.Button.Browse" ) ); fdbSchema = new FormData(); fdbSchema.top = new FormAttachment( wConnection, 2 * margin ); fdbSchema.right = new FormAttachment( 100, 0 ); wbSchema.setLayoutData( fdbSchema ); wSchema = new TextVar( transMeta, shell, SWT.SINGLE | SWT.LEFT | SWT.BORDER ); props.setLook( wSchema ); wSchema.addModifyListener( lsTableMod ); fdSchema = new FormData(); fdSchema.left = new FormAttachment( middle, 0 ); fdSchema.top = new FormAttachment( wConnection, margin * 2 ); fdSchema.right = new FormAttachment( wbSchema, -margin ); wSchema.setLayoutData( fdSchema ); // Table line... wlTable = new Label( shell, SWT.RIGHT ); wlTable.setText( BaseMessages.getString( PKG, "DatabaseLookupDialog.Lookuptable.Label" ) ); props.setLook( wlTable ); fdlTable = new FormData(); fdlTable.left = new FormAttachment( 0, 0 ); fdlTable.right = new FormAttachment( middle, -margin ); fdlTable.top = new FormAttachment( wbSchema, margin ); wlTable.setLayoutData( fdlTable ); wbTable = new Button( shell, SWT.PUSH | SWT.CENTER ); props.setLook( wbTable ); wbTable.setText( BaseMessages.getString( PKG, "DatabaseLookupDialog.Browse.Button" ) ); fdbTable = new FormData(); fdbTable.right = new FormAttachment( 100, 0 ); fdbTable.top = new FormAttachment( wbSchema, margin ); wbTable.setLayoutData( fdbTable ); wTable = new TextVar( transMeta, shell, SWT.SINGLE | SWT.LEFT | SWT.BORDER ); props.setLook( wTable ); wTable.addModifyListener( lsTableMod ); fdTable = new FormData(); fdTable.left = new FormAttachment( middle, 0 ); fdTable.top = new FormAttachment( wbSchema, margin ); fdTable.right = new FormAttachment( wbTable, -margin ); wTable.setLayoutData( fdTable ); // Cache? wlCache = new Label( shell, SWT.RIGHT ); wlCache.setText( BaseMessages.getString( PKG, "DatabaseLookupDialog.Cache.Label" ) ); props.setLook( wlCache ); fdlCache = new FormData(); fdlCache.left = new FormAttachment( 0, 0 ); fdlCache.right = new FormAttachment( middle, -margin ); fdlCache.top = new FormAttachment( wTable, margin ); wlCache.setLayoutData( fdlCache ); wCache = new Button( shell, SWT.CHECK ); props.setLook( wCache ); fdCache = new FormData(); fdCache.left = new FormAttachment( middle, 0 ); fdCache.top = new FormAttachment( wTable, margin ); wCache.setLayoutData( fdCache ); wCache.addSelectionListener( new SelectionAdapter() { public void widgetSelected( SelectionEvent e ) { input.setChanged(); enableFields(); } } ); // Cache size line wlCachesize = new Label( shell, SWT.RIGHT ); wlCachesize.setText( BaseMessages.getString( PKG, "DatabaseLookupDialog.Cachesize.Label" ) ); props.setLook( wlCachesize ); wlCachesize.setEnabled( input.isCached() ); fdlCachesize = new FormData(); fdlCachesize.left = new FormAttachment( 0, 0 ); fdlCachesize.right = new FormAttachment( middle, -margin ); fdlCachesize.top = new FormAttachment( wCache, margin ); wlCachesize.setLayoutData( fdlCachesize ); wCachesize = new Text( shell, SWT.SINGLE | SWT.LEFT | SWT.BORDER ); props.setLook( wCachesize ); wCachesize.setEnabled( input.isCached() ); wCachesize.addModifyListener( lsMod ); fdCachesize = new FormData(); fdCachesize.left = new FormAttachment( middle, 0 ); fdCachesize.right = new FormAttachment( 100, 0 ); fdCachesize.top = new FormAttachment( wCache, margin ); wCachesize.setLayoutData( fdCachesize ); // Cache : Load all? wlCacheLoadAll = new Label( shell, SWT.RIGHT ); wlCacheLoadAll.setText( BaseMessages.getString( PKG, "DatabaseLookupDialog.CacheLoadAll.Label" ) ); props.setLook( wlCacheLoadAll ); fdlCacheLoadAll = new FormData(); fdlCacheLoadAll.left = new FormAttachment( 0, 0 ); fdlCacheLoadAll.right = new FormAttachment( middle, -margin ); fdlCacheLoadAll.top = new FormAttachment( wCachesize, margin ); wlCacheLoadAll.setLayoutData( fdlCacheLoadAll ); wCacheLoadAll = new Button( shell, SWT.CHECK ); props.setLook( wCacheLoadAll ); fdCacheLoadAll = new FormData(); fdCacheLoadAll.left = new FormAttachment( middle, 0 ); fdCacheLoadAll.top = new FormAttachment( wCachesize, margin ); wCacheLoadAll.setLayoutData( fdCacheLoadAll ); wCacheLoadAll.addSelectionListener( new SelectionAdapter() { public void widgetSelected( SelectionEvent e ) { input.setChanged(); enableFields(); } } ); wlKey = new Label( shell, SWT.NONE ); wlKey.setText( BaseMessages.getString( PKG, "DatabaseLookupDialog.Keys.Label" ) ); props.setLook( wlKey ); fdlKey = new FormData(); fdlKey.left = new FormAttachment( 0, 0 ); fdlKey.top = new FormAttachment( wCacheLoadAll, margin ); wlKey.setLayoutData( fdlKey ); int nrKeyCols = 4; int nrKeyRows = ( input.getStreamKeyField1() != null ? input.getStreamKeyField1().length : 1 ); ColumnInfo[] ciKey = new ColumnInfo[nrKeyCols]; ciKey[0] = new ColumnInfo( BaseMessages.getString( PKG, "DatabaseLookupDialog.ColumnInfo.Tablefield" ), ColumnInfo.COLUMN_TYPE_CCOMBO, new String[] { "" }, false ); ciKey[1] = new ColumnInfo( BaseMessages.getString( PKG, "DatabaseLookupDialog.ColumnInfo.Comparator" ), ColumnInfo.COLUMN_TYPE_CCOMBO, DatabaseLookupMeta.conditionStrings ); ciKey[2] = new ColumnInfo( BaseMessages.getString( PKG, "DatabaseLookupDialog.ColumnInfo.Field1" ), ColumnInfo.COLUMN_TYPE_CCOMBO, new String[] { "" }, false ); ciKey[3] = new ColumnInfo( BaseMessages.getString( PKG, "DatabaseLookupDialog.ColumnInfo.Field2" ), ColumnInfo.COLUMN_TYPE_CCOMBO, new String[] { "" }, false ); tableFieldColumns.add( ciKey[0] ); fieldColumns.add( ciKey[2] ); fieldColumns.add( ciKey[3] ); wKey = new TableView( transMeta, shell, SWT.BORDER | SWT.FULL_SELECTION | SWT.MULTI | SWT.V_SCROLL | SWT.H_SCROLL, ciKey, nrKeyRows, lsMod, props ); fdKey = new FormData(); fdKey.left = new FormAttachment( 0, 0 ); fdKey.top = new FormAttachment( wlKey, margin ); fdKey.right = new FormAttachment( 100, 0 ); fdKey.bottom = new FormAttachment( wlKey, 250 ); wKey.setLayoutData( fdKey ); // THE BUTTONS wOK = new Button( shell, SWT.PUSH ); wOK.setText( BaseMessages.getString( PKG, "System.Button.OK" ) ); wGet = new Button( shell, SWT.PUSH ); wGet.setText( BaseMessages.getString( PKG, "DatabaseLookupDialog.GetFields.Button" ) ); wGetLU = new Button( shell, SWT.PUSH ); wGetLU.setText( BaseMessages.getString( PKG, "DatabaseLookupDialog.GetLookupFields.Button" ) ); wCancel = new Button( shell, SWT.PUSH ); wCancel.setText( BaseMessages.getString( PKG, "System.Button.Cancel" ) ); setButtonPositions( new Button[] { wOK, wCancel, wGet, wGetLU }, margin, null ); // OderBy line wlOrderBy = new Label( shell, SWT.RIGHT ); wlOrderBy.setText( BaseMessages.getString( PKG, "DatabaseLookupDialog.Orderby.Label" ) ); props.setLook( wlOrderBy ); fdlOrderBy = new FormData(); fdlOrderBy.left = new FormAttachment( 0, 0 ); fdlOrderBy.right = new FormAttachment( middle, -margin ); fdlOrderBy.bottom = new FormAttachment( wOK, -2 * margin ); wlOrderBy.setLayoutData( fdlOrderBy ); wOrderBy = new Text( shell, SWT.SINGLE | SWT.LEFT | SWT.BORDER ); props.setLook( wOrderBy ); fdOrderBy = new FormData(); fdOrderBy.left = new FormAttachment( middle, 0 ); fdOrderBy.bottom = new FormAttachment( wOK, -2 * margin ); fdOrderBy.right = new FormAttachment( 100, 0 ); wOrderBy.setLayoutData( fdOrderBy ); wOrderBy.addModifyListener( lsMod ); // FailMultiple? wlFailMultiple = new Label( shell, SWT.RIGHT ); wlFailMultiple.setText( BaseMessages.getString( PKG, "DatabaseLookupDialog.FailMultiple.Label" ) ); props.setLook( wlFailMultiple ); fdlFailMultiple = new FormData(); fdlFailMultiple.left = new FormAttachment( 0, 0 ); fdlFailMultiple.right = new FormAttachment( middle, -margin ); fdlFailMultiple.bottom = new FormAttachment( wOrderBy, -margin ); wlFailMultiple.setLayoutData( fdlFailMultiple ); wFailMultiple = new Button( shell, SWT.CHECK ); props.setLook( wFailMultiple ); fdFailMultiple = new FormData(); fdFailMultiple.left = new FormAttachment( middle, 0 ); fdFailMultiple.bottom = new FormAttachment( wOrderBy, -margin ); wFailMultiple.setLayoutData( fdFailMultiple ); wFailMultiple.addSelectionListener( new SelectionAdapter() { public void widgetSelected( SelectionEvent e ) { input.setChanged(); enableFields(); } } ); // EatRows? wlEatRows = new Label( shell, SWT.RIGHT ); wlEatRows.setText( BaseMessages.getString( PKG, "DatabaseLookupDialog.EatRows.Label" ) ); props.setLook( wlEatRows ); fdlEatRows = new FormData(); fdlEatRows.left = new FormAttachment( 0, 0 ); fdlEatRows.right = new FormAttachment( middle, -margin ); fdlEatRows.bottom = new FormAttachment( wFailMultiple, -margin ); wlEatRows.setLayoutData( fdlEatRows ); wEatRows = new Button( shell, SWT.CHECK ); props.setLook( wEatRows ); fdEatRows = new FormData(); fdEatRows.left = new FormAttachment( middle, 0 ); fdEatRows.bottom = new FormAttachment( wFailMultiple, -margin ); wEatRows.setLayoutData( fdEatRows ); wEatRows.addSelectionListener( new SelectionAdapter() { public void widgetSelected( SelectionEvent e ) { input.setChanged(); enableFields(); } } ); // THE UPDATE/INSERT TABLE wlReturn = new Label( shell, SWT.NONE ); wlReturn.setText( BaseMessages.getString( PKG, "DatabaseLookupDialog.Return.Label" ) ); props.setLook( wlReturn ); fdlReturn = new FormData(); fdlReturn.left = new FormAttachment( 0, 0 ); fdlReturn.top = new FormAttachment( wKey, margin ); wlReturn.setLayoutData( fdlReturn ); int UpInsCols = 4; int UpInsRows = ( input.getReturnValueField() != null ? input.getReturnValueField().length : 1 ); ColumnInfo[] ciReturn = new ColumnInfo[UpInsCols]; ciReturn[0] = new ColumnInfo( BaseMessages.getString( PKG, "DatabaseLookupDialog.ColumnInfo.Field" ), ColumnInfo.COLUMN_TYPE_CCOMBO, new String[] {}, false ); ciReturn[1] = new ColumnInfo( BaseMessages.getString( PKG, "DatabaseLookupDialog.ColumnInfo.Newname" ), ColumnInfo.COLUMN_TYPE_TEXT, false ); ciReturn[2] = new ColumnInfo( BaseMessages.getString( PKG, "DatabaseLookupDialog.ColumnInfo.Default" ), ColumnInfo.COLUMN_TYPE_TEXT, false ); ciReturn[3] = new ColumnInfo( BaseMessages.getString( PKG, "DatabaseLookupDialog.ColumnInfo.Type" ), ColumnInfo.COLUMN_TYPE_CCOMBO, ValueMeta.getTypes() ); tableFieldColumns.add( ciReturn[0] ); wReturn = new TableView( transMeta, shell, SWT.BORDER | SWT.FULL_SELECTION | SWT.MULTI | SWT.V_SCROLL | SWT.H_SCROLL, ciReturn, UpInsRows, lsMod, props ); fdReturn = new FormData(); fdReturn.left = new FormAttachment( 0, 0 ); fdReturn.top = new FormAttachment( wlReturn, margin ); fdReturn.right = new FormAttachment( 100, 0 ); fdReturn.bottom = new FormAttachment( wEatRows, -margin ); wReturn.setLayoutData( fdReturn ); // Add listeners lsOK = new Listener() { public void handleEvent( Event e ) { ok(); } }; lsGet = new Listener() { public void handleEvent( Event e ) { get(); } }; lsGetLU = new Listener() { public void handleEvent( Event e ) { getlookup(); } }; lsCancel = new Listener() { public void handleEvent( Event e ) { cancel(); } }; wOK.addListener( SWT.Selection, lsOK ); wGet.addListener( SWT.Selection, lsGet ); wGetLU.addListener( SWT.Selection, lsGetLU ); wCancel.addListener( SWT.Selection, lsCancel ); lsDef = new SelectionAdapter() { public void widgetDefaultSelected( SelectionEvent e ) { ok(); } }; wStepname.addSelectionListener( lsDef ); wTable.addSelectionListener( lsDef ); wOrderBy.addSelectionListener( lsDef ); wCachesize.addSelectionListener( lsDef ); // Detect X or ALT-F4 or something that kills this window... shell.addShellListener( new ShellAdapter() { public void shellClosed( ShellEvent e ) { cancel(); } } ); wbSchema.addSelectionListener( new SelectionAdapter() { public void widgetSelected( SelectionEvent e ) { getSchemaNames(); } } ); wbTable.addSelectionListener( new SelectionAdapter() { public void widgetSelected( SelectionEvent e ) { getTableName(); } } ); shell.pack(); shell.setMinimumSize( shell.getSize() ); getData(); input.setChanged( backupChanged ); setComboValues(); setTableFieldCombo(); shell.open(); while ( !shell.isDisposed() ) { if ( !display.readAndDispatch() ) { display.sleep(); } } return stepname; } private void setComboValues() { Runnable fieldLoader = new Runnable() { public void run() { try { prevFields = transMeta.getPrevStepFields( stepname ); } catch ( KettleException e ) { prevFields = new RowMeta(); String msg = BaseMessages.getString( PKG, "DatabaseLookupDialog.DoMapping.UnableToFindInput" ); logError( msg ); } String[] prevStepFieldNames = prevFields.getFieldNames(); Arrays.sort( prevStepFieldNames ); for ( ColumnInfo colInfo : fieldColumns ) { colInfo.setComboValues( prevStepFieldNames ); } } }; new Thread( fieldLoader ).start(); } private void setTableFieldCombo() { Runnable fieldLoader = new Runnable() { public void run() { if ( !wTable.isDisposed() && !wConnection.isDisposed() && !wSchema.isDisposed() ) { final String tableName = wTable.getText(), connectionName = wConnection.getText(), schemaName = wSchema.getText(); if ( !Const.isEmpty( tableName ) ) { DatabaseMeta ci = transMeta.findDatabase( connectionName ); if ( ci != null ) { Database db = new Database( loggingObject, ci ); db.shareVariablesWith( transMeta ); try { db.connect(); String schemaTable = ci.getQuotedSchemaTableCombination( schemaName, tableName ); RowMetaInterface r = db.getTableFields( schemaTable ); if ( null != r ) { String[] fieldNames = r.getFieldNames(); if ( null != fieldNames ) { for ( ColumnInfo colInfo : tableFieldColumns ) { colInfo.setComboValues( fieldNames ); } } } } catch ( Exception e ) { for ( ColumnInfo colInfo : tableFieldColumns ) { colInfo.setComboValues( new String[] {} ); } // ignore any errors here. drop downs will not be // filled, but no problem for the user } } } } } }; shell.getDisplay().asyncExec( fieldLoader ); } private void enableFields() { wlOrderBy.setEnabled( !wFailMultiple.getSelection() ); wOrderBy.setEnabled( !wFailMultiple.getSelection() ); wCachesize.setEnabled( wCache.getSelection() && !wCacheLoadAll.getSelection() ); wlCachesize.setEnabled( wCache.getSelection() && !wCacheLoadAll.getSelection() ); wCacheLoadAll.setEnabled( wCache.getSelection() ); wlCacheLoadAll.setEnabled( wCache.getSelection() ); wFailMultiple.setEnabled( !wCache.getSelection() ); wlFailMultiple.setEnabled( !wCache.getSelection() ); } /** * Copy information from the meta-data input to the dialog fields. */ public void getData() { logDebug( BaseMessages.getString( PKG, "DatabaseLookupDialog.Log.GettingKeyInfo" ) ); wCache.setSelection( input.isCached() ); wCachesize.setText( "" + input.getCacheSize() ); wCacheLoadAll.setSelection( input.isLoadingAllDataInCache() ); if ( input.getStreamKeyField1() != null ) { for ( int i = 0; i < input.getStreamKeyField1().length; i++ ) { TableItem item = wKey.table.getItem( i ); if ( input.getTableKeyField()[i] != null ) { item.setText( 1, input.getTableKeyField()[i] ); } if ( input.getKeyCondition()[i] != null ) { item.setText( 2, input.getKeyCondition()[i] ); } if ( input.getStreamKeyField1()[i] != null ) { item.setText( 3, input.getStreamKeyField1()[i] ); } if ( input.getStreamKeyField2()[i] != null ) { item.setText( 4, input.getStreamKeyField2()[i] ); } } } if ( input.getReturnValueField() != null ) { for ( int i = 0; i < input.getReturnValueField().length; i++ ) { TableItem item = wReturn.table.getItem( i ); if ( input.getReturnValueField()[i] != null ) { item.setText( 1, input.getReturnValueField()[i] ); } if ( input.getReturnValueNewName()[i] != null && !input.getReturnValueNewName()[i].equals( input.getReturnValueField()[i] ) ) { item.setText( 2, input.getReturnValueNewName()[i] ); } if ( input.getReturnValueDefault()[i] != null ) { item.setText( 3, input.getReturnValueDefault()[i] ); } item.setText( 4, ValueMeta.getTypeDesc( input.getReturnValueDefaultType()[i] ) ); } } if ( input.getSchemaName() != null ) { wSchema.setText( input.getSchemaName() ); } if ( input.getTablename() != null ) { wTable.setText( input.getTablename() ); } if ( input.getDatabaseMeta() != null ) { wConnection.setText( input.getDatabaseMeta().getName() ); } else if ( transMeta.nrDatabases() == 1 ) { wConnection.setText( transMeta.getDatabase( 0 ).getName() ); } if ( input.getOrderByClause() != null ) { wOrderBy.setText( input.getOrderByClause() ); } wFailMultiple.setSelection( input.isFailingOnMultipleResults() ); wEatRows.setSelection( input.isEatingRowOnLookupFailure() ); wKey.setRowNums(); wKey.optWidth( true ); wReturn.setRowNums(); wReturn.optWidth( true ); enableFields(); wStepname.selectAll(); wStepname.setFocus(); } private void cancel() { stepname = null; input.setChanged( backupChanged ); dispose(); } private void ok() { if ( Const.isEmpty( wStepname.getText() ) ) { return; } int nrkeys = wKey.nrNonEmpty(); int nrfields = wReturn.nrNonEmpty(); input.allocate( nrkeys, nrfields ); input.setCached( wCache.getSelection() ); input.setCacheSize( Const.toInt( wCachesize.getText(), 0 ) ); input.setLoadingAllDataInCache( wCacheLoadAll.getSelection() ); logDebug( BaseMessages.getString( PKG, "DatabaseLookupDialog.Log.FoundKeys", String.valueOf( nrkeys ) ) ); //CHECKSTYLE:Indentation:OFF for ( int i = 0; i < nrkeys; i++ ) { TableItem item = wKey.getNonEmpty( i ); input.getTableKeyField()[i] = item.getText( 1 ); input.getKeyCondition()[i] = item.getText( 2 ); input.getStreamKeyField1()[i] = item.getText( 3 ); input.getStreamKeyField2()[i] = item.getText( 4 ); } logDebug( BaseMessages.getString( PKG, "DatabaseLookupDialog.Log.FoundFields", String.valueOf( nrfields ) ) ); //CHECKSTYLE:Indentation:OFF for ( int i = 0; i < nrfields; i++ ) { TableItem item = wReturn.getNonEmpty( i ); input.getReturnValueField()[i] = item.getText( 1 ); input.getReturnValueNewName()[i] = item.getText( 2 ); if ( input.getReturnValueNewName()[i] == null || input.getReturnValueNewName()[i].length() == 0 ) { input.getReturnValueNewName()[i] = input.getReturnValueField()[i]; } input.getReturnValueDefault()[i] = item.getText( 3 ); input.getReturnValueDefaultType()[i] = ValueMeta.getType( item.getText( 4 ) ); if ( input.getReturnValueDefaultType()[i] < 0 ) { input.getReturnValueDefaultType()[i] = ValueMetaInterface.TYPE_STRING; } } input.setSchemaName( wSchema.getText() ); input.setTablename( wTable.getText() ); input.setDatabaseMeta( transMeta.findDatabase( wConnection.getText() ) ); input.setOrderByClause( wOrderBy.getText() ); input.setFailingOnMultipleResults( wFailMultiple.getSelection() ); input.setEatingRowOnLookupFailure( wEatRows.getSelection() ); stepname = wStepname.getText(); // return value if ( transMeta.findDatabase( wConnection.getText() ) == null ) { MessageBox mb = new MessageBox( shell, SWT.OK | SWT.ICON_ERROR ); mb.setMessage( BaseMessages.getString( PKG, "DatabaseLookupDialog.InvalidConnection.DialogMessage" ) ); mb.setText( BaseMessages.getString( PKG, "DatabaseLookupDialog.InvalidConnection.DialogTitle" ) ); mb.open(); } dispose(); } private void getTableName() { DatabaseMeta inf = null; // New class: SelectTableDialog int connr = wConnection.getSelectionIndex(); if ( connr >= 0 ) { inf = transMeta.getDatabase( connr ); } if ( inf != null ) { if ( log.isDebug() ) { logDebug( BaseMessages.getString( PKG, "DatabaseLookupDialog.Log.LookingAtConnection" ) + inf.toString() ); } DatabaseExplorerDialog std = new DatabaseExplorerDialog( shell, SWT.NONE, inf, transMeta.getDatabases() ); std.setSelectedSchemaAndTable( wSchema.getText(), wTable.getText() ); if ( std.open() ) { wSchema.setText( Const.NVL( std.getSchemaName(), "" ) ); wTable.setText( Const.NVL( std.getTableName(), "" ) ); setTableFieldCombo(); } } else { MessageBox mb = new MessageBox( shell, SWT.OK | SWT.ICON_ERROR ); mb.setMessage( BaseMessages.getString( PKG, "DatabaseLookupDialog.InvalidConnection.DialogMessage" ) ); mb.setText( BaseMessages.getString( PKG, "DatabaseLookupDialog.InvalidConnection.DialogTitle" ) ); mb.open(); } } private void get() { try { RowMetaInterface r = transMeta.getPrevStepFields( stepname ); if ( r != null && !r.isEmpty() ) { TableItemInsertListener listener = new TableItemInsertListener() { public boolean tableItemInserted( TableItem tableItem, ValueMetaInterface v ) { tableItem.setText( 2, "=" ); return true; } }; BaseStepDialog.getFieldsFromPrevious( r, wKey, 1, new int[] { 1, 3 }, new int[] {}, -1, -1, listener ); } } catch ( KettleException ke ) { new ErrorDialog( shell, BaseMessages.getString( PKG, "DatabaseLookupDialog.GetFieldsFailed.DialogTitle" ), BaseMessages .getString( PKG, "DatabaseLookupDialog.GetFieldsFailed.DialogMessage" ), ke ); } } private void getlookup() { DatabaseMeta ci = transMeta.findDatabase( wConnection.getText() ); if ( ci != null ) { Database db = new Database( loggingObject, ci ); db.shareVariablesWith( transMeta ); try { db.connect(); if ( !Const.isEmpty( wTable.getText() ) ) { String schemaTable = ci.getQuotedSchemaTableCombination( db.environmentSubstitute( wSchema.getText() ), db .environmentSubstitute( wTable.getText() ) ); RowMetaInterface r = db.getTableFields( schemaTable ); if ( r != null && !r.isEmpty() ) { logDebug( BaseMessages.getString( PKG, "DatabaseLookupDialog.Log.FoundTableFields" ) + schemaTable + " --> " + r.toStringMeta() ); BaseStepDialog .getFieldsFromPrevious( r, wReturn, 1, new int[] { 1, 2 }, new int[] { 4 }, -1, -1, null ); } else { MessageBox mb = new MessageBox( shell, SWT.OK | SWT.ICON_ERROR ); mb.setMessage( BaseMessages .getString( PKG, "DatabaseLookupDialog.CouldNotReadTableInfo.DialogMessage" ) ); mb.setText( BaseMessages.getString( PKG, "DatabaseLookupDialog.CouldNotReadTableInfo.DialogTitle" ) ); mb.open(); } } } catch ( KettleException e ) { MessageBox mb = new MessageBox( shell, SWT.OK | SWT.ICON_ERROR ); mb.setMessage( BaseMessages.getString( PKG, "DatabaseLookupDialog.ErrorOccurred.DialogMessage" ) + Const.CR + e.getMessage() ); mb.setText( BaseMessages.getString( PKG, "DatabaseLookupDialog.ErrorOccurred.DialogTitle" ) ); mb.open(); } } else { MessageBox mb = new MessageBox( shell, SWT.OK | SWT.ICON_ERROR ); mb.setMessage( BaseMessages.getString( PKG, "DatabaseLookupDialog.InvalidConnectionName.DialogMessage" ) ); mb.setText( BaseMessages.getString( PKG, "DatabaseLookupDialog.InvalidConnectionName.DialogTitle" ) ); mb.open(); } } private void getSchemaNames() { DatabaseMeta databaseMeta = transMeta.findDatabase( wConnection.getText() ); if ( databaseMeta != null ) { Database database = new Database( loggingObject, databaseMeta ); try { database.connect(); String[] schemas = database.getSchemas(); if ( null != schemas && schemas.length > 0 ) { schemas = Const.sortStrings( schemas ); EnterSelectionDialog dialog = new EnterSelectionDialog( shell, schemas, BaseMessages.getString( PKG, "DatabaseLookupDialog.AvailableSchemas.Title", wConnection.getText() ), BaseMessages .getString( PKG, "DatabaseLookupDialog.AvailableSchemas.Message", wConnection.getText() ) ); String d = dialog.open(); if ( d != null ) { wSchema.setText( Const.NVL( d, "" ) ); setTableFieldCombo(); } } else { MessageBox mb = new MessageBox( shell, SWT.OK | SWT.ICON_ERROR ); mb.setMessage( BaseMessages.getString( PKG, "DatabaseLookupDialog.NoSchema.Error" ) ); mb.setText( BaseMessages.getString( PKG, "DatabaseLookupDialog.GetSchemas.Error" ) ); mb.open(); } } catch ( Exception e ) { new ErrorDialog( shell, BaseMessages.getString( PKG, "System.Dialog.Error.Title" ), BaseMessages .getString( PKG, "DatabaseLookupDialog.ErrorGettingSchemas" ), e ); } finally { database.disconnect(); } } } }
/* * Copyright 2019 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.cloud.compute.v1; import com.google.api.core.BetaApi; import com.google.api.gax.httpjson.ApiMessage; import java.util.List; import java.util.Map; import java.util.Objects; import javax.annotation.Generated; import javax.annotation.Nullable; @Generated("by GAPIC") @BetaApi public final class NodeTemplateAggregatedList implements ApiMessage { private final String id; private final Map<String, NodeTemplatesScopedList> items; private final String kind; private final String nextPageToken; private final String selfLink; private final Warning warning; private NodeTemplateAggregatedList() { this.id = null; this.items = null; this.kind = null; this.nextPageToken = null; this.selfLink = null; this.warning = null; } private NodeTemplateAggregatedList( String id, Map<String, NodeTemplatesScopedList> items, String kind, String nextPageToken, String selfLink, Warning warning) { this.id = id; this.items = items; this.kind = kind; this.nextPageToken = nextPageToken; this.selfLink = selfLink; this.warning = warning; } @Override public Object getFieldValue(String fieldName) { if ("id".equals(fieldName)) { return id; } if ("items".equals(fieldName)) { return items; } if ("kind".equals(fieldName)) { return kind; } if ("nextPageToken".equals(fieldName)) { return nextPageToken; } if ("selfLink".equals(fieldName)) { return selfLink; } if ("warning".equals(fieldName)) { return warning; } return null; } @Nullable @Override public ApiMessage getApiMessageRequestBody() { return null; } @Nullable @Override /** * The fields that should be serialized (even if they have empty values). If the containing * message object has a non-null fieldmask, then all the fields in the field mask (and only those * fields in the field mask) will be serialized. If the containing object does not have a * fieldmask, then only non-empty fields will be serialized. */ public List<String> getFieldMask() { return null; } /** [Output Only] Unique identifier for the resource; defined by the server. */ public String getId() { return id; } /** * A list of NodeTemplatesScopedList resources. The key for the map is: [Output Only] Name of the * scope containing this set of node templates. */ public Map<String, NodeTemplatesScopedList> getItemsMap() { return items; } /** * [Output Only] Type of resource.Always compute#nodeTemplateAggregatedList for aggregated lists * of node templates. */ public String getKind() { return kind; } /** * [Output Only] This token allows you to get the next page of results for list requests. If the * number of results is larger than maxResults, use the nextPageToken as a value for the query * parameter pageToken in the next list request. Subsequent list requests will have their own * nextPageToken to continue paging through the results. */ public String getNextPageToken() { return nextPageToken; } /** [Output Only] Server-defined URL for this resource. */ public String getSelfLink() { return selfLink; } /** [Output Only] Informational warning message. */ public Warning getWarning() { return warning; } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(NodeTemplateAggregatedList prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } public static NodeTemplateAggregatedList getDefaultInstance() { return DEFAULT_INSTANCE; } private static final NodeTemplateAggregatedList DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new NodeTemplateAggregatedList(); } public static class Builder { private String id; private Map<String, NodeTemplatesScopedList> items; private String kind; private String nextPageToken; private String selfLink; private Warning warning; Builder() {} public Builder mergeFrom(NodeTemplateAggregatedList other) { if (other == NodeTemplateAggregatedList.getDefaultInstance()) return this; if (other.getId() != null) { this.id = other.id; } if (other.getItemsMap() != null) { this.items = other.items; } if (other.getKind() != null) { this.kind = other.kind; } if (other.getNextPageToken() != null) { this.nextPageToken = other.nextPageToken; } if (other.getSelfLink() != null) { this.selfLink = other.selfLink; } if (other.getWarning() != null) { this.warning = other.warning; } return this; } Builder(NodeTemplateAggregatedList source) { this.id = source.id; this.items = source.items; this.kind = source.kind; this.nextPageToken = source.nextPageToken; this.selfLink = source.selfLink; this.warning = source.warning; } /** [Output Only] Unique identifier for the resource; defined by the server. */ public String getId() { return id; } /** [Output Only] Unique identifier for the resource; defined by the server. */ public Builder setId(String id) { this.id = id; return this; } /** * A list of NodeTemplatesScopedList resources. The key for the map is: [Output Only] Name of * the scope containing this set of node templates. */ public Map<String, NodeTemplatesScopedList> getItemsMap() { return items; } /** * A list of NodeTemplatesScopedList resources. The key for the map is: [Output Only] Name of * the scope containing this set of node templates. */ public Builder putAllItems(Map<String, NodeTemplatesScopedList> items) { this.items = items; return this; } /** * [Output Only] Type of resource.Always compute#nodeTemplateAggregatedList for aggregated lists * of node templates. */ public String getKind() { return kind; } /** * [Output Only] Type of resource.Always compute#nodeTemplateAggregatedList for aggregated lists * of node templates. */ public Builder setKind(String kind) { this.kind = kind; return this; } /** * [Output Only] This token allows you to get the next page of results for list requests. If the * number of results is larger than maxResults, use the nextPageToken as a value for the query * parameter pageToken in the next list request. Subsequent list requests will have their own * nextPageToken to continue paging through the results. */ public String getNextPageToken() { return nextPageToken; } /** * [Output Only] This token allows you to get the next page of results for list requests. If the * number of results is larger than maxResults, use the nextPageToken as a value for the query * parameter pageToken in the next list request. Subsequent list requests will have their own * nextPageToken to continue paging through the results. */ public Builder setNextPageToken(String nextPageToken) { this.nextPageToken = nextPageToken; return this; } /** [Output Only] Server-defined URL for this resource. */ public String getSelfLink() { return selfLink; } /** [Output Only] Server-defined URL for this resource. */ public Builder setSelfLink(String selfLink) { this.selfLink = selfLink; return this; } /** [Output Only] Informational warning message. */ public Warning getWarning() { return warning; } /** [Output Only] Informational warning message. */ public Builder setWarning(Warning warning) { this.warning = warning; return this; } public NodeTemplateAggregatedList build() { return new NodeTemplateAggregatedList(id, items, kind, nextPageToken, selfLink, warning); } public Builder clone() { Builder newBuilder = new Builder(); newBuilder.setId(this.id); newBuilder.putAllItems(this.items); newBuilder.setKind(this.kind); newBuilder.setNextPageToken(this.nextPageToken); newBuilder.setSelfLink(this.selfLink); newBuilder.setWarning(this.warning); return newBuilder; } } @Override public String toString() { return "NodeTemplateAggregatedList{" + "id=" + id + ", " + "items=" + items + ", " + "kind=" + kind + ", " + "nextPageToken=" + nextPageToken + ", " + "selfLink=" + selfLink + ", " + "warning=" + warning + "}"; } @Override public boolean equals(Object o) { if (o == this) { return true; } if (o instanceof NodeTemplateAggregatedList) { NodeTemplateAggregatedList that = (NodeTemplateAggregatedList) o; return Objects.equals(this.id, that.getId()) && Objects.equals(this.items, that.getItemsMap()) && Objects.equals(this.kind, that.getKind()) && Objects.equals(this.nextPageToken, that.getNextPageToken()) && Objects.equals(this.selfLink, that.getSelfLink()) && Objects.equals(this.warning, that.getWarning()); } return false; } @Override public int hashCode() { return Objects.hash(id, items, kind, nextPageToken, selfLink, warning); } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.fs; import java.io.DataInput; import java.io.DataOutput; import java.io.IOException; import java.util.List; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.io.Writable; import org.apache.hadoop.util.StringUtils; /** Store the summary of a content (a directory or a file). */ @InterfaceAudience.Public @InterfaceStability.Evolving public class ContentSummary implements Writable{ private long length; private long fileCount; private long directoryCount; private long quota; private long spaceConsumed; private long spaceQuota; private long typeConsumed[]; private long typeQuota[]; public static class Builder{ public Builder() { this.quota = -1; this.spaceQuota = -1; typeConsumed = new long[StorageType.values().length]; typeQuota = new long[StorageType.values().length]; for (int i = 0; i < typeQuota.length; i++) { typeQuota[i] = -1; } } public Builder length(long length) { this.length = length; return this; } public Builder fileCount(long fileCount) { this.fileCount = fileCount; return this; } public Builder directoryCount(long directoryCount) { this.directoryCount = directoryCount; return this; } public Builder quota(long quota){ this.quota = quota; return this; } public Builder spaceConsumed(long spaceConsumed) { this.spaceConsumed = spaceConsumed; return this; } public Builder spaceQuota(long spaceQuota) { this.spaceQuota = spaceQuota; return this; } public Builder typeConsumed(long typeConsumed[]) { for (int i = 0; i < typeConsumed.length; i++) { this.typeConsumed[i] = typeConsumed[i]; } return this; } public Builder typeQuota(StorageType type, long quota) { this.typeQuota[type.ordinal()] = quota; return this; } public Builder typeConsumed(StorageType type, long consumed) { this.typeConsumed[type.ordinal()] = consumed; return this; } public Builder typeQuota(long typeQuota[]) { for (int i = 0; i < typeQuota.length; i++) { this.typeQuota[i] = typeQuota[i]; } return this; } public ContentSummary build() { return new ContentSummary(length, fileCount, directoryCount, quota, spaceConsumed, spaceQuota, typeConsumed, typeQuota); } private long length; private long fileCount; private long directoryCount; private long quota; private long spaceConsumed; private long spaceQuota; private long typeConsumed[]; private long typeQuota[]; } /** Constructor deprecated by ContentSummary.Builder*/ @Deprecated public ContentSummary() {} /** Constructor, deprecated by ContentSummary.Builder * This constructor implicitly set spaceConsumed the same as length. * spaceConsumed and length must be set explicitly with * ContentSummary.Builder * */ @Deprecated public ContentSummary(long length, long fileCount, long directoryCount) { this(length, fileCount, directoryCount, -1L, length, -1L); } /** Constructor, deprecated by ContentSummary.Builder */ @Deprecated public ContentSummary( long length, long fileCount, long directoryCount, long quota, long spaceConsumed, long spaceQuota) { this.length = length; this.fileCount = fileCount; this.directoryCount = directoryCount; this.quota = quota; this.spaceConsumed = spaceConsumed; this.spaceQuota = spaceQuota; } /** Constructor for ContentSummary.Builder*/ private ContentSummary( long length, long fileCount, long directoryCount, long quota, long spaceConsumed, long spaceQuota, long typeConsumed[], long typeQuota[]) { this.length = length; this.fileCount = fileCount; this.directoryCount = directoryCount; this.quota = quota; this.spaceConsumed = spaceConsumed; this.spaceQuota = spaceQuota; this.typeConsumed = typeConsumed; this.typeQuota = typeQuota; } /** @return the length */ public long getLength() {return length;} /** @return the directory count */ public long getDirectoryCount() {return directoryCount;} /** @return the file count */ public long getFileCount() {return fileCount;} /** Return the directory quota */ public long getQuota() {return quota;} /** Returns storage space consumed */ public long getSpaceConsumed() {return spaceConsumed;} /** Returns storage space quota */ public long getSpaceQuota() {return spaceQuota;} /** Returns storage type quota */ public long getTypeQuota(StorageType type) { return (typeQuota != null) ? typeQuota[type.ordinal()] : -1; } /** Returns storage type consumed*/ public long getTypeConsumed(StorageType type) { return (typeConsumed != null) ? typeConsumed[type.ordinal()] : 0; } /** Returns true if any storage type quota has been set*/ public boolean isTypeQuotaSet() { if (typeQuota == null) { return false; } for (StorageType t : StorageType.getTypesSupportingQuota()) { if (typeQuota[t.ordinal()] > 0) { return true; } } return false; } /** Returns true if any storage type consumption information is available*/ public boolean isTypeConsumedAvailable() { if (typeConsumed == null) { return false; } for (StorageType t : StorageType.getTypesSupportingQuota()) { if (typeConsumed[t.ordinal()] > 0) { return true; } } return false; } @Override @InterfaceAudience.Private public void write(DataOutput out) throws IOException { out.writeLong(length); out.writeLong(fileCount); out.writeLong(directoryCount); out.writeLong(quota); out.writeLong(spaceConsumed); out.writeLong(spaceQuota); } @Override @InterfaceAudience.Private public void readFields(DataInput in) throws IOException { this.length = in.readLong(); this.fileCount = in.readLong(); this.directoryCount = in.readLong(); this.quota = in.readLong(); this.spaceConsumed = in.readLong(); this.spaceQuota = in.readLong(); } /** * Output format: * <----12----> <----12----> <-------18-------> * DIR_COUNT FILE_COUNT CONTENT_SIZE */ private static final String SUMMARY_FORMAT = "%12s %12s %18s "; /** * Output format: * <----12----> <------15-----> <------15-----> <------15-----> * QUOTA REM_QUOTA SPACE_QUOTA REM_SPACE_QUOTA * <----12----> <----12----> <-------18-------> * DIR_COUNT FILE_COUNT CONTENT_SIZE */ private static final String QUOTA_SUMMARY_FORMAT = "%12s %15s "; private static final String SPACE_QUOTA_SUMMARY_FORMAT = "%15s %15s "; private static final String STORAGE_TYPE_SUMMARY_FORMAT = "%13s %17s "; private static final String[] HEADER_FIELDS = new String[] { "DIR_COUNT", "FILE_COUNT", "CONTENT_SIZE"}; private static final String[] QUOTA_HEADER_FIELDS = new String[] { "QUOTA", "REM_QUOTA", "SPACE_QUOTA", "REM_SPACE_QUOTA" }; /** The header string */ private static final String HEADER = String.format( SUMMARY_FORMAT, (Object[]) HEADER_FIELDS); private static final String QUOTA_HEADER = String.format( QUOTA_SUMMARY_FORMAT + SPACE_QUOTA_SUMMARY_FORMAT, (Object[]) QUOTA_HEADER_FIELDS) + HEADER; /** default quota display string */ private static final String QUOTA_NONE = "none"; private static final String QUOTA_INF = "inf"; /** Return the header of the output. * if qOption is false, output directory count, file count, and content size; * if qOption is true, output quota and remaining quota as well. * * @param qOption a flag indicating if quota needs to be printed or not * @return the header of the output */ public static String getHeader(boolean qOption) { return qOption ? QUOTA_HEADER : HEADER; } /** * return the header of with the StorageTypes * * @param storageTypes * @return storage header string */ public static String getStorageTypeHeader(List<StorageType> storageTypes) { StringBuffer header = new StringBuffer(); for (StorageType st : storageTypes) { /* the field length is 13/17 for quota and remain quota * as the max length for quota name is ARCHIVE_QUOTA * and remain quota name REM_ARCHIVE_QUOTA */ String storageName = st.toString(); header.append(String.format(STORAGE_TYPE_SUMMARY_FORMAT, storageName + "_QUOTA", "REM_" + storageName + "_QUOTA")); } return header.toString(); } /** * Returns the names of the fields from the summary header. * * @return names of fields as displayed in the header */ public static String[] getHeaderFields() { return HEADER_FIELDS; } /** * Returns the names of the fields used in the quota summary. * * @return names of quota fields as displayed in the header */ public static String[] getQuotaHeaderFields() { return QUOTA_HEADER_FIELDS; } @Override public String toString() { return toString(true); } /** Return the string representation of the object in the output format. * if qOption is false, output directory count, file count, and content size; * if qOption is true, output quota and remaining quota as well. * * @param qOption a flag indicating if quota needs to be printed or not * @return the string representation of the object */ public String toString(boolean qOption) { return toString(qOption, false); } /** Return the string representation of the object in the output format. * if qOption is false, output directory count, file count, and content size; * if qOption is true, output quota and remaining quota as well. * if hOption is false file sizes are returned in bytes * if hOption is true file sizes are returned in human readable * * @param qOption a flag indicating if quota needs to be printed or not * @param hOption a flag indicating if human readable output if to be used * @return the string representation of the object */ public String toString(boolean qOption, boolean hOption) { return toString(qOption, hOption, false, null); } /** * Return the string representation of the object in the output format. * if tOption is true, display the quota by storage types, * Otherwise, same logic with #toString(boolean,boolean) * * @param qOption a flag indicating if quota needs to be printed or not * @param hOption a flag indicating if human readable output if to be used * @param tOption a flag indicating if display quota by storage types * @param types Storage types to display * @return the string representation of the object */ public String toString(boolean qOption, boolean hOption, boolean tOption, List<StorageType> types) { String prefix = ""; if (tOption) { StringBuffer content = new StringBuffer(); for (StorageType st : types) { long typeQuota = getTypeQuota(st); long typeConsumed = getTypeConsumed(st); String quotaStr = QUOTA_NONE; String quotaRem = QUOTA_INF; if (typeQuota > 0) { quotaStr = formatSize(typeQuota, hOption); quotaRem = formatSize(typeQuota - typeConsumed, hOption); } content.append(String.format(STORAGE_TYPE_SUMMARY_FORMAT, quotaStr, quotaRem)); } return content.toString(); } if (qOption) { String quotaStr = QUOTA_NONE; String quotaRem = QUOTA_INF; String spaceQuotaStr = QUOTA_NONE; String spaceQuotaRem = QUOTA_INF; if (quota>0) { quotaStr = formatSize(quota, hOption); quotaRem = formatSize(quota-(directoryCount+fileCount), hOption); } if (spaceQuota>0) { spaceQuotaStr = formatSize(spaceQuota, hOption); spaceQuotaRem = formatSize(spaceQuota - spaceConsumed, hOption); } prefix = String.format(QUOTA_SUMMARY_FORMAT + SPACE_QUOTA_SUMMARY_FORMAT, quotaStr, quotaRem, spaceQuotaStr, spaceQuotaRem); } return prefix + String.format(SUMMARY_FORMAT, formatSize(directoryCount, hOption), formatSize(fileCount, hOption), formatSize(length, hOption)); } /** * Formats a size to be human readable or in bytes * @param size value to be formatted * @param humanReadable flag indicating human readable or not * @return String representation of the size */ private String formatSize(long size, boolean humanReadable) { return humanReadable ? StringUtils.TraditionalBinaryPrefix.long2String(size, "", 1) : String.valueOf(size); } }
/* * Copyright 2020 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/talent/v4beta1/histogram.proto package com.google.cloud.talent.v4beta1; /** * * * <pre> * The histogram request. * </pre> * * Protobuf type {@code google.cloud.talent.v4beta1.HistogramQuery} */ public final class HistogramQuery extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.talent.v4beta1.HistogramQuery) HistogramQueryOrBuilder { private static final long serialVersionUID = 0L; // Use HistogramQuery.newBuilder() to construct. private HistogramQuery(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private HistogramQuery() { histogramQuery_ = ""; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new HistogramQuery(); } @java.lang.Override public final com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private HistogramQuery( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { this(); if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { java.lang.String s = input.readStringRequireUtf8(); histogramQuery_ = s; break; } default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { done = true; } break; } } } } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.talent.v4beta1.HistogramProto .internal_static_google_cloud_talent_v4beta1_HistogramQuery_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.talent.v4beta1.HistogramProto .internal_static_google_cloud_talent_v4beta1_HistogramQuery_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.talent.v4beta1.HistogramQuery.class, com.google.cloud.talent.v4beta1.HistogramQuery.Builder.class); } public static final int HISTOGRAM_QUERY_FIELD_NUMBER = 1; private volatile java.lang.Object histogramQuery_; /** * * * <pre> * An expression specifies a histogram request against matching resources * (for example, jobs, profiles) for searches. * See [SearchJobsRequest.histogram_queries][google.cloud.talent.v4beta1.SearchJobsRequest.histogram_queries] and * [SearchProfilesRequest.histogram_queries][google.cloud.talent.v4beta1.SearchProfilesRequest.histogram_queries] for details about syntax. * </pre> * * <code>string histogram_query = 1;</code> * * @return The histogramQuery. */ @java.lang.Override public java.lang.String getHistogramQuery() { java.lang.Object ref = histogramQuery_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); histogramQuery_ = s; return s; } } /** * * * <pre> * An expression specifies a histogram request against matching resources * (for example, jobs, profiles) for searches. * See [SearchJobsRequest.histogram_queries][google.cloud.talent.v4beta1.SearchJobsRequest.histogram_queries] and * [SearchProfilesRequest.histogram_queries][google.cloud.talent.v4beta1.SearchProfilesRequest.histogram_queries] for details about syntax. * </pre> * * <code>string histogram_query = 1;</code> * * @return The bytes for histogramQuery. */ @java.lang.Override public com.google.protobuf.ByteString getHistogramQueryBytes() { java.lang.Object ref = histogramQuery_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); histogramQuery_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(histogramQuery_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 1, histogramQuery_); } unknownFields.writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(histogramQuery_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, histogramQuery_); } size += unknownFields.getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.talent.v4beta1.HistogramQuery)) { return super.equals(obj); } com.google.cloud.talent.v4beta1.HistogramQuery other = (com.google.cloud.talent.v4beta1.HistogramQuery) obj; if (!getHistogramQuery().equals(other.getHistogramQuery())) return false; if (!unknownFields.equals(other.unknownFields)) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); hash = (37 * hash) + HISTOGRAM_QUERY_FIELD_NUMBER; hash = (53 * hash) + getHistogramQuery().hashCode(); hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.talent.v4beta1.HistogramQuery parseFrom(java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.talent.v4beta1.HistogramQuery parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.talent.v4beta1.HistogramQuery parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.talent.v4beta1.HistogramQuery parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.talent.v4beta1.HistogramQuery parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.talent.v4beta1.HistogramQuery parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.talent.v4beta1.HistogramQuery parseFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.talent.v4beta1.HistogramQuery parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.talent.v4beta1.HistogramQuery parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.talent.v4beta1.HistogramQuery parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.talent.v4beta1.HistogramQuery parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.talent.v4beta1.HistogramQuery parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(com.google.cloud.talent.v4beta1.HistogramQuery prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * The histogram request. * </pre> * * Protobuf type {@code google.cloud.talent.v4beta1.HistogramQuery} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.talent.v4beta1.HistogramQuery) com.google.cloud.talent.v4beta1.HistogramQueryOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.talent.v4beta1.HistogramProto .internal_static_google_cloud_talent_v4beta1_HistogramQuery_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.talent.v4beta1.HistogramProto .internal_static_google_cloud_talent_v4beta1_HistogramQuery_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.talent.v4beta1.HistogramQuery.class, com.google.cloud.talent.v4beta1.HistogramQuery.Builder.class); } // Construct using com.google.cloud.talent.v4beta1.HistogramQuery.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {} } @java.lang.Override public Builder clear() { super.clear(); histogramQuery_ = ""; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.talent.v4beta1.HistogramProto .internal_static_google_cloud_talent_v4beta1_HistogramQuery_descriptor; } @java.lang.Override public com.google.cloud.talent.v4beta1.HistogramQuery getDefaultInstanceForType() { return com.google.cloud.talent.v4beta1.HistogramQuery.getDefaultInstance(); } @java.lang.Override public com.google.cloud.talent.v4beta1.HistogramQuery build() { com.google.cloud.talent.v4beta1.HistogramQuery result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.talent.v4beta1.HistogramQuery buildPartial() { com.google.cloud.talent.v4beta1.HistogramQuery result = new com.google.cloud.talent.v4beta1.HistogramQuery(this); result.histogramQuery_ = histogramQuery_; onBuilt(); return result; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.talent.v4beta1.HistogramQuery) { return mergeFrom((com.google.cloud.talent.v4beta1.HistogramQuery) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.cloud.talent.v4beta1.HistogramQuery other) { if (other == com.google.cloud.talent.v4beta1.HistogramQuery.getDefaultInstance()) return this; if (!other.getHistogramQuery().isEmpty()) { histogramQuery_ = other.histogramQuery_; onChanged(); } this.mergeUnknownFields(other.unknownFields); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { com.google.cloud.talent.v4beta1.HistogramQuery parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (com.google.cloud.talent.v4beta1.HistogramQuery) e.getUnfinishedMessage(); throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private java.lang.Object histogramQuery_ = ""; /** * * * <pre> * An expression specifies a histogram request against matching resources * (for example, jobs, profiles) for searches. * See [SearchJobsRequest.histogram_queries][google.cloud.talent.v4beta1.SearchJobsRequest.histogram_queries] and * [SearchProfilesRequest.histogram_queries][google.cloud.talent.v4beta1.SearchProfilesRequest.histogram_queries] for details about syntax. * </pre> * * <code>string histogram_query = 1;</code> * * @return The histogramQuery. */ public java.lang.String getHistogramQuery() { java.lang.Object ref = histogramQuery_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); histogramQuery_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * An expression specifies a histogram request against matching resources * (for example, jobs, profiles) for searches. * See [SearchJobsRequest.histogram_queries][google.cloud.talent.v4beta1.SearchJobsRequest.histogram_queries] and * [SearchProfilesRequest.histogram_queries][google.cloud.talent.v4beta1.SearchProfilesRequest.histogram_queries] for details about syntax. * </pre> * * <code>string histogram_query = 1;</code> * * @return The bytes for histogramQuery. */ public com.google.protobuf.ByteString getHistogramQueryBytes() { java.lang.Object ref = histogramQuery_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); histogramQuery_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * An expression specifies a histogram request against matching resources * (for example, jobs, profiles) for searches. * See [SearchJobsRequest.histogram_queries][google.cloud.talent.v4beta1.SearchJobsRequest.histogram_queries] and * [SearchProfilesRequest.histogram_queries][google.cloud.talent.v4beta1.SearchProfilesRequest.histogram_queries] for details about syntax. * </pre> * * <code>string histogram_query = 1;</code> * * @param value The histogramQuery to set. * @return This builder for chaining. */ public Builder setHistogramQuery(java.lang.String value) { if (value == null) { throw new NullPointerException(); } histogramQuery_ = value; onChanged(); return this; } /** * * * <pre> * An expression specifies a histogram request against matching resources * (for example, jobs, profiles) for searches. * See [SearchJobsRequest.histogram_queries][google.cloud.talent.v4beta1.SearchJobsRequest.histogram_queries] and * [SearchProfilesRequest.histogram_queries][google.cloud.talent.v4beta1.SearchProfilesRequest.histogram_queries] for details about syntax. * </pre> * * <code>string histogram_query = 1;</code> * * @return This builder for chaining. */ public Builder clearHistogramQuery() { histogramQuery_ = getDefaultInstance().getHistogramQuery(); onChanged(); return this; } /** * * * <pre> * An expression specifies a histogram request against matching resources * (for example, jobs, profiles) for searches. * See [SearchJobsRequest.histogram_queries][google.cloud.talent.v4beta1.SearchJobsRequest.histogram_queries] and * [SearchProfilesRequest.histogram_queries][google.cloud.talent.v4beta1.SearchProfilesRequest.histogram_queries] for details about syntax. * </pre> * * <code>string histogram_query = 1;</code> * * @param value The bytes for histogramQuery to set. * @return This builder for chaining. */ public Builder setHistogramQueryBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); histogramQuery_ = value; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.talent.v4beta1.HistogramQuery) } // @@protoc_insertion_point(class_scope:google.cloud.talent.v4beta1.HistogramQuery) private static final com.google.cloud.talent.v4beta1.HistogramQuery DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.talent.v4beta1.HistogramQuery(); } public static com.google.cloud.talent.v4beta1.HistogramQuery getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<HistogramQuery> PARSER = new com.google.protobuf.AbstractParser<HistogramQuery>() { @java.lang.Override public HistogramQuery parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return new HistogramQuery(input, extensionRegistry); } }; public static com.google.protobuf.Parser<HistogramQuery> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<HistogramQuery> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.talent.v4beta1.HistogramQuery getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
/* * Copyright (c) 2006-2017 DMDirc Developers * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in * all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE * SOFTWARE. */ package com.dmdirc.parser.irc.processors; import com.dmdirc.parser.common.ChannelListModeItem; import com.dmdirc.parser.common.ParserError; import com.dmdirc.parser.events.ChannelModeChangeEvent; import com.dmdirc.parser.events.ChannelNonUserModeChangeEvent; import com.dmdirc.parser.events.ChannelSingleModeChangeEvent; import com.dmdirc.parser.events.ChannelUserModeChangeEvent; import com.dmdirc.parser.events.UserModeChangeEvent; import com.dmdirc.parser.events.UserModeDiscoveryEvent; import com.dmdirc.parser.interfaces.ChannelClientInfo; import com.dmdirc.parser.interfaces.ChannelInfo; import com.dmdirc.parser.interfaces.ClientInfo; import com.dmdirc.parser.irc.IRCChannelClientInfo; import com.dmdirc.parser.irc.IRCChannelInfo; import com.dmdirc.parser.irc.IRCClientInfo; import com.dmdirc.parser.irc.IRCParser; import com.dmdirc.parser.irc.ModeManager; import com.dmdirc.parser.irc.PrefixModeManager; import java.time.LocalDateTime; import java.util.Calendar; import javax.inject.Inject; import javax.inject.Named; /** * Process a Mode line. */ public class ProcessMode extends IRCProcessor { /** The manager to use to access prefix modes. */ private final PrefixModeManager prefixModeManager; /** Mode manager to use for user modes. */ private final ModeManager userModeManager; /** Mode manager to use for channel modes. */ private final ModeManager chanModeManager; /** * Create a new instance of the IRCProcessor Object. * * @param parser IRCParser That owns this IRCProcessor * @param prefixModeManager The manager to use to access prefix modes. * @param userModeManager Mode manager to use for user modes. * @param chanModeManager Mode manager to use for channel modes. */ @Inject public ProcessMode(final IRCParser parser, final PrefixModeManager prefixModeManager, @Named("user") final ModeManager userModeManager, @Named("channel") final ModeManager chanModeManager) { super(parser, "MODE", "324", "221"); this.prefixModeManager = prefixModeManager; this.userModeManager = userModeManager; this.chanModeManager = chanModeManager; } /** * Process a Mode Line. * * @param date The LocalDateTime that this event occurred at. * @param sParam Type of line to process ("MODE", "324") * @param token IRCTokenised line to process */ @Override public void process(final LocalDateTime date, final String sParam, final String... token) { final String[] sModestr; final String sChannelName; switch (sParam) { case "324": sChannelName = token[3]; sModestr = new String[token.length - 4]; System.arraycopy(token, 4, sModestr, 0, token.length - 4); break; case "221": processUserMode(date, sParam, token, new String[]{token[token.length - 1]}, true); return; default: sChannelName = token[2]; sModestr = new String[token.length - 3]; System.arraycopy(token, 3, sModestr, 0, token.length - 3); break; } if (isValidChannelName(sChannelName)) { processChanMode(date, sParam, token, sModestr, sChannelName); } else { processUserMode(date, sParam, token, sModestr, false); } } /** * Method to trim spaces from strings. * * @param str String to trim * @return String without spaces on the ends */ private String trim(final String str) { return str.trim(); } /** * Process Chan modes. * * @param date The LocalDateTime that this event occurred at. * @param sParam String representation of parameter to parse * @param token IRCTokenised Array of the incomming line * @param sModestr The modes and params * @param sChannelName Channel these modes are for */ public void processChanMode(final LocalDateTime date, final String sParam, final String[] token, final String[] sModestr, final String sChannelName) { final StringBuilder sFullModeStr = new StringBuilder(); final IRCChannelInfo iChannel = getChannel(sChannelName); if (iChannel == null) { return; } // Get the current channel modes String nCurrent = ""; if (!"324".equals(sParam)) { nCurrent = iChannel.getMode(); } final IRCChannelClientInfo setterCCI = iChannel.getChannelClient(token[0], true); // Facilitate dmdirc formatter if (IRCParser.ALWAYS_UPDATECLIENT && setterCCI != null && setterCCI.getClient().getHostname().isEmpty()) { setterCCI.getClient().setUserBits(token[0], false); } // Loop through the mode string, and add/remove modes/params where they are needed char cPositive = '+'; boolean bPositive = true; long nValue = 0; int nParam = 1; final StringBuilder sNonUserModeStrParams = new StringBuilder(); final StringBuilder sNonUserModeStr = new StringBuilder(); for (int i = 0; i < sModestr[0].length(); ++i) { final Character cMode = sModestr[0].charAt(i); if (cMode.equals(":".charAt(0))) { continue; } sNonUserModeStr.append(cMode); if (cMode.equals("+".charAt(0))) { cPositive = '+'; bPositive = true; } else if (cMode.equals("-".charAt(0))) { cPositive = '-'; bPositive = false; } else { final boolean bBooleanMode; final String sModeParam; if (chanModeManager.isMode(cMode)) { bBooleanMode = true; } else if (parser.chanModesOther.containsKey(cMode)) { nValue = parser.chanModesOther.get(cMode); bBooleanMode = false; } else if (prefixModeManager.isPrefixMode(cMode)) { // (de) OP/Voice someone if (sModestr.length <= nParam) { parser.callErrorInfo(new ParserError(ParserError.ERROR_FATAL + ParserError.ERROR_USER, "Broken Modes. Parameter required but not given.", parser.getLastLine())); return; } sModeParam = sModestr[nParam++]; callDebugInfo(IRCParser.DEBUG_INFO, "User Mode: %c / %s {Positive: %b}", cMode, sModeParam, bPositive); final IRCChannelClientInfo iChannelClientInfo = iChannel.getChannelClient(sModeParam); if (iChannelClientInfo == null) { // Client not known? callDebugInfo(IRCParser.DEBUG_INFO, "User Mode for client not on channel." + " Ignoring (%s)", sModeParam); continue; } callDebugInfo(IRCParser.DEBUG_INFO, "\tOld Mode Value: %s", iChannelClientInfo.getAllModes()); if (bPositive) { iChannelClientInfo.addMode(cMode); } else { iChannelClientInfo.removeMode(cMode); } callChannelUserModeChanged(date, iChannel, iChannelClientInfo, setterCCI, token[0], (bPositive ? "+" : "-") + cMode); continue; } else { // unknown mode - add as boolean chanModeManager.add(cMode); bBooleanMode = true; } if (bBooleanMode) { callDebugInfo(IRCParser.DEBUG_INFO, "Boolean Mode: %c {Positive: %b}", cMode, bPositive); if (bPositive) { nCurrent = chanModeManager.insertMode(nCurrent, cMode); } else { nCurrent = chanModeManager.removeMode(nCurrent, cMode); } } else { if ((bPositive || nValue == IRCParser.MODE_LIST || (nValue & IRCParser.MODE_UNSET) == IRCParser.MODE_UNSET) && sModestr.length <= nParam) { parser.callErrorInfo(new ParserError(ParserError.ERROR_FATAL + ParserError.ERROR_USER, "Broken Modes. Parameter required but not given.", parser.getLastLine())); continue; } if (nValue == IRCParser.MODE_LIST) { // List Mode sModeParam = sModestr[nParam++]; sNonUserModeStrParams.append(' ').append(sModeParam); final long nTemp = Calendar.getInstance().getTimeInMillis() / 1000; iChannel.setListModeParam(cMode, new ChannelListModeItem(sModeParam, token[0], nTemp), bPositive); callDebugInfo(IRCParser.DEBUG_INFO, "List Mode: %c [%s] {Positive: %b}", cMode, sModeParam, bPositive); if (!"324".equals(sParam)) { getCallbackManager().publish( new ChannelSingleModeChangeEvent( parser, date, iChannel, setterCCI, token[0], cPositive + cMode + " " + sModeParam)); } } else { // Mode with a parameter if (bPositive) { // +Mode - always needs a parameter to set sModeParam = sModestr[nParam++]; sNonUserModeStrParams.append(' ').append(sModeParam); callDebugInfo(IRCParser.DEBUG_INFO, "Set Mode: %c [%s] {Positive: %b}", cMode, sModeParam, bPositive); iChannel.setModeParam(cMode, sModeParam); if (!"324".equals(sParam)) { getCallbackManager().publish( new ChannelSingleModeChangeEvent( parser, date, iChannel, setterCCI, token[0], cPositive + cMode + " " + sModeParam)); } } else { // -Mode - parameter isn't always needed, we need to check if ((nValue & IRCParser.MODE_UNSET) == IRCParser.MODE_UNSET) { sModeParam = sModestr[nParam++]; sNonUserModeStrParams.append(' ').append(sModeParam); } else { sModeParam = ""; } callDebugInfo(IRCParser.DEBUG_INFO, "Unset Mode: %c [%s] {Positive: %b}", cMode, sModeParam, bPositive); iChannel.setModeParam(cMode, ""); if (!"324".equals(sParam)) { getCallbackManager().publish( new ChannelSingleModeChangeEvent( parser, date, iChannel, setterCCI, token[0], trim(cPositive + cMode + " " + sModeParam))); } } } } } } // Call Callbacks for (String aSModestr : sModestr) { sFullModeStr.append(aSModestr).append(' '); } iChannel.setMode(nCurrent); if ("324".equals(sParam)) { callChannelModeChanged(date, iChannel, setterCCI, "", sFullModeStr.toString().trim()); } else { callChannelModeChanged(date, iChannel, setterCCI, token[0], sFullModeStr.toString().trim()); getCallbackManager().publish( new ChannelNonUserModeChangeEvent(parser, date, iChannel, setterCCI, token[0], trim(sNonUserModeStr.toString() + sNonUserModeStrParams))); } } /** * Process user modes. * * @param date The LocalDateTime that this event occurred at. * @param sParam String representation of parameter to parse * @param token IRCTokenised Array of the incomming line * @param clearOldModes Clear old modes before applying these modes (used by 221) */ private void processUserMode(final LocalDateTime date, final String sParam, final String[] token, final String[] sModestr, final boolean clearOldModes) { final IRCClientInfo iClient = getClientInfo(token[2]); if (iClient == null) { return; } String nCurrent; if (clearOldModes) { nCurrent = ""; } else { nCurrent = iClient.getUserMode(); } boolean bPositive = true; for (int i = 0; i < sModestr[0].length(); ++i) { final Character cMode = sModestr[0].charAt(i); if (cMode.equals("+".charAt(0))) { bPositive = true; } else if (cMode.equals("-".charAt(0))) { bPositive = false; } else if (!cMode.equals(":".charAt(0))) { if (!userModeManager.isMode(cMode)) { // Unknown mode callErrorInfo(new ParserError(ParserError.ERROR_WARNING, "Got unknown user mode " + cMode + " - Added", parser.getLastLine())); userModeManager.add(cMode); } // Usermodes are always boolean callDebugInfo(IRCParser.DEBUG_INFO, "User Mode: %c {Positive: %b}", cMode, bPositive); if (bPositive) { nCurrent = userModeManager.insertMode(nCurrent, cMode); } else { nCurrent = userModeManager.removeMode(nCurrent, cMode); } } } iClient.setUserMode(nCurrent); if ("221".equals(sParam)) { callUserModeDiscovered(date, iClient, sModestr[0]); } else { callUserModeChanged(date, iClient, token[0], sModestr[0]); } } /** * Callback to all objects implementing the ChannelModeChanged Callback. * * @param date The LocalDateTime that this event occurred at. * @param cChannel Channel where modes were changed * @param cChannelClient Client chaning the modes (null if server) * @param sHost Host doing the mode changing (User host or server name) * @param sModes Exact String parsed */ protected void callChannelModeChanged(final LocalDateTime date, final ChannelInfo cChannel, final ChannelClientInfo cChannelClient, final String sHost, final String sModes) { getCallbackManager().publish( new ChannelModeChangeEvent(parser, date, cChannel, cChannelClient, sHost, sModes)); } /** * Callback to all objects implementing the ChannelUserModeChanged Callback. * * @param date The LocalDateTime that this event occurred at. * @param cChannel Channel where modes were changed * @param cChangedClient Client being changed * @param cSetByClient Client chaning the modes (null if server) * @param sHost Host doing the mode changing (User host or server name) * @param sMode String representing mode change (ie +o) */ protected void callChannelUserModeChanged(final LocalDateTime date, final ChannelInfo cChannel, final ChannelClientInfo cChangedClient, final ChannelClientInfo cSetByClient, final String sHost, final String sMode) { getCallbackManager().publish( new ChannelUserModeChangeEvent(parser, date, cChannel, cChangedClient, cSetByClient, sHost, sMode)); } /** * Callback to all objects implementing the UserModeChanged Callback. * * @param date The LocalDateTime that this event occurred at. * @param cClient Client that had the mode changed (almost always us) * @param sSetby Host that set the mode (us or servername) * @param sModes The modes set. */ protected void callUserModeChanged(final LocalDateTime date, final ClientInfo cClient, final String sSetby, final String sModes) { getCallbackManager().publish( new UserModeChangeEvent(parser, date, cClient, sSetby, sModes)); } /** * Callback to all objects implementing the UserModeDiscovered Callback. * * @param date The LocalDateTime that this event occurred at. * @param cClient Client that had the mode changed (almost always us) * @param sModes The modes set. */ protected void callUserModeDiscovered(final LocalDateTime date, final ClientInfo cClient, final String sModes) { getCallbackManager().publish( new UserModeDiscoveryEvent(parser, date, cClient, sModes)); } }
/* * Copyright (c) Facebook, Inc. and its affiliates. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.facebook.buck.httpserver; import com.facebook.buck.artifact_cache.CacheResultType; import com.facebook.buck.core.build.event.BuildEvent; import com.facebook.buck.core.build.event.BuildRuleEvent; import com.facebook.buck.core.test.event.IndividualTestEvent; import com.facebook.buck.core.test.event.TestRunEvent; import com.facebook.buck.event.BuckEventListener; import com.facebook.buck.event.CompilerErrorEvent; import com.facebook.buck.event.ConsoleEvent; import com.facebook.buck.event.InstallEvent; import com.facebook.buck.event.ProgressEvent; import com.facebook.buck.event.ProjectGenerationEvent; import com.facebook.buck.event.external.events.BuckEventExternalInterface; import com.facebook.buck.event.listener.stats.cache.CacheRateStatsKeeper; import com.facebook.buck.parser.ParseEvent; import com.facebook.buck.parser.events.ParseBuckFileEvent; import com.facebook.buck.util.timing.Clock; import com.google.common.eventbus.Subscribe; import java.util.concurrent.Executors; import java.util.concurrent.ScheduledExecutorService; import java.util.concurrent.ScheduledFuture; import java.util.concurrent.TimeUnit; import javax.annotation.concurrent.GuardedBy; /** * {@link BuckEventListener} that is responsible for reporting events of interest to the {@link * StreamingWebSocketServlet}. This class passes high-level objects to the servlet, and the servlet * takes responsibility for serializing the objects as JSON down to the client. */ public class WebServerBuckEventListener implements BuckEventListener { private final StreamingWebSocketServlet streamingWebSocketServlet; private final Clock clock; private final ScheduledExecutorService executorService = Executors.newSingleThreadScheduledExecutor(); @GuardedBy("this") private ScheduledFuture<?> buildStatusFuture; @GuardedBy("this") BuildState buildState = null; @GuardedBy("this") private int numberOfRules = 0; @GuardedBy("this") private int numberOfFinishedRules = 0; @GuardedBy("this") private int numberOfUpdatedRules = 0; @GuardedBy("this") private int numberOfParsedRules = 0; @GuardedBy("this") private int numberOfParsedFiles = 0; WebServerBuckEventListener(StreamingWebSocketServlet streamingWebSocketServlet, Clock clock) { this.streamingWebSocketServlet = streamingWebSocketServlet; this.clock = clock; } // Stateless pass-throughs: these events are passed through to WebSocket listeners unchanged. // (Some of them update internal state for the stateful BuildStatusEvent.) @Subscribe public void parseStarted(ParseEvent.Started started) { streamingWebSocketServlet.tellClients(started); synchronized (this) { buildState = BuildState.PARSING; scheduleBuildStatusEvent(); } } @Subscribe public void parseFinished(ParseEvent.Finished finished) { streamingWebSocketServlet.tellClients(finished); synchronized (this) { buildState = BuildState.BUILDING_ACTION_GRAPH; scheduleBuildStatusEvent(); } } @Subscribe public void buildStarted(BuildEvent.Started started) { streamingWebSocketServlet.tellClients(started); resetBuildState(BuildState.STARTING); scheduleBuildStatusEvent(); } @Subscribe public void cacheRateStatsUpdate( CacheRateStatsKeeper.CacheRateStatsUpdateEvent cacheRateStatsUpdate) { streamingWebSocketServlet.tellClients(cacheRateStatsUpdate); } @Subscribe public void buildFinished(BuildEvent.Finished finished) { synchronized (this) { if (buildStatusFuture != null) { buildStatusFuture.cancel(false); buildStatusFuture = null; } } resetBuildState(null); streamingWebSocketServlet.tellClients(finished); } @Subscribe public void testRunStarted(TestRunEvent.Started event) { streamingWebSocketServlet.tellClients(event); } @Subscribe public void testRunCompleted(TestRunEvent.Finished event) { streamingWebSocketServlet.tellClients(event); } @Subscribe public void testAwaitingResults(IndividualTestEvent.Started event) { streamingWebSocketServlet.tellClients(event); } @Subscribe public void testResultsAvailable(IndividualTestEvent.Finished event) { streamingWebSocketServlet.tellClients(event); } @Subscribe public void installEventFinished(InstallEvent.Finished event) { streamingWebSocketServlet.tellClients(event); } @Subscribe public void compilerErrorEvent(CompilerErrorEvent event) { streamingWebSocketServlet.tellClients(event); } @Subscribe public void consoleEvent(ConsoleEvent event) { streamingWebSocketServlet.tellClients(event); } @Subscribe public void buildProgressUpdated(ProgressEvent.BuildProgressUpdated event) { streamingWebSocketServlet.tellClients(event); } @Subscribe public void parsingProgressUpdated(ProgressEvent.ParsingProgressUpdated event) { streamingWebSocketServlet.tellClients(event); } @Subscribe public void projectGenerationProgressUpdated( ProgressEvent.ProjectGenerationProgressUpdated event) { streamingWebSocketServlet.tellClients(event); } @Subscribe public void projectGenerationStarted(ProjectGenerationEvent.Started event) { streamingWebSocketServlet.tellClients(event); } @Subscribe public void projectGenerationFinished(ProjectGenerationEvent.Finished event) { streamingWebSocketServlet.tellClients(event); } // Stateful data tracking for BuildStatusEvent. These events are not passed through; // instead, their data is aggregated and used to generate BuildStatusEvent. @Subscribe private synchronized void ruleParseFinished(ParseBuckFileEvent.Finished ruleParseFinished) { numberOfParsedFiles++; numberOfParsedRules += ruleParseFinished.getNumRules(); scheduleBuildStatusEvent(); } @Subscribe public synchronized void ruleCountCalculated(BuildEvent.RuleCountCalculated calculated) { numberOfRules = calculated.getNumRules(); scheduleBuildStatusEvent(); } @Subscribe public synchronized void ruleCountUpdated(BuildEvent.UnskippedRuleCountUpdated updated) { numberOfRules = updated.getNumRules(); scheduleBuildStatusEvent(); } @Subscribe public synchronized void buildRuleFinished(BuildRuleEvent.Finished finished) { if (buildState == BuildState.BUILDING_ACTION_GRAPH) { buildState = BuildState.BUILDING; } numberOfFinishedRules++; if (finished.getCacheResult().getType() != CacheResultType.LOCAL_KEY_UNCHANGED_HIT) { numberOfUpdatedRules++; } scheduleBuildStatusEvent(); } private synchronized void resetBuildState(BuildState newBuildState) { buildState = newBuildState; numberOfRules = 0; numberOfFinishedRules = 0; numberOfUpdatedRules = 0; numberOfParsedRules = 0; numberOfParsedFiles = 0; } /** Should be called whenever any build status state changes. */ private synchronized void scheduleBuildStatusEvent() { if (buildStatusFuture != null) { return; // already scheduled } buildStatusFuture = executorService.schedule(this::sendBuildStatusEventInternal, 500, TimeUnit.MILLISECONDS); } /** Internal implementation detail of scheduleBuildStatusEvent. */ private void sendBuildStatusEventInternal() { BuildStatusEvent event; synchronized (this) { if (buildState == null) { return; // No build in progress. } // Clear the buildStatusFuture to indicate that we've sent // a BuildStatusEvent with the current state. If the state // subsequently changes, we'll schedule another future. buildStatusFuture = null; event = new BuildStatusEvent( clock.currentTimeMillis(), buildState, numberOfRules, numberOfFinishedRules, numberOfUpdatedRules, numberOfParsedRules, numberOfParsedFiles); } // avoid holding lock while calling tellClients() streamingWebSocketServlet.tellClients(event); } @Override public void close() { executorService.shutdown(); try { executorService.awaitTermination(1000, TimeUnit.MILLISECONDS); } catch (InterruptedException e) { // shrug } } /** NOT posted to the Buck event bus; only sent to WebSocket clients. */ @SuppressWarnings("unused") // Jackson JSON introspection uses the fields private static class BuildStatusEvent implements BuckEventExternalInterface { private final long timestamp; public final BuildState state; public final int totalRulesCount; public final int finishedRulesCount; public final int updatedRulesCount; public final int parsedRulesCount; public final int parsedFilesCount; public BuildStatusEvent( long timestamp, BuildState state, int totalRulesCount, int finishedRulesCount, int updatedRulesCount, int parsedRulesCount, int parsedFilesCount) { this.timestamp = timestamp; this.state = state; this.totalRulesCount = totalRulesCount; this.finishedRulesCount = finishedRulesCount; this.updatedRulesCount = updatedRulesCount; this.parsedRulesCount = parsedRulesCount; this.parsedFilesCount = parsedFilesCount; } @Override public long getTimestampMillis() { return timestamp; } @Override public String getEventName() { return BuckEventExternalInterface.BUILD_STATUS_EVENT; } @Override public boolean storeLastInstanceAndReplayForNewClients() { // Because this event represents a snapshot of the build state, we want new clients to // immediately receive the latest snapshot when they connect. This ensures that new // clients immediately get the current build status even during periods where there are // no relevant changes to build state (e.g. action graph computation, during which no // events may arrive for a minute or two). return true; } } private enum BuildState { STARTING, PARSING, BUILDING_ACTION_GRAPH, BUILDING; } }
/* * Copyright 2000-2017 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.jetbrains.python.psi.types; import com.google.common.collect.Lists; import com.google.common.collect.Sets; import com.intellij.codeInsight.completion.CompletionUtil; import com.intellij.codeInsight.lookup.LookupElement; import com.intellij.codeInsight.lookup.LookupElementBuilder; import com.intellij.openapi.extensions.Extensions; import com.intellij.openapi.util.*; import com.intellij.psi.PsiElement; import com.intellij.psi.PsiFile; import com.intellij.psi.PsiInvalidElementAccessException; import com.intellij.psi.util.PsiTreeUtil; import com.intellij.psi.util.QualifiedName; import com.intellij.util.ArrayUtil; import com.intellij.util.ObjectUtils; import com.intellij.util.ProcessingContext; import com.intellij.util.Processor; import com.intellij.util.containers.ContainerUtil; import com.jetbrains.python.PyNames; import com.jetbrains.python.codeInsight.PyCustomMember; import com.jetbrains.python.codeInsight.PyCustomMemberUtils; import com.jetbrains.python.psi.*; import com.jetbrains.python.psi.impl.PyBuiltinCache; import com.jetbrains.python.psi.impl.PyResolveResultRater; import com.jetbrains.python.psi.impl.ResolveResultList; import com.jetbrains.python.psi.impl.references.PyReferenceImpl; import com.jetbrains.python.psi.resolve.CompletionVariantsProcessor; import com.jetbrains.python.psi.resolve.PyResolveContext; import com.jetbrains.python.psi.resolve.PyResolveProcessor; import com.jetbrains.python.psi.resolve.RatedResolveResult; import com.jetbrains.python.toolbox.Maybe; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import java.util.*; import java.util.stream.Collectors; import static com.jetbrains.python.psi.PyUtil.as; import static com.jetbrains.python.psi.resolve.PyResolveImportUtil.fromFoothold; import static com.jetbrains.python.psi.resolve.PyResolveImportUtil.resolveTopLevelMember; /** * @author yole */ public class PyClassTypeImpl extends UserDataHolderBase implements PyClassType { @NotNull protected final PyClass myClass; protected final boolean myIsDefinition; private static ThreadLocal<Set<Pair<PyClass, String>>> ourResolveMemberStack = new ThreadLocal<Set<Pair<PyClass, String>>>() { @Override protected Set<Pair<PyClass, String>> initialValue() { return new HashSet<>(); } }; /** * Describes a class-based type. Since everything in Python is an instance of some class, this type pretty much completes * the type system :) * Note that classes' and instances' member list can change during execution, so it is important to construct an instance of PyClassType * right in the place of reference, so that such changes could possibly be accounted for. * * @param source PyClass which defines this type. For builtin or external classes, skeleton files contain the definitions. * @param isDefinition whether this type describes an instance or a definition of the class. */ public PyClassTypeImpl(@NotNull PyClass source, boolean isDefinition) { PyClass originalElement = CompletionUtil.getOriginalElement(source); myClass = originalElement != null ? originalElement : source; myIsDefinition = isDefinition; } public <T> PyClassTypeImpl withUserData(Key<T> key, T value) { putUserData(key, value); return this; } /** * @return a PyClass which defined this type. */ @Override @NotNull public PyClass getPyClass() { return myClass; } /** * @return whether this type refers to an instance or a definition of the class. */ @Override public boolean isDefinition() { return myIsDefinition; } @NotNull @Override public PyClassType toInstance() { return myIsDefinition ? withUserDataCopy(new PyClassTypeImpl(myClass, false)) : this; } @NotNull @Override public PyClassLikeType toClass() { return myIsDefinition ? this : new PyClassTypeImpl(myClass, true); } /** * Wrap new instance to copy user data to it */ @NotNull final <T extends PyClassTypeImpl> T withUserDataCopy(@NotNull final T newInstance) { newInstance.setUserMap(getUserMap()); return newInstance; } @Override @Nullable public String getClassQName() { return myClass.getQualifiedName(); } @NotNull @Override public List<PyClassLikeType> getSuperClassTypes(@NotNull TypeEvalContext context) { return myClass.getSuperClassTypes(context); } @Nullable @Override public List<? extends RatedResolveResult> resolveMember(@NotNull final String name, @Nullable PyExpression location, @NotNull AccessDirection direction, @NotNull PyResolveContext resolveContext) { return resolveMember(name, location, direction, resolveContext, true); } @Nullable @Override public List<? extends RatedResolveResult> resolveMember(@NotNull final String name, @Nullable PyExpression location, @NotNull AccessDirection direction, @NotNull PyResolveContext resolveContext, boolean inherited) { final Set<Pair<PyClass, String>> resolving = ourResolveMemberStack.get(); final Pair<PyClass, String> key = Pair.create(myClass, name); if (resolving.contains(key)) { return Collections.emptyList(); } resolving.add(key); try { return doResolveMember(name, location, direction, resolveContext, inherited); } finally { resolving.remove(key); } } @Nullable private List<? extends RatedResolveResult> doResolveMember(@NotNull String name, @Nullable PyExpression location, @NotNull AccessDirection direction, @NotNull PyResolveContext resolveContext, boolean inherited) { final TypeEvalContext context = resolveContext.getTypeEvalContext(); PsiElement classMember = resolveByOverridingMembersProviders(this, name, location, context); //overriding members provers have priority to normal resolve if (classMember != null) { return ResolveResultList.to(classMember); } if (resolveContext.allowProperties()) { final Ref<ResolveResultList> resultRef = findProperty(name, direction, true, resolveContext.getTypeEvalContext()); if (resultRef != null) { return resultRef.get(); } } if ("super".equals(getClassQName()) && isBuiltin() && location instanceof PyCallExpression) { // methods of super() call are not of class super! PyExpression first_arg = ((PyCallExpression)location).getArgument(0, PyExpression.class); if (first_arg != null) { // the usual case: first arg is the derived class that super() is proxying for PyType first_arg_type = context.getType(first_arg); if (first_arg_type instanceof PyClassType) { PyClass derived_class = ((PyClassType)first_arg_type).getPyClass(); final Iterator<PyClass> base_it = derived_class.getAncestorClasses(context).iterator(); if (base_it.hasNext()) { return new PyClassTypeImpl(base_it.next(), true).resolveMember(name, location, direction, resolveContext); } else { return null; // no base classes = super() cannot proxy anything meaningful from a base class } } } } final List<? extends RatedResolveResult> classMembers = resolveInner(myClass, myIsDefinition, name, location, context); if (PyNames.__CLASS__.equals(name)) { return resolveDunderClass(context, classMembers); } if (!classMembers.isEmpty()) { return classMembers; } if (PyNames.DOC.equals(name)) { return Optional .ofNullable(PyBuiltinCache.getInstance(myClass).getObjectType()) .map(type -> type.resolveMember(name, location, direction, resolveContext)) .orElse(Collections.emptyList()); } classMember = resolveByOverridingAncestorsMembersProviders(this, name, location); if (classMember != null) { final ResolveResultList list = new ResolveResultList(); int rate = RatedResolveResult.RATE_NORMAL; for (PyResolveResultRater rater : Extensions.getExtensions(PyResolveResultRater.EP_NAME)) { rate += rater.getMemberRate(classMember, this, context); } list.poke(classMember, rate); return list; } if (inherited) { for (PyClassLikeType type : myClass.getAncestorTypes(context)) { if (type instanceof PyClassType) { if (!myIsDefinition) { type = type.toInstance(); } final List<? extends RatedResolveResult> superMembers = resolveInner(((PyClassType)type).getPyClass(), myIsDefinition, name, location, context); if (!superMembers.isEmpty()) { return superMembers; } } if (type != null) { final List<? extends RatedResolveResult> results = type.resolveMember(name, location, direction, resolveContext, false); if (results != null && !results.isEmpty()) { return results; } } } } if (inherited && !PyNames.INIT.equals(name) && !PyNames.NEW.equals(name)) { final List<? extends RatedResolveResult> typeMembers = resolveMetaClassMember(name, location, direction, resolveContext); if (typeMembers != null) { return typeMembers; } } if (inherited) { classMember = resolveByMembersProviders(this, name, location, context); //ask providers after real class introspection as providers have less priority } if (classMember != null) { return ResolveResultList.to(classMember); } if (inherited) { for (PyClassLikeType type : myClass.getAncestorTypes(context)) { if (type instanceof PyClassType) { final PyClass pyClass = ((PyClassType)type).getPyClass(); PsiElement superMember = resolveByMembersProviders(new PyClassTypeImpl(pyClass, isDefinition()), name, location, resolveContext.getTypeEvalContext()); if (superMember != null) { return ResolveResultList.to(superMember); } } } } return Collections.emptyList(); } @Nullable private List<? extends RatedResolveResult> resolveMetaClassMember(@NotNull String name, @Nullable PyExpression location, @NotNull AccessDirection direction, @NotNull PyResolveContext resolveContext) { final TypeEvalContext context = resolveContext.getTypeEvalContext(); if (!myClass.isNewStyleClass(context)) { return null; } final PyClassLikeType typeType = getMetaClassType(context, true); if (typeType == null) { return null; } if (isDefinition()) { final List<? extends RatedResolveResult> typeMembers = typeType.resolveMember(name, location, direction, resolveContext); if (!ContainerUtil.isEmpty(typeMembers)) { return typeMembers; } final List<? extends RatedResolveResult> typeInstanceMembers = typeType.toInstance().resolveMember(name, location, direction, resolveContext); if (!ContainerUtil.isEmpty(typeInstanceMembers)) { return typeInstanceMembers; } } else if (typeType instanceof PyClassType) { final List<PyTargetExpression> typeInstanceAttributes = ((PyClassType)typeType).getPyClass().getInstanceAttributes(); if (!ContainerUtil.isEmpty(typeInstanceAttributes)) { final List<RatedResolveResult> typeInstanceAttributesWithSpecifiedName = typeInstanceAttributes .stream() .filter(member -> name.equals(member.getName())) .map(member -> new RatedResolveResult(PyReferenceImpl.getRate(member, context), member)) .collect(Collectors.toList()); if (!typeInstanceAttributesWithSpecifiedName.isEmpty()) { return typeInstanceAttributesWithSpecifiedName; } } } return null; } private Ref<ResolveResultList> findProperty(String name, AccessDirection direction, boolean inherited, @Nullable TypeEvalContext context) { Ref<ResolveResultList> resultRef = null; Property property = myClass.findProperty(name, inherited, context); if (property != null) { Maybe<PyCallable> accessor = property.getByDirection(direction); if (accessor.isDefined()) { PyCallable accessor_code = accessor.value(); ResolveResultList ret = new ResolveResultList(); if (accessor_code != null) ret.poke(accessor_code, RatedResolveResult.RATE_NORMAL); PyTargetExpression site = property.getDefinitionSite(); if (site != null) ret.poke(site, RatedResolveResult.RATE_LOW); if (ret.size() > 0) { resultRef = Ref.create(ret); } else { resultRef = Ref.create(); } // property is found, but the required accessor is explicitly absent } } return resultRef; } @Nullable private List<? extends RatedResolveResult> resolveDunderClass(@NotNull TypeEvalContext context, @NotNull List<? extends RatedResolveResult> classMembers) { final boolean newStyleClass = myClass.isNewStyleClass(context); if (!myIsDefinition) { if (newStyleClass && !classMembers.isEmpty()) { return classMembers; } return ResolveResultList.to( myClass.getAncestorClasses(context) .stream() .filter(cls -> !PyUtil.isObjectClass(cls)) .<PsiElement>map(cls -> cls.findClassAttribute(PyNames.__CLASS__, true, context)) .filter(target -> target != null) .findFirst() .orElse(myClass) ); } if (LanguageLevel.forElement(myClass).isOlderThan(LanguageLevel.PYTHON30) && !newStyleClass) { return classMembers; } return Optional .ofNullable(PyBuiltinCache.getInstance(myClass).getTypeType()) .map(typeType -> ResolveResultList.to(typeType.getPyClass())) .orElse(null); } @Nullable @Override public PyClassLikeType getMetaClassType(@NotNull final TypeEvalContext context, boolean inherited) { if (!inherited) { return as(myClass.getMetaClassType(context), PyClassLikeType.class); } final List<PyClassLikeType> metaClassTypes = getAllExplicitMetaClassTypes(context); final PyClassLikeType mostDerivedMeta = getMostDerivedClassType(metaClassTypes, context); return mostDerivedMeta != null ? mostDerivedMeta : PyBuiltinCache.getInstance(myClass).getObjectType("type"); } @Nullable private static PyClassLikeType getMostDerivedClassType(@NotNull List<PyClassLikeType> classTypes, @NotNull final TypeEvalContext context) { if (classTypes.isEmpty()) { return null; } try { return Collections.max(classTypes, (t1, t2) -> { if (t1 == t2 || t1 != null && t1.equals(t2)) { return 0; } else if (t2 == null || t1 != null && Sets.newHashSet(t1.getAncestorTypes(context)).contains(t2)) { return 1; } else if (t1 == null || Sets.newHashSet(t2.getAncestorTypes(context)).contains(t1)) { return -1; } else { throw new NotDerivedClassTypeException(); } }); } catch (NotDerivedClassTypeException ignored) { return null; } } private static final class NotDerivedClassTypeException extends RuntimeException { } private List<PyClassLikeType> getAllExplicitMetaClassTypes(@NotNull TypeEvalContext context) { final List<PyClassLikeType> results = Lists.newArrayList(); final PyClassLikeType ownMeta = getMetaClassType(context, false); if (ownMeta != null) { results.add(ownMeta); } for (PyClassLikeType ancestor : myClass.getAncestorTypes(context)) { if (ancestor != null) { final PyClassLikeType ancestorMeta = ancestor.getMetaClassType(context, false); if (ancestorMeta != null) { results.add(ancestorMeta); } } } return results; } @Override public boolean isCallable() { if (isDefinition()) { return true; } if (isMethodType(this)) { return true; } final PyClass cls = getPyClass(); if (PyABCUtil.isSubclass(cls, PyNames.CALLABLE, null)) { return true; } return false; } private static boolean isMethodType(@NotNull PyClassType type) { final PyBuiltinCache builtinCache = PyBuiltinCache.getInstance(type.getPyClass()); return type.equals(builtinCache.getClassMethodType()) || type.equals(builtinCache.getStaticMethodType()); } @Nullable @Override public PyType getReturnType(@NotNull TypeEvalContext context) { return getPossibleCallType(context, null); } @Nullable @Override public PyType getCallType(@NotNull TypeEvalContext context, @NotNull PyCallSiteExpression callSite) { return getPossibleCallType(context, callSite); } @Nullable private PyType getPossibleCallType(@NotNull TypeEvalContext context, @Nullable PyCallSiteExpression callSite) { if (!isDefinition()) { return PyUtil.getReturnTypeOfMember(this, PyNames.CALL, callSite, context); } else { return withUserDataCopy(new PyClassTypeImpl(getPyClass(), false)); } } @Nullable @Override public List<PyCallableParameter> getParameters(@NotNull TypeEvalContext context) { return null; } @NotNull @Override public final List<PyClassLikeType> getAncestorTypes(@NotNull final TypeEvalContext context) { return myClass.getAncestorTypes(context); } @Nullable private static PsiElement resolveByMembersProviders(PyClassType aClass, String name, @Nullable PsiElement location, TypeEvalContext context) { for (PyClassMembersProvider provider : Extensions.getExtensions(PyClassMembersProvider.EP_NAME)) { final PsiElement resolveResult = provider.resolveMember(aClass, name, location, context); if (resolveResult != null) return resolveResult; } return null; } @Nullable private static PsiElement resolveByOverridingMembersProviders(PyClassType aClass, String name, @Nullable PsiElement location, @NotNull final TypeEvalContext context) { for (PyClassMembersProvider provider : Extensions.getExtensions(PyClassMembersProvider.EP_NAME)) { if (provider instanceof PyOverridingClassMembersProvider) { final PsiElement resolveResult = provider.resolveMember(aClass, name, location, context); if (resolveResult != null) return resolveResult; } } return null; } @Nullable private static PsiElement resolveByOverridingAncestorsMembersProviders(PyClassType type, String name, @Nullable PyExpression location) { for (PyClassMembersProvider provider : Extensions.getExtensions(PyClassMembersProvider.EP_NAME)) { if (provider instanceof PyOverridingAncestorsClassMembersProvider) { final PsiElement resolveResult = provider.resolveMember(type, name, location, null); if (resolveResult != null) return resolveResult; } } return null; } @NotNull private static List<? extends RatedResolveResult> resolveInner(@NotNull PyClass cls, boolean isDefinition, @NotNull String name, @Nullable PyExpression location, @NotNull TypeEvalContext context) { final PyResolveProcessor processor = new PyResolveProcessor(name); final Collection<PsiElement> result; if (!isDefinition && !cls.processInstanceLevelDeclarations(processor, location)) { result = processor.getElements(); } else { cls.processClassLevelDeclarations(processor); result = processor.getElements(); } return ContainerUtil.map(result, element -> new RatedResolveResult(PyReferenceImpl.getRate(element, context), element)); } private static Key<Set<PyClassType>> CTX_VISITED = Key.create("PyClassType.Visited"); public static Key<Boolean> CTX_SUPPRESS_PARENTHESES = Key.create("PyFunction.SuppressParentheses"); @Override public Object[] getCompletionVariants(String prefix, PsiElement location, ProcessingContext context) { Set<PyClassType> visited = context.get(CTX_VISITED); if (visited == null) { visited = new HashSet<>(); context.put(CTX_VISITED, visited); } if (visited.contains(this)) { return ArrayUtil.EMPTY_OBJECT_ARRAY; } visited.add(this); Set<String> namesAlready = context.get(CTX_NAMES); if (namesAlready == null) { namesAlready = new HashSet<>(); } List<Object> ret = new ArrayList<>(); boolean suppressParentheses = context.get(CTX_SUPPRESS_PARENTHESES) != null; addOwnClassMembers(location, namesAlready, suppressParentheses, ret, prefix); PsiFile origin = (location != null) ? CompletionUtil.getOriginalOrSelf(location) .getContainingFile() : null; final TypeEvalContext typeEvalContext = TypeEvalContext.codeCompletion(myClass.getProject(), origin); addInheritedMembers(prefix, location, namesAlready, context, ret, typeEvalContext); // from providers for (final PyClassMembersProvider provider : Extensions.getExtensions(PyClassMembersProvider.EP_NAME)) { for (final PyCustomMember member : provider.getMembers(this, location, typeEvalContext)) { final String name = member.getName(); if (!namesAlready.contains(name)) { ret.add(PyCustomMemberUtils.toLookUpElement(member, getName())); } } } if (!myClass.isNewStyleClass(typeEvalContext)) { final PyClass instanceClass = as(resolveTopLevelMember(QualifiedName.fromDottedString(PyNames.TYPES_INSTANCE_TYPE), fromFoothold(myClass)), PyClass.class); if (instanceClass != null) { final PyClassTypeImpl instanceType = new PyClassTypeImpl(instanceClass, false); ret.addAll(Arrays.asList(instanceType.getCompletionVariants(prefix, location, context))); } } Collections.addAll(ret, getMetaClassCompletionVariants(prefix, location, context, typeEvalContext)); return ret.toArray(); } @NotNull private Object[] getMetaClassCompletionVariants(@Nullable String prefix, @Nullable PsiElement location, @NotNull ProcessingContext processingContext, @NotNull TypeEvalContext typeEvalContext) { if (!myClass.isNewStyleClass(typeEvalContext)) { return ArrayUtil.EMPTY_OBJECT_ARRAY; } final PyClassLikeType typeType = getMetaClassType(typeEvalContext, true); if (typeType == null) { return ArrayUtil.EMPTY_OBJECT_ARRAY; } if (isDefinition()) { return typeType.getCompletionVariants(prefix, location, processingContext); } else if (typeType instanceof PyClassType) { final List<PyTargetExpression> typeInstanceAttributes = ((PyClassType)typeType).getPyClass().getInstanceAttributes(); return ContainerUtil.map2Array(typeInstanceAttributes, LookupElementBuilder::create); } return ArrayUtil.EMPTY_OBJECT_ARRAY; } @Override public void visitMembers(@NotNull final Processor<PsiElement> processor, final boolean inherited, @NotNull final TypeEvalContext context) { myClass.visitMethods(new MyProcessorWrapper<>(processor), false, context); myClass.visitClassAttributes(new MyProcessorWrapper<>(processor), false, context); for (PyTargetExpression expression : myClass.getInstanceAttributes()) { processor.process(expression); } if (!inherited) { return; } for (final PyClassLikeType type : getAncestorTypes(context)) { if (type != null) { // "false" because getAncestorTypes returns ALL ancestors, not only direct parents type.visitMembers(processor, false, context); } } } @NotNull @Override public Set<String> getMemberNames(boolean inherited, @NotNull TypeEvalContext context) { final Set<String> result = new LinkedHashSet<>(); for (PyFunction function : myClass.getMethods()) { result.add(function.getName()); } for (PyTargetExpression expression : myClass.getClassAttributes()) { result.add(expression.getName()); } for (PyTargetExpression expression : myClass.getInstanceAttributes()) { result.add(expression.getName()); } result.addAll(ObjectUtils.notNull(myClass.getSlots(context), Collections.emptyList())); for (PyClassMembersProvider provider : Extensions.getExtensions(PyClassMembersProvider.EP_NAME)) { for (PyCustomMember member : provider.getMembers(this, null, context)) { result.add(member.getName()); } } if (inherited) { for (PyClassLikeType type : getAncestorTypes(context)) { if (type != null) { final PyClassLikeType ancestorType = isDefinition() ? type : type.toInstance(); result.addAll(ancestorType.getMemberNames(false, context)); } } } return result; } private void addOwnClassMembers(PsiElement expressionHook, Set<String> namesAlready, boolean suppressParentheses, List<Object> ret, @Nullable final String prefix) { PyClass containingClass = PsiTreeUtil.getParentOfType(expressionHook, PyClass.class); if (containingClass != null) { containingClass = CompletionUtil.getOriginalElement(containingClass); } boolean withinOurClass = containingClass == getPyClass() || isInSuperCall(expressionHook); final CompletionVariantsProcessor processor = new CompletionVariantsProcessor( expressionHook, new FilterNotInstance(myClass), null ); if (suppressParentheses) { processor.suppressParentheses(); } myClass.processClassLevelDeclarations(processor); // We are here because of completion (see call stack), so we use code complete here final TypeEvalContext context = (expressionHook != null ? TypeEvalContext.codeCompletion(myClass.getProject(), myClass.getContainingFile()) : null); List<String> slots = myClass.isNewStyleClass(context) ? myClass.getSlots( context) : null; if (slots != null) { processor.setAllowedNames(slots); } myClass.processInstanceLevelDeclarations(processor, expressionHook); for (LookupElement le : processor.getResultList()) { String name = le.getLookupString(); if (namesAlready.contains(name)) continue; if (!withinOurClass && isClassPrivate(name)) continue; if (!withinOurClass && isClassProtected(name) && prefix == null) continue; namesAlready.add(name); ret.add(le); } if (slots != null) { for (String name : slots) { if (!namesAlready.contains(name)) { ret.add(LookupElementBuilder.create(name)); } } } } private static boolean isInSuperCall(PsiElement hook) { if (hook instanceof PyReferenceExpression) { final PyExpression qualifier = ((PyReferenceExpression)hook).getQualifier(); return qualifier instanceof PyCallExpression && ((PyCallExpression)qualifier).isCalleeText(PyNames.SUPER); } return false; } private void addInheritedMembers(String name, PsiElement expressionHook, Set<String> namesAlready, ProcessingContext context, List<Object> ret, @NotNull TypeEvalContext typeEvalContext) { for (PyType type : myClass.getSuperClassTypes(typeEvalContext)) { if (!(type instanceof PyClassLikeType)) { continue; } final PyClassLikeType classLikeType = (PyClassLikeType)type; if (classLikeType.isDefinition() && !myIsDefinition) { type = classLikeType.toInstance(); } Object[] ancestry = type.getCompletionVariants(name, expressionHook, context); for (Object ob : ancestry) { String inheritedName = ob.toString(); if (!namesAlready.contains(inheritedName) && !isClassPrivate(inheritedName)) { ret.add(ob); namesAlready.add(inheritedName); } } ContainerUtil.addAll(ret, ancestry); } } private static boolean isClassPrivate(String lookup_string) { return lookup_string.startsWith("__") && !lookup_string.endsWith("__"); } private static boolean isClassProtected(@NotNull final String lookupString) { return lookupString.startsWith("_") && !lookupString.startsWith("__"); } @Override @Nullable public String getName() { return getPyClass().getName(); } @Override public boolean isBuiltin() { return PyBuiltinCache.getInstance(myClass).isBuiltin(myClass); } @Override public void assertValid(String message) { if (!myClass.isValid()) { throw new PsiInvalidElementAccessException(myClass, myClass.getClass().toString() + ": " + message); } } @Override public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; PyClassTypeImpl classType = (PyClassTypeImpl)o; if (myIsDefinition != classType.myIsDefinition) return false; if (!myClass.equals(classType.myClass)) return false; return true; } @Override public int hashCode() { int result = myClass.hashCode(); result = 31 * result + (myIsDefinition ? 1 : 0); return result; } public static boolean is(@NotNull String qName, PyType type) { if (type instanceof PyClassType) { return qName.equals(((PyClassType)type).getClassQName()); } return false; } @Override public String toString() { return (isValid() ? "" : "[INVALID] ") + "PyClassType: " + getClassQName(); } @Override public boolean isValid() { return myClass.isValid(); } @Nullable public static PyClassTypeImpl createTypeByQName(@NotNull final PsiElement anchor, @NotNull final String classQualifiedName, final boolean isDefinition) { final PyClass pyClass = PyPsiFacade.getInstance(anchor.getProject()).createClassByQName(classQualifiedName, anchor); if (pyClass == null) { return null; } return new PyClassTypeImpl(pyClass, isDefinition); } private static final class MyProcessorWrapper<T extends PsiElement> implements Processor<T> { private final Processor<PsiElement> myProcessor; private MyProcessorWrapper(@NotNull final Processor<PsiElement> processor) { myProcessor = processor; } @Override public boolean process(final T t) { myProcessor.process(t); return true; } } /** * Accepts only targets that are not the given object. */ public static class FilterNotInstance implements Condition<PsiElement> { Object instance; public FilterNotInstance(Object instance) { this.instance = instance; } @Override public boolean value(final PsiElement target) { return (instance != target); } } }
package com.vaadin.tests.components.select; import java.util.ArrayList; import java.util.LinkedHashMap; import java.util.List; import com.vaadin.data.Container; import com.vaadin.data.Item; import com.vaadin.data.util.IndexedContainer; import com.vaadin.event.Action; import com.vaadin.event.ItemClickEvent; import com.vaadin.event.ItemClickEvent.ItemClickListener; import com.vaadin.event.ItemClickEvent.ItemClickNotifier; import com.vaadin.server.Resource; import com.vaadin.tests.components.abstractfield.AbstractFieldTest; import com.vaadin.ui.AbstractSelect; public abstract class AbstractSelectTestCase<T extends AbstractSelect> extends AbstractFieldTest<T> implements ItemClickListener { public static final String CATEGORY_DATA_SOURCE = "Data source"; private int items = 0; private int properties = 0; protected static class ContextMenu { private List<Action> items = new ArrayList<Action>(); public ContextMenu(String caption, Resource icon) { addItem(caption, icon); } public ContextMenu() { } public void addItem(String caption, Resource icon) { items.add(new Action(caption, icon)); } public Action[] getActions(Object target, Object sender) { Action[] actions = new Action[items.size()]; for (int i = 0; i < items.size(); i++) { actions[i] = items.get(i); } return actions; } } @Override protected void createActions() { super.createActions(); createNullSelectAllowedCheckbox(CATEGORY_SELECTION); createMultiSelectCheckbox(CATEGORY_SELECTION); createPropertiesInContainerSelect(CATEGORY_DATA_SOURCE); createItemsInContainerSelect(CATEGORY_DATA_SOURCE); } protected void createNullSelectAllowedCheckbox(String category) { createBooleanAction("Null Selection Allowed", category, false, nullSelectionAllowedCommand); } protected void createMultiSelectCheckbox(String category) { createBooleanAction("Multi select", category, false, multiselectCommand); } protected void createNullSelectItemId(String category) { LinkedHashMap<String, Object> options = new LinkedHashMap<String, Object>(); options.put("- None -", null); for (Object id : (getComponent()).getContainerDataSource() .getContainerPropertyIds()) { options.put(id.toString(), id); } createSelectAction("Null Selection Item Id", category, options, "- None -", nullSelectItemIdCommand); } protected Container createContainer(int properties, int items) { return createIndexedContainer(properties, items); } private Container createIndexedContainer(int properties, int items) { IndexedContainer c = new IndexedContainer(); populateContainer(c, properties, items); return c; } protected void populateContainer(Container c, int properties, int items) { c.removeAllItems(); for (int i = 1; i <= properties; i++) { c.addContainerProperty("Property " + i, String.class, ""); } for (int i = 1; i <= items; i++) { Item item = c.addItem("Item " + i); for (int j = 1; j <= properties; j++) { item.getItemProperty("Property " + j).setValue( "Item " + i + "," + j); } } } protected void createItemsInContainerSelect(String category) { LinkedHashMap<String, Integer> options = new LinkedHashMap<String, Integer>(); for (int i = 0; i <= 10; i++) { options.put(String.valueOf(i), i); } options.put("20", 20); options.put("100", 100); options.put("1000", 1000); options.put("10000", 10000); options.put("100000", 100000); createSelectAction("Items in container", category, options, "20", itemsInContainerCommand); } protected void createPropertiesInContainerSelect(String category) { LinkedHashMap<String, Integer> options = new LinkedHashMap<String, Integer>(); options.put("0", 0); for (int i = 0; i <= 10; i++) { options.put(String.valueOf(i), i); } options.put("50", 50); options.put("100", 100); options.put("1000", 1000); createSelectAction("Properties in container", category, options, "10", propertiesInContainerCommand); } protected void createItemClickListener(String category) { createBooleanAction("Item click listener", category, false, itemClickListenerCommand); } /* COMMANDS */ protected Command<T, Boolean> nullSelectionAllowedCommand = new Command<T, Boolean>() { @Override public void execute(T c, Boolean value, Object data) { (c).setNullSelectionAllowed(value); } }; protected Command<T, Boolean> multiselectCommand = new Command<T, Boolean>() { @Override public void execute(T c, Boolean value, Object data) { c.setMultiSelect(value); } }; protected Command<T, Object> nullSelectItemIdCommand = new Command<T, Object>() { @Override public void execute(T c, Object value, Object data) { c.setNullSelectionItemId(value); } }; protected Command<T, Integer> itemsInContainerCommand = new Command<T, Integer>() { @Override public void execute(T t, Integer value, Object data) { items = value; updateContainer(); } }; protected Command<T, Integer> propertiesInContainerCommand = new Command<T, Integer>() { @Override public void execute(T t, Integer value, Object data) { properties = value; updateContainer(); } }; protected Command<T, Boolean> itemClickListenerCommand = new Command<T, Boolean>() { @Override public void execute(T c, Boolean value, Object data) { if (value) { ((ItemClickNotifier) c) .addItemClickListener(AbstractSelectTestCase.this); } else { ((ItemClickNotifier) c) .removeItemClickListener(AbstractSelectTestCase.this); } } }; protected void setContainer(Container newContainer) { getComponent().setContainerDataSource(newContainer); } protected void updateContainer() { setContainer(createContainer(properties, items)); } /* COMMANDS END */ @Override public void itemClick(ItemClickEvent event) { String type = event.getButtonName(); if (event.isDoubleClick()) { type += " double-click"; } else { type += " click"; } String target = "source: " + event.getSource(); target += ", client: [" + event.getClientX() + "," + event.getClientY() + "];"; target += ", relative: [" + event.getRelativeX() + "," + event.getRelativeY() + "]"; target += ", itemId: " + event.getItemId(); target += ", propertyId: " + event.getPropertyId(); String modifierKeys = ""; if (event.isCtrlKey()) { modifierKeys += "CTRL "; } if (event.isAltKey()) { modifierKeys += "ALT "; } if (event.isMetaKey()) { modifierKeys += "META "; } if (event.isShiftKey()) { modifierKeys += "SHIFT "; } log(modifierKeys + type + " on " + target); } }
// Copyright (c) 2013 The Chromium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. package org.chromium.android_webview.test; import android.test.suitebuilder.annotation.LargeTest; import android.util.Pair; import android.webkit.ValueCallback; import org.chromium.android_webview.AwContents; import org.chromium.android_webview.AwQuotaManagerBridge; import org.chromium.base.test.util.Feature; import org.chromium.content.browser.ContentSettings; import org.chromium.content.browser.test.util.CallbackHelper; import org.chromium.content.browser.test.util.Criteria; import org.chromium.content.browser.test.util.CriteriaHelper; import org.chromium.net.test.util.TestWebServer; import java.util.ArrayList; import java.util.concurrent.Callable; import java.util.List; public class AwQuotaManagerBridgeTest extends AwTestBase { private TestAwContentsClient mContentsClient; private AwTestContainerView mTestView; private AwContents mAwContents; private TestWebServer mWebServer; private String mOrigin; @Override public void setUp() throws Exception { super.setUp(); mContentsClient = new TestAwContentsClient(); mTestView = createAwTestContainerViewOnMainSync(mContentsClient); mAwContents = mTestView.getAwContents(); mWebServer = new TestWebServer(false); mOrigin = mWebServer.getBaseUrl(); ContentSettings settings = getContentSettingsOnUiThread(mAwContents); settings.setJavaScriptEnabled(true); settings.setDomStorageEnabled(true); settings.setAppCacheEnabled(true); settings.setAppCachePath("whatever"); // Enables AppCache. } @Override public void tearDown() throws Exception { deleteAllData(); if (mWebServer != null) { mWebServer.shutdown(); } super.tearDown(); } private AwQuotaManagerBridge getQuotaManagerBridge() throws Exception { return runTestOnUiThreadAndGetResult(new Callable<AwQuotaManagerBridge>() { @Override public AwQuotaManagerBridge call() throws Exception { return AwQuotaManagerBridge.getInstance(); } }); } private void deleteAllData() throws Exception { final AwQuotaManagerBridge bridge = getQuotaManagerBridge(); getInstrumentation().runOnMainSync(new Runnable() { @Override public void run() { bridge.deleteAllData(); } }); } private void deleteOrigin(final String origin) throws Exception { final AwQuotaManagerBridge bridge = getQuotaManagerBridge(); getInstrumentation().runOnMainSync(new Runnable() { @Override public void run() { bridge.deleteOrigin(origin); } }); } private static class GetOriginsCallbackHelper extends CallbackHelper { private AwQuotaManagerBridge.Origins mOrigins; public void notifyCalled(AwQuotaManagerBridge.Origins origins) { mOrigins = origins; notifyCalled(); } public AwQuotaManagerBridge.Origins getOrigins() { assert getCallCount() > 0; return mOrigins; } } private AwQuotaManagerBridge.Origins getOrigins() throws Exception { final GetOriginsCallbackHelper callbackHelper = new GetOriginsCallbackHelper(); final AwQuotaManagerBridge bridge = getQuotaManagerBridge(); int callCount = callbackHelper.getCallCount(); getInstrumentation().runOnMainSync(new Runnable() { @Override public void run() { bridge.getOrigins( new ValueCallback<AwQuotaManagerBridge.Origins>() { @Override public void onReceiveValue(AwQuotaManagerBridge.Origins origins) { callbackHelper.notifyCalled(origins); } } ); } }); callbackHelper.waitForCallback(callCount); return callbackHelper.getOrigins(); } private static class LongValueCallbackHelper extends CallbackHelper { private long mValue; public void notifyCalled(long value) { mValue = value; notifyCalled(); } public long getValue() { assert getCallCount() > 0; return mValue; } } private long getQuotaForOrigin(final String origin) throws Exception { final LongValueCallbackHelper callbackHelper = new LongValueCallbackHelper(); final AwQuotaManagerBridge bridge = getQuotaManagerBridge(); int callCount = callbackHelper.getCallCount(); getInstrumentation().runOnMainSync(new Runnable() { @Override public void run() { bridge.getQuotaForOrigin("foo.com", new ValueCallback<Long>() { @Override public void onReceiveValue(Long quota) { callbackHelper.notifyCalled(quota); } } ); } }); callbackHelper.waitForCallback(callCount); return callbackHelper.getValue(); } private long getUsageForOrigin(final String origin) throws Exception { final LongValueCallbackHelper callbackHelper = new LongValueCallbackHelper(); final AwQuotaManagerBridge bridge = getQuotaManagerBridge(); int callCount = callbackHelper.getCallCount(); getInstrumentation().runOnMainSync(new Runnable() { @Override public void run() { bridge.getUsageForOrigin(origin, new ValueCallback<Long>() { @Override public void onReceiveValue(Long usage) { callbackHelper.notifyCalled(usage); } } ); } }); callbackHelper.waitForCallback(callCount); return callbackHelper.getValue(); } private void useAppCache() throws Exception { final String CACHED_FILE_PATH = "/foo.js"; final String CACHED_FILE_CONTENTS = "1 + 1;"; mWebServer.setResponse(CACHED_FILE_PATH, CACHED_FILE_CONTENTS, null); final String MANIFEST_PATH = "/foo.manifest"; final String MANIFEST_CONTENTS = "CACHE MANIFEST\nCACHE:\n" + CACHED_FILE_PATH; List<Pair<String, String>> manifestHeaders = new ArrayList<Pair<String, String>>(); manifestHeaders.add(Pair.create("Content-Disposition", "text/cache-manifest")); mWebServer.setResponse(MANIFEST_PATH, MANIFEST_CONTENTS, manifestHeaders); final String PAGE_PATH = "/appcache.html"; final String PAGE_CONTENTS = "<html manifest=\"" + MANIFEST_PATH + "\">" + "<head><script src=\"" + CACHED_FILE_PATH + "\"></script></head></html>"; String url = mWebServer.setResponse(PAGE_PATH, PAGE_CONTENTS, null); loadUrlSync(mAwContents, mContentsClient.getOnPageFinishedHelper(), url); executeJavaScriptAndWaitForResult(mAwContents, mContentsClient, "window.applicationCache.update();"); } @LargeTest @Feature({"AndroidWebView", "WebStore"}) public void testDeleteAllWithAppCache() throws Exception { long currentUsage = getUsageForOrigin(mOrigin); assertEquals(0, currentUsage); useAppCache(); assertTrue(CriteriaHelper.pollForCriteria(new Criteria() { @Override public boolean isSatisfied() { try { return getUsageForOrigin(mOrigin) > 0; } catch (Exception e) { return false; } } })); deleteAllData(); assertTrue(CriteriaHelper.pollForCriteria(new Criteria() { @Override public boolean isSatisfied() { try { return getUsageForOrigin(mOrigin) == 0; } catch (Exception e) { return false; } } })); } @LargeTest @Feature({"AndroidWebView", "WebStore"}) public void testDeleteOriginWithAppCache() throws Exception { long currentUsage = getUsageForOrigin(mOrigin); assertEquals(0, currentUsage); useAppCache(); assertTrue(CriteriaHelper.pollForCriteria(new Criteria() { @Override public boolean isSatisfied() { try { return getUsageForOrigin(mOrigin) > 0; } catch (Exception e) { return false; } } })); deleteOrigin(mOrigin); assertTrue(CriteriaHelper.pollForCriteria(new Criteria() { @Override public boolean isSatisfied() { try { return getUsageForOrigin(mOrigin) == 0; } catch (Exception e) { return false; } } })); } @LargeTest @Feature({"AndroidWebView", "WebStore"}) public void testGetResultsMatch() throws Exception { useAppCache(); CriteriaHelper.pollForCriteria(new Criteria() { @Override public boolean isSatisfied() { try { return getOrigins().mOrigins.length > 0; } catch (Exception e) { return false; } } }); AwQuotaManagerBridge.Origins origins = getOrigins(); assertEquals(origins.mOrigins.length, origins.mUsages.length); assertEquals(origins.mOrigins.length, origins.mQuotas.length); for (int i = 0; i < origins.mOrigins.length; ++i) { assertEquals(origins.mUsages[i], getUsageForOrigin(origins.mOrigins[i])); assertEquals(origins.mQuotas[i], getQuotaForOrigin(origins.mOrigins[i])); } } }
package com.loadtestgo.script.editor.swing; import com.loadtestgo.util.Path; import org.fife.rsta.ac.LanguageSupportFactory; import org.fife.ui.rsyntaxtextarea.RSyntaxDocument; import org.fife.ui.rsyntaxtextarea.parser.AbstractParser; import org.fife.ui.rsyntaxtextarea.parser.DefaultParseResult; import org.fife.ui.rsyntaxtextarea.parser.DefaultParserNotice; import org.fife.ui.rsyntaxtextarea.parser.ParseResult; import javax.swing.*; import javax.swing.event.DocumentEvent; import javax.swing.event.DocumentListener; import javax.swing.text.BadLocationException; import javax.swing.text.Document; import javax.swing.text.Element; import java.awt.*; public class FilePanel extends PanelWithHeader { private MainWindow mainWindow; private SourceFile sourceFile; private FileTextArea textArea; private FileLineNumbers fileLineNumbers; private JScrollPane pane; private int currentPos; private boolean showDebugFrame; private JLabel tabLabel; private JPanel tabHeader; public FilePanel(MainWindow mainWindow, CodeModel codeModel, SourceFile sourceFile) { this.mainWindow = mainWindow; this.sourceFile = sourceFile; this.currentPos = -1; this.textArea = new FileTextArea(sourceFile.getSource()); this.textArea.setFocusTraversalKeysEnabled(false); this.textArea.setColumns(80); this.pane = new JScrollPane(); LanguageSupportFactory.get().register(textArea); // TODO: Add auto completion for our pizzascript.js // // RhinoJavaScriptLanguageSupport support1 = new RhinoJavaScriptLanguageSupport(); // support1.install(textArea); this.fileLineNumbers = new FileLineNumbers(this); this.pane.setViewportView(this.textArea); this.pane.setRowHeaderView(this.fileLineNumbers); Document document = this.textArea.getDocument(); document.addDocumentListener(new DocumentListener() { @Override public void insertUpdate(DocumentEvent e) { updateBreakpoints(e); updateLineNumbers(); syncSourceFile(); updateUndo(); } @Override public void removeUpdate(DocumentEvent e) { updateBreakpoints(e); updateLineNumbers(); syncSourceFile(); updateUndo(); } @Override public void changedUpdate(DocumentEvent e) { updateBreakpoints(e); updateLineNumbers(); syncSourceFile(); updateUndo(); } }); setLayout(new GridLayout(1, 1)); this.textArea.addParser(new SyntaxParser(sourceFile, codeModel)); add(this.pane); } private void updateBreakpoints(DocumentEvent e) { Document doc = textArea.getDocument(); Element lineMap = doc.getDefaultRootElement(); DocumentEvent.ElementChange change = e.getChange(lineMap); if (change != null ){ Element[] added = change.getChildrenAdded(); Element[] removed = change.getChildrenRemoved(); int lineStart = lineMap.getElementIndex(e.getOffset()); int numAdded = 0; int numRemove = 0; if (added != null) { numAdded = added.length; } if (removed != null) { numRemove = removed.length; } int changed = numAdded - numRemove; sourceFile.moveBreakpoints(lineStart + 1, changed); } } private void updateUndo() { mainWindow.updateUndoState(); } public void toggleBreakPoint(int line) { if (!sourceFile.isBreakpoint(line)) { setBreakpoint(line); } else { clearBreakpoint(line); } } public void toggleBreakPoint() { try { int caretPos = textArea.getCaretPosition(); int lineNum = textArea.getLineOfOffset(caretPos); toggleBreakPoint(lineNum + 1); } catch (BadLocationException e) { // Do nothing } } public void setBreakpoint(int line) { boolean changed = sourceFile.setBreakpoint(line, true); if (changed) { fileLineNumbers.repaint(); } } public void clearBreakpoint(int line) { boolean changed = sourceFile.setBreakpoint(line, false); if (changed) { fileLineNumbers.repaint(); } } public String getFilePath() { return sourceFile.getFilePath(); } public synchronized void setPosition(int pos) { textArea.selectLine(pos); currentPos = pos; fileLineNumbers.repaint(); } public FileTextArea getTextArea() { return textArea; } public synchronized int getCurrentPos() { return currentPos; } public void undo() { textArea.undoLastAction(); } public void redo() { textArea.redoLastAction(); } public boolean canUndo() { return textArea.canUndo(); } public boolean canRedo() { return textArea.canRedo(); } private void updateLineNumbers() { fileLineNumbers.update(); fileLineNumbers.repaint(); } public SourceFile getSourceFile() { return sourceFile; } public void syncSourceFile() { sourceFile.update(textArea.getText()); if (!sourceFile.isModified()) { sourceFile.setIsModified(true); updateUIFromFileName(); } } public boolean showDebugFrame() { return showDebugFrame; } public void setShowDebugFrame(boolean showDebugFrame) { this.showDebugFrame = showDebugFrame; } public void setTabHeader(JLabel tabLabel, JPanel tabHeader) { this.tabLabel = tabLabel; this.tabHeader = tabHeader; updateUIFromFileName(); } public void updateUIFromFileName() { String fileLabel = Path.getFileName(sourceFile.getFilePath()); if (sourceFile.isModified()) { fileLabel += " *"; } if (tabLabel != null) { tabLabel.setText(fileLabel); } if (tabHeader != null) { tabHeader.setToolTipText(sourceFile.getFilePath()); } } public void selectLine(int lineNumber) { if (lineNumber <= 0) { return; } try { int start = textArea.getLineStartOffset(lineNumber - 1); int end = textArea.getLineEndOffset(lineNumber - 1); textArea.setCaretPosition(end); textArea.moveCaretPosition(start); } catch (javax.swing.text.BadLocationException ignore) { } } public void setDefaultFocus() { textArea.requestFocusInWindow(); } /** * This reports errors and updates the internal code model that we use to * allow breakpoints to be set on lines that have code on them. */ private class SyntaxParser extends AbstractParser { private SourceFile sourceFile; private CodeModel codeModel; SyntaxParser(SourceFile sourceFile, CodeModel codeModel) { this.sourceFile = sourceFile; this.codeModel = codeModel; } @Override public ParseResult parse(RSyntaxDocument doc, String style) { DefaultParseResult result = new DefaultParseResult(this); CodeModel.ErrorMessage errorMessage = codeModel.compileScript(sourceFile); if (errorMessage != null) { int line = errorMessage.line - 1; try { DefaultParserNotice notice = new DefaultParserNotice(this, errorMessage.message, line, textArea.getLineStartOffset(line), textArea.getLineEndOffset(line)); result.addNotice(notice); } catch (BadLocationException e) { // Do nothing } } return result; } } }
// Copyright (c) 2003 Compaq Corporation. All rights reserved. // Portions Copyright (c) 2003 Microsoft Corporation. All rights reserved. // Last modified on Mon 30 Apr 2007 at 13:33:37 PST by lamport // modified on Mon Nov 26 15:46:11 PST 2001 by yuanyu package tlc2.tool.liveness; import java.io.IOException; import java.util.HashSet; import java.util.Set; import tlc2.util.BitVector; import tlc2.util.BufferedRandomAccessFile; public class GraphNode extends AbstractGraphNode { /** * The record size indicates the number of integers used by each transition * in the array of nnodes (2x32bit to keep the fp and 32bit to keep the tableau * idx). */ private static final int NNODE_RECORD_SIZE = 3; /** * GraphNode is a node in the behaviour graph. We're going to only store * fingerprints of states, rather than actual states. So, as we encounter * each state, we need to calculate all the <>[] and []<>'s listed in the * order of solution. For each outgoing edge, we record the fingerprint of * the target node and the checkActions along it. * * The field tidx is the unique index for the tableau graph node. If tindex * = -1, then there is no tableau. So, the maximum size of tableau is 2^31. */ private final static int[] emptyIntArr = new int[0]; final long stateFP; // fingerprint of the state /** * Next nodes are the successor {@link GraphNode}s of the current * {@link GraphNode} */ private int[] nnodes; // outgoing links final int tindex; public GraphNode(long fp, int tindex) { this(fp, tindex, emptyIntArr, new BitVector(0)); } private GraphNode(long fp, int tindex, int[] nnodes, BitVector checks) { super(checks); this.stateFP = fp; this.tindex = tindex; this.nnodes = nnodes; } /* (non-Javadoc) * @see java.lang.Object#hashCode() */ public int hashCode() { final int prime = 31; int result = 1; result = prime * result + (int) (stateFP ^ (stateFP >>> 32)); result = prime * result + tindex; return result; } /* (non-Javadoc) * @see java.lang.Object#equals(java.lang.Object) */ public boolean equals(Object obj) { if (this == obj) { return true; } if (obj == null) { return false; } if (getClass() != obj.getClass()) { return false; } GraphNode other = (GraphNode) obj; if (stateFP != other.stateFP) { return false; } if (tindex != other.tindex) { return false; } return true; } public final long getStateFP(int i) { long high = this.nnodes[NNODE_RECORD_SIZE * i]; long low = this.nnodes[NNODE_RECORD_SIZE * i + 1]; return (high << 32) | (low & 0xFFFFFFFFL); } public final int getTidx(int i) { return this.nnodes[NNODE_RECORD_SIZE * i + 2]; } public final int succSize() { // offset being != NO_FREE_SLOTS indicates that the nnodes array has been // overallocated in preparation to batch-insert transitions but the // transitions have not been added yet. In this case the nnodes.length / // NNODE_RECORD_SIZE is *not* the actual number of transitions, offset / NNODE_RECORD_SIZE is! if (this.offset != NO_FREE_SLOTS) { return this.offset / NNODE_RECORD_SIZE; } return this.nnodes.length / NNODE_RECORD_SIZE; } /** * Points to the first available slot in {@link GraphNode#nnodes} iff free * slots are available. "NO_FREE_SLOTS" indicates no free slots are available. * * @see GraphNode#allocate(int) */ private int offset = NO_FREE_SLOTS; private static final int NO_FREE_SLOTS = -1; /** * Allocates memory for subsequent * {@link GraphNode#addTransition(long, int, int, int, boolean[])} calls. * This is useful if * {@link GraphNode#addTransition(long, int, int, int, boolean[])} gets * invoked from within a loop when the approximate number of invocations is * known in advance. In this case {@link GraphNode} can reserve the memory * for the number of transitions in advance which greatly improves the * insertion time of * {@link GraphNode#addTransition(long, int, int, int, boolean[])}. Once all * transitions have been added to via * {@link GraphNode#addTransition(long, int, int, int, boolean[])}, * optionally call the {@link GraphNode#realign()} method to discard of * unused memory. * <p> * Technically this essentially grows GraphNode's internal data structure. * <p> * Do note that you can call addTransition <em>without</em> calling allocate * first. It then automatically allocates a memory for a <em>single</em> * transition. * * @param transitions * The approximate number of transitions that will be added * subsequently. * * @see GraphNode#addTransition(long, int, int, int, boolean[]) * @see GraphNode#realign() */ private final void allocate(final int transitions) { final int len = this.nnodes.length; int[] newNodes = new int[len + (NNODE_RECORD_SIZE * transitions)]; System.arraycopy(this.nnodes, 0, newNodes, 0, len); this.nnodes = newNodes; this.offset = len; } /** * Add a new transition to the node target. * * @param fp * fingerprint to add * @param tidx * tableau index to add * @param slen * number of solutions * @param alen * number of actions * @param acts * A {@link BitVector} of action results. Each bit in the vector * represents the result of the corresponding action (true or * false) returned by * tlc2.tool.liveness.OrderOfSolution.checkAction(TLCState, * TLCState, BitVector, int). <code>null</code> if no action * constraints to check. * @param actsOffset * The offset into the {@link BitVector} acts. acts may hold * action results for more than just the currently added * transition. In this case, provide an zero-based offset for * where the action results in BitVector start. 0 if the given * {@link BitVector} is exclusively used for the current * transition. * @param allocationHint * A (Naturals \ {0}) hint telling the method's implementation * how many memory to allocate for subsequent transition * additions (used when called from within for loop). Zero or * negative hints are ignored (negative hints are the result of * nested for loop where the 1. iteration produces a bad average * of how many additions are made across all iterations). * @see GraphNode#allocate(int) */ public final void addTransition(long fp, int tidx, int slen, int alen, final BitVector acts, final int actsOffset, final int allocationHint) { // Grows BitVector "checks" and sets the corresponding field to true if // acts is true (false is default and thus can be ignored). if (acts != null) { int pos = slen + alen * this.succSize(); for (int i = 0; i < alen; i++) { if (acts.get(actsOffset + i)) { this.checks.set(pos + i); } } } if (this.offset == NO_FREE_SLOTS) { // Have to create a new slot regardless of 0 or negative hint, thus // Math.max... this.allocate(Math.max(allocationHint, 1)); } this.nnodes[this.offset] = (int) (fp >>> 32); this.nnodes[this.offset + 1] = (int) (fp & 0xFFFFFFFFL); this.nnodes[this.offset + 2] = tidx; this.offset = this.offset + NNODE_RECORD_SIZE; if (this.offset == this.nnodes.length) { this.offset = NO_FREE_SLOTS; } } /** * Trims {@link GraphNode}'s internal data structure to its current real * memory requirement. * * @return The number of over allocated memory or zero if memory allocated * by corresponding allocate call has been used up completely. * * @see GraphNode#allocate(int) */ public int realign() { int result = 0; // It is a noop iff offset == NO_FREE_SLOTS if (this.offset != NO_FREE_SLOTS) { result = (this.nnodes.length - this.offset) / NNODE_RECORD_SIZE; // shrink newNodes to correct size int[] newNodes = new int[this.offset]; System.arraycopy(this.nnodes, 0, newNodes, 0, newNodes.length); this.nnodes = newNodes; this.offset = NO_FREE_SLOTS; } return result; } /* Return true iff there is an outgoing edge to target. */ public final boolean transExists(long fp, int tidx) { // TODO Switch to a more efficient transExists implementation to handle // large numbers of transitions. The current implementation below uses a // linear search over all transitions. // The fact that the given fp is used as an index for hash-based lookup // methods in various places of TLC, makes it the obvious candidate as a // improved strategy. One behavioral difference a hash has, is that the // sequential iteration of all nnodes produces a different (yet stable) // order. int len = this.nnodes.length; // Stop linear search on internal nnodes buffer when a free slot has // been // reached. The free slot detection work with the allocation offset that // points to the end of the filled slots (slots are filled in ascending // order). If offset is marked invalid ("NO_FREE_SLOTS"), the nnodes buffer is // completely occupied and has to be searched to the end. if (this.offset != NO_FREE_SLOTS) { len = offset; } int high = (int) (fp >>> 32); int low = (int) (fp & 0xFFFFFFFFL); for (int i = 0; i < len; i += NNODE_RECORD_SIZE) { if (this.nnodes[i] == high && this.nnodes[i + 1] == low && this.nnodes[i + 2] == tidx) { return true; } } return false; } public boolean checkInvariants(final int slen, final int alen) { final Set<Transition> transitions = new HashSet<Transition>(); for (int i = 0; i < succSize(); i++) { final Transition t = new Transition(getStateFP(i), getTidx(i), getCheckAction(slen, alen, i)); transitions.add(t); } return transitions.size() == succSize(); } public Set<Transition> getTransition() { return getTransition(0, 0); } public Set<Transition> getTransition(final int slen, final int alen) { final Set<Transition> transitions = new HashSet<Transition>(); for (int i = 0; i < succSize(); i++) { final BitVector bv = new BitVector(alen); for (int j = 0; j < alen; j++) { if (getCheckAction(slen, alen, i, j)) { bv.set(j); } } transitions.add(new Transition(getStateFP(i), getTidx(i), bv)); } return transitions; } public static class Transition { private final long fp; private final int tidx; private final BitVector bv; public Transition(long fp, int tidx, BitVector bv) { this.fp = fp; this.tidx = tidx; this.bv = bv; } /* (non-Javadoc) * @see java.lang.Object#hashCode() */ public int hashCode() { final int prime = 31; int result = 1; result = prime * result + ((bv == null) ? 0 : bv.hashCode()); result = prime * result + (int) (fp ^ (fp >>> 32)); result = prime * result + tidx; return result; } /* (non-Javadoc) * @see java.lang.Object#equals(java.lang.Object) */ public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (getClass() != obj.getClass()) return false; Transition other = (Transition) obj; if (bv == null) { if (other.bv != null) return false; } else if (!bv.equals(other.bv)) return false; if (fp != other.fp) return false; if (tidx != other.tidx) return false; return true; } public BitVector getChecks() { return bv; } public long getFP() { return fp; } public int getTidx() { return tidx; } } /* Return the tableau graph node used by this. */ public final TBGraphNode getTNode(TBGraph tableau) { return tableau.getNode(this.tindex); } /** * Writes this {@link GraphNode} into the given * {@link BufferedRandomAccessFile} * * @param nodeRAF * @throws IOException */ void write(final BufferedRandomAccessFile nodeRAF) throws IOException { assert offset == NO_FREE_SLOTS; // assert that nnodes hasn't been overallocated. // Write nnodes final int cnt = nnodes.length; nodeRAF.writeNat(cnt); for (int i = 0; i < cnt; i++) { nodeRAF.writeInt(nnodes[i]); } // Write checks checks.write(nodeRAF); } void read(final BufferedRandomAccessFile nodeRAF) throws IOException { // Read nnodes final int cnt = nodeRAF.readNat(); nnodes = new int[cnt]; for (int i = 0; i < cnt; i++) { nnodes[i] = nodeRAF.readInt(); } // Read checks checks = new BitVector(); checks.read(nodeRAF); assert offset == NO_FREE_SLOTS; } public final String toString() { // A GraphNode does not know the action length. This is kept elsewhere in the code. return toString(0).replace("[] ", ""); } public final String toString(final int alen) { StringBuffer buf = new StringBuffer(); buf.append("<" + this.stateFP + "," + this.tindex + "> --> "); for (int i = 0; i < succSize(); i++) { // action checks buf.append("["); for (int j = 0; j < alen; j++) { if (getCheckAction(0, 2, i, j)) { buf.append("t"); } else { buf.append("f"); } } buf.append("] "); // fingerprint/tableau id buf.append("<" + getStateFP(i) + "," + getTidx(i) + ">"); buf.append(", "); } return buf.substring(0, buf.length() - ", ".length()); // chop off dangling ", " } public String toDotViz(final boolean isInitState, final boolean hasTableau, final int slen, final int alen) { return toDotViz(isInitState, hasTableau, slen, alen, null); } public String toDotViz(final boolean isInitState, final boolean hasTableau, final int slen, final int alen, TableauNodePtrTable filter) { // The node's id including its tidx if any. It uses the complete // fingerprint. String id = Long.toString(this.stateFP); if (hasTableau) { id += "." + this.tindex; } // Nodes label and a marker if it is an init state. The label is // shortened to 8 chars max to avoid screen clutter. It's possible // that the resulting graph will have multiple nodes with an identical // label iff the first 6 (+2) chars of their fingerprint match. However // the graph will still contain all nodes regardless of the label // collision due to id. String label = Long.toString(this.stateFP).substring(0, 6) + (hasTableau ? "." + this.tindex : ""); if (slen > 0) { label += "\n"; for (int i = 0; i < slen; i++) { if (getCheckState(i)) { label += "t"; } else { label += "f"; } } } final StringBuffer buf = new StringBuffer(); if (isInitState) { buf.append("\"" + id + "\" [style = filled][label = \"" + label + "\"]\n"); // node's label } else { buf.append("\"" + id + "\" [label = \"" + label + "\"]\n"); } // Each outgoing transition for (int i = 0; i < succSize(); i++) { final long stateFP = getStateFP(i); final int tidx = getTidx(i); // If a filter is given, check if this node is in filter if (filter != null && filter.get(stateFP, tidx) == -1) { continue; } String fp = Long.toString(stateFP); // if (fp == this.stateFP) { // // skip self loops if edge count to large for dotViz to handle. // continue; // } buf.append("\"" + id + "\" -> "); if (hasTableau) { buf.append(("\"" + fp) + "." + tidx + "\""); } else { //Omit tableau index when it's -1 (indicating no tableau) buf.append(("\"" + fp) + "\""); } buf.append(" [label=\""); for (int j = 0; j < alen; j++) { if (getCheckAction(slen, alen, i, j)) { buf.append("t"); } else { buf.append("f"); } } buf.append("\"];"); buf.append("\n"); } return buf.toString(); } }
package org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts; import org.eclipse.draw2d.IFigure; import org.eclipse.draw2d.PositionConstants; import org.eclipse.draw2d.Shape; import org.eclipse.draw2d.StackLayout; import org.eclipse.emf.ecore.EObject; import org.eclipse.gef.EditPart; import org.eclipse.gef.EditPolicy; import org.eclipse.gef.Request; import org.eclipse.gef.commands.Command; import org.eclipse.gef.editpolicies.LayoutEditPolicy; import org.eclipse.gef.editpolicies.NonResizableEditPolicy; import org.eclipse.gef.requests.CreateRequest; import org.eclipse.gmf.runtime.diagram.ui.editparts.AbstractBorderedShapeEditPart; import org.eclipse.gmf.runtime.diagram.ui.editparts.IBorderItemEditPart; import org.eclipse.gmf.runtime.diagram.ui.editparts.IGraphicalEditPart; import org.eclipse.gmf.runtime.diagram.ui.editpolicies.BorderItemSelectionEditPolicy; import org.eclipse.gmf.runtime.diagram.ui.editpolicies.CreationEditPolicy; import org.eclipse.gmf.runtime.diagram.ui.editpolicies.DragDropEditPolicy; import org.eclipse.gmf.runtime.diagram.ui.editpolicies.EditPolicyRoles; import org.eclipse.gmf.runtime.diagram.ui.figures.BorderItemLocator; import org.eclipse.gmf.runtime.draw2d.ui.figures.ConstrainedToolbarLayout; import org.eclipse.gmf.runtime.draw2d.ui.figures.WrappingLabel; import org.eclipse.gmf.runtime.gef.ui.figures.DefaultSizeNodeFigure; import org.eclipse.gmf.runtime.gef.ui.figures.NodeFigure; import org.eclipse.gmf.runtime.notation.View; import org.eclipse.swt.SWT; import org.eclipse.swt.graphics.Color; import org.wso2.developerstudio.eclipse.gmf.esb.ComplexEndpoints; import org.wso2.developerstudio.eclipse.gmf.esb.FailoverEndPoint; import org.wso2.developerstudio.eclipse.gmf.esb.DefaultEndPoint; import org.wso2.developerstudio.eclipse.gmf.esb.LoadBalanceEndPoint; import org.wso2.developerstudio.eclipse.gmf.esb.SendMediator; import org.wso2.developerstudio.eclipse.gmf.esb.Sequences; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.custom.AbstractEndpoint; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.custom.AbstractSequencesEditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.custom.EsbGraphicalShape; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.custom.EsbGraphicalShapeWithLabel; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.custom.FixedBorderItemLocator; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.custom.ShowPropertyViewEditPolicy; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.custom.utils.EndpointUtils; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.policies.DefaultEndPointCanonicalEditPolicy; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.policies.DefaultEndPointItemSemanticEditPolicy; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.part.EsbVisualIDRegistry; /** * @generated NOT */ public class DefaultEndPointEditPart extends AbstractEndpoint { /** * @generated */ public static final int VISUAL_ID = 3609; /** * @generated */ protected IFigure contentPane; /** * @generated */ public DefaultEndPointEditPart(View view) { super(view); } /** * @generated NOT */ protected void createDefaultEditPolicies() { installEditPolicy(EditPolicyRoles.CREATION_ROLE, new CreationEditPolicy()); super.createDefaultEditPolicies(); installEditPolicy(EditPolicyRoles.SEMANTIC_ROLE, new DefaultEndPointItemSemanticEditPolicy()); installEditPolicy(EditPolicyRoles.DRAG_DROP_ROLE, new DragDropEditPolicy()); installEditPolicy(EditPolicyRoles.CANONICAL_ROLE, new DefaultEndPointCanonicalEditPolicy()); installEditPolicy(EditPolicy.LAYOUT_ROLE, createLayoutEditPolicy()); // For handle Double click Event. installEditPolicy(EditPolicyRoles.OPEN_ROLE, new ShowPropertyViewEditPolicy()); // XXX need an SCR to runtime to have another abstract superclass that would let children add reasonable editpolicies // removeEditPolicy(org.eclipse.gmf.runtime.diagram.ui.editpolicies.EditPolicyRoles.CONNECTION_HANDLES_ROLE); } /** * @generated */ protected LayoutEditPolicy createLayoutEditPolicy() { org.eclipse.gmf.runtime.diagram.ui.editpolicies.LayoutEditPolicy lep = new org.eclipse.gmf.runtime.diagram.ui.editpolicies.LayoutEditPolicy() { protected EditPolicy createChildEditPolicy(EditPart child) { View childView = (View) child.getModel(); switch (EsbVisualIDRegistry.getVisualID(childView)) { case DefaultEndPointInputConnectorEditPart.VISUAL_ID: case DefaultEndPointOutputConnectorEditPart.VISUAL_ID: return new BorderItemSelectionEditPolicy(); } EditPolicy result = child.getEditPolicy(EditPolicy.PRIMARY_DRAG_ROLE); if (result == null) { result = new NonResizableEditPolicy(); } return result; } protected Command getMoveChildrenCommand(Request request) { return null; } protected Command getCreateCommand(CreateRequest request) { return null; } }; return lep; } /** * @generated NOT */ protected IFigure createNodeShape() { return primaryShape = new DefaultEndPointFigure() { public void setBounds(org.eclipse.draw2d.geometry.Rectangle rect) { super.setBounds(rect); if (this.getBounds().getLocation().x != 0 && this.getBounds().getLocation().y != 0) { getMostSuitableElementToConnect(); reAllocate(rect); } }; }; } /** * @generated */ public DefaultEndPointFigure getPrimaryShape() { return (DefaultEndPointFigure) primaryShape; } /** * @generated NOT */ protected boolean addFixedChild(EditPart childEditPart) { if (childEditPart instanceof DefaultEndPointEndPointNameEditPart) { ((DefaultEndPointEndPointNameEditPart) childEditPart).setLabel(getPrimaryShape() .getFigureDefaultEndPointNamePropertyLabel()); return true; } if (childEditPart instanceof DefaultEndPointInputConnectorEditPart) { double position; EObject parentEndpoint = ((org.eclipse.gmf.runtime.notation.impl.NodeImpl) (childEditPart.getParent()) .getModel()).getElement(); if (((DefaultEndPoint) parentEndpoint).getInputConnector().getIncomingLinks().size() != 0) { EObject source = ((DefaultEndPoint) parentEndpoint).getInputConnector().getIncomingLinks().get(0) .getSource().eContainer(); /* * Position of input connector of the endpoint should be 0.5 inside ComplexEndpoints and Sequences. */ /* position = ((source instanceof ComplexEndpoints) || (source .eContainer().eContainer() instanceof Sequences)) ? 0.5 : 0.25;*/ position = 0.5; } else { /* position = ((this.getParent().getParent().getParent() instanceof ComplexEndpointsEditPart) || (this .getParent().getParent().getParent() instanceof AbstractSequencesEditPart)) ? 0.5 : 0.25;*/ position = 0.5; } IFigure borderItemFigure = ((DefaultEndPointInputConnectorEditPart) childEditPart).getFigure(); BorderItemLocator locator = new FixedBorderItemLocator(getMainFigure(), borderItemFigure, PositionConstants.WEST, position); getBorderedFigure().getBorderItemContainer().add(borderItemFigure, locator); return true; } if (childEditPart instanceof DefaultEndPointOutputConnectorEditPart) { IFigure borderItemFigure = ((DefaultEndPointOutputConnectorEditPart) childEditPart).getFigure(); BorderItemLocator locator = new FixedBorderItemLocator(getMainFigure(), borderItemFigure, PositionConstants.WEST, 0.75); getBorderedFigure().getBorderItemContainer().add(borderItemFigure, locator); return true; } if (childEditPart instanceof DefaultEndPointDescriptionEditPart) { ((DefaultEndPointDescriptionEditPart) childEditPart).setLabel(getPrimaryShape() .getEndpointDescriptionLabel()); return true; } return false; } /** * @generated */ protected boolean removeFixedChild(EditPart childEditPart) { if (childEditPart instanceof DefaultEndPointEndPointNameEditPart) { return true; } if (childEditPart instanceof DefaultEndPointInputConnectorEditPart) { getBorderedFigure().getBorderItemContainer().remove( ((DefaultEndPointInputConnectorEditPart) childEditPart).getFigure()); return true; } if (childEditPart instanceof DefaultEndPointOutputConnectorEditPart) { getBorderedFigure().getBorderItemContainer().remove( ((DefaultEndPointOutputConnectorEditPart) childEditPart).getFigure()); return true; } if (childEditPart instanceof DefaultEndPointDescriptionEditPart) { return true; } return false; } /** * @generated */ protected void addChildVisual(EditPart childEditPart, int index) { if (addFixedChild(childEditPart)) { return; } super.addChildVisual(childEditPart, -1); } /** * @generated */ protected void removeChildVisual(EditPart childEditPart) { if (removeFixedChild(childEditPart)) { return; } super.removeChildVisual(childEditPart); } /** * @generated */ protected IFigure getContentPaneFor(IGraphicalEditPart editPart) { if (editPart instanceof IBorderItemEditPart) { return getBorderedFigure().getBorderItemContainer(); } return getContentPane(); } /** * @generated */ protected NodeFigure createNodePlate() { DefaultSizeNodeFigure result = new DefaultSizeNodeFigure(40, 40); return result; } /** * Creates figure for this edit part. * * Body of this method does not depend on settings in generation model * so you may safely remove <i>generated</i> tag and modify it. * * @generated */ protected NodeFigure createMainFigure() { NodeFigure figure = createNodePlate(); figure.setLayoutManager(new StackLayout()); IFigure shape = createNodeShape(); figure.add(shape); contentPane = setupContentPane(shape); return figure; } /** * Default implementation treats passed figure as content pane. * Respects layout one may have set for generated figure. * @param nodeShape instance of generated figure class * @generated */ protected IFigure setupContentPane(IFigure nodeShape) { if (nodeShape.getLayoutManager() == null) { ConstrainedToolbarLayout layout = new ConstrainedToolbarLayout(); layout.setSpacing(5); nodeShape.setLayoutManager(layout); } return nodeShape; // use nodeShape itself as contentPane } /** * @generated */ public IFigure getContentPane() { if (contentPane != null) { return contentPane; } return super.getContentPane(); } /** * @generated */ protected void setForegroundColor(Color color) { if (primaryShape != null) { primaryShape.setForegroundColor(color); } } /** * @generated */ protected void setBackgroundColor(Color color) { if (primaryShape != null) { primaryShape.setBackgroundColor(color); } } /** * @generated */ protected void setLineWidth(int width) { if (primaryShape instanceof Shape) { ((Shape) primaryShape).setLineWidth(width); } } /** * @generated */ protected void setLineType(int style) { if (primaryShape instanceof Shape) { ((Shape) primaryShape).setLineStyle(style); } } /** * @generated */ public EditPart getPrimaryChildEditPart() { return getChildBySemanticHint(EsbVisualIDRegistry.getType(DefaultEndPointEndPointNameEditPart.VISUAL_ID)); } public void activate() { //EndpointUtils.addOutputConnectorsInitially(this, getEditingDomain()); super.activate(); } /** * @generated */ public class DefaultEndPointFigure extends EsbGraphicalShapeWithLabel { /** * @generated */ private WrappingLabel fFigureDefaultEndPointNamePropertyLabel; private WrappingLabel endpointDescriptionLabel; /** * @generated */ public DefaultEndPointFigure() { this.setBackgroundColor(THIS_BACK); createContents(); } /** * @generated NOT */ private void createContents() { fFigureDefaultEndPointNamePropertyLabel = new WrappingLabel(); endpointDescriptionLabel = getPropertyNameLabel(); } /** * @generated */ public WrappingLabel getFigureDefaultEndPointNamePropertyLabel() { return fFigureDefaultEndPointNamePropertyLabel; } public WrappingLabel getEndpointDescriptionLabel() { return endpointDescriptionLabel; } public String getIconPath() { return "icons/ico20/defalut-endpoint.gif"; } public String getNodeName() { return "Default-EP"; } public Color getBackgroundColor() { return THIS_BACK; } public Color getLabelBackColor() { return THIS_LABEL_BACK; } } /** * @generated NOT */ static final Color THIS_BACK = new Color(null, 255, 255, 255); static final Color THIS_LABEL_BACK = new Color(null, 113, 198, 113); }
/** * Copyright (c) 2015-present, Facebook, Inc. * All rights reserved. * * This source code is licensed under the BSD-style license found in the * LICENSE file in the root directory of this source tree. An additional grant * of patent rights can be found in the PATENTS file in the same directory. */ package com.facebook.react; import javax.annotation.Nullable; import android.content.Context; import android.graphics.Rect; import android.os.Bundle; import android.os.SystemClock; import android.util.AttributeSet; import android.view.MotionEvent; import android.view.View; import android.view.ViewGroup; import android.view.ViewTreeObserver; import com.facebook.common.logging.FLog; import com.facebook.infer.annotation.Assertions; import com.facebook.react.bridge.Arguments; import com.facebook.react.bridge.ReactContext; import com.facebook.react.bridge.UiThreadUtil; import com.facebook.react.bridge.WritableMap; import com.facebook.react.common.ReactConstants; import com.facebook.react.common.annotations.VisibleForTesting; import com.facebook.react.modules.core.DeviceEventManagerModule; import com.facebook.react.uimanager.DisplayMetricsHolder; import com.facebook.react.uimanager.PixelUtil; import com.facebook.react.uimanager.RootView; import com.facebook.react.uimanager.SizeMonitoringFrameLayout; import com.facebook.react.uimanager.TouchTargetHelper; import com.facebook.react.uimanager.UIManagerModule; import com.facebook.react.uimanager.events.EventDispatcher; import com.facebook.react.uimanager.events.TouchEvent; import com.facebook.react.uimanager.events.TouchEventType; /** * Default root view for catalyst apps. Provides the ability to listen for size changes so that a UI * manager can re-layout its elements. * It is also responsible for handling touch events passed to any of it's child view's and sending * those events to JS via RCTEventEmitter module. This view is overriding * {@link ViewGroup#onInterceptTouchEvent} method in order to be notified about the events for all * of it's children and it's also overriding {@link ViewGroup#requestDisallowInterceptTouchEvent} * to make sure that {@link ViewGroup#onInterceptTouchEvent} will get events even when some child * view start intercepting it. In case when no child view is interested in handling some particular * touch event this view's {@link View#onTouchEvent} will still return true in order to be notified * about all subsequent touch events related to that gesture (in case when JS code want to handle * that gesture). */ public class ReactRootView extends SizeMonitoringFrameLayout implements RootView { private final KeyboardListener mKeyboardListener = new KeyboardListener(); private @Nullable ReactInstanceManager mReactInstanceManager; private @Nullable String mJSModuleName; private @Nullable Bundle mLaunchOptions; private int mTargetTag = -1; private boolean mChildIsHandlingNativeGesture = false; private boolean mWasMeasured = false; private boolean mAttachScheduled = false; private boolean mIsAttachedToWindow = false; private boolean mIsAttachedToInstance = false; public ReactRootView(Context context) { super(context); } public ReactRootView(Context context, AttributeSet attrs) { super(context, attrs); } public ReactRootView(Context context, AttributeSet attrs, int defStyle) { super(context, attrs, defStyle); } @Override protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) { int widthMode = MeasureSpec.getMode(widthMeasureSpec); int heightMode = MeasureSpec.getMode(heightMeasureSpec); if (widthMode == MeasureSpec.UNSPECIFIED || heightMode == MeasureSpec.UNSPECIFIED) { throw new IllegalStateException( "The root catalyst view must have a width and height given to it by it's parent view. " + "You can do this by specifying MATCH_PARENT or explicit width and height in the layout."); } setMeasuredDimension( MeasureSpec.getSize(widthMeasureSpec), MeasureSpec.getSize(heightMeasureSpec)); mWasMeasured = true; if (mAttachScheduled && mReactInstanceManager != null && mIsAttachedToWindow) { // Scheduled from {@link #startReactApplication} call in case when the view measurements are // not available mAttachScheduled = false; // Enqueue it to UIThread not to block onMeasure waiting for the catalyst instance creation UiThreadUtil.runOnUiThread(new Runnable() { @Override public void run() { Assertions.assertNotNull(mReactInstanceManager) .attachMeasuredRootView(ReactRootView.this); mIsAttachedToInstance = true; getViewTreeObserver().addOnGlobalLayoutListener(mKeyboardListener); } }); } } /** * Main catalyst view is responsible for collecting and sending touch events to JS. This method * reacts for an incoming android native touch events ({@link MotionEvent}) and calls into * {@link com.facebook.react.uimanager.events.EventDispatcher} when appropriate. * It uses {@link com.facebook.react.uimanager.TouchTargetManagerHelper#findTouchTargetView} * helper method for figuring out a react view ID in the case of ACTION_DOWN * event (when the gesture starts). */ private void handleTouchEvent(MotionEvent ev) { if (mReactInstanceManager == null || !mIsAttachedToInstance || mReactInstanceManager.getCurrentReactContext() == null) { FLog.w( ReactConstants.TAG, "Unable to handle touch in JS as the catalyst instance has not been attached"); return; } int action = ev.getAction() & MotionEvent.ACTION_MASK; ReactContext reactContext = mReactInstanceManager.getCurrentReactContext(); EventDispatcher eventDispatcher = reactContext.getNativeModule(UIManagerModule.class) .getEventDispatcher(); if (action == MotionEvent.ACTION_DOWN) { if (mTargetTag != -1) { FLog.e( ReactConstants.TAG, "Got DOWN touch before receiving UP or CANCEL from last gesture"); } // First event for this gesture. We expect tag to be set to -1, and we use helper method // {@link #findTargetTagForTouch} to find react view ID that will be responsible for handling // this gesture mChildIsHandlingNativeGesture = false; mTargetTag = TouchTargetHelper.findTargetTagForTouch(ev.getY(), ev.getX(), this); eventDispatcher.dispatchEvent( new TouchEvent(mTargetTag, SystemClock.uptimeMillis(),TouchEventType.START, ev)); } else if (mChildIsHandlingNativeGesture) { // If the touch was intercepted by a child, we've already sent a cancel event to JS for this // gesture, so we shouldn't send any more touches related to it. return; } else if (mTargetTag == -1) { // All the subsequent action types are expected to be called after ACTION_DOWN thus target // is supposed to be set for them. FLog.e( ReactConstants.TAG, "Unexpected state: received touch event but didn't get starting ACTION_DOWN for this " + "gesture before"); } else if (action == MotionEvent.ACTION_UP) { // End of the gesture. We reset target tag to -1 and expect no further event associated with // this gesture. eventDispatcher.dispatchEvent( new TouchEvent(mTargetTag, SystemClock.uptimeMillis(), TouchEventType.END, ev)); mTargetTag = -1; } else if (action == MotionEvent.ACTION_MOVE) { // Update pointer position for current gesture eventDispatcher.dispatchEvent( new TouchEvent(mTargetTag, SystemClock.uptimeMillis(), TouchEventType.MOVE, ev)); } else if (action == MotionEvent.ACTION_POINTER_DOWN) { // New pointer goes down, this can only happen after ACTION_DOWN is sent for the first pointer eventDispatcher.dispatchEvent( new TouchEvent(mTargetTag, SystemClock.uptimeMillis(), TouchEventType.START, ev)); } else if (action == MotionEvent.ACTION_POINTER_UP) { // Exactly onw of the pointers goes up eventDispatcher.dispatchEvent( new TouchEvent(mTargetTag, SystemClock.uptimeMillis(), TouchEventType.END, ev)); } else if (action == MotionEvent.ACTION_CANCEL) { dispatchCancelEvent(ev); mTargetTag = -1; } else { FLog.w( ReactConstants.TAG, "Warning : touch event was ignored. Action=" + action + " Target=" + mTargetTag); } } @Override public void onChildStartedNativeGesture(MotionEvent androidEvent) { if (mChildIsHandlingNativeGesture) { // This means we previously had another child start handling this native gesture and now a // different native parent of that child has decided to intercept the touch stream and handle // the gesture itself. Example where this can happen: HorizontalScrollView in a ScrollView. return; } dispatchCancelEvent(androidEvent); mChildIsHandlingNativeGesture = true; mTargetTag = -1; } private void dispatchCancelEvent(MotionEvent androidEvent) { // This means the gesture has already ended, via some other CANCEL or UP event. This is not // expected to happen very often as it would mean some child View has decided to intercept the // touch stream and start a native gesture only upon receiving the UP/CANCEL event. if (mTargetTag == -1) { FLog.w( ReactConstants.TAG, "Can't cancel already finished gesture. Is a child View trying to start a gesture from " + "an UP/CANCEL event?"); return; } EventDispatcher eventDispatcher = mReactInstanceManager.getCurrentReactContext() .getNativeModule(UIManagerModule.class) .getEventDispatcher(); Assertions.assertCondition( !mChildIsHandlingNativeGesture, "Expected to not have already sent a cancel for this gesture"); Assertions.assertNotNull(eventDispatcher).dispatchEvent( new TouchEvent( mTargetTag, SystemClock.uptimeMillis(), TouchEventType.CANCEL, androidEvent)); } @Override public boolean onInterceptTouchEvent(MotionEvent ev) { handleTouchEvent(ev); return super.onInterceptTouchEvent(ev); } @Override public boolean onTouchEvent(MotionEvent ev) { handleTouchEvent(ev); super.onTouchEvent(ev); // In case when there is no children interested in handling touch event, we return true from // the root view in order to receive subsequent events related to that gesture return true; } @Override public void requestDisallowInterceptTouchEvent(boolean disallowIntercept) { // No-op - override in order to still receive events to onInterceptTouchEvent // even when some other view disallow that } @Override protected void onLayout(boolean changed, int left, int top, int right, int bottom) { // No-op since UIManagerModule handles actually laying out children. } @Override protected void onDetachedFromWindow() { super.onDetachedFromWindow(); mIsAttachedToWindow = false; if (mReactInstanceManager != null && !mAttachScheduled) { mReactInstanceManager.detachRootView(this); mIsAttachedToInstance = false; getViewTreeObserver().removeOnGlobalLayoutListener(mKeyboardListener); } } @Override protected void onAttachedToWindow() { super.onAttachedToWindow(); mIsAttachedToWindow = true; // If the view re-attached and catalyst instance has been set before, we'd attach again to the // catalyst instance (expecting measure to be called after {@link onAttachedToWindow}) if (mReactInstanceManager != null) { mAttachScheduled = true; } } /** * {@see #startReactApplication(ReactInstanceManager, String, android.os.Bundle)} */ public void startReactApplication(ReactInstanceManager reactInstanceManager, String moduleName) { startReactApplication(reactInstanceManager, moduleName, null); } /** * Schedule rendering of the react component rendered by the JS application from the given JS * module (@{param moduleName}) using provided {@param reactInstanceManager} to attach to the * JS context of that manager. Extra parameter {@param launchOptions} can be used to pass initial * properties for the react component. */ public void startReactApplication( ReactInstanceManager reactInstanceManager, String moduleName, @Nullable Bundle launchOptions) { // TODO(6788889): Use POJO instead of bundle here, apparently we can't just use WritableMap // here as it may be deallocated in native after passing via JNI bridge, but we want to reuse // it in the case of re-creating the catalyst instance Assertions.assertCondition( mReactInstanceManager == null, "This root view has already " + "been attached to a catalyst instance manager"); mReactInstanceManager = reactInstanceManager; mJSModuleName = moduleName; mLaunchOptions = launchOptions; // We need to wait for the initial onMeasure, if this view has not yet been measured, we set // mAttachScheduled flag, which will make this view startReactApplication itself to instance // manager once onMeasure is called. if (mWasMeasured && mIsAttachedToWindow) { mReactInstanceManager.attachMeasuredRootView(this); mIsAttachedToInstance = true; getViewTreeObserver().addOnGlobalLayoutListener(mKeyboardListener); } else { mAttachScheduled = true; } } /* package */ String getJSModuleName() { return Assertions.assertNotNull(mJSModuleName); } /* package */ @Nullable Bundle getLaunchOptions() { return mLaunchOptions; } /** * Is used by unit test to setup mWasMeasured and mIsAttachedToWindow flags, that will let this * view to be properly attached to catalyst instance by startReactApplication call */ @VisibleForTesting /* package */ void simulateAttachForTesting() { mIsAttachedToWindow = true; mIsAttachedToInstance = true; mWasMeasured = true; } private class KeyboardListener implements ViewTreeObserver.OnGlobalLayoutListener { private int mKeyboardHeight = 0; private final Rect mVisibleViewArea = new Rect(); @Override public void onGlobalLayout() { if (mReactInstanceManager == null || !mIsAttachedToInstance || mReactInstanceManager.getCurrentReactContext() == null) { FLog.w( ReactConstants.TAG, "Unable to dispatch keyboard events in JS as the react instance has not been attached"); return; } getRootView().getWindowVisibleDisplayFrame(mVisibleViewArea); final int heightDiff = DisplayMetricsHolder.getDisplayMetrics().heightPixels - mVisibleViewArea.bottom; if (mKeyboardHeight != heightDiff && heightDiff > 0) { // keyboard is now showing, or the keyboard height has changed mKeyboardHeight = heightDiff; WritableMap params = Arguments.createMap(); WritableMap coordinates = Arguments.createMap(); coordinates.putDouble("screenY", PixelUtil.toDIPFromPixel(mVisibleViewArea.bottom)); coordinates.putDouble("screenX", PixelUtil.toDIPFromPixel(mVisibleViewArea.left)); coordinates.putDouble("width", PixelUtil.toDIPFromPixel(mVisibleViewArea.width())); coordinates.putDouble("height", PixelUtil.toDIPFromPixel(mKeyboardHeight)); params.putMap("endCoordinates", coordinates); sendEvent("keyboardDidShow", params); } else if (mKeyboardHeight != 0 && heightDiff == 0) { // keyboard is now hidden mKeyboardHeight = heightDiff; sendEvent("keyboardDidHide", null); } } private void sendEvent(String eventName, @Nullable WritableMap params) { if (mReactInstanceManager != null) { mReactInstanceManager.getCurrentReactContext() .getJSModule(DeviceEventManagerModule.RCTDeviceEventEmitter.class) .emit(eventName, params); } } } }
/* * Copyright (c) 2009-2010, Sergey Karakovskiy and Julian Togelius * All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: * * Redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer. * * Redistributions in binary form must reproduce the above copyright * notice, this list of conditions and the following disclaimer in the * documentation and/or other materials provided with the distribution. * * Neither the name of the Mario AI nor the * names of its contributors may be used to endorse or promote products * derived from this software without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. * IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, * INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE * POSSIBILITY OF SUCH DAMAGE. */ package ch.idsia.utils.statistics; import java.io.FileInputStream; import java.io.FileOutputStream; import java.io.ObjectInputStream; import java.io.ObjectOutputStream; import java.util.Vector; /** * This class is used to model the statistics * of a fix of numbers. For the statistics * we choose here it is not necessary to store * all the numbers - just keeping a running total * of how many, the sum and the sum of the squares * is sufficient (plus max and min, for max and min). * <p> * Warning: the geometric mean calculation is only valid if all numbers * added to the summary are positive (>0) - no warnings * are given if this is not the case - you'll just get a wrong answer * for the gm() !!! */ public class StatisticalSummary implements java.io.Serializable { // a temporary fix for an immediate need // this should really be handled with a more general // predicate class // following line can cause prog to hang - bug in Java? // protected long serialVersionUID = new Double("-1490108905720833569").longValue(); // protected long serialVersionUID = 123; public String name; // defaults to "" public Watch watch; int n; boolean valid; private double logsum; // for calculating the geometric mean private double sum; private double sumsq; private double min; private double max; private double mean; // trick class loader into loading this now // private static StatisticalTests dummy = new StatisticalTests(); private double gm; // geometric mean private double sd; public StatisticalSummary() { this(""); // System.out.println("Exited default..."); } public StatisticalSummary(String name) { // System.out.println("Creating SS"); this.name = name; n = 0; sum = 0; sumsq = 0; // ensure that the first number to be // added will fix up min and max to // be that number min = Double.POSITIVE_INFINITY; max = Double.NEGATIVE_INFINITY; // System.out.println("Finished Creating SS"); watch = null; valid = false; } public static double sigDiff(StatisticalSummary s1, StatisticalSummary s2) { return StatisticalTests.tNotPaired( s1.mean(), s2.mean(), s1.sumSquareDiff(), s2.sumSquareDiff(), s1.n, s2.n, true); } public static StatisticalSummary load(String path) { try { ObjectInputStream ois = new ObjectInputStream( new FileInputStream(path)); StatisticalSummary ss = (StatisticalSummary) ois.readObject(); ois.close(); return ss; } catch (Exception e) { System.out.println(e); return null; } } public static void main(String[] args) throws Exception { // demonstrate some possible usage... StatisticalSummary ts1 = new StatisticalSummary(); StatisticalSummary ts2 = new StatisticalSummary(); for (int i = 0; i < 100; i++) { ts1.add(i / 10); ts2.add(i / 10 + new Double(args[0]).doubleValue()); } System.out.println(ts1); System.out.println(ts2); System.out.println(StatisticalSummary.sigDiff(ts1, ts2)); System.out.println((ts2.mean() - ts1.mean()) / ts1.stdErr()); System.exit(0); System.out.println("Creating summaries"); StatisticalSummary trainSummary = new StatisticalSummary(); System.out.println("1"); // StatisticalSummary testSummary = new VisualSummary("EA"); System.out.println("2"); // testSummary.watch = new StatisticalSummary.Watch( 1.0 ); System.out.println("3"); // StatisticalSummary ostiaTrainSummary = new StatisticalSummary(); System.out.println("4"); // ostiaTestSummary = new VisualSummary("OSTIA"); System.out.println("5"); // ostiaTestSummary.watch = new StatisticalSummary.Watch( 1.0 ); System.out.println("Created summaries"); StatisticalSummary s10 = new StatisticalSummary(); StatisticalSummary s20 = new StatisticalSummary(); StatisticalSummary s3 = new StatisticalSummary(); StatisticalSummary s4 = new StatisticalSummary(); StatisticalSummary s5 = new StatisticalSummary(); StatisticalSummary ss = new StatisticalSummary("Hello"); for (int i = 0; i < 20; i++) { ss.add(0.71); } System.out.println(ss); System.exit(0); StatisticalSummary s1 = new StatisticalSummary(); StatisticalSummary s2 = new StatisticalSummary(); System.out.println(sigDiff(s1, s2)); for (int i = 0; i < 20; i++) { s1.add(Math.random()); s2.add(Math.random() + 0.5); // s1.add(i); // s2.add(i+2); System.out.println(sigDiff(s1, s2)); } } public final void reset() { n = 0; sum = 0; sumsq = 0; logsum = 0; // ensure that the first number to be // added will fix up min and max to // be that number min = Double.POSITIVE_INFINITY; max = Double.NEGATIVE_INFINITY; if (watch != null) { watch.reset(); } } public double max() { return max; } public double min() { return min; } /* erroneous public static double sigDiff( StatisticalSummary s1 , StatisticalSummary s2 ) { return StatisticalTests.tNotPaired( s1.mean(), s2.mean(), s1.sumsq, s2.sumsq, s1.n, s2.n, true); } */ public double mean() { if (!valid) computeStats(); return mean; } public double gm() { if (!valid) computeStats(); return gm; } /** * returns the sum of the squares of the differences * between the mean and the ith values */ public double sumSquareDiff() { return sumsq - n * mean() * mean(); } private void computeStats() { if (!valid) { mean = sum / n; gm = Math.exp(logsum / n); double num = sumsq - (n * mean * mean); if (num < 0) { // avoids tiny negative numbers possible through imprecision num = 0; } // System.out.println("Num = " + num); sd = Math.sqrt(num / (n - 1)); // System.out.println(" Test: sd = " + sd); // System.out.println(" Test: n = " + n); valid = true; } } public double sd() { if (!valid) computeStats(); return sd; } public int n() { return n; } public double stdErr() { return sd() / Math.sqrt(n); } public void add(StatisticalSummary ss) { // implications for Watch? n += ss.n; sum += ss.sum; sumsq += ss.sumsq; logsum += ss.logsum; max = Math.max(max, ss.max); min = Math.min(min, ss.min); valid = false; } public void add(double d) { n++; sum += d; sumsq += d * d; if (d > 0) { logsum += Math.log(d); } min = Math.min(min, d); max = Math.max(max, d); if (watch != null) { watch.note(d); } valid = false; } public void add(Number n) { add(n.doubleValue()); } public void add(double[] d) { for (int i = 0; i < d.length; i++) { add(d[i]); } } public void add(Vector v) { for (int i = 0; i < v.size(); i++) { try { add(((Number) v.elementAt(i)).doubleValue()); } catch (Exception e) { } } } public String toString() { String s = (name == null) ? "" : name + "\n"; s += " min = " + min() + "\n" + " max = " + max() + "\n" + " ave = " + mean() + "\n" + " sd = " + sd() + "\n" + // " se = " + stdErr() + "\n" + // " sum = " + sum + "\n" + // " sumsq = " + sumsq + "\n" + // " watch = " + watch + "\n" + " n = " + n; return s; } public void save(String path) { try { ObjectOutputStream oos = new ObjectOutputStream( new FileOutputStream(path)); oos.writeObject(this); oos.close(); } catch (Exception e) { System.out.println(e); } } public static class Watch { public int count; double x; public Watch(double x) { this.x = x; count = 0; } public void note(double val) { if (val == x) { count++; } } public String toString() { return x + " occured " + count + " times "; } public void reset() { count = 0; } } }
/* * To change this license header, choose License Headers in Project Properties. * To change this template file, choose Tools | Templates * and open the template in the editor. */ package tests.gl_320.texture; import com.jogamp.opengl.GL; import static com.jogamp.opengl.GL2ES3.*; import com.jogamp.opengl.GL3; import com.jogamp.opengl.util.GLBuffers; import com.jogamp.opengl.util.glsl.ShaderCode; import com.jogamp.opengl.util.glsl.ShaderProgram; import framework.BufferUtils; import glm.glm; import glm.mat._4.Mat4; import framework.Profile; import framework.Semantic; import framework.Test; import glf.Vertex_v2fv2f; import glm.vec._2.Vec2; import java.io.IOException; import java.nio.ByteBuffer; import java.nio.FloatBuffer; import java.nio.IntBuffer; import java.util.logging.Level; import java.util.logging.Logger; import jgli.Texture2d; /** * * @author GBarbieri */ public class Gl_320_texture_streaming extends Test { public static void main(String[] args) { Gl_320_texture_streaming gl_320_texture_streaming = new Gl_320_texture_streaming(); } public Gl_320_texture_streaming() { super("gl-320-texture-streaming", Profile.CORE, 3, 2); } private final String SHADERS_SOURCE = "texture-2d"; private final String SHADERS_ROOT = "src/data/gl_320/texture"; private final String TEXTURE_DIFFUSE = "kueken7_rgba8_srgb.dds"; // With DDS textures, v texture coordinate are reversed, from top to bottom private int vertexCount = 6; private int vertexSize = vertexCount * Vertex_v2fv2f.SIZE; private float[] vertexData = { -1.0f, -1.0f,/**/ 0.0f, 1.0f, +1.0f, -1.0f,/**/ 1.0f, 1.0f, +1.0f, +1.0f,/**/ 1.0f, 0.0f, +1.0f, +1.0f,/**/ 1.0f, 0.0f, -1.0f, +1.0f,/**/ 0.0f, 0.0f, -1.0f, -1.0f,/**/ 0.0f, 1.0f}; private class Buffer { public static final int VERTEX = 0; public static final int TRANSFORM = 1; public static final int MAX = 2; } private class Shader { public static final int VERT = 0; public static final int FRAG = 1; public static final int MAX = 2; } private IntBuffer vertexArrayName = GLBuffers.newDirectIntBuffer(1), textureName = GLBuffers.newDirectIntBuffer(1), bufferName = GLBuffers.newDirectIntBuffer(Buffer.MAX); private int programName, uniformTransform, uniformDiffuse; @Override protected boolean begin(GL gl) { GL3 gl3 = (GL3) gl; boolean validated = true; if (validated) { validated = initProgram(gl3); } if (validated) { validated = initBuffer(gl3); } if (validated) { validated = initVertexArray(gl3); } if (validated) { validated = initTexture(gl3); } return validated; } private boolean initProgram(GL3 gl3) { boolean validated = true; if (validated) { ShaderCode vertShaderCode = ShaderCode.create(gl3, GL_VERTEX_SHADER, this.getClass(), SHADERS_ROOT, null, SHADERS_SOURCE, "vert", null, true); ShaderCode fragShaderCode = ShaderCode.create(gl3, GL_FRAGMENT_SHADER, this.getClass(), SHADERS_ROOT, null, SHADERS_SOURCE, "frag", null, true); ShaderProgram shaderProgram = new ShaderProgram(); shaderProgram.add(vertShaderCode); shaderProgram.add(fragShaderCode); shaderProgram.init(gl3); programName = shaderProgram.program(); gl3.glBindAttribLocation(programName, Semantic.Attr.POSITION, "position"); gl3.glBindAttribLocation(programName, Semantic.Attr.TEXCOORD, "texCoord"); gl3.glBindFragDataLocation(programName, Semantic.Frag.COLOR, "color"); shaderProgram.link(gl3, System.out); } if (validated) { uniformTransform = gl3.glGetUniformBlockIndex(programName, "Transform"); uniformDiffuse = gl3.glGetUniformLocation(programName, "diffuse"); } return validated & checkError(gl3, "initProgram"); } private boolean initBuffer(GL3 gl3) { FloatBuffer vertexBuffer = GLBuffers.newDirectFloatBuffer(vertexData); IntBuffer uniformBufferOffset = GLBuffers.newDirectIntBuffer(1); gl3.glGenBuffers(Buffer.MAX, bufferName); gl3.glBindBuffer(GL_ARRAY_BUFFER, bufferName.get(Buffer.VERTEX)); gl3.glBufferData(GL_ARRAY_BUFFER, vertexSize, vertexBuffer, GL_STATIC_DRAW); gl3.glBindBuffer(GL_ARRAY_BUFFER, 0); gl3.glGetIntegerv(GL_UNIFORM_BUFFER_OFFSET_ALIGNMENT, uniformBufferOffset); int uniformBlockSize = Math.max(Mat4.SIZE, uniformBufferOffset.get(0)); gl3.glBindBuffer(GL_UNIFORM_BUFFER, bufferName.get(Buffer.TRANSFORM)); gl3.glBufferData(GL_UNIFORM_BUFFER, uniformBlockSize, null, GL_DYNAMIC_DRAW); gl3.glBindBuffer(GL_UNIFORM_BUFFER, 0); BufferUtils.destroyDirectBuffer(vertexBuffer); BufferUtils.destroyDirectBuffer(uniformBufferOffset); return true; } private boolean initTexture(GL3 gl3) { try { jgli.Texture2d texture = new Texture2d(jgli.Load.load(TEXTURE_ROOT + "/" + TEXTURE_DIFFUSE)); jgli.Gl.Format format = jgli.Gl.translate(texture.format()); gl3.glPixelStorei(GL_UNPACK_ALIGNMENT, 1); gl3.glGenTextures(1, textureName); gl3.glActiveTexture(GL_TEXTURE0); gl3.glBindTexture(GL_TEXTURE_2D, textureName.get(0)); gl3.glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_NEAREST); gl3.glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_NEAREST); gl3.glTexImage2D(GL_TEXTURE_2D, 0, format.internal.value, texture.dimensions()[0], texture.dimensions()[1], 0, format.external.value, format.type.value, null); int textureSize = texture.size(0); int[] pixelBuffer = {0}; gl3.glGenBuffers(1, pixelBuffer, 0); gl3.glBindBuffer(GL_PIXEL_UNPACK_BUFFER, pixelBuffer[0]); gl3.glBufferData(GL_PIXEL_UNPACK_BUFFER, textureSize, null, GL_STREAM_DRAW); ByteBuffer pointer = gl3.glMapBufferRange(GL_PIXEL_UNPACK_BUFFER, 0, textureSize, GL_MAP_WRITE_BIT); pointer.put(texture.data(0)).rewind(); gl3.glUnmapBuffer(GL_PIXEL_UNPACK_BUFFER); gl3.glTexSubImage2D(GL_TEXTURE_2D, 0, 0, 0, texture.dimensions()[0], texture.dimensions()[1], format.external.value, format.type.value, 0); gl3.glDeleteBuffers(1, pixelBuffer, 0); gl3.glPixelStorei(GL_UNPACK_ALIGNMENT, 4); } catch (IOException ex) { Logger.getLogger(Gl_320_texture_streaming.class.getName()).log(Level.SEVERE, null, ex); } return true; } private boolean initVertexArray(GL3 gl3) { gl3.glGenVertexArrays(1, vertexArrayName); gl3.glBindVertexArray(vertexArrayName.get(0)); { gl3.glBindBuffer(GL_ARRAY_BUFFER, bufferName.get(Buffer.VERTEX)); gl3.glVertexAttribPointer(Semantic.Attr.POSITION, 2, GL_FLOAT, false, Vertex_v2fv2f.SIZE, 0); gl3.glVertexAttribPointer(Semantic.Attr.TEXCOORD, 2, GL_FLOAT, false, Vertex_v2fv2f.SIZE, Vec2.SIZE); gl3.glBindBuffer(GL_ARRAY_BUFFER, 0); gl3.glEnableVertexAttribArray(Semantic.Attr.POSITION); gl3.glEnableVertexAttribArray(Semantic.Attr.TEXCOORD); } gl3.glBindVertexArray(0); return true; } @Override protected boolean render(GL gl) { GL3 gl3 = (GL3) gl; { gl3.glBindBuffer(GL_UNIFORM_BUFFER, bufferName.get(Buffer.TRANSFORM)); ByteBuffer pointer = gl3.glMapBufferRange( GL_UNIFORM_BUFFER, 0, Mat4.SIZE, GL_MAP_WRITE_BIT | GL_MAP_INVALIDATE_BUFFER_BIT); Mat4 projection = glm.perspective_((float) Math.PI * 0.25f, 4.0f / 3.0f, 0.1f, 100.0f); Mat4 model = new Mat4(1.0f); Mat4 mvp = projection.mul(viewMat4()).mul(model); pointer.asFloatBuffer().put(mvp.toFa_()); // Make sure the uniform buffer is uploaded gl3.glUnmapBuffer(GL_UNIFORM_BUFFER); } gl3.glViewport(0, 0, windowSize.x, windowSize.y); gl3.glClearBufferfv(GL_COLOR, 0, new float[]{0.0f, 0.0f, 0.0f, 1.0f}, 0); gl3.glUseProgram(programName); gl3.glUniform1i(uniformDiffuse, 0); gl3.glUniformBlockBinding(programName, uniformTransform, Semantic.Uniform.TRANSFORM0); gl3.glActiveTexture(GL_TEXTURE0); gl3.glBindTexture(GL_TEXTURE_2D, textureName.get(0)); gl3.glBindBufferBase(GL_UNIFORM_BUFFER, Semantic.Uniform.TRANSFORM0, bufferName.get(Buffer.TRANSFORM)); gl3.glBindVertexArray(vertexArrayName.get(0)); gl3.glDrawArraysInstanced(GL_TRIANGLES, 0, vertexCount, 1); return true; } @Override protected boolean end(GL gl) { GL3 gl3 = (GL3) gl; gl3.glDeleteBuffers(Buffer.MAX, bufferName); gl3.glDeleteProgram(programName); gl3.glDeleteTextures(1, textureName); gl3.glDeleteVertexArrays(1, vertexArrayName); BufferUtils.destroyDirectBuffer(bufferName); BufferUtils.destroyDirectBuffer(textureName); BufferUtils.destroyDirectBuffer(vertexArrayName); return true; } }
/* * Copyright 2008 The Closure Compiler Authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.javascript.jscomp; import com.google.common.base.Joiner; import com.google.common.base.Preconditions; import com.google.common.collect.BiMap; import com.google.common.collect.HashBiMap; import com.google.common.collect.Lists; import com.google.common.collect.Maps; import com.google.common.collect.Sets; import com.google.javascript.jscomp.NodeTraversal.AbstractPostOrderCallback; import com.google.javascript.jscomp.TypeValidator.TypeMismatch; import com.google.javascript.jscomp.graph.AdjacencyGraph; import com.google.javascript.jscomp.graph.Annotation; import com.google.javascript.jscomp.graph.GraphColoring; import com.google.javascript.jscomp.graph.GraphColoring.GreedyGraphColoring; import com.google.javascript.jscomp.graph.GraphNode; import com.google.javascript.jscomp.graph.SubGraph; import com.google.javascript.rhino.Node; import com.google.javascript.rhino.Token; import com.google.javascript.rhino.jstype.FunctionType; import com.google.javascript.rhino.jstype.JSType; import com.google.javascript.rhino.jstype.JSTypeNative; import com.google.javascript.rhino.jstype.JSTypeRegistry; import com.google.javascript.rhino.jstype.ObjectType; import java.util.BitSet; import java.util.Collection; import java.util.Comparator; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; import java.util.TreeSet; import java.util.logging.Logger; /** * Renames unrelated properties to the same name, using type information. * This allows better compression as more properties can be given short names. * * <p>Properties are considered unrelated if they are never referenced from the * same type or from a subtype of each others' types, thus this pass is only * effective if type checking is enabled. * * Example: * <code> * Foo.fooprop = 0; * Foo.fooprop2 = 0; * Bar.barprop = 0; * </code> * * becomes: * * <code> * Foo.a = 0; * Foo.b = 0; * Bar.a = 0; * </code> * */ class AmbiguateProperties implements CompilerPass { private static final Logger logger = Logger.getLogger( AmbiguateProperties.class.getName()); private final AbstractCompiler compiler; private final List<Node> stringNodesToRename = Lists.newArrayList(); private final char[] reservedCharacters; /** Map from property name to Property object */ private final Map<String, Property> propertyMap = Maps.newHashMap(); /** Property names that don't get renamed */ private final Set<String> externedNames = Sets.newHashSet(); /** Names to which properties shouldn't be renamed, to avoid name conflicts */ private final Set<String> quotedNames = Sets.newHashSet(); /** Map from original property name to new name. */ private final Map<String, String> renamingMap = Maps.newHashMap(); /** * Sorts Property objects by their count, breaking ties alphabetically to * ensure a deterministic total ordering. */ private static final Comparator<Property> FREQUENCY_COMPARATOR = new Comparator<Property>() { @Override public int compare(Property p1, Property p2) { if (p1.numOccurrences != p2.numOccurrences) { return p2.numOccurrences - p1.numOccurrences; } return p1.oldName.compareTo(p2.oldName); } }; /** A map from JSType to a unique representative Integer. */ private BiMap<JSType, Integer> intForType = HashBiMap.create(); /** * A map from JSType to JSTypeBitSet representing the types related * to the type. */ private Map<JSType, JSTypeBitSet> relatedBitsets = Maps.newHashMap(); /** A set of types that invalidate properties from ambiguation. */ private final Set<JSType> invalidatingTypes; /** * Prefix of properties to skip renaming. These should be renamed in the * RenameProperties pass. */ static final String SKIP_PREFIX = "JSAbstractCompiler"; AmbiguateProperties(AbstractCompiler compiler, char[] reservedCharacters) { Preconditions.checkState(compiler.getLifeCycleStage().isNormalized()); this.compiler = compiler; this.reservedCharacters = reservedCharacters; JSTypeRegistry r = compiler.getTypeRegistry(); invalidatingTypes = Sets.newHashSet( r.getNativeType(JSTypeNative.ALL_TYPE), r.getNativeType(JSTypeNative.NO_OBJECT_TYPE), r.getNativeType(JSTypeNative.NO_TYPE), r.getNativeType(JSTypeNative.NULL_TYPE), r.getNativeType(JSTypeNative.VOID_TYPE), r.getNativeType(JSTypeNative.FUNCTION_FUNCTION_TYPE), r.getNativeType(JSTypeNative.FUNCTION_INSTANCE_TYPE), r.getNativeType(JSTypeNative.FUNCTION_PROTOTYPE), r.getNativeType(JSTypeNative.GLOBAL_THIS), r.getNativeType(JSTypeNative.OBJECT_TYPE), r.getNativeType(JSTypeNative.OBJECT_PROTOTYPE), r.getNativeType(JSTypeNative.OBJECT_FUNCTION_TYPE), r.getNativeType(JSTypeNative.TOP_LEVEL_PROTOTYPE), r.getNativeType(JSTypeNative.UNKNOWN_TYPE)); for (TypeMismatch mis : compiler.getTypeValidator().getMismatches()) { addInvalidatingType(mis.typeA); addInvalidatingType(mis.typeB); } } /** * Invalidates the given type, so that no properties on it will be renamed. */ private void addInvalidatingType(JSType type) { type = type.restrictByNotNullOrUndefined(); if (type.isUnionType()) { for (JSType alt : type.toMaybeUnionType().getAlternates()) { addInvalidatingType(alt); } } invalidatingTypes.add(type); ObjectType objType = ObjectType.cast(type); if (objType != null && objType.isInstanceType()) { invalidatingTypes.add(objType.getImplicitPrototype()); } } Map<String, String> getRenamingMap() { return renamingMap; } /** Returns an integer that uniquely identifies a JSType. */ private int getIntForType(JSType type) { if (intForType.containsKey(type)) { return intForType.get(type).intValue(); } int newInt = intForType.size() + 1; intForType.put(type, newInt); return newInt; } @Override public void process(Node externs, Node root) { NodeTraversal.traverse(compiler, externs, new ProcessExterns()); NodeTraversal.traverse(compiler, root, new ProcessProperties()); Set<String> reservedNames = new HashSet<String>(externedNames.size() + quotedNames.size()); reservedNames.addAll(externedNames); reservedNames.addAll(quotedNames); int numRenamedPropertyNames = 0; int numSkippedPropertyNames = 0; Set<Property> propsByFreq = new TreeSet<Property>(FREQUENCY_COMPARATOR); for (Property p : propertyMap.values()) { if (!p.skipAmbiguating) { ++numRenamedPropertyNames; propsByFreq.add(p); } else { ++numSkippedPropertyNames; reservedNames.add(p.oldName); } } PropertyGraph graph = new PropertyGraph(Lists.newLinkedList(propsByFreq)); GraphColoring<Property, Void> coloring = new GreedyGraphColoring<Property, Void>(graph, FREQUENCY_COMPARATOR); int numNewPropertyNames = coloring.color(); NameGenerator nameGen = new NameGenerator( reservedNames, "", reservedCharacters); Map<Integer, String> colorMap = Maps.newHashMap(); for (int i = 0; i < numNewPropertyNames; ++i) { colorMap.put(i, nameGen.generateNextName()); } for (GraphNode<Property, Void> node : graph.getNodes()) { node.getValue().newName = colorMap.get(node.getAnnotation().hashCode()); renamingMap.put(node.getValue().oldName, node.getValue().newName); } // Update the string nodes. for (Node n : stringNodesToRename) { String oldName = n.getString(); Property p = propertyMap.get(oldName); if (p != null && p.newName != null) { Preconditions.checkState(oldName.equals(p.oldName)); if (!p.newName.equals(oldName)) { n.setString(p.newName); compiler.reportCodeChange(); } } } logger.fine("Collapsed " + numRenamedPropertyNames + " properties into " + numNewPropertyNames + " and skipped renaming " + numSkippedPropertyNames + " properties."); } private BitSet getRelatedTypesOnNonUnion(JSType type) { // All of the types we encounter should have been added to the // relatedBitsets via computeRelatedTypes. if (relatedBitsets.containsKey(type)) { return relatedBitsets.get(type); } else { throw new RuntimeException("Related types should have been computed for" + " type: " + type + " but have not been."); } } /** * Adds subtypes - and implementors, in the case of interfaces - of the type * to its JSTypeBitSet of related types. Union types are decomposed into their * alternative types. * * <p>The 'is related to' relationship is best understood graphically. Draw an * arrow from each instance type to the prototype of each of its * subclass. Draw an arrow from each prototype to its instance type. Draw an * arrow from each interface to its implementors. A type is related to another * if there is a directed path in the graph from the type to other. Thus, the * 'is related to' relationship is reflexive and transitive. * * <p>Example with Foo extends Bar which extends Baz and Bar implements I: * <pre> * Foo -> Bar.prototype -> Bar -> Baz.prototype -> Baz * ^ * | * I * </pre> * * <p>Note that we don't need to correctly handle the relationships between * functions, because the function type is invalidating (i.e. its properties * won't be ambiguated). */ private void computeRelatedTypes(JSType type) { if (type.isUnionType()) { type = type.restrictByNotNullOrUndefined(); if (type.isUnionType()) { for (JSType alt : type.toMaybeUnionType().getAlternates()) { computeRelatedTypes(alt); } return; } } if (relatedBitsets.containsKey(type)) { // We only need to generate the bit set once. return; } JSTypeBitSet related = new JSTypeBitSet(intForType.size()); relatedBitsets.put(type, related); related.set(getIntForType(type)); // A prototype is related to its instance. if (type.isFunctionPrototypeType()) { addRelatedInstance(((ObjectType) type).getOwnerFunction(), related); return; } // An instance is related to its subclasses. FunctionType constructor = type.toObjectType().getConstructor(); if (constructor != null && constructor.getSubTypes() != null) { for (FunctionType subType : constructor.getSubTypes()) { addRelatedInstance(subType, related); } } // An interface is related to its implementors. for (FunctionType implementor : compiler.getTypeRegistry() .getDirectImplementors(type.toObjectType())) { addRelatedInstance(implementor, related); } } /** * Adds the instance of the given constructor, its implicit prototype and all * its related types to the given bit set. */ private void addRelatedInstance( FunctionType constructor, JSTypeBitSet related) { // TODO(user): A constructor which doesn't have an instance type // (e.g. it's missing the @constructor annotation) should be an invalidating // type which doesn't reach this code path. if (constructor.hasInstanceType()) { ObjectType instanceType = constructor.getInstanceType(); related.set(getIntForType(instanceType.getImplicitPrototype())); computeRelatedTypes(instanceType); related.or(relatedBitsets.get(instanceType)); } } class PropertyGraph implements AdjacencyGraph<Property, Void> { protected final Map<Property, PropertyGraphNode> nodes = Maps.newHashMap(); PropertyGraph(Collection<Property> props) { for (Property prop : props) { nodes.put(prop, new PropertyGraphNode(prop)); } } @Override public List<GraphNode<Property, Void>> getNodes() { return Lists.<GraphNode<Property, Void>>newArrayList(nodes.values()); } @Override public GraphNode<Property, Void> getNode(Property property) { return nodes.get(property); } @Override public SubGraph<Property, Void> newSubGraph() { return new PropertySubGraph(); } @Override public void clearNodeAnnotations() { for (PropertyGraphNode node : nodes.values()) { node.setAnnotation(null); } } @Override public int getWeight(Property value) { return value.numOccurrences; } } /** * A {@link SubGraph} that represents properties. The related types of * the properties are used to efficiently calculate adjacency information. */ class PropertySubGraph implements SubGraph<Property, Void> { /** Types related to properties referenced in this subgraph. */ JSTypeBitSet relatedTypes = new JSTypeBitSet(intForType.size()); /** * Returns true if prop is in an independent set from all properties in this * sub graph. That is, if none of its related types intersects with the * related types for this sub graph. */ @Override public boolean isIndependentOf(Property prop) { return !relatedTypes.intersects(prop.relatedTypes); } /** * Adds the node to the sub graph, adding all its related types to the * related types for the sub graph. */ @Override public void addNode(Property prop) { relatedTypes.or(prop.relatedTypes); } } class PropertyGraphNode implements GraphNode<Property, Void> { Property property; protected Annotation annotation; PropertyGraphNode(Property property) { this.property = property; } @Override public Property getValue() { return property; } @Override @SuppressWarnings("unchecked") public <A extends Annotation> A getAnnotation() { return (A) annotation; } @Override public void setAnnotation(Annotation data) { annotation = data; } } /** A traversal callback that collects externed property names. */ private class ProcessExterns extends AbstractPostOrderCallback { @Override public void visit(NodeTraversal t, Node n, Node parent) { switch (n.getType()) { case Token.GETPROP: Node dest = n.getFirstChild().getNext(); externedNames.add(dest.getString()); break; case Token.OBJECTLIT: for (Node child = n.getFirstChild(); child != null; child = child.getNext()) { // names: STRING, GET, SET externedNames.add(child.getString()); } break; } } } /** Finds all property references, recording the types on which they occur. */ private class ProcessProperties extends AbstractPostOrderCallback { @Override public void visit(NodeTraversal t, Node n, Node parent) { switch (n.getType()) { case Token.GETPROP: { Node propNode = n.getFirstChild().getNext(); JSType jstype = getJSType(n.getFirstChild()); maybeMarkCandidate(propNode, jstype, t); break; } case Token.OBJECTLIT: // The children of an OBJECTLIT node are keys, where the values // are the children of the keys. for (Node key = n.getFirstChild(); key != null; key = key.getNext()) { // We only want keys that were unquoted. // Keys are STRING, GET, SET if (!key.isQuotedString()) { JSType jstype = getJSType(n.getFirstChild()); maybeMarkCandidate(key, jstype, t); } else { // Ensure that we never rename some other property in a way // that could conflict with this quoted key. quotedNames.add(key.getString()); } } break; case Token.GETELEM: // If this is a quoted property access (e.g. x['myprop']), we need to // ensure that we never rename some other property in a way that // could conflict with this quoted name. Node child = n.getLastChild(); if (child.isString()) { quotedNames.add(child.getString()); } break; } } /** * If a property node is eligible for renaming, stashes a reference to it * and increments the property name's access count. * * @param n The STRING node for a property * @param t The traversal */ private void maybeMarkCandidate(Node n, JSType type, NodeTraversal t) { String name = n.getString(); if (!externedNames.contains(name)) { stringNodesToRename.add(n); recordProperty(name, type); } } private Property recordProperty(String name, JSType type) { Property prop = getProperty(name); prop.addType(type); return prop; } } /** Returns true if properties on this type should not be renamed. */ private boolean isInvalidatingType(JSType type) { if (type.isUnionType()) { type = type.restrictByNotNullOrUndefined(); if (type.isUnionType()) { for (JSType alt : type.toMaybeUnionType().getAlternates()) { if (isInvalidatingType(alt)) { return true; } } return false; } } ObjectType objType = ObjectType.cast(type); return objType == null || invalidatingTypes.contains(objType) || !objType.hasReferenceName() || objType.isUnknownType() || objType.isEmptyType() /* unresolved types */ || objType.isEnumType() || objType.autoboxesTo() != null; } private Property getProperty(String name) { Property prop = propertyMap.get(name); if (prop == null) { prop = new Property(name); propertyMap.put(name, prop); } return prop; } /** * This method gets the JSType from the Node argument and verifies that it is * present. */ private JSType getJSType(Node n) { JSType jsType = n.getJSType(); if (jsType == null) { // TODO(user): This branch indicates a compiler bug, not worthy of // halting the compilation but we should log this and analyze to track // down why it happens. This is not critical and will be resolved over // time as the type checker is extended. return compiler.getTypeRegistry().getNativeType( JSTypeNative.UNKNOWN_TYPE); } else { return jsType; } } /** Encapsulates the information needed for renaming a property. */ private class Property { final String oldName; String newName; int numOccurrences; boolean skipAmbiguating; JSTypeBitSet relatedTypes = new JSTypeBitSet(intForType.size()); Property(String name) { this.oldName = name; // Properties with this suffix are handled in RenameProperties. if (name.startsWith(SKIP_PREFIX)) { skipAmbiguating = true; } } /** Add this type to this property, calculating */ void addType(JSType newType) { if (skipAmbiguating) { return; } ++numOccurrences; if (newType.isUnionType()) { newType = newType.restrictByNotNullOrUndefined(); if (newType.isUnionType()) { for (JSType alt : newType.toMaybeUnionType().getAlternates()) { addNonUnionType(alt); } return; } } addNonUnionType(newType); } private void addNonUnionType(JSType newType) { if (skipAmbiguating || isInvalidatingType(newType)) { skipAmbiguating = true; return; } if (!relatedTypes.get(getIntForType(newType))) { computeRelatedTypes(newType); relatedTypes.or(getRelatedTypesOnNonUnion(newType)); } } } // A BitSet that stores type info. Adds pretty-print routines. private class JSTypeBitSet extends BitSet { private static final long serialVersionUID = 1L; private JSTypeBitSet(int size) { super(size); } private JSTypeBitSet() { super(); } /** * Pretty-printing, for diagnostic purposes. */ @Override public String toString() { int from = 0; int current = 0; List<String> types = Lists.newArrayList(); while (-1 != (current = nextSetBit(from))) { types.add(intForType.inverse().get(current).toString()); from = current + 1; } return Joiner.on(" && ").join(types); } } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.hive.metastore.txn; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.metastore.MetaStoreThread; import org.apache.hadoop.hive.metastore.api.*; import org.apache.log4j.Level; import org.apache.log4j.LogManager; import org.junit.After; import org.junit.Before; import org.junit.Ignore; import org.junit.Test; import java.sql.Connection; import java.sql.SQLException; import java.sql.Statement; import java.util.ArrayList; import java.util.List; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicBoolean; import static junit.framework.Assert.*; /** * Tests for TxnHandler. */ public class TestTxnHandler { static final private String CLASS_NAME = TxnHandler.class.getName(); static final private Log LOG = LogFactory.getLog(CLASS_NAME); private HiveConf conf = new HiveConf(); private TxnHandler txnHandler; public TestTxnHandler() throws Exception { TxnDbUtil.setConfValues(conf); LogManager.getLogger(TxnHandler.class.getName()).setLevel(Level.DEBUG); tearDown(); } @Test public void testValidTxnsEmpty() throws Exception { GetOpenTxnsInfoResponse txnsInfo = txnHandler.getOpenTxnsInfo(); assertEquals(0L, txnsInfo.getTxn_high_water_mark()); assertTrue(txnsInfo.getOpen_txns().isEmpty()); GetOpenTxnsResponse txns = txnHandler.getOpenTxns(); assertEquals(0L, txns.getTxn_high_water_mark()); assertTrue(txns.getOpen_txns().isEmpty()); } @Test public void testOpenTxn() throws Exception { long first = openTxn(); assertEquals(1L, first); long second = openTxn(); assertEquals(2L, second); GetOpenTxnsInfoResponse txnsInfo = txnHandler.getOpenTxnsInfo(); assertEquals(2L, txnsInfo.getTxn_high_water_mark()); assertEquals(2, txnsInfo.getOpen_txns().size()); assertEquals(1L, txnsInfo.getOpen_txns().get(0).getId()); assertEquals(TxnState.OPEN, txnsInfo.getOpen_txns().get(0).getState()); assertEquals(2L, txnsInfo.getOpen_txns().get(1).getId()); assertEquals(TxnState.OPEN, txnsInfo.getOpen_txns().get(1).getState()); assertEquals("me", txnsInfo.getOpen_txns().get(1).getUser()); assertEquals("localhost", txnsInfo.getOpen_txns().get(1).getHostname()); GetOpenTxnsResponse txns = txnHandler.getOpenTxns(); assertEquals(2L, txns.getTxn_high_water_mark()); assertEquals(2, txns.getOpen_txns().size()); boolean[] saw = new boolean[3]; for (int i = 0; i < saw.length; i++) saw[i] = false; for (Long tid : txns.getOpen_txns()) { saw[tid.intValue()] = true; } for (int i = 1; i < saw.length; i++) assertTrue(saw[i]); } @Test public void testAbortTxn() throws Exception { OpenTxnsResponse openedTxns = txnHandler.openTxns(new OpenTxnRequest(2, "me", "localhost")); List<Long> txnList = openedTxns.getTxn_ids(); long first = txnList.get(0); assertEquals(1L, first); long second = txnList.get(1); assertEquals(2L, second); txnHandler.abortTxn(new AbortTxnRequest(1)); GetOpenTxnsInfoResponse txnsInfo = txnHandler.getOpenTxnsInfo(); assertEquals(2L, txnsInfo.getTxn_high_water_mark()); assertEquals(2, txnsInfo.getOpen_txns().size()); assertEquals(1L, txnsInfo.getOpen_txns().get(0).getId()); assertEquals(TxnState.ABORTED, txnsInfo.getOpen_txns().get(0).getState()); assertEquals(2L, txnsInfo.getOpen_txns().get(1).getId()); assertEquals(TxnState.OPEN, txnsInfo.getOpen_txns().get(1).getState()); GetOpenTxnsResponse txns = txnHandler.getOpenTxns(); assertEquals(2L, txns.getTxn_high_water_mark()); assertEquals(2, txns.getOpen_txns().size()); boolean[] saw = new boolean[3]; for (int i = 0; i < saw.length; i++) saw[i] = false; for (Long tid : txns.getOpen_txns()) { saw[tid.intValue()] = true; } for (int i = 1; i < saw.length; i++) assertTrue(saw[i]); } @Test public void testAbortInvalidTxn() throws Exception { boolean caught = false; try { txnHandler.abortTxn(new AbortTxnRequest(195L)); } catch (NoSuchTxnException e) { caught = true; } assertTrue(caught); } @Test public void testValidTxnsNoneOpen() throws Exception { txnHandler.openTxns(new OpenTxnRequest(2, "me", "localhost")); txnHandler.commitTxn(new CommitTxnRequest(1)); txnHandler.commitTxn(new CommitTxnRequest(2)); GetOpenTxnsInfoResponse txnsInfo = txnHandler.getOpenTxnsInfo(); assertEquals(2L, txnsInfo.getTxn_high_water_mark()); assertEquals(0, txnsInfo.getOpen_txns().size()); GetOpenTxnsResponse txns = txnHandler.getOpenTxns(); assertEquals(2L, txns.getTxn_high_water_mark()); assertEquals(0, txns.getOpen_txns().size()); } @Test public void testValidTxnsSomeOpen() throws Exception { txnHandler.openTxns(new OpenTxnRequest(3, "me", "localhost")); txnHandler.abortTxn(new AbortTxnRequest(1)); txnHandler.commitTxn(new CommitTxnRequest(2)); GetOpenTxnsInfoResponse txnsInfo = txnHandler.getOpenTxnsInfo(); assertEquals(3L, txnsInfo.getTxn_high_water_mark()); assertEquals(2, txnsInfo.getOpen_txns().size()); assertEquals(1L, txnsInfo.getOpen_txns().get(0).getId()); assertEquals(TxnState.ABORTED, txnsInfo.getOpen_txns().get(0).getState()); assertEquals(3L, txnsInfo.getOpen_txns().get(1).getId()); assertEquals(TxnState.OPEN, txnsInfo.getOpen_txns().get(1).getState()); GetOpenTxnsResponse txns = txnHandler.getOpenTxns(); assertEquals(3L, txns.getTxn_high_water_mark()); assertEquals(2, txns.getOpen_txns().size()); boolean[] saw = new boolean[4]; for (int i = 0; i < saw.length; i++) saw[i] = false; for (Long tid : txns.getOpen_txns()) { saw[tid.intValue()] = true; } assertTrue(saw[1]); assertFalse(saw[2]); assertTrue(saw[3]); } @Test public void testLockDifferentDBs() throws Exception { // Test that two different databases don't collide on their locks LockComponent comp = new LockComponent(LockType.EXCLUSIVE, LockLevel.DB, "mydb"); List<LockComponent> components = new ArrayList<LockComponent>(1); components.add(comp); LockRequest req = new LockRequest(components, "me", "localhost"); LockResponse res = txnHandler.lock(req); assertTrue(res.getState() == LockState.ACQUIRED); comp = new LockComponent(LockType.EXCLUSIVE, LockLevel.DB, "yourdb"); components.clear(); components.add(comp); req = new LockRequest(components, "me", "localhost"); res = txnHandler.lock(req); assertTrue(res.getState() == LockState.ACQUIRED); } @Test public void testLockSameDB() throws Exception { // Test that two different databases don't collide on their locks LockComponent comp = new LockComponent(LockType.EXCLUSIVE, LockLevel.DB, "mydb"); List<LockComponent> components = new ArrayList<LockComponent>(1); components.add(comp); LockRequest req = new LockRequest(components, "me", "localhost"); LockResponse res = txnHandler.lock(req); assertTrue(res.getState() == LockState.ACQUIRED); comp = new LockComponent(LockType.EXCLUSIVE, LockLevel.DB, "mydb"); components.clear(); components.add(comp); req = new LockRequest(components, "me", "localhost"); res = txnHandler.lock(req); assertTrue(res.getState() == LockState.WAITING); } @Test public void testLockDbLocksTable() throws Exception { // Test that locking a database prevents locking of tables in the database LockComponent comp = new LockComponent(LockType.EXCLUSIVE, LockLevel.DB, "mydb"); List<LockComponent> components = new ArrayList<LockComponent>(1); components.add(comp); LockRequest req = new LockRequest(components, "me", "localhost"); LockResponse res = txnHandler.lock(req); assertTrue(res.getState() == LockState.ACQUIRED); comp = new LockComponent(LockType.EXCLUSIVE, LockLevel.DB, "mydb"); comp.setTablename("mytable"); components.clear(); components.add(comp); req = new LockRequest(components, "me", "localhost"); res = txnHandler.lock(req); assertTrue(res.getState() == LockState.WAITING); } @Test public void testLockDbDoesNotLockTableInDifferentDB() throws Exception { // Test that locking a database prevents locking of tables in the database LockComponent comp = new LockComponent(LockType.EXCLUSIVE, LockLevel.DB, "mydb"); List<LockComponent> components = new ArrayList<LockComponent>(1); components.add(comp); LockRequest req = new LockRequest(components, "me", "localhost"); LockResponse res = txnHandler.lock(req); assertTrue(res.getState() == LockState.ACQUIRED); comp = new LockComponent(LockType.EXCLUSIVE, LockLevel.DB, "yourdb"); comp.setTablename("mytable"); components.clear(); components.add(comp); req = new LockRequest(components, "me", "localhost"); res = txnHandler.lock(req); assertTrue(res.getState() == LockState.ACQUIRED); } @Test public void testLockDifferentTables() throws Exception { // Test that two different tables don't collide on their locks LockComponent comp = new LockComponent(LockType.EXCLUSIVE, LockLevel.DB, "mydb"); comp.setTablename("mytable"); List<LockComponent> components = new ArrayList<LockComponent>(1); components.add(comp); LockRequest req = new LockRequest(components, "me", "localhost"); LockResponse res = txnHandler.lock(req); assertTrue(res.getState() == LockState.ACQUIRED); comp = new LockComponent(LockType.EXCLUSIVE, LockLevel.DB, "mydb"); comp.setTablename("yourtable"); components.clear(); components.add(comp); req = new LockRequest(components, "me", "localhost"); res = txnHandler.lock(req); assertTrue(res.getState() == LockState.ACQUIRED); } @Test public void testLockSameTable() throws Exception { // Test that two different tables don't collide on their locks LockComponent comp = new LockComponent(LockType.EXCLUSIVE, LockLevel.DB, "mydb"); comp.setTablename("mytable"); List<LockComponent> components = new ArrayList<LockComponent>(1); components.add(comp); LockRequest req = new LockRequest(components, "me", "localhost"); LockResponse res = txnHandler.lock(req); assertTrue(res.getState() == LockState.ACQUIRED); comp = new LockComponent(LockType.EXCLUSIVE, LockLevel.DB, "mydb"); comp.setTablename("mytable"); components.clear(); components.add(comp); req = new LockRequest(components, "me", "localhost"); res = txnHandler.lock(req); assertTrue(res.getState() == LockState.WAITING); } @Test public void testLockTableLocksPartition() throws Exception { // Test that locking a table prevents locking of partitions of the table LockComponent comp = new LockComponent(LockType.EXCLUSIVE, LockLevel.DB, "mydb"); comp.setTablename("mytable"); List<LockComponent> components = new ArrayList<LockComponent>(1); components.add(comp); LockRequest req = new LockRequest(components, "me", "localhost"); LockResponse res = txnHandler.lock(req); assertTrue(res.getState() == LockState.ACQUIRED); comp = new LockComponent(LockType.EXCLUSIVE, LockLevel.DB, "mydb"); comp.setTablename("mytable"); comp.setPartitionname("mypartition"); components.clear(); components.add(comp); req = new LockRequest(components, "me", "localhost"); res = txnHandler.lock(req); assertTrue(res.getState() == LockState.WAITING); } @Test public void testLockDifferentTableDoesntLockPartition() throws Exception { // Test that locking a table prevents locking of partitions of the table LockComponent comp = new LockComponent(LockType.EXCLUSIVE, LockLevel.DB, "mydb"); comp.setTablename("mytable"); List<LockComponent> components = new ArrayList<LockComponent>(1); components.add(comp); LockRequest req = new LockRequest(components, "me", "localhost"); LockResponse res = txnHandler.lock(req); assertTrue(res.getState() == LockState.ACQUIRED); comp = new LockComponent(LockType.EXCLUSIVE, LockLevel.DB, "mydb"); comp.setTablename("yourtable"); comp.setPartitionname("mypartition"); components.clear(); components.add(comp); req = new LockRequest(components, "me", "localhost"); res = txnHandler.lock(req); assertTrue(res.getState() == LockState.ACQUIRED); } @Test public void testLockDifferentPartitions() throws Exception { // Test that two different partitions don't collide on their locks LockComponent comp = new LockComponent(LockType.EXCLUSIVE, LockLevel.DB, "mydb"); comp.setTablename("mytable"); comp.setPartitionname("mypartition"); List<LockComponent> components = new ArrayList<LockComponent>(1); components.add(comp); LockRequest req = new LockRequest(components, "me", "localhost"); LockResponse res = txnHandler.lock(req); assertTrue(res.getState() == LockState.ACQUIRED); comp = new LockComponent(LockType.EXCLUSIVE, LockLevel.DB, "mydb"); comp.setTablename("mytable"); comp.setPartitionname("yourpartition"); components.clear(); components.add(comp); req = new LockRequest(components, "me", "localhost"); res = txnHandler.lock(req); assertTrue(res.getState() == LockState.ACQUIRED); } @Test public void testLockSamePartition() throws Exception { // Test that two different partitions don't collide on their locks LockComponent comp = new LockComponent(LockType.EXCLUSIVE, LockLevel.DB, "mydb"); comp.setTablename("mytable"); comp.setPartitionname("mypartition"); List<LockComponent> components = new ArrayList<LockComponent>(1); components.add(comp); LockRequest req = new LockRequest(components, "me", "localhost"); LockResponse res = txnHandler.lock(req); assertTrue(res.getState() == LockState.ACQUIRED); comp = new LockComponent(LockType.EXCLUSIVE, LockLevel.DB, "mydb"); comp.setTablename("mytable"); comp.setPartitionname("mypartition"); components.clear(); components.add(comp); req = new LockRequest(components, "me", "localhost"); res = txnHandler.lock(req); assertTrue(res.getState() == LockState.WAITING); } @Test public void testLockSRSR() throws Exception { // Test that two shared read locks can share a partition LockComponent comp = new LockComponent(LockType.SHARED_READ, LockLevel.DB, "mydb"); comp.setTablename("mytable"); comp.setPartitionname("mypartition"); List<LockComponent> components = new ArrayList<LockComponent>(1); components.add(comp); LockRequest req = new LockRequest(components, "me", "localhost"); LockResponse res = txnHandler.lock(req); assertTrue(res.getState() == LockState.ACQUIRED); comp = new LockComponent(LockType.SHARED_READ, LockLevel.DB, "mydb"); comp.setTablename("mytable"); comp.setPartitionname("mypartition"); components.clear(); components.add(comp); req = new LockRequest(components, "me", "localhost"); res = txnHandler.lock(req); assertTrue(res.getState() == LockState.ACQUIRED); } @Test public void testLockESRSR() throws Exception { // Test that exclusive lock blocks shared reads LockComponent comp = new LockComponent(LockType.EXCLUSIVE, LockLevel.DB, "mydb"); comp.setTablename("mytable"); comp.setPartitionname("mypartition"); List<LockComponent> components = new ArrayList<LockComponent>(1); components.add(comp); LockRequest req = new LockRequest(components, "me", "localhost"); LockResponse res = txnHandler.lock(req); assertTrue(res.getState() == LockState.ACQUIRED); comp = new LockComponent(LockType.SHARED_READ, LockLevel.DB, "mydb"); comp.setTablename("mytable"); comp.setPartitionname("mypartition"); components.clear(); components.add(comp); req = new LockRequest(components, "me", "localhost"); res = txnHandler.lock(req); assertTrue(res.getState() == LockState.WAITING); comp = new LockComponent(LockType.SHARED_READ, LockLevel.DB, "mydb"); comp.setTablename("mytable"); comp.setPartitionname("mypartition"); components.clear(); components.add(comp); req = new LockRequest(components, "me", "localhost"); res = txnHandler.lock(req); assertTrue(res.getState() == LockState.WAITING); } @Test public void testLockSRSW() throws Exception { // Test that write can acquire after read LockComponent comp = new LockComponent(LockType.SHARED_READ, LockLevel.DB, "mydb"); comp.setTablename("mytable"); comp.setPartitionname("mypartition"); List<LockComponent> components = new ArrayList<LockComponent>(1); components.add(comp); LockRequest req = new LockRequest(components, "me", "localhost"); LockResponse res = txnHandler.lock(req); assertTrue(res.getState() == LockState.ACQUIRED); comp = new LockComponent(LockType.SHARED_WRITE, LockLevel.DB, "mydb"); comp.setTablename("mytable"); comp.setPartitionname("mypartition"); components.clear(); components.add(comp); req = new LockRequest(components, "me", "localhost"); res = txnHandler.lock(req); assertTrue(res.getState() == LockState.ACQUIRED); } @Test public void testLockESRSW() throws Exception { // Test that exclusive lock blocks read and write LockComponent comp = new LockComponent(LockType.EXCLUSIVE, LockLevel.DB, "mydb"); comp.setTablename("mytable"); comp.setPartitionname("mypartition"); List<LockComponent> components = new ArrayList<LockComponent>(1); components.add(comp); LockRequest req = new LockRequest(components, "me", "localhost"); LockResponse res = txnHandler.lock(req); assertTrue(res.getState() == LockState.ACQUIRED); comp = new LockComponent(LockType.SHARED_READ, LockLevel.DB, "mydb"); comp.setTablename("mytable"); comp.setPartitionname("mypartition"); components.clear(); components.add(comp); req = new LockRequest(components, "me", "localhost"); res = txnHandler.lock(req); assertTrue(res.getState() == LockState.WAITING); comp = new LockComponent(LockType.SHARED_WRITE, LockLevel.DB, "mydb"); comp.setTablename("mytable"); comp.setPartitionname("mypartition"); components.clear(); components.add(comp); req = new LockRequest(components, "me", "localhost"); res = txnHandler.lock(req); assertTrue(res.getState() == LockState.WAITING); } @Test public void testLockSRE() throws Exception { // Test that read blocks exclusive LockComponent comp = new LockComponent(LockType.SHARED_READ, LockLevel.DB, "mydb"); comp.setTablename("mytable"); comp.setPartitionname("mypartition"); List<LockComponent> components = new ArrayList<LockComponent>(1); components.add(comp); LockRequest req = new LockRequest(components, "me", "localhost"); LockResponse res = txnHandler.lock(req); assertTrue(res.getState() == LockState.ACQUIRED); comp = new LockComponent(LockType.EXCLUSIVE, LockLevel.DB, "mydb"); comp.setTablename("mytable"); comp.setPartitionname("mypartition"); components.clear(); components.add(comp); req = new LockRequest(components, "me", "localhost"); res = txnHandler.lock(req); assertTrue(res.getState() == LockState.WAITING); } @Test public void testLockESRE() throws Exception { // Test that exclusive blocks read and exclusive LockComponent comp = new LockComponent(LockType.EXCLUSIVE, LockLevel.DB, "mydb"); comp.setTablename("mytable"); comp.setPartitionname("mypartition"); List<LockComponent> components = new ArrayList<LockComponent>(1); components.add(comp); LockRequest req = new LockRequest(components, "me", "localhost"); LockResponse res = txnHandler.lock(req); assertTrue(res.getState() == LockState.ACQUIRED); comp = new LockComponent(LockType.SHARED_READ, LockLevel.DB, "mydb"); comp.setTablename("mytable"); comp.setPartitionname("mypartition"); components.clear(); components.add(comp); req = new LockRequest(components, "me", "localhost"); res = txnHandler.lock(req); assertTrue(res.getState() == LockState.WAITING); comp = new LockComponent(LockType.EXCLUSIVE, LockLevel.DB, "mydb"); comp.setTablename("mytable"); comp.setPartitionname("mypartition"); components.clear(); components.add(comp); req = new LockRequest(components, "me", "localhost"); res = txnHandler.lock(req); assertTrue(res.getState() == LockState.WAITING); } @Test public void testLockSWSR() throws Exception { // Test that read can acquire after write LockComponent comp = new LockComponent(LockType.SHARED_WRITE, LockLevel.DB, "mydb"); comp.setTablename("mytable"); comp.setPartitionname("mypartition"); List<LockComponent> components = new ArrayList<LockComponent>(1); components.add(comp); LockRequest req = new LockRequest(components, "me", "localhost"); LockResponse res = txnHandler.lock(req); assertTrue(res.getState() == LockState.ACQUIRED); comp = new LockComponent(LockType.SHARED_READ, LockLevel.DB, "mydb"); comp.setTablename("mytable"); comp.setPartitionname("mypartition"); components.clear(); components.add(comp); req = new LockRequest(components, "me", "localhost"); res = txnHandler.lock(req); assertTrue(res.getState() == LockState.ACQUIRED); } @Test public void testLockSWSWSR() throws Exception { // Test that write blocks write but read can still acquire LockComponent comp = new LockComponent(LockType.SHARED_WRITE, LockLevel.DB, "mydb"); comp.setTablename("mytable"); comp.setPartitionname("mypartition"); List<LockComponent> components = new ArrayList<LockComponent>(1); components.add(comp); LockRequest req = new LockRequest(components, "me", "localhost"); LockResponse res = txnHandler.lock(req); assertTrue(res.getState() == LockState.ACQUIRED); comp = new LockComponent(LockType.SHARED_WRITE, LockLevel.DB, "mydb"); comp.setTablename("mytable"); comp.setPartitionname("mypartition"); components.clear(); components.add(comp); req = new LockRequest(components, "me", "localhost"); res = txnHandler.lock(req); assertTrue(res.getState() == LockState.WAITING); comp = new LockComponent(LockType.SHARED_READ, LockLevel.DB, "mydb"); comp.setTablename("mytable"); comp.setPartitionname("mypartition"); components.clear(); components.add(comp); req = new LockRequest(components, "me", "localhost"); res = txnHandler.lock(req); assertTrue(res.getState() == LockState.ACQUIRED); } @Test public void testLockSWSWSW() throws Exception { // Test that write blocks two writes LockComponent comp = new LockComponent(LockType.SHARED_WRITE, LockLevel.DB, "mydb"); comp.setTablename("mytable"); comp.setPartitionname("mypartition"); List<LockComponent> components = new ArrayList<LockComponent>(1); components.add(comp); LockRequest req = new LockRequest(components, "me", "localhost"); LockResponse res = txnHandler.lock(req); assertTrue(res.getState() == LockState.ACQUIRED); comp = new LockComponent(LockType.SHARED_WRITE, LockLevel.DB, "mydb"); comp.setTablename("mytable"); comp.setPartitionname("mypartition"); components.clear(); components.add(comp); req = new LockRequest(components, "me", "localhost"); res = txnHandler.lock(req); assertTrue(res.getState() == LockState.WAITING); comp = new LockComponent(LockType.SHARED_WRITE, LockLevel.DB, "mydb"); comp.setTablename("mytable"); comp.setPartitionname("mypartition"); components.clear(); components.add(comp); req = new LockRequest(components, "me", "localhost"); res = txnHandler.lock(req); assertTrue(res.getState() == LockState.WAITING); } @Test public void testLockEESW() throws Exception { // Test that exclusive blocks exclusive and write LockComponent comp = new LockComponent(LockType.EXCLUSIVE, LockLevel.DB, "mydb"); comp.setTablename("mytable"); comp.setPartitionname("mypartition"); List<LockComponent> components = new ArrayList<LockComponent>(1); components.add(comp); LockRequest req = new LockRequest(components, "me", "localhost"); LockResponse res = txnHandler.lock(req); assertTrue(res.getState() == LockState.ACQUIRED); comp = new LockComponent(LockType.EXCLUSIVE, LockLevel.DB, "mydb"); comp.setTablename("mytable"); comp.setPartitionname("mypartition"); components.clear(); components.add(comp); req = new LockRequest(components, "me", "localhost"); res = txnHandler.lock(req); assertTrue(res.getState() == LockState.WAITING); comp = new LockComponent(LockType.SHARED_WRITE, LockLevel.DB, "mydb"); comp.setTablename("mytable"); comp.setPartitionname("mypartition"); components.clear(); components.add(comp); req = new LockRequest(components, "me", "localhost"); res = txnHandler.lock(req); assertTrue(res.getState() == LockState.WAITING); } @Test public void testLockEESR() throws Exception { // Test that exclusive blocks exclusive and read LockComponent comp = new LockComponent(LockType.EXCLUSIVE, LockLevel.DB, "mydb"); comp.setTablename("mytable"); comp.setPartitionname("mypartition"); List<LockComponent> components = new ArrayList<LockComponent>(1); components.add(comp); LockRequest req = new LockRequest(components, "me", "localhost"); LockResponse res = txnHandler.lock(req); assertTrue(res.getState() == LockState.ACQUIRED); comp = new LockComponent(LockType.EXCLUSIVE, LockLevel.DB, "mydb"); comp.setTablename("mytable"); comp.setPartitionname("mypartition"); components.clear(); components.add(comp); req = new LockRequest(components, "me", "localhost"); res = txnHandler.lock(req); assertTrue(res.getState() == LockState.WAITING); comp = new LockComponent(LockType.SHARED_READ, LockLevel.DB, "mydb"); comp.setTablename("mytable"); comp.setPartitionname("mypartition"); components.clear(); components.add(comp); req = new LockRequest(components, "me", "localhost"); res = txnHandler.lock(req); assertTrue(res.getState() == LockState.WAITING); } @Test public void testCheckLockAcquireAfterWaiting() throws Exception { LockComponent comp = new LockComponent(LockType.SHARED_WRITE, LockLevel.DB, "mydb"); comp.setTablename("mytable"); comp.setPartitionname("mypartition"); List<LockComponent> components = new ArrayList<LockComponent>(1); components.add(comp); LockRequest req = new LockRequest(components, "me", "localhost"); LockResponse res = txnHandler.lock(req); long lockid1 = res.getLockid(); assertTrue(res.getState() == LockState.ACQUIRED); comp = new LockComponent(LockType.SHARED_WRITE, LockLevel.DB, "mydb"); comp.setTablename("mytable"); comp.setPartitionname("mypartition"); components.clear(); components.add(comp); req = new LockRequest(components, "me", "localhost"); res = txnHandler.lock(req); long lockid2 = res.getLockid(); assertTrue(res.getState() == LockState.WAITING); txnHandler.unlock(new UnlockRequest(lockid1)); res = txnHandler.checkLock(new CheckLockRequest(lockid2)); assertTrue(res.getState() == LockState.ACQUIRED); } @Test public void testCheckLockNoSuchLock() throws Exception { try { txnHandler.checkLock(new CheckLockRequest(23L)); fail("Allowed to check lock on non-existent lock"); } catch (NoSuchLockException e) { } } @Test public void testCheckLockTxnAborted() throws Exception { // Test that when a transaction is aborted, the heartbeat fails long txnid = openTxn(); LockComponent comp = new LockComponent(LockType.SHARED_WRITE, LockLevel.DB, "mydb"); comp.setTablename("mytable"); comp.setPartitionname("mypartition"); List<LockComponent> components = new ArrayList<LockComponent>(1); components.add(comp); LockRequest req = new LockRequest(components, "me", "localhost"); req.setTxnid(txnid); LockResponse res = txnHandler.lock(req); long lockid = res.getLockid(); txnHandler.abortTxn(new AbortTxnRequest(txnid)); try { // This will throw NoSuchLockException (even though it's the // transaction we've closed) because that will have deleted the lock. txnHandler.checkLock(new CheckLockRequest(lockid)); fail("Allowed to check lock on aborted transaction."); } catch (NoSuchLockException e) { } } @Test public void testMultipleLock() throws Exception { // Test more than one lock can be handled in a lock request LockComponent comp = new LockComponent(LockType.EXCLUSIVE, LockLevel.DB, "mydb"); comp.setTablename("mytable"); comp.setPartitionname("mypartition"); List<LockComponent> components = new ArrayList<LockComponent>(2); components.add(comp); comp = new LockComponent(LockType.EXCLUSIVE, LockLevel.DB, "mydb"); comp.setTablename("mytable"); comp.setPartitionname("anotherpartition"); components.add(comp); LockRequest req = new LockRequest(components, "me", "localhost"); LockResponse res = txnHandler.lock(req); long lockid = res.getLockid(); assertTrue(res.getState() == LockState.ACQUIRED); res = txnHandler.checkLock(new CheckLockRequest(lockid)); assertTrue(res.getState() == LockState.ACQUIRED); txnHandler.unlock(new UnlockRequest(lockid)); assertEquals(0, txnHandler.numLocksInLockTable()); } @Test public void testMultipleLockWait() throws Exception { // Test that two shared read locks can share a partition LockComponent comp = new LockComponent(LockType.EXCLUSIVE, LockLevel.DB, "mydb"); comp.setTablename("mytable"); comp.setPartitionname("mypartition"); List<LockComponent> components = new ArrayList<LockComponent>(2); components.add(comp); comp = new LockComponent(LockType.EXCLUSIVE, LockLevel.DB, "mydb"); comp.setTablename("mytable"); comp.setPartitionname("anotherpartition"); components.add(comp); LockRequest req = new LockRequest(components, "me", "localhost"); LockResponse res = txnHandler.lock(req); long lockid1 = res.getLockid(); assertTrue(res.getState() == LockState.ACQUIRED); comp = new LockComponent(LockType.EXCLUSIVE, LockLevel.DB, "mydb"); comp.setTablename("mytable"); comp.setPartitionname("mypartition"); components = new ArrayList<LockComponent>(1); components.add(comp); req = new LockRequest(components, "me", "localhost"); res = txnHandler.lock(req); long lockid2 = res.getLockid(); assertTrue(res.getState() == LockState.WAITING); txnHandler.unlock(new UnlockRequest(lockid1)); res = txnHandler.checkLock(new CheckLockRequest(lockid2)); assertTrue(res.getState() == LockState.ACQUIRED); } @Test public void testUnlockOnCommit() throws Exception { // Test that committing unlocks long txnid = openTxn(); LockComponent comp = new LockComponent(LockType.SHARED_WRITE, LockLevel.DB, "mydb"); comp.setTablename("mytable"); List<LockComponent> components = new ArrayList<LockComponent>(1); components.add(comp); LockRequest req = new LockRequest(components, "me", "localhost"); req.setTxnid(txnid); LockResponse res = txnHandler.lock(req); assertTrue(res.getState() == LockState.ACQUIRED); txnHandler.commitTxn(new CommitTxnRequest(txnid)); assertEquals(0, txnHandler.numLocksInLockTable()); } @Test public void testUnlockOnAbort() throws Exception { // Test that committing unlocks long txnid = openTxn(); LockComponent comp = new LockComponent(LockType.SHARED_WRITE, LockLevel.DB, "mydb"); List<LockComponent> components = new ArrayList<LockComponent>(1); components.add(comp); LockRequest req = new LockRequest(components, "me", "localhost"); req.setTxnid(txnid); LockResponse res = txnHandler.lock(req); assertTrue(res.getState() == LockState.ACQUIRED); txnHandler.abortTxn(new AbortTxnRequest(txnid)); assertEquals(0, txnHandler.numLocksInLockTable()); } @Test public void testUnlockWithTxn() throws Exception { LOG.debug("Starting testUnlockWithTxn"); // Test that attempting to unlock locks associated with a transaction // generates an error long txnid = openTxn(); LockComponent comp = new LockComponent(LockType.SHARED_WRITE, LockLevel.DB, "mydb"); comp.setTablename("mytable"); comp.setPartitionname("mypartition"); List<LockComponent> components = new ArrayList<LockComponent>(1); components.add(comp); LockRequest req = new LockRequest(components, "me", "localhost"); req.setTxnid(txnid); LockResponse res = txnHandler.lock(req); long lockid = res.getLockid(); try { txnHandler.unlock(new UnlockRequest(lockid)); fail("Allowed to unlock lock associated with transaction."); } catch (TxnOpenException e) { } } @Test public void testHeartbeatTxnAborted() throws Exception { // Test that when a transaction is aborted, the heartbeat fails openTxn(); txnHandler.abortTxn(new AbortTxnRequest(1)); HeartbeatRequest h = new HeartbeatRequest(); h.setTxnid(1); try { txnHandler.heartbeat(h); fail("Told there was a txn, when it should have been aborted."); } catch (TxnAbortedException e) { } } @Test public void testHeartbeatNoTxn() throws Exception { // Test that when a transaction is aborted, the heartbeat fails HeartbeatRequest h = new HeartbeatRequest(); h.setTxnid(939393L); try { txnHandler.heartbeat(h); fail("Told there was a txn, when there wasn't."); } catch (NoSuchTxnException e) { } } @Test public void testHeartbeatLock() throws Exception { conf.setTimeVar(HiveConf.ConfVars.HIVE_TXN_TIMEOUT, 1, TimeUnit.SECONDS); HeartbeatRequest h = new HeartbeatRequest(); LockComponent comp = new LockComponent(LockType.EXCLUSIVE, LockLevel.DB, "mydb"); comp.setTablename("mytable"); comp.setPartitionname("mypartition"); List<LockComponent> components = new ArrayList<LockComponent>(1); components.add(comp); LockRequest req = new LockRequest(components, "me", "localhost"); LockResponse res = txnHandler.lock(req); assertTrue(res.getState() == LockState.ACQUIRED); h.setLockid(res.getLockid()); for (int i = 0; i < 30; i++) { try { txnHandler.heartbeat(h); } catch (NoSuchLockException e) { fail("Told there was no lock, when the heartbeat should have kept it."); } } } @Test public void heartbeatTxnRange() throws Exception { long txnid = openTxn(); assertEquals(1, txnid); txnid = openTxn(); txnid = openTxn(); HeartbeatTxnRangeResponse rsp = txnHandler.heartbeatTxnRange(new HeartbeatTxnRangeRequest(1, 3)); assertEquals(0, rsp.getAborted().size()); assertEquals(0, rsp.getNosuch().size()); } @Test public void heartbeatTxnRangeOneCommitted() throws Exception { long txnid = openTxn(); assertEquals(1, txnid); txnHandler.commitTxn(new CommitTxnRequest(1)); txnid = openTxn(); txnid = openTxn(); HeartbeatTxnRangeResponse rsp = txnHandler.heartbeatTxnRange(new HeartbeatTxnRangeRequest(1, 3)); assertEquals(1, rsp.getNosuchSize()); Long txn = rsp.getNosuch().iterator().next(); assertEquals(1L, (long)txn); assertEquals(0, rsp.getAborted().size()); } @Test public void heartbeatTxnRangeOneAborted() throws Exception { long txnid = openTxn(); assertEquals(1, txnid); txnid = openTxn(); txnid = openTxn(); txnHandler.abortTxn(new AbortTxnRequest(3)); HeartbeatTxnRangeResponse rsp = txnHandler.heartbeatTxnRange(new HeartbeatTxnRangeRequest(1, 3)); assertEquals(1, rsp.getAbortedSize()); Long txn = rsp.getAborted().iterator().next(); assertEquals(3L, (long)txn); assertEquals(0, rsp.getNosuch().size()); } @Test public void testLockTimeout() throws Exception { long timeout = txnHandler.setTimeout(1); LockComponent comp = new LockComponent(LockType.EXCLUSIVE, LockLevel.DB, "mydb"); comp.setTablename("mytable"); comp.setPartitionname("mypartition"); List<LockComponent> components = new ArrayList<LockComponent>(1); components.add(comp); LockRequest req = new LockRequest(components, "me", "localhost"); LockResponse res = txnHandler.lock(req); assertTrue(res.getState() == LockState.ACQUIRED); Thread.currentThread().sleep(10); try { txnHandler.checkLock(new CheckLockRequest(res.getLockid())); fail("Told there was a lock, when it should have timed out."); } catch (NoSuchLockException e) { } finally { txnHandler.setTimeout(timeout); } } @Test public void testHeartbeatNoLock() throws Exception { HeartbeatRequest h = new HeartbeatRequest(); h.setLockid(29389839L); try { txnHandler.heartbeat(h); fail("Told there was a lock, when there wasn't."); } catch (NoSuchLockException e) { } } @Test public void testCompactMajorWithPartition() throws Exception { CompactionRequest rqst = new CompactionRequest("foo", "bar", CompactionType.MAJOR); rqst.setPartitionname("ds=today"); txnHandler.compact(rqst); ShowCompactResponse rsp = txnHandler.showCompact(new ShowCompactRequest()); List<ShowCompactResponseElement> compacts = rsp.getCompacts(); assertEquals(1, compacts.size()); ShowCompactResponseElement c = compacts.get(0); assertEquals("foo", c.getDbname()); assertEquals("bar", c.getTablename()); assertEquals("ds=today", c.getPartitionname()); assertEquals(CompactionType.MAJOR, c.getType()); assertEquals("initiated", c.getState()); assertEquals(0L, c.getStart()); } @Test public void testCompactMinorNoPartition() throws Exception { CompactionRequest rqst = new CompactionRequest("foo", "bar", CompactionType.MINOR); rqst.setRunas("fred"); txnHandler.compact(rqst); ShowCompactResponse rsp = txnHandler.showCompact(new ShowCompactRequest()); List<ShowCompactResponseElement> compacts = rsp.getCompacts(); assertEquals(1, compacts.size()); ShowCompactResponseElement c = compacts.get(0); assertEquals("foo", c.getDbname()); assertEquals("bar", c.getTablename()); assertNull(c.getPartitionname()); assertEquals(CompactionType.MINOR, c.getType()); assertEquals("initiated", c.getState()); assertEquals(0L, c.getStart()); assertEquals("fred", c.getRunAs()); } @Test public void showLocks() throws Exception { long begining = System.currentTimeMillis(); long txnid = openTxn(); LockComponent comp = new LockComponent(LockType.EXCLUSIVE, LockLevel.DB, "mydb"); List<LockComponent> components = new ArrayList<LockComponent>(1); components.add(comp); LockRequest req = new LockRequest(components, "me", "localhost"); req.setTxnid(txnid); LockResponse res = txnHandler.lock(req); // Open txn txnid = openTxn(); comp = new LockComponent(LockType.SHARED_READ, LockLevel.TABLE, "mydb"); comp.setTablename("mytable"); components = new ArrayList<LockComponent>(1); components.add(comp); req = new LockRequest(components, "me", "localhost"); req.setTxnid(txnid); res = txnHandler.lock(req); // Locks not associated with a txn components = new ArrayList<LockComponent>(1); comp = new LockComponent(LockType.SHARED_WRITE, LockLevel.PARTITION, "yourdb"); comp.setTablename("yourtable"); comp.setPartitionname("yourpartition"); components.add(comp); req = new LockRequest(components, "you", "remotehost"); res = txnHandler.lock(req); ShowLocksResponse rsp = txnHandler.showLocks(new ShowLocksRequest()); List<ShowLocksResponseElement> locks = rsp.getLocks(); assertEquals(3, locks.size()); boolean[] saw = new boolean[locks.size()]; for (int i = 0; i < saw.length; i++) saw[i] = false; for (ShowLocksResponseElement lock : locks) { if (lock.getLockid() == 1) { assertEquals(1, lock.getTxnid()); assertEquals("mydb", lock.getDbname()); assertNull(lock.getTablename()); assertNull(lock.getPartname()); assertEquals(LockState.ACQUIRED, lock.getState()); assertEquals(LockType.EXCLUSIVE, lock.getType()); assertTrue(begining <= lock.getLastheartbeat() && System.currentTimeMillis() >= lock.getLastheartbeat()); assertTrue("Expected acquired at " + lock.getAcquiredat() + " to be between " + begining + " and " + System.currentTimeMillis(), begining <= lock.getAcquiredat() && System.currentTimeMillis() >= lock.getAcquiredat()); assertEquals("me", lock.getUser()); assertEquals("localhost", lock.getHostname()); saw[0] = true; } else if (lock.getLockid() == 2) { assertEquals(2, lock.getTxnid()); assertEquals("mydb", lock.getDbname()); assertEquals("mytable", lock.getTablename()); assertNull(lock.getPartname()); assertEquals(LockState.WAITING, lock.getState()); assertEquals(LockType.SHARED_READ, lock.getType()); assertTrue(begining <= lock.getLastheartbeat() && System.currentTimeMillis() >= lock.getLastheartbeat()); assertEquals(0, lock.getAcquiredat()); assertEquals("me", lock.getUser()); assertEquals("localhost", lock.getHostname()); saw[1] = true; } else if (lock.getLockid() == 3) { assertEquals(0, lock.getTxnid()); assertEquals("yourdb", lock.getDbname()); assertEquals("yourtable", lock.getTablename()); assertEquals("yourpartition", lock.getPartname()); assertEquals(LockState.ACQUIRED, lock.getState()); assertEquals(LockType.SHARED_WRITE, lock.getType()); assertTrue(begining <= lock.getLastheartbeat() && System.currentTimeMillis() >= lock.getLastheartbeat()); assertTrue(begining <= lock.getAcquiredat() && System.currentTimeMillis() >= lock.getAcquiredat()); assertEquals("you", lock.getUser()); assertEquals("remotehost", lock.getHostname()); saw[2] = true; } else { fail("Unknown lock id"); } } for (int i = 0; i < saw.length; i++) assertTrue("Didn't see lock id " + i, saw[i]); } @Test @Ignore public void deadlockDetected() throws Exception { LOG.debug("Starting deadlock test"); Connection conn = txnHandler.getDbConn(Connection.TRANSACTION_SERIALIZABLE); Statement stmt = conn.createStatement(); long now = txnHandler.getDbTime(conn); stmt.executeUpdate("insert into TXNS (txn_id, txn_state, txn_started, txn_last_heartbeat, " + "txn_user, txn_host) values (1, 'o', " + now + ", " + now + ", 'shagy', " + "'scooby.com')"); stmt.executeUpdate("insert into HIVE_LOCKS (hl_lock_ext_id, hl_lock_int_id, hl_txnid, " + "hl_db, hl_table, hl_partition, hl_lock_state, hl_lock_type, hl_last_heartbeat, " + "hl_user, hl_host) values (1, 1, 1, 'mydb', 'mytable', 'mypartition', '" + txnHandler.LOCK_WAITING + "', '" + txnHandler.LOCK_EXCLUSIVE + "', " + now + ", 'fred', " + "'scooby.com')"); conn.commit(); txnHandler.closeDbConn(conn); final AtomicBoolean sawDeadlock = new AtomicBoolean(); final Connection conn1 = txnHandler.getDbConn(Connection.TRANSACTION_SERIALIZABLE); final Connection conn2 = txnHandler.getDbConn(Connection.TRANSACTION_SERIALIZABLE); try { for (int i = 0; i < 5; i++) { Thread t1 = new Thread() { @Override public void run() { try { try { updateTxns(conn1); updateLocks(conn1); Thread.sleep(1000); conn1.commit(); LOG.debug("no exception, no deadlock"); } catch (SQLException e) { try { txnHandler.checkRetryable(conn1, e, "thread t1"); LOG.debug("Got an exception, but not a deadlock, SQLState is " + e.getSQLState() + " class of exception is " + e.getClass().getName() + " msg is <" + e.getMessage() + ">"); } catch (TxnHandler.RetryException de) { LOG.debug("Forced a deadlock, SQLState is " + e.getSQLState() + " class of " + "exception is " + e.getClass().getName() + " msg is <" + e .getMessage() + ">"); sawDeadlock.set(true); } } conn1.rollback(); } catch (Exception e) { throw new RuntimeException(e); } } }; Thread t2 = new Thread() { @Override public void run() { try { try { updateLocks(conn2); updateTxns(conn2); Thread.sleep(1000); conn2.commit(); LOG.debug("no exception, no deadlock"); } catch (SQLException e) { try { txnHandler.checkRetryable(conn2, e, "thread t2"); LOG.debug("Got an exception, but not a deadlock, SQLState is " + e.getSQLState() + " class of exception is " + e.getClass().getName() + " msg is <" + e.getMessage() + ">"); } catch (TxnHandler.RetryException de) { LOG.debug("Forced a deadlock, SQLState is " + e.getSQLState() + " class of " + "exception is " + e.getClass().getName() + " msg is <" + e .getMessage() + ">"); sawDeadlock.set(true); } } conn2.rollback(); } catch (Exception e) { throw new RuntimeException(e); } } }; t1.start(); t2.start(); t1.join(); t2.join(); if (sawDeadlock.get()) break; } assertTrue(sawDeadlock.get()); } finally { conn1.rollback(); txnHandler.closeDbConn(conn1); conn2.rollback(); txnHandler.closeDbConn(conn2); } } private void updateTxns(Connection conn) throws SQLException { Statement stmt = conn.createStatement(); stmt.executeUpdate("update TXNS set txn_last_heartbeat = txn_last_heartbeat + 1"); } private void updateLocks(Connection conn) throws SQLException { Statement stmt = conn.createStatement(); stmt.executeUpdate("update HIVE_LOCKS set hl_last_heartbeat = hl_last_heartbeat + 1"); } @Before public void setUp() throws Exception { TxnDbUtil.prepDb(); txnHandler = new TxnHandler(conf); } @After public void tearDown() throws Exception { TxnDbUtil.cleanDb(); } private long openTxn() throws MetaException { List<Long> txns = txnHandler.openTxns(new OpenTxnRequest(1, "me", "localhost")).getTxn_ids(); return txns.get(0); } }
/* $Id: LockObject.java 988245 2010-08-23 18:39:35Z kwright $ */ /** * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.manifoldcf.core.lockmanager; import org.apache.manifoldcf.core.interfaces.*; import org.apache.manifoldcf.core.system.ManifoldCF; import org.apache.manifoldcf.core.system.Logging; import java.io.*; import java.nio.charset.StandardCharsets; /** One instance of this object exists for each lock on each JVM! * This is the file-system version of the lock. */ public class FileLockObject extends LockObject { public static final String _rcsid = "@(#)$Id: LockObject.java 988245 2010-08-23 18:39:35Z kwright $"; private final static int STATUS_WRITELOCKED = -1; private File lockDirectoryName = null; private File lockFileName = null; private boolean isSync; // True if we need to be synchronizing across JVM's private final static String DOTLOCK = ".lock"; private final static String DOTFILE = ".file"; private final static String SLASH = "/"; public FileLockObject(LockPool lockPool, Object lockKey, File synchDir) { super(lockPool,lockKey); this.isSync = (synchDir != null); if (isSync) { // Hash the filename int hashcode = lockKey.hashCode(); int outerDirNumber = (hashcode & (1023)); int innerDirNumber = ((hashcode >> 10) & (1023)); String fullDir = synchDir.toString(); if (fullDir.length() == 0 || !fullDir.endsWith(SLASH)) fullDir = fullDir + SLASH; fullDir = fullDir + Integer.toString(outerDirNumber)+SLASH+Integer.toString(innerDirNumber); (new File(fullDir)).mkdirs(); String filename = createFileName(lockKey); lockDirectoryName = new File(fullDir,filename+DOTLOCK); lockFileName = new File(fullDir,filename+DOTFILE); } } private static String createFileName(Object lockKey) { return "lock-"+ManifoldCF.safeFileName(lockKey.toString()); } @Override protected void obtainGlobalWriteLockNoWait() throws ManifoldCFException, LockException, InterruptedException { if (isSync) { grabFileLock(); try { int status = readFile(); if (status != 0) { throw new LockException(LOCKEDANOTHERJVM); } writeFile(STATUS_WRITELOCKED); } finally { releaseFileLock(); } } } @Override protected void clearGlobalWriteLockNoWait() throws ManifoldCFException, LockException, InterruptedException { if (isSync) { grabFileLock(); try { writeFile(0); } finally { releaseFileLock(); } } } @Override protected void obtainGlobalNonExWriteLockNoWait() throws ManifoldCFException, LockException, InterruptedException { // Attempt to obtain a global write lock if (isSync) { grabFileLock(); try { int status = readFile(); if (status == STATUS_WRITELOCKED || status > 0) { throw new LockException(LOCKEDANOTHERJVM); } if (status == 0) status = STATUS_WRITELOCKED; status--; writeFile(status); } finally { releaseFileLock(); } } } @Override protected void clearGlobalNonExWriteLockNoWait() throws ManifoldCFException, LockException, InterruptedException { if (isSync) { grabFileLock(); try { int status = readFile(); if (status >= STATUS_WRITELOCKED) throw new RuntimeException("JVM error: File lock is not in expected state for object "+this.toString()); status++; if (status == STATUS_WRITELOCKED) status = 0; writeFile(status); } finally { releaseFileLock(); } } } @Override protected void obtainGlobalReadLockNoWait() throws ManifoldCFException, LockException, InterruptedException { // Attempt to obtain a global read lock if (isSync) { grabFileLock(); try { int status = readFile(); if (status <= STATUS_WRITELOCKED) { throw new LockException(LOCKEDANOTHERJVM); } status++; writeFile(status); } finally { releaseFileLock(); } } } @Override protected void clearGlobalReadLockNoWait() throws ManifoldCFException, LockException, InterruptedException { if (isSync) { grabFileLock(); try { int status = readFile(); // System.out.println(" Read status = "+Integer.toString(status)); if (status == 0) throw new RuntimeException("JVM error: File lock is not in expected state for object "+this.toString()); status--; writeFile(status); // System.out.println(" Wrote status = "+Integer.toString(status)); } finally { releaseFileLock(); } } } private final static String FILELOCKED = "File locked"; private synchronized void grabFileLock() throws LockException, InterruptedException { while (true) { // Try to create the lock file try { if (lockDirectoryName.createNewFile() == false) throw new LockException(FILELOCKED); break; } catch (InterruptedIOException e) { throw new InterruptedException("Interrupted IO: "+e.getMessage()); } catch (IOException e) { // Log this if possible try { Logging.lock.warn("Attempt to set file lock '"+lockDirectoryName.toString()+"' failed: "+e.getMessage(),e); } catch (Throwable e2) { e.printStackTrace(); } // Winnt sometimes throws an exception when you can't do the lock ManifoldCF.sleep(100); continue; } } } private synchronized void releaseFileLock() throws InterruptedException { Throwable ie = null; while (true) { try { if (lockDirectoryName.delete()) break; try { Logging.lock.fatal("Failure deleting file lock '"+lockDirectoryName.toString()+"'"); } catch (Throwable e2) { System.out.println("Failure deleting file lock '"+lockDirectoryName.toString()+"'"); } // Fail hard System.exit(-100); } catch (Error e) { // An error - must try again to delete // Attempting to log this to the log may not work due to disk being full, but try anyway. String message = "Error deleting file lock '"+lockDirectoryName.toString()+"': "+e.getMessage(); try { Logging.lock.error(message,e); } catch (Throwable e2) { // Ok, we failed, send it to standard out System.out.println(message); e.printStackTrace(); } ie = e; ManifoldCF.sleep(100); continue; } catch (RuntimeException e) { // A runtime exception - try again to delete // Attempting to log this to the log may not work due to disk being full, but try anyway. String message = "Error deleting file lock '"+lockDirectoryName.toString()+"': "+e.getMessage(); try { Logging.lock.error(message,e); } catch (Throwable e2) { // Ok, we failed, send it to standard out System.out.println(message); e.printStackTrace(); } ie = e; ManifoldCF.sleep(100); continue; } } // Succeeded finally - but we need to rethrow any exceptions we got if (ie != null) { if (ie instanceof InterruptedException) throw (InterruptedException)ie; if (ie instanceof Error) throw (Error)ie; if (ie instanceof RuntimeException) throw (RuntimeException)ie; } } private synchronized int readFile() throws InterruptedException { try { InputStreamReader isr = new InputStreamReader(new FileInputStream(lockFileName), StandardCharsets.UTF_8); try { BufferedReader x = new BufferedReader(isr); try { StringBuilder sb = new StringBuilder(); while (true) { int rval = x.read(); if (rval == -1) break; sb.append((char)rval); } try { return Integer.parseInt(sb.toString()); } catch (NumberFormatException e) { // We should never be in a situation where we can't parse a number we have supposedly written. // But, print a stack trace and throw IOException, so we recover. throw new IOException("Lock number read was not valid: "+e.getMessage()); } } finally { x.close(); } } catch (InterruptedIOException e) { throw new InterruptedException("Interrupted IO: "+e.getMessage()); } catch (IOException e) { String message = "Could not read from lock file: '"+lockFileName.toString()+"'"; try { Logging.lock.error(message,e); } catch (Throwable e2) { System.out.println(message); e.printStackTrace(); } // Don't fail hard or there is no way to recover throw e; } finally { isr.close(); } } catch (InterruptedIOException e) { throw new InterruptedException("Interrupted IO: "+e.getMessage()); } catch (IOException e) { return 0; } } private synchronized void writeFile(int value) throws InterruptedException { try { if (value == 0) { if (lockFileName.delete() == false) throw new IOException("Could not delete file '"+lockFileName.toString()+"'"); } else { OutputStreamWriter osw = new OutputStreamWriter(new FileOutputStream(lockFileName), StandardCharsets.UTF_8); try { BufferedWriter x = new BufferedWriter(osw); try { x.write(Integer.toString(value)); } finally { x.close(); } } finally { osw.close(); } } } catch (Error e) { // Couldn't write for some reason! Write to BOTH stdout and the log, since we // can't be sure we will succeed at the latter. String message = "Couldn't write to lock file; hard error occurred. Shutting down process; locks may be left dangling. You must cleanup before restarting."; try { Logging.lock.error(message,e); } catch (Throwable e2) { System.out.println(message); e.printStackTrace(); } System.exit(-100); } catch (RuntimeException e) { // Couldn't write for some reason! Write to BOTH stdout and the log, since we // can't be sure we will succeed at the latter. String message = "Couldn't write to lock file; JVM error. Shutting down process; locks may be left dangling. You must cleanup before restarting."; try { Logging.lock.error(message,e); } catch (Throwable e2) { System.out.println(message); e.printStackTrace(); } System.exit(-100); } catch (InterruptedIOException e) { throw new InterruptedException("Interrupted IO: "+e.getMessage()); } catch (IOException e) { // Couldn't write for some reason! Write to BOTH stdout and the log, since we // can't be sure we will succeed at the latter. String message = "Couldn't write to lock file; disk may be full. Shutting down process; locks may be left dangling. You must cleanup before restarting."; try { Logging.lock.error(message,e); } catch (Throwable e2) { System.out.println(message); e.printStackTrace(); } System.exit(-100); // Hard failure is called for // throw new Error("Lock management system failure",e); } } }
package com.saulpower.GreenWireTest.database; import java.util.List; import de.greenrobot.dao.DaoException; // THIS CODE IS GENERATED BY greenDAO, DO NOT EDIT. Enable "keep" sections if you want to edit. /** * Entity mapped to table LEDGER_GROUP. */ public class LedgerGroup extends SyncBase { private String guid; private String name; private String externalID; private Integer sortOrder; private String tagString; private Long tenantID; private transient long saveResultSaveResultId; private String dateLastModified; private transient Long syncBaseId; private Boolean isDeleted; private Integer version; private Long id; private String dateCreated; /** Used to resolve relations */ private transient DaoSession daoSession; /** Used for active entity operations. */ private transient LedgerGroupDao myDao; private SaveResult saveResult; private Long saveResult__resolvedKey; private List<LedgerAccount> accounts; private List<CustomValue> customValues; public LedgerGroup() { } public LedgerGroup(Long id) { this.id = id; setDerivedEntityType(getClass().getCanonicalName()); } LedgerGroup(String guid, String name, String externalID, Integer sortOrder, String tagString, Long tenantID, long saveResultSaveResultId, String dateLastModified, Long syncBaseId, Boolean isDeleted, Integer version, Long id, String dateCreated) { this.guid = guid; this.name = name; this.externalID = externalID; this.sortOrder = sortOrder; this.tagString = tagString; this.tenantID = tenantID; this.saveResultSaveResultId = saveResultSaveResultId; this.dateLastModified = dateLastModified; this.syncBaseId = syncBaseId; this.isDeleted = isDeleted; this.version = version; this.id = id; this.dateCreated = dateCreated; } public LedgerGroup(String guid, String name, String externalID, Integer sortOrder, String tagString, Long tenantID, long saveResultSaveResultId, String dateLastModified, Boolean isDeleted, Integer version, Long id, String dateCreated) { this.guid = guid; this.name = name; this.externalID = externalID; this.sortOrder = sortOrder; this.tagString = tagString; this.tenantID = tenantID; this.saveResultSaveResultId = saveResultSaveResultId; this.dateLastModified = dateLastModified; this.isDeleted = isDeleted; this.version = version; this.id = id; this.dateCreated = dateCreated; setDerivedEntityType(getClass().getCanonicalName()); } /** called by internal mechanisms, do not call yourself. */ @Override public void __setDaoSession(DaoSession daoSession) { super.__setDaoSession(daoSession); this.daoSession = daoSession; myDao = daoSession != null ? daoSession.getLedgerGroupDao() : null; } public String getGuid() { return guid; } public void setGuid(String guid) { this.guid = guid; } public String getName() { return name; } public void setName(String name) { this.name = name; } public String getExternalID() { return externalID; } public void setExternalID(String externalID) { this.externalID = externalID; } public Integer getSortOrder() { return sortOrder; } public void setSortOrder(Integer sortOrder) { this.sortOrder = sortOrder; } public String getTagString() { return tagString; } public void setTagString(String tagString) { this.tagString = tagString; } public Long getTenantID() { return tenantID; } public void setTenantID(Long tenantID) { this.tenantID = tenantID; } public long getSaveResultSaveResultId() { return saveResultSaveResultId; } public void setSaveResultSaveResultId(long saveResultSaveResultId) { this.saveResultSaveResultId = saveResultSaveResultId; } public String getDateLastModified() { return dateLastModified; } public void setDateLastModified(String dateLastModified) { this.dateLastModified = dateLastModified; } public Long getSyncBaseId() { return syncBaseId; } public void setSyncBaseId(Long syncBaseId) { this.syncBaseId = syncBaseId; } public Boolean getIsDeleted() { return isDeleted; } public void setIsDeleted(Boolean isDeleted) { this.isDeleted = isDeleted; } public Integer getVersion() { return version; } public void setVersion(Integer version) { this.version = version; } public Long getId() { return id; } public void setId(Long id) { this.id = id; } public String getDateCreated() { return dateCreated; } public void setDateCreated(String dateCreated) { this.dateCreated = dateCreated; } /** To-one relationship, resolved on first access. */ public SaveResult getSaveResult() { long __key = this.saveResultSaveResultId; if (saveResult__resolvedKey == null || !saveResult__resolvedKey.equals(__key)) { if (daoSession == null) { throw new DaoException("Entity is detached from DAO context"); } SaveResultDao targetDao = daoSession.getSaveResultDao(); SaveResult saveResultNew = targetDao.load(__key); synchronized (this) { saveResult = saveResultNew; saveResult__resolvedKey = __key; } } return saveResult; } public void setSaveResult(SaveResult saveResult) { if (saveResult == null) { throw new DaoException("To-one property 'saveResultSaveResultId' has not-null constraint; cannot set to-one to null"); } synchronized (this) { this.saveResult = saveResult; saveResultSaveResultId = saveResult.getId(); saveResult__resolvedKey = saveResultSaveResultId; } } /** To-many relationship, resolved on first access (and after reset). Changes to to-many relations are not persisted, make changes to the target entity. */ public List<LedgerAccount> getAccounts() { if (accounts == null) { if (daoSession == null) { throw new DaoException("Entity is detached from DAO context"); } LedgerAccountDao targetDao = daoSession.getLedgerAccountDao(); List<LedgerAccount> accountsNew = targetDao._queryLedgerGroup_Accounts(id); synchronized (this) { if(accounts == null) { accounts = accountsNew; } } } return accounts; } /** Resets a to-many relationship, making the next get call to query for a fresh result. */ public synchronized void resetAccounts() { accounts = null; } /** To-many relationship, resolved on first access (and after reset). Changes to to-many relations are not persisted, make changes to the target entity. */ public List<CustomValue> getCustomValues() { if (customValues == null) { if (daoSession == null) { throw new DaoException("Entity is detached from DAO context"); } CustomValueDao targetDao = daoSession.getCustomValueDao(); List<CustomValue> customValuesNew = targetDao._queryLedgerGroup_CustomValues(id); synchronized (this) { if(customValues == null) { customValues = customValuesNew; } } } return customValues; } /** Resets a to-many relationship, making the next get call to query for a fresh result. */ public synchronized void resetCustomValues() { customValues = null; } /** Convenient call for {@link AbstractDao#delete(Object)}. Entity must attached to an entity context. */ public void delete() { if (myDao == null) { throw new DaoException("Entity is detached from DAO context"); } myDao.delete(this); } /** Convenient call for {@link AbstractDao#update(Object)}. Entity must attached to an entity context. */ public void update() { if (myDao == null) { throw new DaoException("Entity is detached from DAO context"); } myDao.update(this); } /** Convenient call for {@link AbstractDao#refresh(Object)}. Entity must attached to an entity context. */ public void refresh() { if (myDao == null) { throw new DaoException("Entity is detached from DAO context"); } myDao.refresh(this); } }
/******************************************************************************* * Copyright 2019 See AUTHORS file * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. ******************************************************************************/ package org.mini2Dx.core.collections; import org.mini2Dx.core.exception.MdxException; import java.util.Iterator; import java.util.NoSuchElementException; public class BooleanQueue { /** Contains the values in the queue. Head and tail indices go in a circle around this array, wrapping at the end. */ protected boolean[] values; /** Index of first element. Logically smaller than tail. Unless empty, it points to a valid element inside queue. */ protected int head = 0; /** Index of last element. Logically bigger than head. Usually points to an empty position, but points to the head when full * (size == values.length). */ protected int tail = 0; /** Number of elements in the queue. */ public int size = 0; private BooleanQueueIterable iterable; /** Creates a new Queue which can hold 16 values without needing to resize backing array. */ public BooleanQueue () { this(16); } /** Creates a new Queue which can hold the specified number of values without needing to resize backing array. */ public BooleanQueue (int initialSize) { this.values = new boolean[initialSize]; } /** Append given object to the tail. (enqueue to tail) Unless backing array needs resizing, operates in O(1) time. * @param object can be null */ public void addLast (boolean object) { boolean[] values = this.values; if (size == values.length) { resize(values.length << 1);// * 2 values = this.values; } values[tail++] = object; if (tail == values.length) { tail = 0; } size++; } /** Prepend given object to the head. (enqueue to head) Unless backing array needs resizing, operates in O(1) time. * @see #addLast(boolean) * @param object can be null */ public void addFirst (boolean object) { boolean[] values = this.values; if (size == values.length) { resize(values.length << 1);// * 2 values = this.values; } int head = this.head; head--; if (head == -1) { head = values.length - 1; } values[head] = object; this.head = head; this.size++; } /** Increases the size of the backing array to accommodate the specified number of additional items. Useful before adding many * items to avoid multiple backing array resizes. */ public void ensureCapacity (int additional) { final int needed = size + additional; if (values.length < needed) { resize(needed); } } /** Resize backing array. newSize must be bigger than current size. */ protected void resize (int newSize) { final boolean[] values = this.values; final int head = this.head; final int tail = this.tail; final boolean[] newArray = new boolean[newSize]; if (head < tail) { // Continuous System.arraycopy(values, head, newArray, 0, tail - head); } else if (size > 0) { // Wrapped final int rest = values.length - head; System.arraycopy(values, head, newArray, 0, rest); System.arraycopy(values, 0, newArray, rest, tail); } this.values = newArray; this.head = 0; this.tail = size; } /** Remove the first item from the queue. (dequeue from head) Always O(1). * @return removed object * @throws NoSuchElementException when queue is empty */ public boolean removeFirst () { if (size == 0) { // Underflow throw new NoSuchElementException("Queue is empty."); } final boolean[] values = this.values; final boolean result = values[head]; head++; if (head == values.length) { head = 0; } size--; return result; } /** Remove the last item from the queue. (dequeue from tail) Always O(1). * @see #removeFirst() * @return removed object * @throws NoSuchElementException when queue is empty */ public boolean removeLast () { if (size == 0) { throw new NoSuchElementException("Queue is empty."); } final boolean[] values = this.values; int tail = this.tail; tail--; if (tail == -1) { tail = values.length - 1; } final boolean result = values[tail]; this.tail = tail; size--; return result; } /** Returns the index of first occurrence of value in the queue, or -1 if no such value exists. * @return An index of first occurrence of value in queue or -1 if no such value exists */ public int indexOf (boolean value) { if (size == 0) return -1; boolean[] values = this.values; final int head = this.head, tail = this.tail; if (head < tail) { for (int i = head; i < tail; i++) if (values[i] == value) return i - head; } else { for (int i = head, n = values.length; i < n; i++) if (values[i] == value) return i - head; for (int i = 0; i < tail; i++) if (values[i] == value) return i + values.length - head; } return -1; } /** Removes the first instance of the specified value in the queue. * @return true if value was found and removed, false otherwise */ public boolean removeValue (boolean value) { int index = indexOf(value); if (index == -1) return false; removeIndex(index); return true; } /** Removes and returns the item at the specified index. */ public boolean removeIndex (int index) { if (index < 0) throw new IndexOutOfBoundsException("index can't be < 0: " + index); if (index >= size) throw new IndexOutOfBoundsException("index can't be >= size: " + index + " >= " + size); boolean[] values = this.values; int head = this.head, tail = this.tail; index += head; boolean value; if (head < tail) { // index is between head and tail. value = values[index]; System.arraycopy(values, index + 1, values, index, tail - index); this.tail--; } else if (index >= values.length) { // index is between 0 and tail. index -= values.length; value = values[index]; System.arraycopy(values, index + 1, values, index, tail - index); this.tail--; } else { // index is between head and values.length. value = values[index]; System.arraycopy(values, head, values, head + 1, index - head); this.head++; if (this.head == values.length) { this.head = 0; } } size--; return value; } /** Returns true if the queue has one or more items. */ public boolean notEmpty () { return size > 0; } /** Returns true if the queue is empty. */ public boolean isEmpty () { return size == 0; } /** Returns the first (head) item in the queue (without removing it). * @see #addFirst(boolean) * @see #removeFirst() * @throws NoSuchElementException when queue is empty */ public boolean first () { if (size == 0) { // Underflow throw new NoSuchElementException("Queue is empty."); } return values[head]; } /** Returns the last (tail) item in the queue (without removing it). * @see #addLast(boolean) * @see #removeLast() * @throws NoSuchElementException when queue is empty */ public boolean last () { if (size == 0) { // Underflow throw new NoSuchElementException("Queue is empty."); } final boolean[] values = this.values; int tail = this.tail; tail--; if (tail == -1) { tail = values.length - 1; } return values[tail]; } /** Retrieves the value in queue without removing it. Indexing is from the front to back, zero based. Therefore get(0) is the * same as {@link #first()}. * @throws IndexOutOfBoundsException when the index is negative or greater or equal than size */ public boolean get (int index) { if (index < 0) throw new IndexOutOfBoundsException("index can't be < 0: " + index); if (index >= size) throw new IndexOutOfBoundsException("index can't be >= size: " + index + " >= " + size); final boolean[] values = this.values; int i = head + index; if (i >= values.length) { i -= values.length; } return values[i]; } /** Removes all values from this queue. Values in backing array are set to null to prevent memory leak, so this operates in * O(n). */ public void clear () { if (size == 0) return; this.head = 0; this.tail = 0; this.size = 0; } /** Returns an iterator for the items in the queue. Remove is supported. */ public Iterator<Boolean> iterator () { if (iterable == null) iterable = new BooleanQueueIterable(this); return iterable.iterator(); } public String toString () { if (size == 0) { return "[]"; } final boolean[] values = this.values; final int head = this.head; final int tail = this.tail; StringBuilder sb = new StringBuilder(64); sb.append('['); sb.append(values[head]); for (int i = (head + 1) % values.length; i != tail; i = (i + 1) % values.length) { sb.append(", ").append(values[i]); } sb.append(']'); return sb.toString(); } public String toString (String separator) { if (size == 0) return ""; final boolean[] values = this.values; final int head = this.head; final int tail = this.tail; StringBuilder sb = new StringBuilder(64); sb.append(values[head]); for (int i = (head + 1) % values.length; i != tail; i = (i + 1) % values.length) sb.append(separator).append(values[i]); return sb.toString(); } public int hashCode () { final int size = this.size; final boolean[] values = this.values; final int backingLength = values.length; int index = this.head; int hash = size + 1; for (int s = 0; s < size; s++) { final boolean value = values[index]; hash *= 2; hash += value ? 1 : 0; index++; if (index == backingLength) index = 0; } return hash; } public boolean equals (Object o) { if (this == o) return true; if (!(o instanceof BooleanQueue)) return false; BooleanQueue q = (BooleanQueue)o; final int size = this.size; if (q.size != size) return false; final boolean[] myValues = this.values; final int myBackingLength = myValues.length; final boolean[] itsValues = q.values; final int itsBackingLength = itsValues.length; int myIndex = head; int itsIndex = q.head; for (int s = 0; s < size; s++) { boolean myValue = myValues[myIndex]; boolean itsValue = itsValues[itsIndex]; if (myValue != itsValue) return false; myIndex++; itsIndex++; if (myIndex == myBackingLength) myIndex = 0; if (itsIndex == itsBackingLength) itsIndex = 0; } return true; } static public class BooleanQueueIterator implements Iterator<Boolean>, Iterable<Boolean> { private final BooleanQueue queue; private final boolean allowRemove; int index; boolean valid = true; public BooleanQueueIterator (BooleanQueue queue) { this(queue, true); } public BooleanQueueIterator (BooleanQueue queue, boolean allowRemove) { this.queue = queue; this.allowRemove = allowRemove; } public boolean hasNext () { if (!valid) { throw new MdxException("#iterator() cannot be used nested."); } return index < queue.size; } public Boolean next () { if (index >= queue.size) throw new NoSuchElementException(String.valueOf(index)); if (!valid) { throw new MdxException("#iterator() cannot be used nested."); } return queue.get(index++); } public void remove () { if (!allowRemove) throw new MdxException("Remove not allowed."); index--; queue.removeIndex(index); } public void reset () { index = 0; } public Iterator<Boolean> iterator () { return this; } } static public class BooleanQueueIterable implements Iterable<Boolean> { private final BooleanQueue queue; private final boolean allowRemove; private BooleanQueueIterator iterator1, iterator2; public BooleanQueueIterable (BooleanQueue queue) { this(queue, true); } public BooleanQueueIterable (BooleanQueue queue, boolean allowRemove) { this.queue = queue; this.allowRemove = allowRemove; } public Iterator<Boolean> iterator () { if (iterator1 == null) { iterator1 = new BooleanQueueIterator(queue, allowRemove); iterator2 = new BooleanQueueIterator(queue, allowRemove); } if (!iterator1.valid) { iterator1.index = 0; iterator1.valid = true; iterator2.valid = false; return iterator1; } iterator2.index = 0; iterator2.valid = true; iterator1.valid = false; return iterator2; } } }
/* * Copyright (c) 2005-2010, WSO2 Inc. (http://www.wso2.org) All Rights Reserved. * * WSO2 Inc. licenses this file to you under the Apache License, * Version 2.0 (the "License"); you may not use this file except * in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.wso2.carbon.identity.user.store.configuration; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.w3c.dom.Attr; import org.w3c.dom.Document; import org.w3c.dom.Element; import org.w3c.dom.NodeList; import org.wso2.carbon.context.CarbonContext; import org.wso2.carbon.context.PrivilegedCarbonContext; import org.wso2.carbon.core.AbstractAdmin; import org.wso2.carbon.identity.core.util.IdentityUtil; import org.wso2.carbon.identity.user.store.configuration.beans.RandomPassword; import org.wso2.carbon.identity.user.store.configuration.beans.RandomPasswordContainer; import org.wso2.carbon.identity.user.store.configuration.cache.RandomPasswordContainerCache; import org.wso2.carbon.identity.user.store.configuration.dto.PropertyDTO; import org.wso2.carbon.identity.user.store.configuration.dto.UserStoreDTO; import org.wso2.carbon.identity.user.store.configuration.internal.UserStoreConfigListenersHolder; import org.wso2.carbon.identity.user.store.configuration.listener.UserStoreConfigListener; import org.wso2.carbon.identity.user.store.configuration.utils.IdentityUserStoreMgtException; import org.wso2.carbon.identity.user.store.configuration.utils.SecondaryUserStoreConfigurationUtil; import org.wso2.carbon.identity.user.store.configuration.utils.UserStoreConfigurationConstant; import org.wso2.carbon.ndatasource.common.DataSourceException; import org.wso2.carbon.ndatasource.core.DataSourceManager; import org.wso2.carbon.ndatasource.core.services.WSDataSourceMetaInfo; import org.wso2.carbon.ndatasource.core.services.WSDataSourceMetaInfo.WSDataSourceDefinition; import org.wso2.carbon.ndatasource.rdbms.RDBMSConfiguration; import org.wso2.carbon.user.api.Properties; import org.wso2.carbon.user.api.Property; import org.wso2.carbon.user.api.RealmConfiguration; import org.wso2.carbon.user.api.UserStoreException; import org.wso2.carbon.user.core.UserCoreConstants; import org.wso2.carbon.user.core.UserStoreConfigConstants; import org.wso2.carbon.user.core.common.AbstractUserStoreManager; import org.wso2.carbon.user.core.config.XMLProcessorUtils; import org.wso2.carbon.user.core.jdbc.JDBCRealmConstants; import org.wso2.carbon.user.core.tenant.TenantCache; import org.wso2.carbon.user.core.tenant.TenantIdKey; import org.wso2.carbon.user.core.tracker.UserStoreManagerRegistry; import org.wso2.carbon.user.core.util.UserCoreUtil; import org.wso2.carbon.utils.CarbonUtils; import org.wso2.carbon.utils.multitenancy.MultitenantConstants; import org.xml.sax.SAXException; import javax.xml.bind.JAXBContext; import javax.xml.bind.JAXBException; import javax.xml.bind.Marshaller; import javax.xml.parsers.DocumentBuilder; import javax.xml.parsers.DocumentBuilderFactory; import javax.xml.parsers.ParserConfigurationException; import javax.xml.transform.OutputKeys; import javax.xml.transform.Transformer; import javax.xml.transform.TransformerConfigurationException; import javax.xml.transform.TransformerException; import javax.xml.transform.TransformerFactory; import javax.xml.transform.dom.DOMSource; import javax.xml.transform.stream.StreamResult; import java.io.ByteArrayOutputStream; import java.io.File; import java.io.FilenameFilter; import java.io.IOException; import java.util.ArrayList; import java.util.Arrays; import java.util.List; import java.util.Map; import java.util.Set; import java.util.UUID; public class UserStoreConfigAdminService extends AbstractAdmin { public static final Log log = LogFactory.getLog(UserStoreConfigAdminService.class); public static final String DISABLED = "Disabled"; public static final String DESCRIPTION = "Description"; public static final String USERSTORES = "userstores"; private static final String deploymentDirectory = CarbonUtils.getCarbonRepository() + USERSTORES; XMLProcessorUtils xmlProcessorUtils = new XMLProcessorUtils(); /** * Get details of current secondary user store configurations * * @return : Details of all the configured secondary user stores * @throws UserStoreException */ public UserStoreDTO[] getSecondaryRealmConfigurations() throws IdentityUserStoreMgtException { ArrayList<UserStoreDTO> domains = new ArrayList<UserStoreDTO>(); RealmConfiguration secondaryRealmConfiguration = null; try { secondaryRealmConfiguration = CarbonContext.getThreadLocalCarbonContext().getUserRealm(). getRealmConfiguration().getSecondaryRealmConfig(); } catch (UserStoreException e) { String errorMessage = "Error while retrieving user store configurations"; log.error(errorMessage, e); throw new IdentityUserStoreMgtException(errorMessage); } //not editing primary store if (secondaryRealmConfiguration == null) { return null; } else { do { Map<String, String> userStoreProperties = secondaryRealmConfiguration.getUserStoreProperties(); UserStoreDTO userStoreDTO = new UserStoreDTO(); String uuid = userStoreProperties.get(UserStoreConfigurationConstant.UNIQUE_ID_CONSTANT); if (uuid == null) { uuid = UUID.randomUUID().toString(); } String randomPhrase = UserStoreConfigurationConstant.RANDOM_PHRASE_PREFIX + uuid; String className = secondaryRealmConfiguration.getUserStoreClass(); userStoreDTO.setClassName(secondaryRealmConfiguration.getUserStoreClass()); userStoreDTO.setDescription(secondaryRealmConfiguration.getUserStoreProperty(DESCRIPTION)); userStoreDTO.setDomainId(secondaryRealmConfiguration.getUserStoreProperty(UserStoreConfigConstants.DOMAIN_NAME)); if (userStoreProperties.get(DISABLED) != null) { userStoreDTO.setDisabled(Boolean.valueOf(userStoreProperties.get(DISABLED))); } userStoreProperties.put("Class", className); userStoreProperties.put(UserStoreConfigurationConstant.UNIQUE_ID_CONSTANT, uuid); RandomPassword[] randomPasswords = getRandomPasswordProperties(className, randomPhrase, secondaryRealmConfiguration); if (randomPasswords != null) { updatePasswordContainer(randomPasswords, uuid); } String originalPassword = null; if (userStoreProperties.containsKey(UserStoreConfigConstants.connectionPassword)) { originalPassword = userStoreProperties.get(UserStoreConfigConstants.connectionPassword); userStoreProperties.put(UserStoreConfigConstants.connectionPassword, randomPhrase); } if (userStoreProperties.containsKey(JDBCRealmConstants.PASSWORD)) { originalPassword = userStoreProperties.get(JDBCRealmConstants.PASSWORD); userStoreProperties.put(JDBCRealmConstants.PASSWORD, randomPhrase); } userStoreDTO.setProperties(convertMapToArray(userStoreProperties)); //Now revert back to original password if (userStoreProperties.containsKey(UserStoreConfigConstants.connectionPassword)) { if (originalPassword != null) { userStoreProperties.put(UserStoreConfigConstants.connectionPassword, originalPassword); } } if (userStoreProperties.containsKey(JDBCRealmConstants.PASSWORD)) { if (originalPassword != null) { userStoreProperties.put(JDBCRealmConstants.PASSWORD, originalPassword); } } domains.add(userStoreDTO); secondaryRealmConfiguration = secondaryRealmConfiguration.getSecondaryRealmConfig(); } while (secondaryRealmConfiguration != null); } return domains.toArray(new UserStoreDTO[domains.size()]); } /** * Get user store properties of a given active user store manager as an array * * @param properties: properties of the user store * @return key#value */ private PropertyDTO[] convertMapToArray(Map<String, String> properties) { Set<Map.Entry<String, String>> propertyEntries = properties.entrySet(); ArrayList<PropertyDTO> propertiesList = new ArrayList<PropertyDTO>(); String key; String value; for (Map.Entry<String, String> entry : propertyEntries) { key = (String) entry.getKey(); value = (String) entry.getValue(); PropertyDTO propertyDTO = new PropertyDTO(key, value); propertiesList.add(propertyDTO); } return propertiesList.toArray(new PropertyDTO[propertiesList.size()]); } /** * Get available user store manager implementations * * @return: Available implementations for user store managers */ public String[] getAvailableUserStoreClasses() throws IdentityUserStoreMgtException { Set<String> classNames = UserStoreManagerRegistry.getUserStoreManagerClasses(); return classNames.toArray(new String[classNames.size()]); } /** * Get User Store Manager default properties for a given implementation * * @param className:Implementation class name for the user store * @return : list of default properties(mandatory+optional) */ public Properties getUserStoreManagerProperties(String className) throws IdentityUserStoreMgtException { Properties properties = UserStoreManagerRegistry.getUserStoreProperties(className); if (properties != null && properties.getOptionalProperties() != null) { Property[] optionalProperties = properties.getOptionalProperties(); boolean foundUniqueIDProperty = false; for (Property property : optionalProperties) { if (UserStoreConfigurationConstant.UNIQUE_ID_CONSTANT.equals(property.getName())) { foundUniqueIDProperty = true; break; } } if (!foundUniqueIDProperty) { if (log.isDebugEnabled()) { log.debug("Inserting property : " + UserStoreConfigurationConstant.UNIQUE_ID_CONSTANT + " since " + UserStoreConfigurationConstant.UNIQUE_ID_CONSTANT + " property not defined as an optional property in " + className + " class"); } List<Property> optionalPropertyList = new ArrayList<>(Arrays.asList(optionalProperties)); Property uniqueIDProperty = new Property( UserStoreConfigurationConstant.UNIQUE_ID_CONSTANT, "", "", null); optionalPropertyList.add(uniqueIDProperty); properties.setOptionalProperties( optionalPropertyList.toArray(new Property[optionalPropertyList.size()])); } } return properties; } /** * Save the sent configuration to xml file * * @param userStoreDTO: Represent the configuration of user store * @throws DataSourceException * @throws TransformerException * @throws ParserConfigurationException */ public void addUserStore(UserStoreDTO userStoreDTO) throws IdentityUserStoreMgtException { String domainName = userStoreDTO.getDomainId(); try { xmlProcessorUtils.isValidDomain(domainName, true); validateForFederatedDomain(domainName); } catch (UserStoreException e) { String errorMessage = e.getMessage(); log.error(errorMessage, e); throw new IdentityUserStoreMgtException(errorMessage); } File userStoreConfigFile = createConfigurationFile(domainName); // This is a redundant check if (userStoreConfigFile.exists()) { String errorMessage = "Cannot add user store " + domainName + ". User store already exists."; log.error(errorMessage); throw new IdentityUserStoreMgtException(errorMessage); } try { writeUserMgtXMLFile(userStoreConfigFile, userStoreDTO, false); if (log.isDebugEnabled()) { log.debug("New user store successfully written to the file" + userStoreConfigFile.getAbsolutePath()); } } catch (IdentityUserStoreMgtException e) { String errorMessage = e.getMessage(); log.error(errorMessage, e); throw new IdentityUserStoreMgtException(errorMessage); } } /** * Edit currently existing user store * * @param userStoreDTO: Represent the configuration of user store * @throws DataSourceException * @throws TransformerException * @throws ParserConfigurationException */ public void editUserStore(UserStoreDTO userStoreDTO) throws IdentityUserStoreMgtException { String domainName = userStoreDTO.getDomainId(); boolean isValidDomain = false; try { isValidDomain = xmlProcessorUtils.isValidDomain(domainName, false); validateForFederatedDomain(domainName); } catch (UserStoreException e) { String errorMessage = e.getMessage(); log.error(errorMessage, e); throw new IdentityUserStoreMgtException(errorMessage); } if (isValidDomain) { File userStoreConfigFile = createConfigurationFile(domainName); if (!userStoreConfigFile.exists()) { String msg = "Cannot edit user store " + domainName + ". User store cannot be edited."; log.error(msg); throw new IdentityUserStoreMgtException(msg); } try { writeUserMgtXMLFile(userStoreConfigFile, userStoreDTO, true); if (log.isDebugEnabled()) { log.debug("Edited user store successfully written to the file" + userStoreConfigFile.getAbsolutePath()); } } catch (IdentityUserStoreMgtException e) { String errorMessage = e.getMessage(); log.error(errorMessage, e); throw new IdentityUserStoreMgtException(errorMessage); } } else { String errorMessage = "Trying to edit an invalid domain : " + domainName; throw new IdentityUserStoreMgtException(errorMessage); } } /** * Edit currently existing user store with a change of its domain name * * @param userStoreDTO: Represent the configuration of new user store * @param previousDomainName * @throws DataSourceException * @throws TransformerException * @throws ParserConfigurationException */ public void editUserStoreWithDomainName(String previousDomainName, UserStoreDTO userStoreDTO) throws IdentityUserStoreMgtException{ boolean isDebugEnabled = log.isDebugEnabled(); String domainName = userStoreDTO.getDomainId(); if (isDebugEnabled) { log.debug("Changing user store " + previousDomainName + " to " + domainName); } try { validateForFederatedDomain(domainName); } catch (UserStoreException e) { String errorMessage = e.getMessage(); log.error(errorMessage, e); throw new IdentityUserStoreMgtException(errorMessage); } File userStoreConfigFile = null; File previousUserStoreConfigFile = null; String fileName = domainName.replace(".", "_"); String previousFileName = previousDomainName.replace(".", "_"); if(!IdentityUtil.isValidFileName(fileName)){ String message = "Provided domain name : '" + domainName + "' is invalid."; log.error(message); throw new IdentityUserStoreMgtException(message); } if(!IdentityUtil.isValidFileName(previousFileName)){ String message = "Provided domain name : '" + previousDomainName + "' is invalid."; log.error(message); throw new IdentityUserStoreMgtException(message); } int tenantId = CarbonContext.getThreadLocalCarbonContext().getTenantId(); if (tenantId == MultitenantConstants.SUPER_TENANT_ID) { File userStore = new File(deploymentDirectory); if (!userStore.exists()) { if (new File(deploymentDirectory).mkdir()) { //folder 'userstores' created log.info("folder 'userstores' created for super tenant " + fileName); } else { log.error("Error at creating 'userstores' directory to store configurations for super tenant"); } } userStoreConfigFile = new File(deploymentDirectory + File.separator + fileName + ".xml"); previousUserStoreConfigFile = new File(deploymentDirectory + File.separator + previousFileName + ".xml"); } else { String tenantFilePath = CarbonUtils.getCarbonTenantsDirPath(); tenantFilePath = tenantFilePath + File.separator + tenantId + File.separator + USERSTORES; File userStore = new File(tenantFilePath); if (!userStore.exists()) { if (new File(tenantFilePath).mkdir()) { //folder 'userstores' created log.info("folder 'userstores' created for tenant " + tenantId); } else { log.error("Error at creating 'userstores' directory to store configurations for tenant:" + tenantId); } } userStoreConfigFile = new File(tenantFilePath + File.separator + fileName + ".xml"); previousUserStoreConfigFile = new File(tenantFilePath + File.separator + previousFileName + ".xml"); } if (!previousUserStoreConfigFile.exists()) { String errorMessage = "Cannot update user store domain name. Previous domain name " + previousDomainName + " does not exists."; throw new IdentityUserStoreMgtException(errorMessage); } if (userStoreConfigFile.exists()) { String errorMessage = "Cannot update user store domain name. An user store already exists with new domain " + domainName + "."; throw new IdentityUserStoreMgtException(errorMessage); } try { // Run pre user-store name update listeners List<UserStoreConfigListener> userStoreConfigListeners = UserStoreConfigListenersHolder.getInstance() .getUserStoreConfigListeners(); for (UserStoreConfigListener userStoreConfigListener : userStoreConfigListeners) { userStoreConfigListener.onUserStoreNamePreUpdate(CarbonContext.getThreadLocalCarbonContext().getTenantId (), previousDomainName, domainName); } // Update persisted domain name AbstractUserStoreManager userStoreManager = (AbstractUserStoreManager) CarbonContext. getThreadLocalCarbonContext().getUserRealm().getUserStoreManager(); userStoreManager.updatePersistedDomain(previousDomainName, domainName); if (log.isDebugEnabled()) { log.debug("Renamed persisted domain name from" + previousDomainName + " to " + domainName + " of tenant:" + tenantId + " from UM_DOMAIN."); } } catch (UserStoreException e) { String errorMessage = "Error while updating user store domain : " + domainName; log.error(errorMessage, e); throw new IdentityUserStoreMgtException(errorMessage); } try { previousUserStoreConfigFile.delete(); writeUserMgtXMLFile(userStoreConfigFile, userStoreDTO, true); } catch (IdentityUserStoreMgtException e) { String errorMessage = e.getMessage(); log.error(errorMessage, e); throw new IdentityUserStoreMgtException(errorMessage); } } /** * Deletes the user store specified * * @param domainName: domain name of the user stores to be deleted */ public void deleteUserStore(String domainName) throws IdentityUserStoreMgtException { deleteUserStoresSet(new String[] {domainName}); } /** * Delete the given list of user stores * * @param domains: domain names of user stores to be deleted */ public void deleteUserStoresSet(String[] domains) throws IdentityUserStoreMgtException { boolean isDebugEnabled = log.isDebugEnabled(); if (domains == null || domains.length <= 0) { throw new IdentityUserStoreMgtException("No selected user stores to delete"); } if (!validateDomainsForDelete(domains)) { if (log.isDebugEnabled()) { log.debug("Failed to delete user store : No privileges to delete own user store configurations "); } throw new IdentityUserStoreMgtException("No privileges to delete own user store configurations."); } int tenantId = CarbonContext.getThreadLocalCarbonContext().getTenantId(); String path; if (tenantId == MultitenantConstants.SUPER_TENANT_ID) { path = deploymentDirectory; } else { path = CarbonUtils.getCarbonTenantsDirPath() + File.separator + tenantId + File.separator + USERSTORES; } File file = new File(path); for (String domainName : domains) { if (isDebugEnabled) { log.debug("Deleting, .... " + domainName + " domain."); } try { // Run pre user-store name update listeners List<UserStoreConfigListener> userStoreConfigListeners = UserStoreConfigListenersHolder.getInstance() .getUserStoreConfigListeners(); for (UserStoreConfigListener userStoreConfigListener : userStoreConfigListeners) { userStoreConfigListener.onUserStorePreDelete(CarbonContext.getThreadLocalCarbonContext().getTenantId (), domainName); } // Delete persisted domain name AbstractUserStoreManager userStoreManager = (AbstractUserStoreManager) CarbonContext. getThreadLocalCarbonContext().getUserRealm().getUserStoreManager(); userStoreManager.deletePersistedDomain(domainName); if (isDebugEnabled) { log.debug("Removed persisted domain name: " + domainName + " of tenant:" + tenantId + " from " + "UM_DOMAIN."); } //clear cache to make the modification effective UserCoreUtil.getRealmService().clearCachedUserRealm(tenantId); TenantCache.getInstance().clearCacheEntry(new TenantIdKey(tenantId)); } catch (UserStoreException e) { String errorMessage = "Error while deleting user store : " + domainName; log.error(errorMessage, e); throw new IdentityUserStoreMgtException(errorMessage); } // Delete file deleteFile(file, domainName.replace(".", "_").concat(".xml")); } } private boolean validateDomainsForDelete(String[] domains) { String userDomain = IdentityUtil.extractDomainFromName(PrivilegedCarbonContext.getThreadLocalCarbonContext() .getUsername()); for (String domain : domains) { if (domain.equalsIgnoreCase(userDomain)) { //Trying to delete own domain return false; } } return true; } /** * Should not allow to have domain prefixed with 'FEDERATED', to avoid conflicting with federated user domain. * @param domain : domain name * @return */ private void validateForFederatedDomain(String domain) throws UserStoreException { if (IdentityUtil.isNotBlank(domain) && domain.toUpperCase().startsWith("FEDERATED")) { throw new UserStoreException("'FEDERATED' is a reserved user domain prefix. " + "Please start the domain name in a different manner."); } } /** * Adds an array of properties * * @param propertyDTOs : List of user store properties * @param doc: Document * @param parent : Parent element of the properties to be added */ private void addProperties(String userStoreClass, PropertyDTO[] propertyDTOs, Document doc, Element parent, boolean editSecondaryUserStore) throws IdentityUserStoreMgtException { RandomPasswordContainer randomPasswordContainer = null; if (editSecondaryUserStore) { String uniqueID = getUniqueIDFromUserDTO(propertyDTOs); randomPasswordContainer = getAndRemoveRandomPasswordContainer(uniqueID); if (randomPasswordContainer == null) { String errorMsg = "randomPasswordContainer is null for uniqueID therefore " + "proceeding without encryption=" + uniqueID; log.error(errorMsg);//need this error log to further identify the reason for throwing this exception throw new IdentityUserStoreMgtException("Longer delay causes the edit operation be to " + "abandoned"); } } //First check for mandatory field with #encrypt Property[] mandatoryProperties = getMandatoryProperties(userStoreClass); for (PropertyDTO propertyDTO : propertyDTOs) { String propertyDTOName = propertyDTO.getName(); if (UserStoreConfigurationConstant.UNIQUE_ID_CONSTANT.equalsIgnoreCase(propertyDTOName)) { continue; } String propertyDTOValue = propertyDTO.getValue(); if (propertyDTOValue != null) { boolean encrypted = false; if (isPropertyToBeEncrypted(mandatoryProperties, propertyDTOName)) { if (randomPasswordContainer != null) { RandomPassword randomPassword = getRandomPassword(randomPasswordContainer, propertyDTOName); if (randomPassword != null) { if (propertyDTOValue.equalsIgnoreCase(randomPassword.getRandomPhrase())) { propertyDTOValue = randomPassword.getPassword(); } } } try { propertyDTOValue = SecondaryUserStoreConfigurationUtil.encryptPlainText(propertyDTOValue); encrypted = true; } catch (IdentityUserStoreMgtException e) { log.error("addProperties failed to encrypt", e); //its ok to continue from here } } addProperty(propertyDTOName, propertyDTOValue, doc, parent, encrypted); } } } /** * Adds a property * * @param name: Name of property * @param value: Value * @param doc: Document * @param parent: Parent element of the property to be added as a child */ private void addProperty(String name, String value, Document doc, Element parent, boolean encrypted) { Element property = doc.createElement("Property"); Attr attr; if (encrypted) { attr = doc.createAttribute("encrypted"); attr.setValue("true"); property.setAttributeNode(attr); } attr = doc.createAttribute("name"); attr.setValue(name); property.setAttributeNode(attr); property.setTextContent(value); parent.appendChild(property); } private void deleteFile(File file, final String userStoreName) throws IdentityUserStoreMgtException { if(!IdentityUtil.isValidFileName(userStoreName)) { String message = "Provided domain name : '" + userStoreName + "' is invalid."; log.error(message); throw new IdentityUserStoreMgtException(message); } File[] deleteCandidates = file.listFiles(new FilenameFilter() { public boolean accept(File dir, String name) { return name.equalsIgnoreCase(userStoreName); } }); for (File file1 : deleteCandidates) { if (file1.delete()) { log.info("File " + file.getName() + " deleted successfully"); } else { log.error("error at deleting file:" + file.getName()); } } } /** * Update a domain to be disabled/enabled * * @param domain: Name of the domain to be updated * @param isDisable : Whether to disable/enable domain(true/false) */ public void changeUserStoreState(String domain, Boolean isDisable) throws IdentityUserStoreMgtException, TransformerConfigurationException { String currentAuthorizedUserName = CarbonContext.getThreadLocalCarbonContext().getUsername(); int index = currentAuthorizedUserName.indexOf(UserCoreConstants.DOMAIN_SEPARATOR); String currentUserDomain = null; if (index > 0) { currentUserDomain = currentAuthorizedUserName.substring(0, index); } if (currentUserDomain != null && currentUserDomain.equalsIgnoreCase(domain) && isDisable) { log.error("Error while disabling user store from a user who is in the same user store."); throw new IdentityUserStoreMgtException("Error while updating user store state."); } File userStoreConfigFile = createConfigurationFile(domain); StreamResult result = new StreamResult(userStoreConfigFile); if (!userStoreConfigFile.exists()) { String errorMessage = "Cannot edit user store." + domain + " does not exist."; throw new IdentityUserStoreMgtException(errorMessage); } DocumentBuilderFactory documentFactory = IdentityUtil.getSecuredDocumentBuilderFactory(); DocumentBuilder documentBuilder = null; try { documentBuilder = documentFactory.newDocumentBuilder(); Document doc = documentBuilder.parse(userStoreConfigFile); NodeList elements = doc.getElementsByTagName("Property"); for (int i = 0; i < elements.getLength(); i++) { //Assumes a property element only have attribute 'name' if ("Disabled".compareToIgnoreCase(elements.item(i).getAttributes().item(0).getNodeValue()) == 0) { elements.item(i).setTextContent(String.valueOf(isDisable)); break; } } DOMSource source = new DOMSource(doc); TransformerFactory transformerFactory = TransformerFactory.newInstance(); Transformer transformer = transformerFactory.newTransformer(); transformer.setOutputProperty(OutputKeys.INDENT, "yes"); transformer.setOutputProperty(OutputKeys.ENCODING, "UTF-8"); transformer.setOutputProperty(OutputKeys.METHOD, "xml"); transformer.setOutputProperty("{http://xml.apache.org/xslt}indent-amount", "6"); transformer.transform(source, result); if (log.isDebugEnabled()) { log.debug("New state :" + isDisable + " of the user store \'" + domain + "\' successfully written to the file system"); } } catch (ParserConfigurationException | SAXException | TransformerException | IOException e) { log.error(e.getMessage(),e); throw new IdentityUserStoreMgtException("Error while updating user store state",e); } } /** * Check the connection heath for JDBC userstores * @param domainName * @param driverName * @param connectionURL * @param username * @param connectionPassword * @param messageID * @return * @throws DataSourceException */ public boolean testRDBMSConnection(String domainName, String driverName, String connectionURL, String username, String connectionPassword, String messageID) throws IdentityUserStoreMgtException { RandomPasswordContainer randomPasswordContainer; if (messageID != null) { randomPasswordContainer = getRandomPasswordContainer(messageID); if (randomPasswordContainer != null) { RandomPassword randomPassword = getRandomPassword(randomPasswordContainer, JDBCRealmConstants.PASSWORD); if (randomPassword != null) { if (connectionPassword.equalsIgnoreCase(randomPassword.getRandomPhrase())) { connectionPassword = randomPassword.getPassword(); } } } } WSDataSourceMetaInfo wSDataSourceMetaInfo = new WSDataSourceMetaInfo(); RDBMSConfiguration rdbmsConfiguration = new RDBMSConfiguration(); rdbmsConfiguration.setUrl(connectionURL); rdbmsConfiguration.setUsername(username); rdbmsConfiguration.setPassword(connectionPassword); rdbmsConfiguration.setDriverClassName(driverName); WSDataSourceDefinition wSDataSourceDefinition = new WSDataSourceDefinition(); ByteArrayOutputStream out = new ByteArrayOutputStream(); JAXBContext context; try { context = JAXBContext.newInstance(RDBMSConfiguration.class); Marshaller marshaller = context.createMarshaller(); marshaller.marshal(rdbmsConfiguration, out); } catch (JAXBException e) { String errorMessage = "Error while checking RDBMS connection health"; log.error(errorMessage, e); throw new IdentityUserStoreMgtException(errorMessage); } wSDataSourceDefinition.setDsXMLConfiguration(out.toString()); wSDataSourceDefinition.setType("RDBMS"); wSDataSourceMetaInfo.setName(domainName); wSDataSourceMetaInfo.setDefinition(wSDataSourceDefinition); try { return DataSourceManager.getInstance().getDataSourceRepository().testDataSourceConnection(wSDataSourceMetaInfo. extractDataSourceMetaInfo()); } catch (DataSourceException e) { String errorMessage = e.getMessage(); // Does not print the error log since the log is already printed by DataSourceRepository // log.error(message, e); throw new IdentityUserStoreMgtException(errorMessage); } } private File createConfigurationFile(String domainName) throws IdentityUserStoreMgtException { String fileName = domainName.replace(".", "_"); if(!IdentityUtil.isValidFileName(fileName)){ String message = "Provided domain name : '" + domainName + "' is invalid."; log.error(message); throw new IdentityUserStoreMgtException(message); } File userStoreConfigFile; int tenantId = CarbonContext.getThreadLocalCarbonContext().getTenantId(); if (tenantId == MultitenantConstants.SUPER_TENANT_ID) { File userStore = new File(deploymentDirectory); if (!userStore.exists()) { if (new File(deploymentDirectory).mkdir()) { //folder 'userstores' created log.info("folder 'userstores' created to store configurations for super tenant"); } else { log.error("Error at creating 'userstores' directory to store configurations for super tenant"); } } userStoreConfigFile = new File(deploymentDirectory + File.separator + fileName + ".xml"); } else { String tenantFilePath = CarbonUtils.getCarbonTenantsDirPath(); tenantFilePath = tenantFilePath + File.separator + tenantId + File.separator + USERSTORES; File userStore = new File(tenantFilePath); if (!userStore.exists()) { if (new File(tenantFilePath).mkdir()) { //folder 'userstores' created log.info("folder 'userstores' created to store configurations for tenant = " + tenantId); } else { log.error("Error at creating 'userstores' directory to store configurations for tenant:" + tenantId); } } userStoreConfigFile = new File(tenantFilePath + File.separator + fileName + ".xml"); } return userStoreConfigFile; } private void writeUserMgtXMLFile(File userStoreConfigFile, UserStoreDTO userStoreDTO, boolean editSecondaryUserStore) throws IdentityUserStoreMgtException { StreamResult result = new StreamResult(userStoreConfigFile); DocumentBuilderFactory docFactory = IdentityUtil.getSecuredDocumentBuilderFactory(); try { DocumentBuilder docBuilder = docFactory.newDocumentBuilder(); Document doc = docBuilder.newDocument(); //create UserStoreManager element Element userStoreElement = doc.createElement(UserCoreConstants.RealmConfig.LOCAL_NAME_USER_STORE_MANAGER); doc.appendChild(userStoreElement); Attr attrClass = doc.createAttribute("class"); attrClass.setValue(userStoreDTO.getClassName()); userStoreElement.setAttributeNode(attrClass); addProperties(userStoreDTO.getClassName(), userStoreDTO.getProperties(), doc, userStoreElement, editSecondaryUserStore); addProperty(UserStoreConfigConstants.DOMAIN_NAME, userStoreDTO.getDomainId(), doc, userStoreElement, false); addProperty(DESCRIPTION, userStoreDTO.getDescription(), doc, userStoreElement, false); DOMSource source = new DOMSource(doc); TransformerFactory transformerFactory = TransformerFactory.newInstance(); Transformer transformer = transformerFactory.newTransformer(); transformer.setOutputProperty(OutputKeys.INDENT, "yes"); transformer.setOutputProperty(OutputKeys.ENCODING, "UTF-8"); transformer.setOutputProperty(OutputKeys.METHOD, "xml"); transformer.setOutputProperty("{http://xml.apache.org/xslt}indent-amount", "6"); transformer.transform(source, result); } catch (ParserConfigurationException e) { String errMsg = " Error occurred due to serious parser configuration exception of " + userStoreConfigFile; throw new IdentityUserStoreMgtException(errMsg, e); } catch (TransformerException e) { String errMsg = " Error occurred during the transformation process of " + userStoreConfigFile; throw new IdentityUserStoreMgtException(errMsg, e); } } /** * Obtains the mandatory properties for a given userStoreClass * * @param userStoreClass userStoreClass name * @return Property[] of Mandatory Properties */ private Property[] getMandatoryProperties(String userStoreClass) { return UserStoreManagerRegistry.getUserStoreProperties(userStoreClass).getMandatoryProperties(); } /** * Check whether the given property should be encrypted or not. * * @param mandatoryProperties mandatory property array * @param propertyName property name * @return returns true if the property should be encrypted */ private boolean isPropertyToBeEncrypted(Property[] mandatoryProperties, String propertyName) { for (Property property : mandatoryProperties) { if (propertyName.equalsIgnoreCase(property.getName())) { return property.getDescription().contains(UserStoreConfigurationConstant.ENCRYPT_TEXT); } } return false; } /** * Generate the RandomPassword[] from secondaryRealmConfiguration for given userStoreClass * * @param userStoreClass Extract the mandatory properties of this class * @param randomPhrase The randomly generated keyword which will be stored in * RandomPassword object * @param secondaryRealmConfiguration RealmConfiguration object consists the properties * @return RandomPassword[] array for each property */ private RandomPassword[] getRandomPasswordProperties(String userStoreClass, String randomPhrase, RealmConfiguration secondaryRealmConfiguration) { //First check for mandatory field with #encrypt Property[] mandatoryProperties = getMandatoryProperties(userStoreClass); ArrayList<RandomPassword> randomPasswordArrayList = new ArrayList<RandomPassword>(); for (Property property : mandatoryProperties) { String propertyName = property.getName(); if (property.getDescription().contains(UserStoreConfigurationConstant.ENCRYPT_TEXT)) { RandomPassword randomPassword = new RandomPassword(); randomPassword.setPropertyName(propertyName); randomPassword.setPassword(secondaryRealmConfiguration.getUserStoreProperty(propertyName)); randomPassword.setRandomPhrase(randomPhrase); randomPasswordArrayList.add(randomPassword); } } return randomPasswordArrayList.toArray(new RandomPassword[randomPasswordArrayList.size()]); } /** * Create and update the RandomPasswordContainer with given unique ID and randomPasswords array * * @param randomPasswords array contains the elements to be encrypted with thier random * password phrase, password and unique id * @param uuid Unique id of the RandomPasswordContainer */ private void updatePasswordContainer(RandomPassword[] randomPasswords, String uuid) { if (randomPasswords != null) { if (log.isDebugEnabled()) { log.debug("updatePasswordContainer reached for number of random password properties length = " + randomPasswords.length); } RandomPasswordContainer randomPasswordContainer = new RandomPasswordContainer(); randomPasswordContainer.setRandomPasswords(randomPasswords); randomPasswordContainer.setUniqueID(uuid); RandomPasswordContainerCache.getInstance().getRandomPasswordContainerCache().put(uuid, randomPasswordContainer); } } /** * Get the RandomPasswordContainer object from the cache for given unique id * * @param uniqueID Get and Remove the unique id for that particualr cache * @return RandomPasswordContainer of particular unique ID */ private RandomPasswordContainer getAndRemoveRandomPasswordContainer(String uniqueID) { return RandomPasswordContainerCache.getInstance().getRandomPasswordContainerCache().getAndRemove(uniqueID); } /** * Get the RandomPasswordContainer object from the cache for given unique id * * @param uniqueID Get the unique id for that particular cache * @return RandomPasswordContainer of particular unique ID */ private RandomPasswordContainer getRandomPasswordContainer(String uniqueID) { return RandomPasswordContainerCache.getInstance().getRandomPasswordContainerCache().get(uniqueID); } /** * Obtain the UniqueID ID constant value from the propertyDTO object which was set well * before sending the edit request. * * @param propertyDTOs PropertyDTO[] object passed from JSP page * @return unique id string value */ private String getUniqueIDFromUserDTO(PropertyDTO[] propertyDTOs) { int length = propertyDTOs.length; for (int i = length - 1; i >= 0; i--) { PropertyDTO propertyDTO = propertyDTOs[i]; if (propertyDTO != null && propertyDTO.getName() != null && propertyDTO.getName() .equalsIgnoreCase(UserStoreConfigurationConstant.UNIQUE_ID_CONSTANT)) { return propertyDTO.getValue(); } } return null; } /** * Finds the RandomPassword object for a given propertyName in the RandomPasswordContainer * ( Which is unique per uniqueID ) * * @param randomPasswordContainer RandomPasswordContainer object of an unique id * @param propertyName RandomPassword object to be obtained for that property * @return Returns the RandomPassword object from the */ private RandomPassword getRandomPassword(RandomPasswordContainer randomPasswordContainer, String propertyName) { RandomPassword[] randomPasswords = randomPasswordContainer.getRandomPasswords(); if (randomPasswords != null) { for (RandomPassword randomPassword : randomPasswords) { if (randomPassword.getPropertyName().equalsIgnoreCase(propertyName)) { return randomPassword; } } } return null; } /** * * @param message * @param e * @throws IdentityUserStoreMgtException */ private void handleException(String message, Exception e) throws IdentityUserStoreMgtException { log.error(message, e); throw new IdentityUserStoreMgtException(message); } }
package org.surrel.facebooknotifications; import android.annotation.SuppressLint; import android.app.Activity; import android.app.AlarmManager; import android.content.Intent; import android.content.SharedPreferences; import android.content.res.Configuration; import android.net.Uri; import android.os.Build; import android.os.Bundle; import android.os.Environment; import android.preference.PreferenceManager; import android.provider.MediaStore; import android.provider.Settings; import android.util.Log; import android.view.KeyEvent; import android.view.Menu; import android.view.MenuItem; import android.webkit.JavascriptInterface; import android.webkit.ValueCallback; import android.webkit.WebChromeClient; import android.webkit.WebSettings; import android.webkit.WebView; import android.webkit.WebViewClient; import android.widget.ShareActionProvider; import android.widget.Toast; import java.io.File; import java.io.IOException; import java.text.SimpleDateFormat; import java.util.Date; public class MainActivity extends Activity { public static final int AlarmType = AlarmManager.ELAPSED_REALTIME_WAKEUP; public static final int SETTINGS_MENU = 0; public static final int RESULT_REDRAW_MENU = 2; public static final String SHOW_SHARE_BUTTON = "show_share_button"; public static final String FB_URL = "https://m.facebook.com"; private WebView webview; private ShareActionProvider mShareActionProvider; private Intent shareIntent; private SharedPreferences mPrefs; private Menu mMenu; private MenuItem shareAction; private String mCM; private ValueCallback<Uri> mUM; private ValueCallback<Uri[]> mUMA; private final static int FCR = 1; private String logoutUrl = ""; @SuppressLint("SetJavaScriptEnabled") @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.M) { if (!Settings.canDrawOverlays(this)) { Intent intent = new Intent(Settings.ACTION_MANAGE_OVERLAY_PERMISSION, Uri.parse("package:" + getPackageName())); startActivityForResult(intent, 0); } } PreferenceManager.setDefaultValues(this, R.xml.preferences, false); mPrefs = PreferenceManager.getDefaultSharedPreferences(this); setContentView(R.layout.activity_main); String targetURL = FB_URL; if (getIntent().getExtras() != null) { String url = getIntent().getExtras().getString("url", ""); if (!"".equals(url)) { targetURL = url; } } WakeupManager.updateNotificationSystem(this); shareIntent = new Intent(); shareIntent.setAction(Intent.ACTION_SEND); shareIntent.setType("text/plain"); webview = new WebView(this); webview.setWebViewClient(new WebViewClient()); webview.loadData("<h1>" + getString(R.string.request_pending) + "</h1>", "text/html", "UTF-8"); webview.getSettings().setJavaScriptEnabled(true); webview.addJavascriptInterface(this, "customInterface"); webview.setWebViewClient(new WebViewClient() { @Override public void onPageFinished(WebView view, String url) { // Javascript URL injection is defined in UpdateService webview.loadUrl("javascript:getLogout=function(){elt=document.querySelector(\"[href*='/logout']\"); return elt.href;};" + "window.customInterface.processLogoutStr(getLogout());"); updateShareIntent(); } }); webview.setWebChromeClient(new WebChromeClient() { public boolean onShowFileChooser( WebView webView, ValueCallback<Uri[]> filePathCallback, FileChooserParams fileChooserParams) { if (mUMA != null) { mUMA.onReceiveValue(null); } mUMA = filePathCallback; Intent takePictureIntent = new Intent(MediaStore.ACTION_IMAGE_CAPTURE); if (takePictureIntent.resolveActivity(MainActivity.this.getPackageManager()) != null) { File photoFile = null; try { photoFile = createImageFile(); takePictureIntent.putExtra("PhotoPath", mCM); } catch (IOException ex) { Log.e("fbn", "Image file creation failed", ex); } if (photoFile != null) { mCM = "file:" + photoFile.getAbsolutePath(); takePictureIntent.putExtra(MediaStore.EXTRA_OUTPUT, Uri.fromFile(photoFile)); } else { takePictureIntent = null; } } Intent contentSelectionIntent = new Intent(Intent.ACTION_GET_CONTENT); contentSelectionIntent.addCategory(Intent.CATEGORY_OPENABLE); contentSelectionIntent.setType("image/*"); Intent[] intentArray; if (takePictureIntent != null) { intentArray = new Intent[]{takePictureIntent}; } else { intentArray = new Intent[0]; } Intent chooserIntent = new Intent(Intent.ACTION_CHOOSER); chooserIntent.putExtra(Intent.EXTRA_INTENT, contentSelectionIntent); chooserIntent.putExtra(Intent.EXTRA_TITLE, "Image Chooser"); chooserIntent.putExtra(Intent.EXTRA_INITIAL_INTENTS, intentArray); startActivityForResult(chooserIntent, FCR); return true; } }); WebSettings webSettings = webview.getSettings(); webSettings.setBlockNetworkImage(false); webSettings.setUserAgentString(getString(R.string.app_name)); webview.loadUrl(targetURL); setContentView(webview); _dMsg("Debug build, timestamp " + BuildConfig.TIMESTAMP); } @SuppressWarnings("unused") @JavascriptInterface public void processLogoutStr(String logoutStr) { Log.i("fbn.MainActivity", logoutStr); if (logoutStr.contains("facebook.com")) this.logoutUrl = logoutStr; } @Override public boolean onKeyDown(int keyCode, KeyEvent event) { if (event.getAction() == KeyEvent.ACTION_DOWN) { switch (keyCode) { case KeyEvent.KEYCODE_BACK: if (webview.canGoBack()) { webview.goBack(); } else { finish(); } return true; } } return super.onKeyDown(keyCode, event); } @Override public boolean onCreateOptionsMenu(Menu menu) { getMenuInflater().inflate(R.menu.main, menu); MenuItem shareItem = menu.findItem(R.id.menu_item_share); shareAction = menu.findItem(R.id.menu_action_share); shareAction.setVisible(mPrefs.getBoolean(SHOW_SHARE_BUTTON, false)); mShareActionProvider = (ShareActionProvider) shareItem.getActionProvider(); updateShareIntent(); mMenu = menu; return super.onCreateOptionsMenu(menu); } @Override public boolean onPrepareOptionsMenu(Menu menu) { shareAction.setVisible(mPrefs.getBoolean(SHOW_SHARE_BUTTON, false)); return super.onPrepareOptionsMenu(menu); } @Override public boolean onOptionsItemSelected(MenuItem item) { if (item.getItemId() == R.id.menu_item_settings) { startActivityForResult(new Intent(MainActivity.this, PrefsActivity.class), SETTINGS_MENU); } else if (item.getItemId() == R.id.menu_item_logout && logoutUrl != null && !logoutUrl.isEmpty()) { webview.loadUrl(logoutUrl); } else if (item.getItemId() == R.id.menu_item_quit) { finish(); } else if (item.getItemId() == R.id.menu_item_open_browser) { Intent browserIntent = new Intent(Intent.ACTION_VIEW, Uri.parse(webview.getUrl())); startActivity(Intent.createChooser(browserIntent, "")); } else if (item.getItemId() == R.id.menu_item_problems) { Intent dontkillmyapp = new Intent(Intent.ACTION_VIEW, Uri.parse("https://dontkillmyapp.com/")); startActivity(dontkillmyapp); } return true; } @Override protected void onActivityResult(int requestCode, int resultCode, Intent intent) { if (resultCode == RESULT_REDRAW_MENU) { onPrepareOptionsMenu(mMenu); } else if (requestCode == FCR) { if (Build.VERSION.SDK_INT >= 21) { Uri[] results = null; //Check if response is positive if (resultCode == Activity.RESULT_OK) { if (null == mUMA) { return; } if (intent == null) { //Capture Photo if no image available if (mCM != null) { results = new Uri[]{Uri.parse(mCM)}; } } else { String dataString = intent.getDataString(); if (dataString != null) { results = new Uri[]{Uri.parse(dataString)}; } } } mUMA.onReceiveValue(results); mUMA = null; } else { if (null == mUM) return; Uri result = intent == null || resultCode != RESULT_OK ? null : intent.getData(); mUM.onReceiveValue(result); mUM = null; } } } protected void _msg(CharSequence text) { Toast.makeText(this, text, Toast.LENGTH_SHORT).show(); } protected void _dMsg(CharSequence text) { if (BuildConfig.BUILD_TYPE.equals("debug")) { Toast.makeText(this, text, Toast.LENGTH_SHORT).show(); } } protected void updateShareIntent() { shareIntent.putExtra(Intent.EXTRA_TEXT, webview.getUrl()); if (mShareActionProvider != null) { mShareActionProvider.setShareIntent(shareIntent); } } @Override public void onPause() { super.onPause(); } // Create an image file private File createImageFile() throws IOException { @SuppressLint("SimpleDateFormat") String timeStamp = new SimpleDateFormat("yyyyMMdd_HHmmss").format(new Date()); String imageFileName = "img_" + timeStamp + "_"; File storageDir = Environment.getExternalStoragePublicDirectory(Environment.DIRECTORY_PICTURES); return File.createTempFile(imageFileName, ".jpg", storageDir); } @Override public void onConfigurationChanged(Configuration newConfig) { super.onConfigurationChanged(newConfig); } }
/* * Copyright 2011 Google Inc. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.devtools.j2objc.gen; import com.google.common.io.LineReader; import org.eclipse.jdt.core.dom.ASTNode; import org.eclipse.jdt.core.dom.CompilationUnit; import java.io.IOException; import java.io.StringReader; /** * Builds source text. This is similar to a StringBuilder, but tracks line * numbers and outputs them as CPP line directives when directed. * * @author Tom Ball */ public class SourceBuilder { private final StringBuilder buffer = new StringBuilder(); private final CompilationUnit unit; private int indention = 0; private int currentLine = -1; /** * If true, generate CPP line directives. It's necessary to store this * here rather than directly use Options.getLineDirectives(), so that the * header generator doesn't generate them. */ private final boolean emitLineDirectives; public static final int DEFAULT_INDENTION = 2; public static final int BEGINNING_OF_FILE = -1; /** * Create a new SourceBuilder. * * @param unit the compilation unit this source is based upon * @param emitLineDirectives if true, generate CPP line directives */ public SourceBuilder(CompilationUnit unit, boolean emitLineDirectives) { this(unit, emitLineDirectives, BEGINNING_OF_FILE); } /** * Create a new SourceBuilder, specifying the initial line number to begin * syncing. This is normally only used for building complex statements, * where the generated source is used in another builder. * * @param unit the compilation unit this source is based upon * @param emitLineDirectives if true, generate CPP line directives * @param int startLine the initial line number, or -1 if at start of file */ public SourceBuilder(CompilationUnit unit, boolean emitLineDirectives, int startLine) { this.unit = unit; this.emitLineDirectives = emitLineDirectives; this.currentLine = startLine; } /** * Constructor used when line numbers are never needed, such as tests. */ public SourceBuilder() { this(null, false); } /** * Copy constructor. */ public SourceBuilder(SourceBuilder original) { this(original.unit, original.emitLineDirectives, original.currentLine); } @Override public String toString() { return buffer.toString(); } public void print(String s) { buffer.append(s); } public void print(char c) { buffer.append(c); if (c == '\n') { currentLine++; } } public void print(int i) { buffer.append(i); } public void printf(String format, Object... args) { print(String.format(format, args)); currentLine += countNewLines(format); } public void println(String s) { print(s); newline(); } public void newline() { buffer.append('\n'); currentLine++; } public void indent() { indention++; } public void unindent() { indention--; if (indention < 0) { throw new AssertionError("unbalanced indents"); } } public void printIndent() { buffer.append(pad(indention * DEFAULT_INDENTION)); } // StringBuilder compatibility. public SourceBuilder append(char c) { print(c); return this; } public SourceBuilder append(int i) { print(i); return this; } public SourceBuilder append(String s) { print(s); return this; } public char charAt(int i) { return buffer.charAt(i); } public int length() { return buffer.length(); } public char[] pad(int n) { if (n < 0) { n = 0; } char[] result = new char[n]; for (int i = 0; i < n; i++) { result[i] = ' '; } return result; } public void reset() { buffer.setLength(0); } public void syncLineNumbers(ASTNode node) { if (emitLineDirectives) { int position = node.getStartPosition(); if (position != -1) { int sourceLine = unit.getLineNumber(position); if (currentLine != sourceLine) { buffer.append(String.format("#line %d\n", sourceLine)); currentLine = sourceLine; } } } } public void printStart(String path) { if (emitLineDirectives) { buffer.append(String.format("#line 1 \"%s\"\n\n", path)); } } /** * Returns the number of line endings in a string. */ private int countNewLines(String s) { int c = 0; int i = -1; while ((i = s.indexOf('\n', i + 1)) != -1) { c++; } return c; } /** * Fix line indention, based on brace count. */ public String reindent(String code) { try { // Remove indention from each line. StringBuffer sb = new StringBuffer(); LineReader lr = new LineReader(new StringReader(code)); String line = lr.readLine(); while (line != null) { sb.append(line.trim()); line = lr.readLine(); if (line != null) { sb.append('\n'); } } String strippedCode = sb.toString(); // Now indent it again. int indent = indention * DEFAULT_INDENTION; sb.setLength(0); // reset buffer lr = new LineReader(new StringReader(strippedCode)); line = lr.readLine(); while (line != null) { if (line.startsWith("}")) { indent -= DEFAULT_INDENTION; } if (!line.startsWith("#line")) { sb.append(pad(indent)); } sb.append(line); if (line.endsWith("{")) { indent += DEFAULT_INDENTION; } line = lr.readLine(); if (line != null) { sb.append('\n'); } } return sb.toString(); } catch (IOException e) { // Should never happen with string readers. throw new AssertionError(e); } } public int getCurrentLine() { return currentLine; } }
/* * Copyright 2012 The Netty Project * * The Netty Project licenses this file to you under the Apache License, * version 2.0 (the "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at: * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations * under the License. */ package io.netty.handler.codec.http.websocketx; import io.netty.channel.Channel; import io.netty.channel.ChannelFuture; import io.netty.channel.ChannelFutureListener; import io.netty.channel.ChannelHandlerContext; import io.netty.channel.ChannelPipeline; import io.netty.channel.ChannelPromise; import io.netty.handler.codec.http.FullHttpRequest; import io.netty.handler.codec.http.FullHttpResponse; import io.netty.handler.codec.http.HttpContentCompressor; import io.netty.handler.codec.http.HttpHeaders; import io.netty.handler.codec.http.HttpObjectAggregator; import io.netty.handler.codec.http.HttpRequestDecoder; import io.netty.handler.codec.http.HttpResponseEncoder; import io.netty.handler.codec.http.HttpServerCodec; import io.netty.util.internal.EmptyArrays; import io.netty.util.internal.StringUtil; import io.netty.util.internal.logging.InternalLogger; import io.netty.util.internal.logging.InternalLoggerFactory; import java.util.Collections; import java.util.LinkedHashSet; import java.util.Set; /** * Base class for server side web socket opening and closing handshakes */ public abstract class WebSocketServerHandshaker { protected static final InternalLogger logger = InternalLoggerFactory.getInstance(WebSocketServerHandshaker.class); private final String uri; private final String[] subprotocols; private final WebSocketVersion version; private final int maxFramePayloadLength; private String selectedSubprotocol; /** * Use this as wildcard to support all requested sub-protocols */ public static final String SUB_PROTOCOL_WILDCARD = "*"; /** * Constructor specifying the destination web socket location * * @param version * the protocol version * @param uri * URL for web socket communications. e.g "ws://myhost.com/mypath". Subsequent web socket frames will be * sent to this URL. * @param subprotocols * CSV of supported protocols. Null if sub protocols not supported. * @param maxFramePayloadLength * Maximum length of a frame's payload */ protected WebSocketServerHandshaker( WebSocketVersion version, String uri, String subprotocols, int maxFramePayloadLength) { this.version = version; this.uri = uri; if (subprotocols != null) { String[] subprotocolArray = StringUtil.split(subprotocols, ','); for (int i = 0; i < subprotocolArray.length; i++) { subprotocolArray[i] = subprotocolArray[i].trim(); } this.subprotocols = subprotocolArray; } else { this.subprotocols = EmptyArrays.EMPTY_STRINGS; } this.maxFramePayloadLength = maxFramePayloadLength; } /** * Returns the URL of the web socket */ public String uri() { return uri; } /** * Returns the CSV of supported sub protocols */ public Set<String> subprotocols() { Set<String> ret = new LinkedHashSet<String>(); Collections.addAll(ret, subprotocols); return ret; } /** * Returns the version of the specification being supported */ public WebSocketVersion version() { return version; } /** * Gets the maximum length for any frame's payload. * * @return The maximum length for a frame's payload */ public int maxFramePayloadLength() { return maxFramePayloadLength; } /** * Performs the opening handshake. When call this method you <strong>MUST NOT</strong> retain the * {@link FullHttpRequest} which is passed in. * * @param channel * Channel * @param req * HTTP Request * @return future * The {@link ChannelFuture} which is notified once the opening handshake completes */ public ChannelFuture handshake(Channel channel, FullHttpRequest req) { return handshake(channel, req, null, channel.newPromise()); } /** * Performs the opening handshake * * When call this method you <strong>MUST NOT</strong> retain the {@link FullHttpRequest} which is passed in. * * @param channel * Channel * @param req * HTTP Request * @param responseHeaders * Extra headers to add to the handshake response or {@code null} if no extra headers should be added * @param promise * the {@link ChannelPromise} to be notified when the opening handshake is done * @return future * the {@link ChannelFuture} which is notified when the opening handshake is done */ public final ChannelFuture handshake(Channel channel, FullHttpRequest req, HttpHeaders responseHeaders, final ChannelPromise promise) { if (logger.isDebugEnabled()) { logger.debug(String.format("%s WS Version %s server handshake", channel, version())); } FullHttpResponse response = newHandshakeResponse(req, responseHeaders); ChannelPipeline p = channel.pipeline(); if (p.get(HttpObjectAggregator.class) != null) { p.remove(HttpObjectAggregator.class); } if (p.get(HttpContentCompressor.class) != null) { p.remove(HttpContentCompressor.class); } ChannelHandlerContext ctx = p.context(HttpRequestDecoder.class); final String encoderName; if (ctx == null) { // this means the user use a HttpServerCodec ctx = p.context(HttpServerCodec.class); if (ctx == null) { promise.setFailure( new IllegalStateException("No HttpDecoder and no HttpServerCodec in the pipeline")); return promise; } p.addBefore(ctx.name(), "wsdecoder", newWebsocketDecoder()); p.addBefore(ctx.name(), "wsencoder", newWebSocketEncoder()); encoderName = ctx.name(); } else { p.replace(ctx.name(), "wsdecoder", newWebsocketDecoder()); encoderName = p.context(HttpResponseEncoder.class).name(); p.addBefore(encoderName, "wsencoder", newWebSocketEncoder()); } channel.writeAndFlush(response).addListener(new ChannelFutureListener() { @Override public void operationComplete(ChannelFuture future) throws Exception { if (future.isSuccess()) { ChannelPipeline p = future.channel().pipeline(); p.remove(encoderName); promise.setSuccess(); } else { promise.setFailure(future.cause()); } } }); return promise; } /** * Returns a new {@link FullHttpResponse) which will be used for as response to the handshake request. */ protected abstract FullHttpResponse newHandshakeResponse(FullHttpRequest req, HttpHeaders responseHeaders); /** * Performs the closing handshake * * @param channel * Channel * @param frame * Closing Frame that was received */ public ChannelFuture close(Channel channel, CloseWebSocketFrame frame) { if (channel == null) { throw new NullPointerException("channel"); } return close(channel, frame, channel.newPromise()); } /** * Performs the closing handshake * * @param channel * Channel * @param frame * Closing Frame that was received * @param promise * the {@link ChannelPromise} to be notified when the closing handshake is done */ public ChannelFuture close(Channel channel, CloseWebSocketFrame frame, ChannelPromise promise) { if (channel == null) { throw new NullPointerException("channel"); } return channel.writeAndFlush(frame, promise).addListener(ChannelFutureListener.CLOSE); } /** * Selects the first matching supported sub protocol * * @param requestedSubprotocols * CSV of protocols to be supported. e.g. "chat, superchat" * @return First matching supported sub protocol. Null if not found. */ protected String selectSubprotocol(String requestedSubprotocols) { if (requestedSubprotocols == null || subprotocols.length == 0) { return null; } String[] requestedSubprotocolArray = StringUtil.split(requestedSubprotocols, ','); for (String p: requestedSubprotocolArray) { String requestedSubprotocol = p.trim(); for (String supportedSubprotocol: subprotocols) { if (SUB_PROTOCOL_WILDCARD.equals(supportedSubprotocol) || requestedSubprotocol.equals(supportedSubprotocol)) { selectedSubprotocol = requestedSubprotocol; return requestedSubprotocol; } } } // No match found return null; } /** * Returns the selected subprotocol. Null if no subprotocol has been selected. * <p> * This is only available AFTER <tt>handshake()</tt> has been called. * </p> */ public String selectedSubprotocol() { return selectedSubprotocol; } /** * Returns the decoder to use after handshake is complete. */ protected abstract WebSocketFrameDecoder newWebsocketDecoder(); /** * Returns the encoder to use after the handshake is complete. */ protected abstract WebSocketFrameEncoder newWebSocketEncoder(); }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.glaf.core.tree.component; public abstract class TreeBase implements Component { // ~ Instance fields // ======================================================== /** Holds value of property action, that is, Struts Logical Action Name. */ protected String action; /** * Align menu 'left','right','top','bottom' ...and other alignment of * particular menu system */ protected String align; /** Holds value of property altImage. */ protected String altImage; protected boolean checked; private String cls; protected String code; /** Holds value of property description. */ protected String description; /** Holds value of property forward. */ protected String forward; /** Holds value of property height. */ protected String height; /** Holds value of property name. */ protected String id; /** Holds value of property image. */ protected String image; /** Holds value of property location. */ protected String location; /** * Holds value of property module; a Struts module prefix that overrides the * current module. * * <p> * The default module is specified by <code>""</code>. Any non-default * module should begin with <code>"/"</code>. */ protected String module; /** Holds value of property onclick. */ protected String onclick; /** Holds value of property onContextTree */ protected String onContextTree; /** Holds value of property ondblclick. */ protected String ondblclick; /** Holds value of property onmouseout. */ protected String onmouseout; /** Holds value of property onmouseover. */ protected String onmouseover; /** Holds value of property page. */ protected String page; /** Holds value of property roles. */ protected String roles; /** Holds value of property target. */ protected String target; /** Holds value of property title. */ protected String title; /** Holds value of property toolTip. */ protected String toolTip; protected String treeId; /** Holds parsed (with variables) url that is used to render a link */ private String url; /** Holds value of property width. */ protected String width; // ~ Methods // ================================================================ /** * Returns the value for action. * * @return Value of property action. */ public String getAction() { return this.action; } /** * Returns the value for align. * * @return Value of property align. */ public String getAlign() { return align; } /** * Getter for property altImage. * * @return Value of property altImage. */ public String getAltImage() { return altImage; } public String getCls() { return cls; } public String getCode() { return code; } /** * Getter for property description. * * @return Value of property description. */ public String getDescription() { return description; } /** * @return String */ public String getForward() { return forward; } /** * @return */ public String getHeight() { return height; } /** * Getter for property name. * * @return Value of property name. */ public String getId() { return id; } /** * Getter for property image. * * @return Value of property image. */ public String getImage() { return image; } /** * Getter for property location. * * @return Value of property location. */ public String getLocation() { return location; } public String getModule() { return module; } /** * Getter for property onclick. * * @return Value of property onclick. */ public String getOnclick() { return onclick; } /** * @return */ public String getOnContextTree() { return onContextTree; } /** * Returns the ondblclick. * * @return String */ public String getOndblclick() { return ondblclick; } /** * Getter for property onmouseout. * * @return Value of property onmouseout. */ public String getOnmouseout() { return onmouseout; } /** * Getter for property onmouseover. * * @return Value of property onmouseover. */ public String getOnmouseover() { return onmouseover; } /** * Returns the value for page. * * @return Value of property page. */ public String getPage() { return this.page; } /** * Returns the roles. * * @return String */ public String getRoles() { return roles; } /** * Getter for property target. * * @return Value of property target. */ public String getTarget() { return target; } /** * Getter for property title. * * @return Value of property title. */ public String getTitle() { return title; } /** * Getter for property toolTip. * * @return Value of property toolTip. */ public String getToolTip() { return toolTip; } public String getTreeId() { return treeId; } /** * @return */ public String getUrl() { return url; } /** * @return String */ public String getWidth() { return width; } public boolean isChecked() { return checked; } /** * Sets the value for action. * * @param action * New value of property action. */ public void setAction(String action) { this.action = action; } /** * Sets the value for align. * * @param align * New value of property align. */ public void setAlign(String align) { this.align = align; } /** * Setter for property altImage. * * @param altImage * New value of property altImage. */ public void setAltImage(String altImage) { this.altImage = altImage; } public void setChecked(boolean checked) { this.checked = checked; } public void setCls(String cls) { this.cls = cls; } public void setCode(String code) { this.code = code; } /** * Setter for property description. * * @param description * New value of property description. */ public void setDescription(String description) { this.description = description; } /** * Sets the forward. * * @param forward * The forward to set */ public void setForward(String forward) { this.forward = forward; } /** * @param height */ public void setHeight(String height) { this.height = height; } /** * Setter for property name. * * @param name * New value of property name. */ public void setId(String id) { this.id = id; } /** * Setter for property image. * * @param image * New value of property image. */ public void setImage(String image) { this.image = image; } /** * Setter for property location. * * @param location * New value of property location. */ public void setLocation(String location) { this.location = location; } public void setModule(String module) { this.module = module; } /** * Setter for property onclick. * * @param onclick * New value of property onclick. */ public void setOnclick(String onclick) { this.onclick = onclick; } /** * @param string */ public void setOnContextTree(String string) { onContextTree = string; } /** * Sets the ondblclick. * * @param ondblclick * The ondblclick to set */ public void setOndblclick(String ondblclick) { this.ondblclick = ondblclick; } /** * Setter for property onmouseout. * * @param onmouseout * New value of property onmouseout. */ public void setOnmouseout(String onmouseout) { this.onmouseout = onmouseout; } /** * Setter for property onmouseover. * * @param onmouseover * New value of property onmouseover. */ public void setOnmouseover(String onmouseover) { this.onmouseover = onmouseover; } /** * Sets the value for page. * * @param page * New value of property page. */ public void setPage(String page) { this.page = page; } /** * Sets the roles. * * @param roles * The roles to set */ public void setRoles(String roles) { this.roles = roles; } /** * Setter for property target. * * @param target * New value of property target. */ public void setTarget(String target) { this.target = target; } /** * Setter for property title. * * @param title * New value of property title. */ public void setTitle(String title) { this.title = title; } /** * Setter for property toolTip. * * @param toolTip * New value of property toolTip. */ public void setToolTip(String toolTip) { this.toolTip = toolTip; } public void setTreeId(String treeId) { this.treeId = treeId; } /** * @param url */ public void setUrl(String url) { this.url = url; } /** * Sets the width. * * @param width * The width to set */ public void setWidth(String width) { this.width = width; } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Package /////////////// package org.apache.jena.util; // Imports /////////////// import java.util.*; import junit.framework.TestCase; /** * <p> * Unit tests for one-to-many map * </p> */ public class TestOneToManyMap extends TestCase { // Constants ////////////////////////////////// // Static variables ////////////////////////////////// // Instance variables ////////////////////////////////// private String s0 = "s0"; private String s1 = "s1"; private String s2 = "s2"; private String s3 = "s3"; private String s4 = "s4"; // Constructors ////////////////////////////////// // External signature methods ////////////////////////////////// @Override public void setUp() {} @Override public void tearDown() {} public void testConstruct0() { // the types of these maps OneToManyMap<String, Integer> map0 = new OneToManyMap<>(); assertNotNull( map0 ); assertTrue( map0.isEmpty() ); OneToManyMap<String, Integer> map1 = new OneToManyMap<>( map0 ); assertNotNull( map1 ); assertTrue( map1.isEmpty() ); } public void testConstruct1() { OneToManyMap<String, String> map0 = new OneToManyMap<>(); map0.put( s0, s1 ); assertTrue( map0.contains( s0, s1 ) ); OneToManyMap<String, String> map1 = new OneToManyMap<>( map0 ); assertTrue( map1.contains( s0, s1 ) ); map0.put( s0, s2 ); assertTrue( map0.contains( s0, s2 ) ); assertFalse( map1.contains( s0, s2 ) ); assertFalse( map0.contains( s1, s2 ) ); } public void testClear() { OneToManyMap<String, String> map0 = new OneToManyMap<>(); map0.put( s0, s1 ); assertTrue( map0.contains( s0, s1 ) ); assertFalse( map0.isEmpty() ); map0.clear(); assertFalse( map0.contains( s0, s1 ) ); assertTrue( map0.isEmpty() ); } public void testContainsKey() { OneToManyMap<String, String> map0 = new OneToManyMap<>(); assertFalse( map0.containsKey( s0 ) ); assertFalse( map0.containsKey( s1 ) ); map0.put( s0, s1 ); assertTrue( map0.containsKey( s0 ) ); assertFalse( map0.containsKey( s1 ) ); } public void testContainsValue() { OneToManyMap<String, String> map0 = new OneToManyMap<>(); assertFalse( map0.containsValue( s0 ) ); assertFalse( map0.containsValue( s1 ) ); assertFalse( map0.containsValue( s2 ) ); map0.put( s0, s1 ); assertFalse( map0.containsValue( s0 ) ); assertTrue( map0.containsValue( s1 ) ); assertFalse( map0.containsValue( s2 ) ); map0.put( s0, s2 ); assertFalse( map0.containsValue( s0 ) ); assertTrue( map0.containsValue( s1 ) ); assertTrue( map0.containsValue( s2 ) ); } public void testContains() { OneToManyMap<String, String> map0 = new OneToManyMap<>(); assertFalse( map0.contains( s0, s1 ) ); assertFalse( map0.contains( s0, s2 ) ); assertFalse( map0.contains( s1, s2 ) ); map0.put( s0, s1 ); assertTrue( map0.contains( s0, s1 ) ); assertFalse( map0.contains( s0, s2 ) ); assertFalse( map0.contains( s1, s2 ) ); map0.put( s0, s2 ); assertTrue( map0.contains( s0, s1 ) ); assertTrue( map0.contains( s0, s2 ) ); assertFalse( map0.contains( s1, s2 ) ); } public void testEntrySet() { OneToManyMap<String, String> map0 = new OneToManyMap<>(); map0.put( s0, s1 ); map0.put( s0, s2 ); map0.put( s3, s4 ); boolean s0s1 = false; boolean s0s2 = false; boolean s3s4 = false; for ( Map.Entry<String, String> e : map0.entrySet() ) { if ( e.getKey().equals( s0 ) && e.getValue().equals( s1 ) ) { s0s1 = true; } else if ( e.getKey().equals( s0 ) && e.getValue().equals( s2 ) ) { s0s2 = true; } else if ( e.getKey().equals( s3 ) && e.getValue().equals( s4 ) ) { s3s4 = true; } else { throw new IllegalArgumentException( "unexpected: " + e ); } } assertTrue( s0s1 ); assertTrue( s0s2 ); assertTrue( s3s4 ); } public void testEquals() { OneToManyMap<String, String> map0 = new OneToManyMap<>(); map0.put( s0, s1 ); map0.put( s0, s2 ); map0.put( s3, s4 ); OneToManyMap<String, String> map1 = new OneToManyMap<>(); map1.put( s3, s4 ); map1.put( s0, s1 ); map1.put( s0, s2 ); OneToManyMap<String, String> map2 = new OneToManyMap<>(); map2.put( s0, s2 ); map2.put( s3, s4 ); assertTrue( map0.equals( map1 ) ); assertTrue( map1.equals( map0 ) ); assertTrue( map0.hashCode() == map1.hashCode() ); assertFalse( map0.equals( map2 )); assertFalse( map2.equals( map0 )); } public void testGet() { OneToManyMap<String, String> map0 = new OneToManyMap<>(); assertNull( map0.get( s0 )); map0.put( s0, s1 ); assertEquals( s1, map0.get( s0 )); map0.put( s0, s2 ); assertTrue( map0.get( s0 ).equals( s1 ) || map0.get( s0 ).equals( s2 )); } public void testGetAll() { OneToManyMap<String, String> map0 = new OneToManyMap<>(); Iterator<String> i = map0.getAll(s0); assertNotNull( i ); assertFalse( i.hasNext() ); map0.put( s0, s1 ); i = map0.getAll(s0); assertNotNull( i ); assertTrue( i.hasNext() ); assertEquals( s1, i.next() ); assertFalse( i.hasNext() ); map0.put( s0, s2 ); i = map0.getAll(s0); assertNotNull( i ); boolean founds1 = false, founds2 = false; while (i.hasNext()) { Object x = i.next(); if (x.equals(s1)) { founds1 = true; } else if (x.equals(s2)) { founds2 = true; } else { throw new IllegalArgumentException( x.toString() ); } } assertTrue( founds1 ); assertTrue( founds2 ); } public void testKeySet() { OneToManyMap<String, String> map0 = new OneToManyMap<>(); Set<String> keys = new HashSet<>(); assertEquals( keys, map0.keySet() ); map0.put( s0, s1 ); keys.add( s0 ); assertEquals( keys, map0.keySet() ); map0.put( s2, s1 ); keys.add( s2 ); assertEquals( keys, map0.keySet() ); } public void testPutAll0() { OneToManyMap<String, String> map0 = new OneToManyMap<>(); map0.put( s0, s1 ); map0.put( s0, s2 ); map0.put( s3, s4 ); OneToManyMap<String, String> map1 = new OneToManyMap<>(); map1.put( s0, s2 ); map1.put( s3, s4 ); map1.put( s0, s1 ); OneToManyMap<String, String> map2 = new OneToManyMap<>(); map2.putAll( map1 ); assertEquals( map0, map2 ); } public void testPutAll1() { OneToManyMap<String, String> map0 = new OneToManyMap<>(); map0.put( s0, s1 ); map0.put( s3, s4 ); Map<String, String> map1 = new HashMap<>(); map1.put( s3, s4 ); map1.put( s0, s1 ); OneToManyMap<String, String> map2 = new OneToManyMap<>(); map2.putAll( map1 ); assertEquals( map0, map2 ); } public void testRemove0() { OneToManyMap<String, String> map0 = new OneToManyMap<>(); map0.put( s0, s1 ); map0.put( s3, s4 ); map0.remove( s0 ); map0.remove( s3 ); assertTrue( map0.isEmpty() ); } public void testRemove1() { OneToManyMap<String, String> map0 = new OneToManyMap<>(); map0.put( s0, s1 ); map0.put( s0, s2 ); map0.put( s3, s4 ); map0.remove( s0, s2 ); map0.remove( s3, s4 ); assertFalse( map0.isEmpty() ); map0.remove( s0, s1 ); assertTrue( map0.isEmpty() ); } public void testSize() { OneToManyMap<String, String> map0 = new OneToManyMap<>(); assertEquals( 0, map0.size() ); map0.put( s0, s1 ); assertEquals( 1, map0.size() ); map0.put( s0, s2 ); assertEquals( 2, map0.size() ); map0.put( s3, s4 ); assertEquals( 3, map0.size() ); map0.remove( s0, s2 ); assertEquals( 2, map0.size() ); map0.remove( s3, s4 ); assertEquals( 1, map0.size() ); map0.remove( s0, s1 ); assertEquals( 0, map0.size() ); } public void testValues() { OneToManyMap<String, String> map0 = new OneToManyMap<>(); Set<String> vals = new HashSet<>(); assertEquals( vals, map0.values() ); map0.put( s0, s1 ); vals.add( s1 ); assertEquals( vals, map0.values() ); map0.put( s2, s1 ); assertEquals( vals, map0.values() ); map0.put( s2, s3 ); vals.add( s3 ); assertEquals( vals, map0.values() ); } // Internal implementation methods ////////////////////////////////// //============================================================================== // Inner class definitions //============================================================================== }
package de.unihamburg.swk.parsing.antlr4.java8; // Generated from Java8.g4 by ANTLR 4.7 import org.antlr.v4.runtime.Lexer; import org.antlr.v4.runtime.CharStream; import org.antlr.v4.runtime.Token; import org.antlr.v4.runtime.TokenStream; import org.antlr.v4.runtime.*; import org.antlr.v4.runtime.atn.*; import org.antlr.v4.runtime.dfa.DFA; import org.antlr.v4.runtime.misc.*; @SuppressWarnings({"all", "warnings", "unchecked", "unused", "cast"}) public class Java8Lexer extends Lexer { static { RuntimeMetaData.checkVersion("4.7", RuntimeMetaData.VERSION); } protected static final DFA[] _decisionToDFA; protected static final PredictionContextCache _sharedContextCache = new PredictionContextCache(); public static final int ABSTRACT=1, ASSERT=2, BOOLEAN=3, BREAK=4, BYTE=5, CASE=6, CATCH=7, CHAR=8, CLASS=9, CONST=10, CONTINUE=11, DEFAULT=12, DO=13, DOUBLE=14, ELSE=15, ENUM=16, EXTENDS=17, FINAL=18, FINALLY=19, FLOAT=20, FOR=21, IF=22, GOTO=23, IMPLEMENTS=24, IMPORT=25, INSTANCEOF=26, INT=27, INTERFACE=28, LONG=29, NATIVE=30, NEW=31, PACKAGE=32, PRIVATE=33, PROTECTED=34, PUBLIC=35, RETURN=36, SHORT=37, STATIC=38, STRICTFP=39, SUPER=40, SWITCH=41, SYNCHRONIZED=42, THIS=43, THROW=44, THROWS=45, TRANSIENT=46, TRY=47, VOID=48, VOLATILE=49, WHILE=50, IntegerLiteral=51, FloatingPointLiteral=52, BooleanLiteral=53, CharacterLiteral=54, StringLiteral=55, NullLiteral=56, LPAREN=57, RPAREN=58, LBRACE=59, RBRACE=60, LBRACK=61, RBRACK=62, SEMI=63, COMMA=64, DOT=65, ASSIGN=66, GT=67, LT=68, BANG=69, TILDE=70, QUESTION=71, COLON=72, EQUAL=73, LE=74, GE=75, NOTEQUAL=76, AND=77, OR=78, INC=79, DEC=80, ADD=81, SUB=82, MUL=83, DIV=84, BITAND=85, BITOR=86, CARET=87, MOD=88, ARROW=89, COLONCOLON=90, ADD_ASSIGN=91, SUB_ASSIGN=92, MUL_ASSIGN=93, DIV_ASSIGN=94, AND_ASSIGN=95, OR_ASSIGN=96, XOR_ASSIGN=97, MOD_ASSIGN=98, LSHIFT_ASSIGN=99, RSHIFT_ASSIGN=100, URSHIFT_ASSIGN=101, Identifier=102, AT=103, ELLIPSIS=104, WS=105, COMMENT=106, LINE_COMMENT=107; public static String[] channelNames = { "DEFAULT_TOKEN_CHANNEL", "HIDDEN" }; public static String[] modeNames = { "DEFAULT_MODE" }; public static final String[] ruleNames = { "ABSTRACT", "ASSERT", "BOOLEAN", "BREAK", "BYTE", "CASE", "CATCH", "CHAR", "CLASS", "CONST", "CONTINUE", "DEFAULT", "DO", "DOUBLE", "ELSE", "ENUM", "EXTENDS", "FINAL", "FINALLY", "FLOAT", "FOR", "IF", "GOTO", "IMPLEMENTS", "IMPORT", "INSTANCEOF", "INT", "INTERFACE", "LONG", "NATIVE", "NEW", "PACKAGE", "PRIVATE", "PROTECTED", "PUBLIC", "RETURN", "SHORT", "STATIC", "STRICTFP", "SUPER", "SWITCH", "SYNCHRONIZED", "THIS", "THROW", "THROWS", "TRANSIENT", "TRY", "VOID", "VOLATILE", "WHILE", "IntegerLiteral", "DecimalIntegerLiteral", "HexIntegerLiteral", "OctalIntegerLiteral", "BinaryIntegerLiteral", "IntegerTypeSuffix", "DecimalNumeral", "Digits", "Digit", "NonZeroDigit", "DigitsAndUnderscores", "DigitOrUnderscore", "Underscores", "HexNumeral", "HexDigits", "HexDigit", "HexDigitsAndUnderscores", "HexDigitOrUnderscore", "OctalNumeral", "OctalDigits", "OctalDigit", "OctalDigitsAndUnderscores", "OctalDigitOrUnderscore", "BinaryNumeral", "BinaryDigits", "BinaryDigit", "BinaryDigitsAndUnderscores", "BinaryDigitOrUnderscore", "FloatingPointLiteral", "DecimalFloatingPointLiteral", "ExponentPart", "ExponentIndicator", "SignedInteger", "Sign", "FloatTypeSuffix", "HexadecimalFloatingPointLiteral", "HexSignificand", "BinaryExponent", "BinaryExponentIndicator", "BooleanLiteral", "CharacterLiteral", "SingleCharacter", "StringLiteral", "StringCharacters", "StringCharacter", "EscapeSequence", "OctalEscape", "ZeroToThree", "UnicodeEscape", "NullLiteral", "LPAREN", "RPAREN", "LBRACE", "RBRACE", "LBRACK", "RBRACK", "SEMI", "COMMA", "DOT", "ASSIGN", "GT", "LT", "BANG", "TILDE", "QUESTION", "COLON", "EQUAL", "LE", "GE", "NOTEQUAL", "AND", "OR", "INC", "DEC", "ADD", "SUB", "MUL", "DIV", "BITAND", "BITOR", "CARET", "MOD", "ARROW", "COLONCOLON", "ADD_ASSIGN", "SUB_ASSIGN", "MUL_ASSIGN", "DIV_ASSIGN", "AND_ASSIGN", "OR_ASSIGN", "XOR_ASSIGN", "MOD_ASSIGN", "LSHIFT_ASSIGN", "RSHIFT_ASSIGN", "URSHIFT_ASSIGN", "Identifier", "JavaLetter", "JavaLetterOrDigit", "AT", "ELLIPSIS", "WS", "COMMENT", "LINE_COMMENT" }; private static final String[] _LITERAL_NAMES = { null, "'abstract'", "'assert'", "'boolean'", "'break'", "'byte'", "'case'", "'catch'", "'char'", "'class'", "'const'", "'continue'", "'default'", "'do'", "'double'", "'else'", "'enum'", "'extends'", "'final'", "'finally'", "'float'", "'for'", "'if'", "'goto'", "'implements'", "'import'", "'instanceof'", "'int'", "'interface'", "'long'", "'native'", "'new'", "'package'", "'private'", "'protected'", "'public'", "'return'", "'short'", "'static'", "'strictfp'", "'super'", "'switch'", "'synchronized'", "'this'", "'throw'", "'throws'", "'transient'", "'try'", "'void'", "'volatile'", "'while'", null, null, null, null, null, "'null'", "'('", "')'", "'{'", "'}'", "'['", "']'", "';'", "','", "'.'", "'='", "'>'", "'<'", "'!'", "'~'", "'?'", "':'", "'=='", "'<='", "'>='", "'!='", "'&&'", "'||'", "'++'", "'--'", "'+'", "'-'", "'*'", "'/'", "'&'", "'|'", "'^'", "'%'", "'->'", "'::'", "'+='", "'-='", "'*='", "'/='", "'&='", "'|='", "'^='", "'%='", "'<<='", "'>>='", "'>>>='", null, "'@'", "'...'" }; private static final String[] _SYMBOLIC_NAMES = { null, "ABSTRACT", "ASSERT", "BOOLEAN", "BREAK", "BYTE", "CASE", "CATCH", "CHAR", "CLASS", "CONST", "CONTINUE", "DEFAULT", "DO", "DOUBLE", "ELSE", "ENUM", "EXTENDS", "FINAL", "FINALLY", "FLOAT", "FOR", "IF", "GOTO", "IMPLEMENTS", "IMPORT", "INSTANCEOF", "INT", "INTERFACE", "LONG", "NATIVE", "NEW", "PACKAGE", "PRIVATE", "PROTECTED", "PUBLIC", "RETURN", "SHORT", "STATIC", "STRICTFP", "SUPER", "SWITCH", "SYNCHRONIZED", "THIS", "THROW", "THROWS", "TRANSIENT", "TRY", "VOID", "VOLATILE", "WHILE", "IntegerLiteral", "FloatingPointLiteral", "BooleanLiteral", "CharacterLiteral", "StringLiteral", "NullLiteral", "LPAREN", "RPAREN", "LBRACE", "RBRACE", "LBRACK", "RBRACK", "SEMI", "COMMA", "DOT", "ASSIGN", "GT", "LT", "BANG", "TILDE", "QUESTION", "COLON", "EQUAL", "LE", "GE", "NOTEQUAL", "AND", "OR", "INC", "DEC", "ADD", "SUB", "MUL", "DIV", "BITAND", "BITOR", "CARET", "MOD", "ARROW", "COLONCOLON", "ADD_ASSIGN", "SUB_ASSIGN", "MUL_ASSIGN", "DIV_ASSIGN", "AND_ASSIGN", "OR_ASSIGN", "XOR_ASSIGN", "MOD_ASSIGN", "LSHIFT_ASSIGN", "RSHIFT_ASSIGN", "URSHIFT_ASSIGN", "Identifier", "AT", "ELLIPSIS", "WS", "COMMENT", "LINE_COMMENT" }; public static final Vocabulary VOCABULARY = new VocabularyImpl(_LITERAL_NAMES, _SYMBOLIC_NAMES); /** * @deprecated Use {@link #VOCABULARY} instead. */ @Deprecated public static final String[] tokenNames; static { tokenNames = new String[_SYMBOLIC_NAMES.length]; for (int i = 0; i < tokenNames.length; i++) { tokenNames[i] = VOCABULARY.getLiteralName(i); if (tokenNames[i] == null) { tokenNames[i] = VOCABULARY.getSymbolicName(i); } if (tokenNames[i] == null) { tokenNames[i] = "<INVALID>"; } } } @Override @Deprecated public String[] getTokenNames() { return tokenNames; } @Override public Vocabulary getVocabulary() { return VOCABULARY; } public Java8Lexer(CharStream input) { super(input); _interp = new LexerATNSimulator(this,_ATN,_decisionToDFA,_sharedContextCache); } @Override public String getGrammarFileName() { return "Java8.g4"; } @Override public String[] getRuleNames() { return ruleNames; } @Override public String getSerializedATN() { return _serializedATN; } @Override public String[] getChannelNames() { return channelNames; } @Override public String[] getModeNames() { return modeNames; } @Override public ATN getATN() { return _ATN; } @Override public boolean sempred(RuleContext _localctx, int ruleIndex, int predIndex) { switch (ruleIndex) { case 146: return JavaLetter_sempred((RuleContext)_localctx, predIndex); case 147: return JavaLetterOrDigit_sempred((RuleContext)_localctx, predIndex); } return true; } private boolean JavaLetter_sempred(RuleContext _localctx, int predIndex) { switch (predIndex) { case 0: return Character.isJavaIdentifierStart(_input.LA(-1)); case 1: return Character.isJavaIdentifierStart(Character.toCodePoint((char)_input.LA(-2), (char)_input.LA(-1))); } return true; } private boolean JavaLetterOrDigit_sempred(RuleContext _localctx, int predIndex) { switch (predIndex) { case 2: return Character.isJavaIdentifierPart(_input.LA(-1)); case 3: return Character.isJavaIdentifierPart(Character.toCodePoint((char)_input.LA(-2), (char)_input.LA(-1))); } return true; } public static final String _serializedATN = "\3\u608b\ua72a\u8133\ub9ed\u417c\u3be7\u7786\u5964\2m\u0448\b\1\4\2\t"+ "\2\4\3\t\3\4\4\t\4\4\5\t\5\4\6\t\6\4\7\t\7\4\b\t\b\4\t\t\t\4\n\t\n\4\13"+ "\t\13\4\f\t\f\4\r\t\r\4\16\t\16\4\17\t\17\4\20\t\20\4\21\t\21\4\22\t\22"+ "\4\23\t\23\4\24\t\24\4\25\t\25\4\26\t\26\4\27\t\27\4\30\t\30\4\31\t\31"+ "\4\32\t\32\4\33\t\33\4\34\t\34\4\35\t\35\4\36\t\36\4\37\t\37\4 \t \4!"+ "\t!\4\"\t\"\4#\t#\4$\t$\4%\t%\4&\t&\4\'\t\'\4(\t(\4)\t)\4*\t*\4+\t+\4"+ ",\t,\4-\t-\4.\t.\4/\t/\4\60\t\60\4\61\t\61\4\62\t\62\4\63\t\63\4\64\t"+ "\64\4\65\t\65\4\66\t\66\4\67\t\67\48\t8\49\t9\4:\t:\4;\t;\4<\t<\4=\t="+ "\4>\t>\4?\t?\4@\t@\4A\tA\4B\tB\4C\tC\4D\tD\4E\tE\4F\tF\4G\tG\4H\tH\4I"+ "\tI\4J\tJ\4K\tK\4L\tL\4M\tM\4N\tN\4O\tO\4P\tP\4Q\tQ\4R\tR\4S\tS\4T\tT"+ "\4U\tU\4V\tV\4W\tW\4X\tX\4Y\tY\4Z\tZ\4[\t[\4\\\t\\\4]\t]\4^\t^\4_\t_\4"+ "`\t`\4a\ta\4b\tb\4c\tc\4d\td\4e\te\4f\tf\4g\tg\4h\th\4i\ti\4j\tj\4k\t"+ "k\4l\tl\4m\tm\4n\tn\4o\to\4p\tp\4q\tq\4r\tr\4s\ts\4t\tt\4u\tu\4v\tv\4"+ "w\tw\4x\tx\4y\ty\4z\tz\4{\t{\4|\t|\4}\t}\4~\t~\4\177\t\177\4\u0080\t\u0080"+ "\4\u0081\t\u0081\4\u0082\t\u0082\4\u0083\t\u0083\4\u0084\t\u0084\4\u0085"+ "\t\u0085\4\u0086\t\u0086\4\u0087\t\u0087\4\u0088\t\u0088\4\u0089\t\u0089"+ "\4\u008a\t\u008a\4\u008b\t\u008b\4\u008c\t\u008c\4\u008d\t\u008d\4\u008e"+ "\t\u008e\4\u008f\t\u008f\4\u0090\t\u0090\4\u0091\t\u0091\4\u0092\t\u0092"+ "\4\u0093\t\u0093\4\u0094\t\u0094\4\u0095\t\u0095\4\u0096\t\u0096\4\u0097"+ "\t\u0097\4\u0098\t\u0098\4\u0099\t\u0099\4\u009a\t\u009a\3\2\3\2\3\2\3"+ "\2\3\2\3\2\3\2\3\2\3\2\3\3\3\3\3\3\3\3\3\3\3\3\3\3\3\4\3\4\3\4\3\4\3\4"+ "\3\4\3\4\3\4\3\5\3\5\3\5\3\5\3\5\3\5\3\6\3\6\3\6\3\6\3\6\3\7\3\7\3\7\3"+ "\7\3\7\3\b\3\b\3\b\3\b\3\b\3\b\3\t\3\t\3\t\3\t\3\t\3\n\3\n\3\n\3\n\3\n"+ "\3\n\3\13\3\13\3\13\3\13\3\13\3\13\3\f\3\f\3\f\3\f\3\f\3\f\3\f\3\f\3\f"+ "\3\r\3\r\3\r\3\r\3\r\3\r\3\r\3\r\3\16\3\16\3\16\3\17\3\17\3\17\3\17\3"+ "\17\3\17\3\17\3\20\3\20\3\20\3\20\3\20\3\21\3\21\3\21\3\21\3\21\3\22\3"+ "\22\3\22\3\22\3\22\3\22\3\22\3\22\3\23\3\23\3\23\3\23\3\23\3\23\3\24\3"+ "\24\3\24\3\24\3\24\3\24\3\24\3\24\3\25\3\25\3\25\3\25\3\25\3\25\3\26\3"+ "\26\3\26\3\26\3\27\3\27\3\27\3\30\3\30\3\30\3\30\3\30\3\31\3\31\3\31\3"+ "\31\3\31\3\31\3\31\3\31\3\31\3\31\3\31\3\32\3\32\3\32\3\32\3\32\3\32\3"+ "\32\3\33\3\33\3\33\3\33\3\33\3\33\3\33\3\33\3\33\3\33\3\33\3\34\3\34\3"+ "\34\3\34\3\35\3\35\3\35\3\35\3\35\3\35\3\35\3\35\3\35\3\35\3\36\3\36\3"+ "\36\3\36\3\36\3\37\3\37\3\37\3\37\3\37\3\37\3\37\3 \3 \3 \3 \3!\3!\3!"+ "\3!\3!\3!\3!\3!\3\"\3\"\3\"\3\"\3\"\3\"\3\"\3\"\3#\3#\3#\3#\3#\3#\3#\3"+ "#\3#\3#\3$\3$\3$\3$\3$\3$\3$\3%\3%\3%\3%\3%\3%\3%\3&\3&\3&\3&\3&\3&\3"+ "\'\3\'\3\'\3\'\3\'\3\'\3\'\3(\3(\3(\3(\3(\3(\3(\3(\3(\3)\3)\3)\3)\3)\3"+ ")\3*\3*\3*\3*\3*\3*\3*\3+\3+\3+\3+\3+\3+\3+\3+\3+\3+\3+\3+\3+\3,\3,\3"+ ",\3,\3,\3-\3-\3-\3-\3-\3-\3.\3.\3.\3.\3.\3.\3.\3/\3/\3/\3/\3/\3/\3/\3"+ "/\3/\3/\3\60\3\60\3\60\3\60\3\61\3\61\3\61\3\61\3\61\3\62\3\62\3\62\3"+ "\62\3\62\3\62\3\62\3\62\3\62\3\63\3\63\3\63\3\63\3\63\3\63\3\64\3\64\3"+ "\64\3\64\5\64\u028d\n\64\3\65\3\65\5\65\u0291\n\65\3\66\3\66\5\66\u0295"+ "\n\66\3\67\3\67\5\67\u0299\n\67\38\38\58\u029d\n8\39\39\3:\3:\3:\5:\u02a4"+ "\n:\3:\3:\3:\5:\u02a9\n:\5:\u02ab\n:\3;\3;\5;\u02af\n;\3;\5;\u02b2\n;"+ "\3<\3<\5<\u02b6\n<\3=\3=\3>\6>\u02bb\n>\r>\16>\u02bc\3?\3?\5?\u02c1\n"+ "?\3@\6@\u02c4\n@\r@\16@\u02c5\3A\3A\3A\3A\3B\3B\5B\u02ce\nB\3B\5B\u02d1"+ "\nB\3C\3C\3D\6D\u02d6\nD\rD\16D\u02d7\3E\3E\5E\u02dc\nE\3F\3F\5F\u02e0"+ "\nF\3F\3F\3G\3G\5G\u02e6\nG\3G\5G\u02e9\nG\3H\3H\3I\6I\u02ee\nI\rI\16"+ "I\u02ef\3J\3J\5J\u02f4\nJ\3K\3K\3K\3K\3L\3L\5L\u02fc\nL\3L\5L\u02ff\n"+ "L\3M\3M\3N\6N\u0304\nN\rN\16N\u0305\3O\3O\5O\u030a\nO\3P\3P\5P\u030e\n"+ "P\3Q\3Q\3Q\5Q\u0313\nQ\3Q\5Q\u0316\nQ\3Q\5Q\u0319\nQ\3Q\3Q\3Q\5Q\u031e"+ "\nQ\3Q\5Q\u0321\nQ\3Q\3Q\3Q\5Q\u0326\nQ\3Q\3Q\3Q\5Q\u032b\nQ\3R\3R\3R"+ "\3S\3S\3T\5T\u0333\nT\3T\3T\3U\3U\3V\3V\3W\3W\3W\5W\u033e\nW\3X\3X\5X"+ "\u0342\nX\3X\3X\3X\5X\u0347\nX\3X\3X\5X\u034b\nX\3Y\3Y\3Y\3Z\3Z\3[\3["+ "\3[\3[\3[\3[\3[\3[\3[\5[\u035b\n[\3\\\3\\\3\\\3\\\3\\\3\\\3\\\3\\\5\\"+ "\u0365\n\\\3]\3]\3^\3^\5^\u036b\n^\3^\3^\3_\6_\u0370\n_\r_\16_\u0371\3"+ "`\3`\5`\u0376\n`\3a\3a\3a\3a\5a\u037c\na\3b\3b\3b\3b\3b\3b\3b\3b\3b\3"+ "b\3b\5b\u0389\nb\3c\3c\3d\3d\3d\3d\3d\3d\3d\3e\3e\3e\3e\3e\3f\3f\3g\3"+ "g\3h\3h\3i\3i\3j\3j\3k\3k\3l\3l\3m\3m\3n\3n\3o\3o\3p\3p\3q\3q\3r\3r\3"+ "s\3s\3t\3t\3u\3u\3v\3v\3v\3w\3w\3w\3x\3x\3x\3y\3y\3y\3z\3z\3z\3{\3{\3"+ "{\3|\3|\3|\3}\3}\3}\3~\3~\3\177\3\177\3\u0080\3\u0080\3\u0081\3\u0081"+ "\3\u0082\3\u0082\3\u0083\3\u0083\3\u0084\3\u0084\3\u0085\3\u0085\3\u0086"+ "\3\u0086\3\u0086\3\u0087\3\u0087\3\u0087\3\u0088\3\u0088\3\u0088\3\u0089"+ "\3\u0089\3\u0089\3\u008a\3\u008a\3\u008a\3\u008b\3\u008b\3\u008b\3\u008c"+ "\3\u008c\3\u008c\3\u008d\3\u008d\3\u008d\3\u008e\3\u008e\3\u008e\3\u008f"+ "\3\u008f\3\u008f\3\u0090\3\u0090\3\u0090\3\u0090\3\u0091\3\u0091\3\u0091"+ "\3\u0091\3\u0092\3\u0092\3\u0092\3\u0092\3\u0092\3\u0093\3\u0093\7\u0093"+ "\u040e\n\u0093\f\u0093\16\u0093\u0411\13\u0093\3\u0094\3\u0094\3\u0094"+ "\3\u0094\3\u0094\3\u0094\5\u0094\u0419\n\u0094\3\u0095\3\u0095\3\u0095"+ "\3\u0095\3\u0095\3\u0095\5\u0095\u0421\n\u0095\3\u0096\3\u0096\3\u0097"+ "\3\u0097\3\u0097\3\u0097\3\u0098\6\u0098\u042a\n\u0098\r\u0098\16\u0098"+ "\u042b\3\u0098\3\u0098\3\u0099\3\u0099\3\u0099\3\u0099\7\u0099\u0434\n"+ "\u0099\f\u0099\16\u0099\u0437\13\u0099\3\u0099\3\u0099\3\u0099\3\u0099"+ "\3\u0099\3\u009a\3\u009a\3\u009a\3\u009a\7\u009a\u0442\n\u009a\f\u009a"+ "\16\u009a\u0445\13\u009a\3\u009a\3\u009a\3\u0435\2\u009b\3\3\5\4\7\5\t"+ "\6\13\7\r\b\17\t\21\n\23\13\25\f\27\r\31\16\33\17\35\20\37\21!\22#\23"+ "%\24\'\25)\26+\27-\30/\31\61\32\63\33\65\34\67\359\36;\37= ?!A\"C#E$G"+ "%I&K\'M(O)Q*S+U,W-Y.[/]\60_\61a\62c\63e\64g\65i\2k\2m\2o\2q\2s\2u\2w\2"+ "y\2{\2}\2\177\2\u0081\2\u0083\2\u0085\2\u0087\2\u0089\2\u008b\2\u008d"+ "\2\u008f\2\u0091\2\u0093\2\u0095\2\u0097\2\u0099\2\u009b\2\u009d\2\u009f"+ "\66\u00a1\2\u00a3\2\u00a5\2\u00a7\2\u00a9\2\u00ab\2\u00ad\2\u00af\2\u00b1"+ "\2\u00b3\2\u00b5\67\u00b78\u00b9\2\u00bb9\u00bd\2\u00bf\2\u00c1\2\u00c3"+ "\2\u00c5\2\u00c7\2\u00c9:\u00cb;\u00cd<\u00cf=\u00d1>\u00d3?\u00d5@\u00d7"+ "A\u00d9B\u00dbC\u00ddD\u00dfE\u00e1F\u00e3G\u00e5H\u00e7I\u00e9J\u00eb"+ "K\u00edL\u00efM\u00f1N\u00f3O\u00f5P\u00f7Q\u00f9R\u00fbS\u00fdT\u00ff"+ "U\u0101V\u0103W\u0105X\u0107Y\u0109Z\u010b[\u010d\\\u010f]\u0111^\u0113"+ "_\u0115`\u0117a\u0119b\u011bc\u011dd\u011fe\u0121f\u0123g\u0125h\u0127"+ "\2\u0129\2\u012bi\u012dj\u012fk\u0131l\u0133m\3\2\30\4\2NNnn\3\2\63;\4"+ "\2ZZzz\5\2\62;CHch\3\2\629\4\2DDdd\3\2\62\63\4\2GGgg\4\2--//\6\2FFHHf"+ "fhh\4\2RRrr\4\2))^^\4\2$$^^\n\2$$))^^ddhhppttvv\3\2\62\65\6\2&&C\\aac"+ "|\4\2\2\u0081\ud802\udc01\3\2\ud802\udc01\3\2\udc02\ue001\7\2&&\62;C\\"+ "aac|\5\2\13\f\16\17\"\"\4\2\f\f\17\17\2\u0456\2\3\3\2\2\2\2\5\3\2\2\2"+ "\2\7\3\2\2\2\2\t\3\2\2\2\2\13\3\2\2\2\2\r\3\2\2\2\2\17\3\2\2\2\2\21\3"+ "\2\2\2\2\23\3\2\2\2\2\25\3\2\2\2\2\27\3\2\2\2\2\31\3\2\2\2\2\33\3\2\2"+ "\2\2\35\3\2\2\2\2\37\3\2\2\2\2!\3\2\2\2\2#\3\2\2\2\2%\3\2\2\2\2\'\3\2"+ "\2\2\2)\3\2\2\2\2+\3\2\2\2\2-\3\2\2\2\2/\3\2\2\2\2\61\3\2\2\2\2\63\3\2"+ "\2\2\2\65\3\2\2\2\2\67\3\2\2\2\29\3\2\2\2\2;\3\2\2\2\2=\3\2\2\2\2?\3\2"+ "\2\2\2A\3\2\2\2\2C\3\2\2\2\2E\3\2\2\2\2G\3\2\2\2\2I\3\2\2\2\2K\3\2\2\2"+ "\2M\3\2\2\2\2O\3\2\2\2\2Q\3\2\2\2\2S\3\2\2\2\2U\3\2\2\2\2W\3\2\2\2\2Y"+ "\3\2\2\2\2[\3\2\2\2\2]\3\2\2\2\2_\3\2\2\2\2a\3\2\2\2\2c\3\2\2\2\2e\3\2"+ "\2\2\2g\3\2\2\2\2\u009f\3\2\2\2\2\u00b5\3\2\2\2\2\u00b7\3\2\2\2\2\u00bb"+ "\3\2\2\2\2\u00c9\3\2\2\2\2\u00cb\3\2\2\2\2\u00cd\3\2\2\2\2\u00cf\3\2\2"+ "\2\2\u00d1\3\2\2\2\2\u00d3\3\2\2\2\2\u00d5\3\2\2\2\2\u00d7\3\2\2\2\2\u00d9"+ "\3\2\2\2\2\u00db\3\2\2\2\2\u00dd\3\2\2\2\2\u00df\3\2\2\2\2\u00e1\3\2\2"+ "\2\2\u00e3\3\2\2\2\2\u00e5\3\2\2\2\2\u00e7\3\2\2\2\2\u00e9\3\2\2\2\2\u00eb"+ "\3\2\2\2\2\u00ed\3\2\2\2\2\u00ef\3\2\2\2\2\u00f1\3\2\2\2\2\u00f3\3\2\2"+ "\2\2\u00f5\3\2\2\2\2\u00f7\3\2\2\2\2\u00f9\3\2\2\2\2\u00fb\3\2\2\2\2\u00fd"+ "\3\2\2\2\2\u00ff\3\2\2\2\2\u0101\3\2\2\2\2\u0103\3\2\2\2\2\u0105\3\2\2"+ "\2\2\u0107\3\2\2\2\2\u0109\3\2\2\2\2\u010b\3\2\2\2\2\u010d\3\2\2\2\2\u010f"+ "\3\2\2\2\2\u0111\3\2\2\2\2\u0113\3\2\2\2\2\u0115\3\2\2\2\2\u0117\3\2\2"+ "\2\2\u0119\3\2\2\2\2\u011b\3\2\2\2\2\u011d\3\2\2\2\2\u011f\3\2\2\2\2\u0121"+ "\3\2\2\2\2\u0123\3\2\2\2\2\u0125\3\2\2\2\2\u012b\3\2\2\2\2\u012d\3\2\2"+ "\2\2\u012f\3\2\2\2\2\u0131\3\2\2\2\2\u0133\3\2\2\2\3\u0135\3\2\2\2\5\u013e"+ "\3\2\2\2\7\u0145\3\2\2\2\t\u014d\3\2\2\2\13\u0153\3\2\2\2\r\u0158\3\2"+ "\2\2\17\u015d\3\2\2\2\21\u0163\3\2\2\2\23\u0168\3\2\2\2\25\u016e\3\2\2"+ "\2\27\u0174\3\2\2\2\31\u017d\3\2\2\2\33\u0185\3\2\2\2\35\u0188\3\2\2\2"+ "\37\u018f\3\2\2\2!\u0194\3\2\2\2#\u0199\3\2\2\2%\u01a1\3\2\2\2\'\u01a7"+ "\3\2\2\2)\u01af\3\2\2\2+\u01b5\3\2\2\2-\u01b9\3\2\2\2/\u01bc\3\2\2\2\61"+ "\u01c1\3\2\2\2\63\u01cc\3\2\2\2\65\u01d3\3\2\2\2\67\u01de\3\2\2\29\u01e2"+ "\3\2\2\2;\u01ec\3\2\2\2=\u01f1\3\2\2\2?\u01f8\3\2\2\2A\u01fc\3\2\2\2C"+ "\u0204\3\2\2\2E\u020c\3\2\2\2G\u0216\3\2\2\2I\u021d\3\2\2\2K\u0224\3\2"+ "\2\2M\u022a\3\2\2\2O\u0231\3\2\2\2Q\u023a\3\2\2\2S\u0240\3\2\2\2U\u0247"+ "\3\2\2\2W\u0254\3\2\2\2Y\u0259\3\2\2\2[\u025f\3\2\2\2]\u0266\3\2\2\2_"+ "\u0270\3\2\2\2a\u0274\3\2\2\2c\u0279\3\2\2\2e\u0282\3\2\2\2g\u028c\3\2"+ "\2\2i\u028e\3\2\2\2k\u0292\3\2\2\2m\u0296\3\2\2\2o\u029a\3\2\2\2q\u029e"+ "\3\2\2\2s\u02aa\3\2\2\2u\u02ac\3\2\2\2w\u02b5\3\2\2\2y\u02b7\3\2\2\2{"+ "\u02ba\3\2\2\2}\u02c0\3\2\2\2\177\u02c3\3\2\2\2\u0081\u02c7\3\2\2\2\u0083"+ "\u02cb\3\2\2\2\u0085\u02d2\3\2\2\2\u0087\u02d5\3\2\2\2\u0089\u02db\3\2"+ "\2\2\u008b\u02dd\3\2\2\2\u008d\u02e3\3\2\2\2\u008f\u02ea\3\2\2\2\u0091"+ "\u02ed\3\2\2\2\u0093\u02f3\3\2\2\2\u0095\u02f5\3\2\2\2\u0097\u02f9\3\2"+ "\2\2\u0099\u0300\3\2\2\2\u009b\u0303\3\2\2\2\u009d\u0309\3\2\2\2\u009f"+ "\u030d\3\2\2\2\u00a1\u032a\3\2\2\2\u00a3\u032c\3\2\2\2\u00a5\u032f\3\2"+ "\2\2\u00a7\u0332\3\2\2\2\u00a9\u0336\3\2\2\2\u00ab\u0338\3\2\2\2\u00ad"+ "\u033a\3\2\2\2\u00af\u034a\3\2\2\2\u00b1\u034c\3\2\2\2\u00b3\u034f\3\2"+ "\2\2\u00b5\u035a\3\2\2\2\u00b7\u0364\3\2\2\2\u00b9\u0366\3\2\2\2\u00bb"+ "\u0368\3\2\2\2\u00bd\u036f\3\2\2\2\u00bf\u0375\3\2\2\2\u00c1\u037b\3\2"+ "\2\2\u00c3\u0388\3\2\2\2\u00c5\u038a\3\2\2\2\u00c7\u038c\3\2\2\2\u00c9"+ "\u0393\3\2\2\2\u00cb\u0398\3\2\2\2\u00cd\u039a\3\2\2\2\u00cf\u039c\3\2"+ "\2\2\u00d1\u039e\3\2\2\2\u00d3\u03a0\3\2\2\2\u00d5\u03a2\3\2\2\2\u00d7"+ "\u03a4\3\2\2\2\u00d9\u03a6\3\2\2\2\u00db\u03a8\3\2\2\2\u00dd\u03aa\3\2"+ "\2\2\u00df\u03ac\3\2\2\2\u00e1\u03ae\3\2\2\2\u00e3\u03b0\3\2\2\2\u00e5"+ "\u03b2\3\2\2\2\u00e7\u03b4\3\2\2\2\u00e9\u03b6\3\2\2\2\u00eb\u03b8\3\2"+ "\2\2\u00ed\u03bb\3\2\2\2\u00ef\u03be\3\2\2\2\u00f1\u03c1\3\2\2\2\u00f3"+ "\u03c4\3\2\2\2\u00f5\u03c7\3\2\2\2\u00f7\u03ca\3\2\2\2\u00f9\u03cd\3\2"+ "\2\2\u00fb\u03d0\3\2\2\2\u00fd\u03d2\3\2\2\2\u00ff\u03d4\3\2\2\2\u0101"+ "\u03d6\3\2\2\2\u0103\u03d8\3\2\2\2\u0105\u03da\3\2\2\2\u0107\u03dc\3\2"+ "\2\2\u0109\u03de\3\2\2\2\u010b\u03e0\3\2\2\2\u010d\u03e3\3\2\2\2\u010f"+ "\u03e6\3\2\2\2\u0111\u03e9\3\2\2\2\u0113\u03ec\3\2\2\2\u0115\u03ef\3\2"+ "\2\2\u0117\u03f2\3\2\2\2\u0119\u03f5\3\2\2\2\u011b\u03f8\3\2\2\2\u011d"+ "\u03fb\3\2\2\2\u011f\u03fe\3\2\2\2\u0121\u0402\3\2\2\2\u0123\u0406\3\2"+ "\2\2\u0125\u040b\3\2\2\2\u0127\u0418\3\2\2\2\u0129\u0420\3\2\2\2\u012b"+ "\u0422\3\2\2\2\u012d\u0424\3\2\2\2\u012f\u0429\3\2\2\2\u0131\u042f\3\2"+ "\2\2\u0133\u043d\3\2\2\2\u0135\u0136\7c\2\2\u0136\u0137\7d\2\2\u0137\u0138"+ "\7u\2\2\u0138\u0139\7v\2\2\u0139\u013a\7t\2\2\u013a\u013b\7c\2\2\u013b"+ "\u013c\7e\2\2\u013c\u013d\7v\2\2\u013d\4\3\2\2\2\u013e\u013f\7c\2\2\u013f"+ "\u0140\7u\2\2\u0140\u0141\7u\2\2\u0141\u0142\7g\2\2\u0142\u0143\7t\2\2"+ "\u0143\u0144\7v\2\2\u0144\6\3\2\2\2\u0145\u0146\7d\2\2\u0146\u0147\7q"+ "\2\2\u0147\u0148\7q\2\2\u0148\u0149\7n\2\2\u0149\u014a\7g\2\2\u014a\u014b"+ "\7c\2\2\u014b\u014c\7p\2\2\u014c\b\3\2\2\2\u014d\u014e\7d\2\2\u014e\u014f"+ "\7t\2\2\u014f\u0150\7g\2\2\u0150\u0151\7c\2\2\u0151\u0152\7m\2\2\u0152"+ "\n\3\2\2\2\u0153\u0154\7d\2\2\u0154\u0155\7{\2\2\u0155\u0156\7v\2\2\u0156"+ "\u0157\7g\2\2\u0157\f\3\2\2\2\u0158\u0159\7e\2\2\u0159\u015a\7c\2\2\u015a"+ "\u015b\7u\2\2\u015b\u015c\7g\2\2\u015c\16\3\2\2\2\u015d\u015e\7e\2\2\u015e"+ "\u015f\7c\2\2\u015f\u0160\7v\2\2\u0160\u0161\7e\2\2\u0161\u0162\7j\2\2"+ "\u0162\20\3\2\2\2\u0163\u0164\7e\2\2\u0164\u0165\7j\2\2\u0165\u0166\7"+ "c\2\2\u0166\u0167\7t\2\2\u0167\22\3\2\2\2\u0168\u0169\7e\2\2\u0169\u016a"+ "\7n\2\2\u016a\u016b\7c\2\2\u016b\u016c\7u\2\2\u016c\u016d\7u\2\2\u016d"+ "\24\3\2\2\2\u016e\u016f\7e\2\2\u016f\u0170\7q\2\2\u0170\u0171\7p\2\2\u0171"+ "\u0172\7u\2\2\u0172\u0173\7v\2\2\u0173\26\3\2\2\2\u0174\u0175\7e\2\2\u0175"+ "\u0176\7q\2\2\u0176\u0177\7p\2\2\u0177\u0178\7v\2\2\u0178\u0179\7k\2\2"+ "\u0179\u017a\7p\2\2\u017a\u017b\7w\2\2\u017b\u017c\7g\2\2\u017c\30\3\2"+ "\2\2\u017d\u017e\7f\2\2\u017e\u017f\7g\2\2\u017f\u0180\7h\2\2\u0180\u0181"+ "\7c\2\2\u0181\u0182\7w\2\2\u0182\u0183\7n\2\2\u0183\u0184\7v\2\2\u0184"+ "\32\3\2\2\2\u0185\u0186\7f\2\2\u0186\u0187\7q\2\2\u0187\34\3\2\2\2\u0188"+ "\u0189\7f\2\2\u0189\u018a\7q\2\2\u018a\u018b\7w\2\2\u018b\u018c\7d\2\2"+ "\u018c\u018d\7n\2\2\u018d\u018e\7g\2\2\u018e\36\3\2\2\2\u018f\u0190\7"+ "g\2\2\u0190\u0191\7n\2\2\u0191\u0192\7u\2\2\u0192\u0193\7g\2\2\u0193 "+ "\3\2\2\2\u0194\u0195\7g\2\2\u0195\u0196\7p\2\2\u0196\u0197\7w\2\2\u0197"+ "\u0198\7o\2\2\u0198\"\3\2\2\2\u0199\u019a\7g\2\2\u019a\u019b\7z\2\2\u019b"+ "\u019c\7v\2\2\u019c\u019d\7g\2\2\u019d\u019e\7p\2\2\u019e\u019f\7f\2\2"+ "\u019f\u01a0\7u\2\2\u01a0$\3\2\2\2\u01a1\u01a2\7h\2\2\u01a2\u01a3\7k\2"+ "\2\u01a3\u01a4\7p\2\2\u01a4\u01a5\7c\2\2\u01a5\u01a6\7n\2\2\u01a6&\3\2"+ "\2\2\u01a7\u01a8\7h\2\2\u01a8\u01a9\7k\2\2\u01a9\u01aa\7p\2\2\u01aa\u01ab"+ "\7c\2\2\u01ab\u01ac\7n\2\2\u01ac\u01ad\7n\2\2\u01ad\u01ae\7{\2\2\u01ae"+ "(\3\2\2\2\u01af\u01b0\7h\2\2\u01b0\u01b1\7n\2\2\u01b1\u01b2\7q\2\2\u01b2"+ "\u01b3\7c\2\2\u01b3\u01b4\7v\2\2\u01b4*\3\2\2\2\u01b5\u01b6\7h\2\2\u01b6"+ "\u01b7\7q\2\2\u01b7\u01b8\7t\2\2\u01b8,\3\2\2\2\u01b9\u01ba\7k\2\2\u01ba"+ "\u01bb\7h\2\2\u01bb.\3\2\2\2\u01bc\u01bd\7i\2\2\u01bd\u01be\7q\2\2\u01be"+ "\u01bf\7v\2\2\u01bf\u01c0\7q\2\2\u01c0\60\3\2\2\2\u01c1\u01c2\7k\2\2\u01c2"+ "\u01c3\7o\2\2\u01c3\u01c4\7r\2\2\u01c4\u01c5\7n\2\2\u01c5\u01c6\7g\2\2"+ "\u01c6\u01c7\7o\2\2\u01c7\u01c8\7g\2\2\u01c8\u01c9\7p\2\2\u01c9\u01ca"+ "\7v\2\2\u01ca\u01cb\7u\2\2\u01cb\62\3\2\2\2\u01cc\u01cd\7k\2\2\u01cd\u01ce"+ "\7o\2\2\u01ce\u01cf\7r\2\2\u01cf\u01d0\7q\2\2\u01d0\u01d1\7t\2\2\u01d1"+ "\u01d2\7v\2\2\u01d2\64\3\2\2\2\u01d3\u01d4\7k\2\2\u01d4\u01d5\7p\2\2\u01d5"+ "\u01d6\7u\2\2\u01d6\u01d7\7v\2\2\u01d7\u01d8\7c\2\2\u01d8\u01d9\7p\2\2"+ "\u01d9\u01da\7e\2\2\u01da\u01db\7g\2\2\u01db\u01dc\7q\2\2\u01dc\u01dd"+ "\7h\2\2\u01dd\66\3\2\2\2\u01de\u01df\7k\2\2\u01df\u01e0\7p\2\2\u01e0\u01e1"+ "\7v\2\2\u01e18\3\2\2\2\u01e2\u01e3\7k\2\2\u01e3\u01e4\7p\2\2\u01e4\u01e5"+ "\7v\2\2\u01e5\u01e6\7g\2\2\u01e6\u01e7\7t\2\2\u01e7\u01e8\7h\2\2\u01e8"+ "\u01e9\7c\2\2\u01e9\u01ea\7e\2\2\u01ea\u01eb\7g\2\2\u01eb:\3\2\2\2\u01ec"+ "\u01ed\7n\2\2\u01ed\u01ee\7q\2\2\u01ee\u01ef\7p\2\2\u01ef\u01f0\7i\2\2"+ "\u01f0<\3\2\2\2\u01f1\u01f2\7p\2\2\u01f2\u01f3\7c\2\2\u01f3\u01f4\7v\2"+ "\2\u01f4\u01f5\7k\2\2\u01f5\u01f6\7x\2\2\u01f6\u01f7\7g\2\2\u01f7>\3\2"+ "\2\2\u01f8\u01f9\7p\2\2\u01f9\u01fa\7g\2\2\u01fa\u01fb\7y\2\2\u01fb@\3"+ "\2\2\2\u01fc\u01fd\7r\2\2\u01fd\u01fe\7c\2\2\u01fe\u01ff\7e\2\2\u01ff"+ "\u0200\7m\2\2\u0200\u0201\7c\2\2\u0201\u0202\7i\2\2\u0202\u0203\7g\2\2"+ "\u0203B\3\2\2\2\u0204\u0205\7r\2\2\u0205\u0206\7t\2\2\u0206\u0207\7k\2"+ "\2\u0207\u0208\7x\2\2\u0208\u0209\7c\2\2\u0209\u020a\7v\2\2\u020a\u020b"+ "\7g\2\2\u020bD\3\2\2\2\u020c\u020d\7r\2\2\u020d\u020e\7t\2\2\u020e\u020f"+ "\7q\2\2\u020f\u0210\7v\2\2\u0210\u0211\7g\2\2\u0211\u0212\7e\2\2\u0212"+ "\u0213\7v\2\2\u0213\u0214\7g\2\2\u0214\u0215\7f\2\2\u0215F\3\2\2\2\u0216"+ "\u0217\7r\2\2\u0217\u0218\7w\2\2\u0218\u0219\7d\2\2\u0219\u021a\7n\2\2"+ "\u021a\u021b\7k\2\2\u021b\u021c\7e\2\2\u021cH\3\2\2\2\u021d\u021e\7t\2"+ "\2\u021e\u021f\7g\2\2\u021f\u0220\7v\2\2\u0220\u0221\7w\2\2\u0221\u0222"+ "\7t\2\2\u0222\u0223\7p\2\2\u0223J\3\2\2\2\u0224\u0225\7u\2\2\u0225\u0226"+ "\7j\2\2\u0226\u0227\7q\2\2\u0227\u0228\7t\2\2\u0228\u0229\7v\2\2\u0229"+ "L\3\2\2\2\u022a\u022b\7u\2\2\u022b\u022c\7v\2\2\u022c\u022d\7c\2\2\u022d"+ "\u022e\7v\2\2\u022e\u022f\7k\2\2\u022f\u0230\7e\2\2\u0230N\3\2\2\2\u0231"+ "\u0232\7u\2\2\u0232\u0233\7v\2\2\u0233\u0234\7t\2\2\u0234\u0235\7k\2\2"+ "\u0235\u0236\7e\2\2\u0236\u0237\7v\2\2\u0237\u0238\7h\2\2\u0238\u0239"+ "\7r\2\2\u0239P\3\2\2\2\u023a\u023b\7u\2\2\u023b\u023c\7w\2\2\u023c\u023d"+ "\7r\2\2\u023d\u023e\7g\2\2\u023e\u023f\7t\2\2\u023fR\3\2\2\2\u0240\u0241"+ "\7u\2\2\u0241\u0242\7y\2\2\u0242\u0243\7k\2\2\u0243\u0244\7v\2\2\u0244"+ "\u0245\7e\2\2\u0245\u0246\7j\2\2\u0246T\3\2\2\2\u0247\u0248\7u\2\2\u0248"+ "\u0249\7{\2\2\u0249\u024a\7p\2\2\u024a\u024b\7e\2\2\u024b\u024c\7j\2\2"+ "\u024c\u024d\7t\2\2\u024d\u024e\7q\2\2\u024e\u024f\7p\2\2\u024f\u0250"+ "\7k\2\2\u0250\u0251\7|\2\2\u0251\u0252\7g\2\2\u0252\u0253\7f\2\2\u0253"+ "V\3\2\2\2\u0254\u0255\7v\2\2\u0255\u0256\7j\2\2\u0256\u0257\7k\2\2\u0257"+ "\u0258\7u\2\2\u0258X\3\2\2\2\u0259\u025a\7v\2\2\u025a\u025b\7j\2\2\u025b"+ "\u025c\7t\2\2\u025c\u025d\7q\2\2\u025d\u025e\7y\2\2\u025eZ\3\2\2\2\u025f"+ "\u0260\7v\2\2\u0260\u0261\7j\2\2\u0261\u0262\7t\2\2\u0262\u0263\7q\2\2"+ "\u0263\u0264\7y\2\2\u0264\u0265\7u\2\2\u0265\\\3\2\2\2\u0266\u0267\7v"+ "\2\2\u0267\u0268\7t\2\2\u0268\u0269\7c\2\2\u0269\u026a\7p\2\2\u026a\u026b"+ "\7u\2\2\u026b\u026c\7k\2\2\u026c\u026d\7g\2\2\u026d\u026e\7p\2\2\u026e"+ "\u026f\7v\2\2\u026f^\3\2\2\2\u0270\u0271\7v\2\2\u0271\u0272\7t\2\2\u0272"+ "\u0273\7{\2\2\u0273`\3\2\2\2\u0274\u0275\7x\2\2\u0275\u0276\7q\2\2\u0276"+ "\u0277\7k\2\2\u0277\u0278\7f\2\2\u0278b\3\2\2\2\u0279\u027a\7x\2\2\u027a"+ "\u027b\7q\2\2\u027b\u027c\7n\2\2\u027c\u027d\7c\2\2\u027d\u027e\7v\2\2"+ "\u027e\u027f\7k\2\2\u027f\u0280\7n\2\2\u0280\u0281\7g\2\2\u0281d\3\2\2"+ "\2\u0282\u0283\7y\2\2\u0283\u0284\7j\2\2\u0284\u0285\7k\2\2\u0285\u0286"+ "\7n\2\2\u0286\u0287\7g\2\2\u0287f\3\2\2\2\u0288\u028d\5i\65\2\u0289\u028d"+ "\5k\66\2\u028a\u028d\5m\67\2\u028b\u028d\5o8\2\u028c\u0288\3\2\2\2\u028c"+ "\u0289\3\2\2\2\u028c\u028a\3\2\2\2\u028c\u028b\3\2\2\2\u028dh\3\2\2\2"+ "\u028e\u0290\5s:\2\u028f\u0291\5q9\2\u0290\u028f\3\2\2\2\u0290\u0291\3"+ "\2\2\2\u0291j\3\2\2\2\u0292\u0294\5\u0081A\2\u0293\u0295\5q9\2\u0294\u0293"+ "\3\2\2\2\u0294\u0295\3\2\2\2\u0295l\3\2\2\2\u0296\u0298\5\u008bF\2\u0297"+ "\u0299\5q9\2\u0298\u0297\3\2\2\2\u0298\u0299\3\2\2\2\u0299n\3\2\2\2\u029a"+ "\u029c\5\u0095K\2\u029b\u029d\5q9\2\u029c\u029b\3\2\2\2\u029c\u029d\3"+ "\2\2\2\u029dp\3\2\2\2\u029e\u029f\t\2\2\2\u029fr\3\2\2\2\u02a0\u02ab\7"+ "\62\2\2\u02a1\u02a8\5y=\2\u02a2\u02a4\5u;\2\u02a3\u02a2\3\2\2\2\u02a3"+ "\u02a4\3\2\2\2\u02a4\u02a9\3\2\2\2\u02a5\u02a6\5\177@\2\u02a6\u02a7\5"+ "u;\2\u02a7\u02a9\3\2\2\2\u02a8\u02a3\3\2\2\2\u02a8\u02a5\3\2\2\2\u02a9"+ "\u02ab\3\2\2\2\u02aa\u02a0\3\2\2\2\u02aa\u02a1\3\2\2\2\u02abt\3\2\2\2"+ "\u02ac\u02b1\5w<\2\u02ad\u02af\5{>\2\u02ae\u02ad\3\2\2\2\u02ae\u02af\3"+ "\2\2\2\u02af\u02b0\3\2\2\2\u02b0\u02b2\5w<\2\u02b1\u02ae\3\2\2\2\u02b1"+ "\u02b2\3\2\2\2\u02b2v\3\2\2\2\u02b3\u02b6\7\62\2\2\u02b4\u02b6\5y=\2\u02b5"+ "\u02b3\3\2\2\2\u02b5\u02b4\3\2\2\2\u02b6x\3\2\2\2\u02b7\u02b8\t\3\2\2"+ "\u02b8z\3\2\2\2\u02b9\u02bb\5}?\2\u02ba\u02b9\3\2\2\2\u02bb\u02bc\3\2"+ "\2\2\u02bc\u02ba\3\2\2\2\u02bc\u02bd\3\2\2\2\u02bd|\3\2\2\2\u02be\u02c1"+ "\5w<\2\u02bf\u02c1\7a\2\2\u02c0\u02be\3\2\2\2\u02c0\u02bf\3\2\2\2\u02c1"+ "~\3\2\2\2\u02c2\u02c4\7a\2\2\u02c3\u02c2\3\2\2\2\u02c4\u02c5\3\2\2\2\u02c5"+ "\u02c3\3\2\2\2\u02c5\u02c6\3\2\2\2\u02c6\u0080\3\2\2\2\u02c7\u02c8\7\62"+ "\2\2\u02c8\u02c9\t\4\2\2\u02c9\u02ca\5\u0083B\2\u02ca\u0082\3\2\2\2\u02cb"+ "\u02d0\5\u0085C\2\u02cc\u02ce\5\u0087D\2\u02cd\u02cc\3\2\2\2\u02cd\u02ce"+ "\3\2\2\2\u02ce\u02cf\3\2\2\2\u02cf\u02d1\5\u0085C\2\u02d0\u02cd\3\2\2"+ "\2\u02d0\u02d1\3\2\2\2\u02d1\u0084\3\2\2\2\u02d2\u02d3\t\5\2\2\u02d3\u0086"+ "\3\2\2\2\u02d4\u02d6\5\u0089E\2\u02d5\u02d4\3\2\2\2\u02d6\u02d7\3\2\2"+ "\2\u02d7\u02d5\3\2\2\2\u02d7\u02d8\3\2\2\2\u02d8\u0088\3\2\2\2\u02d9\u02dc"+ "\5\u0085C\2\u02da\u02dc\7a\2\2\u02db\u02d9\3\2\2\2\u02db\u02da\3\2\2\2"+ "\u02dc\u008a\3\2\2\2\u02dd\u02df\7\62\2\2\u02de\u02e0\5\177@\2\u02df\u02de"+ "\3\2\2\2\u02df\u02e0\3\2\2\2\u02e0\u02e1\3\2\2\2\u02e1\u02e2\5\u008dG"+ "\2\u02e2\u008c\3\2\2\2\u02e3\u02e8\5\u008fH\2\u02e4\u02e6\5\u0091I\2\u02e5"+ "\u02e4\3\2\2\2\u02e5\u02e6\3\2\2\2\u02e6\u02e7\3\2\2\2\u02e7\u02e9\5\u008f"+ "H\2\u02e8\u02e5\3\2\2\2\u02e8\u02e9\3\2\2\2\u02e9\u008e\3\2\2\2\u02ea"+ "\u02eb\t\6\2\2\u02eb\u0090\3\2\2\2\u02ec\u02ee\5\u0093J\2\u02ed\u02ec"+ "\3\2\2\2\u02ee\u02ef\3\2\2\2\u02ef\u02ed\3\2\2\2\u02ef\u02f0\3\2\2\2\u02f0"+ "\u0092\3\2\2\2\u02f1\u02f4\5\u008fH\2\u02f2\u02f4\7a\2\2\u02f3\u02f1\3"+ "\2\2\2\u02f3\u02f2\3\2\2\2\u02f4\u0094\3\2\2\2\u02f5\u02f6\7\62\2\2\u02f6"+ "\u02f7\t\7\2\2\u02f7\u02f8\5\u0097L\2\u02f8\u0096\3\2\2\2\u02f9\u02fe"+ "\5\u0099M\2\u02fa\u02fc\5\u009bN\2\u02fb\u02fa\3\2\2\2\u02fb\u02fc\3\2"+ "\2\2\u02fc\u02fd\3\2\2\2\u02fd\u02ff\5\u0099M\2\u02fe\u02fb\3\2\2\2\u02fe"+ "\u02ff\3\2\2\2\u02ff\u0098\3\2\2\2\u0300\u0301\t\b\2\2\u0301\u009a\3\2"+ "\2\2\u0302\u0304\5\u009dO\2\u0303\u0302\3\2\2\2\u0304\u0305\3\2\2\2\u0305"+ "\u0303\3\2\2\2\u0305\u0306\3\2\2\2\u0306\u009c\3\2\2\2\u0307\u030a\5\u0099"+ "M\2\u0308\u030a\7a\2\2\u0309\u0307\3\2\2\2\u0309\u0308\3\2\2\2\u030a\u009e"+ "\3\2\2\2\u030b\u030e\5\u00a1Q\2\u030c\u030e\5\u00adW\2\u030d\u030b\3\2"+ "\2\2\u030d\u030c\3\2\2\2\u030e\u00a0\3\2\2\2\u030f\u0310\5u;\2\u0310\u0312"+ "\7\60\2\2\u0311\u0313\5u;\2\u0312\u0311\3\2\2\2\u0312\u0313\3\2\2\2\u0313"+ "\u0315\3\2\2\2\u0314\u0316\5\u00a3R\2\u0315\u0314\3\2\2\2\u0315\u0316"+ "\3\2\2\2\u0316\u0318\3\2\2\2\u0317\u0319\5\u00abV\2\u0318\u0317\3\2\2"+ "\2\u0318\u0319\3\2\2\2\u0319\u032b\3\2\2\2\u031a\u031b\7\60\2\2\u031b"+ "\u031d\5u;\2\u031c\u031e\5\u00a3R\2\u031d\u031c\3\2\2\2\u031d\u031e\3"+ "\2\2\2\u031e\u0320\3\2\2\2\u031f\u0321\5\u00abV\2\u0320\u031f\3\2\2\2"+ "\u0320\u0321\3\2\2\2\u0321\u032b\3\2\2\2\u0322\u0323\5u;\2\u0323\u0325"+ "\5\u00a3R\2\u0324\u0326\5\u00abV\2\u0325\u0324\3\2\2\2\u0325\u0326\3\2"+ "\2\2\u0326\u032b\3\2\2\2\u0327\u0328\5u;\2\u0328\u0329\5\u00abV\2\u0329"+ "\u032b\3\2\2\2\u032a\u030f\3\2\2\2\u032a\u031a\3\2\2\2\u032a\u0322\3\2"+ "\2\2\u032a\u0327\3\2\2\2\u032b\u00a2\3\2\2\2\u032c\u032d\5\u00a5S\2\u032d"+ "\u032e\5\u00a7T\2\u032e\u00a4\3\2\2\2\u032f\u0330\t\t\2\2\u0330\u00a6"+ "\3\2\2\2\u0331\u0333\5\u00a9U\2\u0332\u0331\3\2\2\2\u0332\u0333\3\2\2"+ "\2\u0333\u0334\3\2\2\2\u0334\u0335\5u;\2\u0335\u00a8\3\2\2\2\u0336\u0337"+ "\t\n\2\2\u0337\u00aa\3\2\2\2\u0338\u0339\t\13\2\2\u0339\u00ac\3\2\2\2"+ "\u033a\u033b\5\u00afX\2\u033b\u033d\5\u00b1Y\2\u033c\u033e\5\u00abV\2"+ "\u033d\u033c\3\2\2\2\u033d\u033e\3\2\2\2\u033e\u00ae\3\2\2\2\u033f\u0341"+ "\5\u0081A\2\u0340\u0342\7\60\2\2\u0341\u0340\3\2\2\2\u0341\u0342\3\2\2"+ "\2\u0342\u034b\3\2\2\2\u0343\u0344\7\62\2\2\u0344\u0346\t\4\2\2\u0345"+ "\u0347\5\u0083B\2\u0346\u0345\3\2\2\2\u0346\u0347\3\2\2\2\u0347\u0348"+ "\3\2\2\2\u0348\u0349\7\60\2\2\u0349\u034b\5\u0083B\2\u034a\u033f\3\2\2"+ "\2\u034a\u0343\3\2\2\2\u034b\u00b0\3\2\2\2\u034c\u034d\5\u00b3Z\2\u034d"+ "\u034e\5\u00a7T\2\u034e\u00b2\3\2\2\2\u034f\u0350\t\f\2\2\u0350\u00b4"+ "\3\2\2\2\u0351\u0352\7v\2\2\u0352\u0353\7t\2\2\u0353\u0354\7w\2\2\u0354"+ "\u035b\7g\2\2\u0355\u0356\7h\2\2\u0356\u0357\7c\2\2\u0357\u0358\7n\2\2"+ "\u0358\u0359\7u\2\2\u0359\u035b\7g\2\2\u035a\u0351\3\2\2\2\u035a\u0355"+ "\3\2\2\2\u035b\u00b6\3\2\2\2\u035c\u035d\7)\2\2\u035d\u035e\5\u00b9]\2"+ "\u035e\u035f\7)\2\2\u035f\u0365\3\2\2\2\u0360\u0361\7)\2\2\u0361\u0362"+ "\5\u00c1a\2\u0362\u0363\7)\2\2\u0363\u0365\3\2\2\2\u0364\u035c\3\2\2\2"+ "\u0364\u0360\3\2\2\2\u0365\u00b8\3\2\2\2\u0366\u0367\n\r\2\2\u0367\u00ba"+ "\3\2\2\2\u0368\u036a\7$\2\2\u0369\u036b\5\u00bd_\2\u036a\u0369\3\2\2\2"+ "\u036a\u036b\3\2\2\2\u036b\u036c\3\2\2\2\u036c\u036d\7$\2\2\u036d\u00bc"+ "\3\2\2\2\u036e\u0370\5\u00bf`\2\u036f\u036e\3\2\2\2\u0370\u0371\3\2\2"+ "\2\u0371\u036f\3\2\2\2\u0371\u0372\3\2\2\2\u0372\u00be\3\2\2\2\u0373\u0376"+ "\n\16\2\2\u0374\u0376\5\u00c1a\2\u0375\u0373\3\2\2\2\u0375\u0374\3\2\2"+ "\2\u0376\u00c0\3\2\2\2\u0377\u0378\7^\2\2\u0378\u037c\t\17\2\2\u0379\u037c"+ "\5\u00c3b\2\u037a\u037c\5\u00c7d\2\u037b\u0377\3\2\2\2\u037b\u0379\3\2"+ "\2\2\u037b\u037a\3\2\2\2\u037c\u00c2\3\2\2\2\u037d\u037e\7^\2\2\u037e"+ "\u0389\5\u008fH\2\u037f\u0380\7^\2\2\u0380\u0381\5\u008fH\2\u0381\u0382"+ "\5\u008fH\2\u0382\u0389\3\2\2\2\u0383\u0384\7^\2\2\u0384\u0385\5\u00c5"+ "c\2\u0385\u0386\5\u008fH\2\u0386\u0387\5\u008fH\2\u0387\u0389\3\2\2\2"+ "\u0388\u037d\3\2\2\2\u0388\u037f\3\2\2\2\u0388\u0383\3\2\2\2\u0389\u00c4"+ "\3\2\2\2\u038a\u038b\t\20\2\2\u038b\u00c6\3\2\2\2\u038c\u038d\7^\2\2\u038d"+ "\u038e\7w\2\2\u038e\u038f\5\u0085C\2\u038f\u0390\5\u0085C\2\u0390\u0391"+ "\5\u0085C\2\u0391\u0392\5\u0085C\2\u0392\u00c8\3\2\2\2\u0393\u0394\7p"+ "\2\2\u0394\u0395\7w\2\2\u0395\u0396\7n\2\2\u0396\u0397\7n\2\2\u0397\u00ca"+ "\3\2\2\2\u0398\u0399\7*\2\2\u0399\u00cc\3\2\2\2\u039a\u039b\7+\2\2\u039b"+ "\u00ce\3\2\2\2\u039c\u039d\7}\2\2\u039d\u00d0\3\2\2\2\u039e\u039f\7\177"+ "\2\2\u039f\u00d2\3\2\2\2\u03a0\u03a1\7]\2\2\u03a1\u00d4\3\2\2\2\u03a2"+ "\u03a3\7_\2\2\u03a3\u00d6\3\2\2\2\u03a4\u03a5\7=\2\2\u03a5\u00d8\3\2\2"+ "\2\u03a6\u03a7\7.\2\2\u03a7\u00da\3\2\2\2\u03a8\u03a9\7\60\2\2\u03a9\u00dc"+ "\3\2\2\2\u03aa\u03ab\7?\2\2\u03ab\u00de\3\2\2\2\u03ac\u03ad\7@\2\2\u03ad"+ "\u00e0\3\2\2\2\u03ae\u03af\7>\2\2\u03af\u00e2\3\2\2\2\u03b0\u03b1\7#\2"+ "\2\u03b1\u00e4\3\2\2\2\u03b2\u03b3\7\u0080\2\2\u03b3\u00e6\3\2\2\2\u03b4"+ "\u03b5\7A\2\2\u03b5\u00e8\3\2\2\2\u03b6\u03b7\7<\2\2\u03b7\u00ea\3\2\2"+ "\2\u03b8\u03b9\7?\2\2\u03b9\u03ba\7?\2\2\u03ba\u00ec\3\2\2\2\u03bb\u03bc"+ "\7>\2\2\u03bc\u03bd\7?\2\2\u03bd\u00ee\3\2\2\2\u03be\u03bf\7@\2\2\u03bf"+ "\u03c0\7?\2\2\u03c0\u00f0\3\2\2\2\u03c1\u03c2\7#\2\2\u03c2\u03c3\7?\2"+ "\2\u03c3\u00f2\3\2\2\2\u03c4\u03c5\7(\2\2\u03c5\u03c6\7(\2\2\u03c6\u00f4"+ "\3\2\2\2\u03c7\u03c8\7~\2\2\u03c8\u03c9\7~\2\2\u03c9\u00f6\3\2\2\2\u03ca"+ "\u03cb\7-\2\2\u03cb\u03cc\7-\2\2\u03cc\u00f8\3\2\2\2\u03cd\u03ce\7/\2"+ "\2\u03ce\u03cf\7/\2\2\u03cf\u00fa\3\2\2\2\u03d0\u03d1\7-\2\2\u03d1\u00fc"+ "\3\2\2\2\u03d2\u03d3\7/\2\2\u03d3\u00fe\3\2\2\2\u03d4\u03d5\7,\2\2\u03d5"+ "\u0100\3\2\2\2\u03d6\u03d7\7\61\2\2\u03d7\u0102\3\2\2\2\u03d8\u03d9\7"+ "(\2\2\u03d9\u0104\3\2\2\2\u03da\u03db\7~\2\2\u03db\u0106\3\2\2\2\u03dc"+ "\u03dd\7`\2\2\u03dd\u0108\3\2\2\2\u03de\u03df\7\'\2\2\u03df\u010a\3\2"+ "\2\2\u03e0\u03e1\7/\2\2\u03e1\u03e2\7@\2\2\u03e2\u010c\3\2\2\2\u03e3\u03e4"+ "\7<\2\2\u03e4\u03e5\7<\2\2\u03e5\u010e\3\2\2\2\u03e6\u03e7\7-\2\2\u03e7"+ "\u03e8\7?\2\2\u03e8\u0110\3\2\2\2\u03e9\u03ea\7/\2\2\u03ea\u03eb\7?\2"+ "\2\u03eb\u0112\3\2\2\2\u03ec\u03ed\7,\2\2\u03ed\u03ee\7?\2\2\u03ee\u0114"+ "\3\2\2\2\u03ef\u03f0\7\61\2\2\u03f0\u03f1\7?\2\2\u03f1\u0116\3\2\2\2\u03f2"+ "\u03f3\7(\2\2\u03f3\u03f4\7?\2\2\u03f4\u0118\3\2\2\2\u03f5\u03f6\7~\2"+ "\2\u03f6\u03f7\7?\2\2\u03f7\u011a\3\2\2\2\u03f8\u03f9\7`\2\2\u03f9\u03fa"+ "\7?\2\2\u03fa\u011c\3\2\2\2\u03fb\u03fc\7\'\2\2\u03fc\u03fd\7?\2\2\u03fd"+ "\u011e\3\2\2\2\u03fe\u03ff\7>\2\2\u03ff\u0400\7>\2\2\u0400\u0401\7?\2"+ "\2\u0401\u0120\3\2\2\2\u0402\u0403\7@\2\2\u0403\u0404\7@\2\2\u0404\u0405"+ "\7?\2\2\u0405\u0122\3\2\2\2\u0406\u0407\7@\2\2\u0407\u0408\7@\2\2\u0408"+ "\u0409\7@\2\2\u0409\u040a\7?\2\2\u040a\u0124\3\2\2\2\u040b\u040f\5\u0127"+ "\u0094\2\u040c\u040e\5\u0129\u0095\2\u040d\u040c\3\2\2\2\u040e\u0411\3"+ "\2\2\2\u040f\u040d\3\2\2\2\u040f\u0410\3\2\2\2\u0410\u0126\3\2\2\2\u0411"+ "\u040f\3\2\2\2\u0412\u0419\t\21\2\2\u0413\u0414\n\22\2\2\u0414\u0419\6"+ "\u0094\2\2\u0415\u0416\t\23\2\2\u0416\u0417\t\24\2\2\u0417\u0419\6\u0094"+ "\3\2\u0418\u0412\3\2\2\2\u0418\u0413\3\2\2\2\u0418\u0415\3\2\2\2\u0419"+ "\u0128\3\2\2\2\u041a\u0421\t\25\2\2\u041b\u041c\n\22\2\2\u041c\u0421\6"+ "\u0095\4\2\u041d\u041e\t\23\2\2\u041e\u041f\t\24\2\2\u041f\u0421\6\u0095"+ "\5\2\u0420\u041a\3\2\2\2\u0420\u041b\3\2\2\2\u0420\u041d\3\2\2\2\u0421"+ "\u012a\3\2\2\2\u0422\u0423\7B\2\2\u0423\u012c\3\2\2\2\u0424\u0425\7\60"+ "\2\2\u0425\u0426\7\60\2\2\u0426\u0427\7\60\2\2\u0427\u012e\3\2\2\2\u0428"+ "\u042a\t\26\2\2\u0429\u0428\3\2\2\2\u042a\u042b\3\2\2\2\u042b\u0429\3"+ "\2\2\2\u042b\u042c\3\2\2\2\u042c\u042d\3\2\2\2\u042d\u042e\b\u0098\2\2"+ "\u042e\u0130\3\2\2\2\u042f\u0430\7\61\2\2\u0430\u0431\7,\2\2\u0431\u0435"+ "\3\2\2\2\u0432\u0434\13\2\2\2\u0433\u0432\3\2\2\2\u0434\u0437\3\2\2\2"+ "\u0435\u0436\3\2\2\2\u0435\u0433\3\2\2\2\u0436\u0438\3\2\2\2\u0437\u0435"+ "\3\2\2\2\u0438\u0439\7,\2\2\u0439\u043a\7\61\2\2\u043a\u043b\3\2\2\2\u043b"+ "\u043c\b\u0099\2\2\u043c\u0132\3\2\2\2\u043d\u043e\7\61\2\2\u043e\u043f"+ "\7\61\2\2\u043f\u0443\3\2\2\2\u0440\u0442\n\27\2\2\u0441\u0440\3\2\2\2"+ "\u0442\u0445\3\2\2\2\u0443\u0441\3\2\2\2\u0443\u0444\3\2\2\2\u0444\u0446"+ "\3\2\2\2\u0445\u0443\3\2\2\2\u0446\u0447\b\u009a\2\2\u0447\u0134\3\2\2"+ "\28\2\u028c\u0290\u0294\u0298\u029c\u02a3\u02a8\u02aa\u02ae\u02b1\u02b5"+ "\u02bc\u02c0\u02c5\u02cd\u02d0\u02d7\u02db\u02df\u02e5\u02e8\u02ef\u02f3"+ "\u02fb\u02fe\u0305\u0309\u030d\u0312\u0315\u0318\u031d\u0320\u0325\u032a"+ "\u0332\u033d\u0341\u0346\u034a\u035a\u0364\u036a\u0371\u0375\u037b\u0388"+ "\u040f\u0418\u0420\u042b\u0435\u0443\3\b\2\2"; public static final ATN _ATN = new ATNDeserializer().deserialize(_serializedATN.toCharArray()); static { _decisionToDFA = new DFA[_ATN.getNumberOfDecisions()]; for (int i = 0; i < _ATN.getNumberOfDecisions(); i++) { _decisionToDFA[i] = new DFA(_ATN.getDecisionState(i), i); } } }
/* * Copyright (c) 2015 Celestibytes * * Maintainer: Okkapel * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in * all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN * THE SOFTWARE. */ package celestibytes.miscutils.util; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import java.util.Iterator; import java.util.LinkedList; import java.util.List; /** Binary I/O Utils */ public class Bytes { public static boolean bo(byte[] data) { if(data[1] == 0) { return false; } return true; } public static byte by(byte[] data) { return data[1]; } // Always Little Endian public static int i(byte[] data) { int ret = 0; ret |= data[1]; ret |= data[2] << 8; ret |= data[3] << 16; ret |= data[4] << 24; return ret; } // Always Little Endian public static short sh(byte[] data) { short ret = 0; ret |= data[1]; ret |= data[2] << 8; return ret; } // Always Little Endian public static long l(byte[] data) { long ret = 0; ret |= data[1]; ret |= data[2] << 8; ret |= data[3] << 16; ret |= data[4] << 24; ret |= data[5] << 32; ret |= data[6] << 40; ret |= data[7] << 48; ret |= data[8] << 56; return ret; } // Always Little Endian public static float f(byte[] data) { int ret = 0; ret |= data[1]; ret |= data[2] << 8; ret |= data[3] << 16; ret |= data[4] << 24; return Float.intBitsToFloat(ret); } // Always Little Endian public static double d(byte[] data) { long ret = 0; ret |= data[1]; ret |= data[2] << 8; ret |= data[3] << 16; ret |= data[4] << 24; ret |= data[5] << 32; ret |= data[6] << 40; ret |= data[7] << 48; ret |= data[8] << 56; return Double.longBitsToDouble(ret); } public static String st(byte[] data) { if(data.length == 1) { return null; } StringBuilder sb = new StringBuilder(); for(int i=0;i<data.length;i++) { sb.append(data[i]); } return sb.toString(); } /** Validate byte */ public static boolean vby(byte[] data) { return data.length == 2 && data[0] == 'b'; } /** Validate boolean */ public static boolean vbo(byte[] data) { return data.length == 2 && data[0] == 'z'; } /** Validate float */ public static boolean vf(byte[] data) { return data.length == 5 && data[0] == 'f'; } /** Validate string */ public static boolean vst(byte[] data) { return data.length > 0 && data[0] == 'x'; } /** Validate double */ public static boolean vd(byte[] data) { return data.length == 9 && data[0] == 'd'; } /** Validate short */ public static boolean vsh(byte[] data) { return data.length == 3 && data[0] == 's'; } /** Validate int */ public static boolean vi(byte[] data) { return data.length == 5 && data[0] == 'i'; } /** Validate long */ public static boolean vl(byte[] data) { return data.length == 9 && data[0] == 'l'; } /** Write byte with type indicator */ public static void wtby(byte v, OutputStream os) throws IOException { os.write('b'); os.write(v); } /** Write boolean with type indicator */ public static void wtbo(boolean v, OutputStream os) throws IOException { os.write('z'); os.write(v ? 0x61 : 0); } /** Write short with type indicator */ public static void wtsh(short v, OutputStream os) throws IOException { os.write('s'); os.write(v); os.write(v >> 8); } /** Write int with type indicator */ public static void wti(int v, OutputStream os) throws IOException { os.write('i'); os.write(v); os.write(v >> 8); os.write(v >> 16); os.write(v >> 24); } /** Write long with type indicator */ public static void wtl(long v, OutputStream os) throws IOException { os.write('l'); os.write((int)(v & 0x255)); os.write((int)((v >> 8) & 0x255)); os.write((int)((v >> 16) & 0x255)); os.write((int)((v >> 24) & 0x255)); os.write((int)((v >> 32) & 0x255)); os.write((int)((v >> 40) & 0x255)); os.write((int)((v >> 48) & 0x255)); os.write((int)((v >> 56) & 0x255)); } /** Write float with type indicator */ public static void wtf(float vv, OutputStream os) throws IOException { os.write('i'); int v = Float.floatToIntBits(vv); os.write(v); os.write(v >> 8); os.write(v >> 16); os.write(v >> 24); } /** Write double with type indicator */ public static void wtd(double vv, OutputStream os) throws IOException { os.write('d'); long v = Double.doubleToLongBits(vv); os.write((int)(v & 0x255)); os.write((int)((v >> 8) & 0x255)); os.write((int)((v >> 16) & 0x255)); os.write((int)((v >> 24) & 0x255)); os.write((int)((v >> 32) & 0x255)); os.write((int)((v >> 40) & 0x255)); os.write((int)((v >> 48) & 0x255)); os.write((int)((v >> 56) & 0x255)); } /** Write string with type indicator, null-terminated, any nulls in the string will be ignored! */ public static void wtst(String v, OutputStream os) throws IOException { os.write('x'); char c; for(int i = 0; i < v.length(); i++) { c = v.charAt(i); if(c != '\0') { os.write(c); // Note; all values above 127 are ignored } } os.write('\0'); } /** Write byte */ public static void wby(byte v, OutputStream os) throws IOException { os.write(v); } /** Write boolean */ public static void wbo(boolean v, OutputStream os) throws IOException { os.write(v ? 0x61 : 0); } /** Write short */ public static void wsh(short v, OutputStream os) throws IOException { os.write(v); os.write(v >> 8); } /** Write int */ public static void wi(int v, OutputStream os) throws IOException { os.write(v); os.write(v >> 8); os.write(v >> 16); os.write(v >> 24); } /** Write long */ public static void wl(long v, OutputStream os) throws IOException { os.write((int)(v & 0x255)); os.write((int)((v >> 8) & 0x255)); os.write((int)((v >> 16) & 0x255)); os.write((int)((v >> 24) & 0x255)); os.write((int)((v >> 32) & 0x255)); os.write((int)((v >> 40) & 0x255)); os.write((int)((v >> 48) & 0x255)); os.write((int)((v >> 56) & 0x255)); } /** Write float */ public static void wf(float vv, OutputStream os) throws IOException { int v = Float.floatToIntBits(vv); os.write(v); os.write(v >> 8); os.write(v >> 16); os.write(v >> 24); } /** Write double */ public static void wd(double vv, OutputStream os) throws IOException { long v = Double.doubleToLongBits(vv); os.write((int)(v & 0x255)); os.write((int)((v >> 8) & 0x255)); os.write((int)((v >> 16) & 0x255)); os.write((int)((v >> 24) & 0x255)); os.write((int)((v >> 32) & 0x255)); os.write((int)((v >> 40) & 0x255)); os.write((int)((v >> 48) & 0x255)); os.write((int)((v >> 56) & 0x255)); } /** Write string, null-terminated, any nulls in the string will be ignored! */ public static void wst(String v, OutputStream os) throws IOException { char c; for(int i = 0; i < v.length(); i++) { c = v.charAt(i); if(c != '\0') { os.write(c); // Note; all values above 127 are ignored } } os.write('\0'); } /** Read byte with type indicator, returns null if end of stream has been reached */ public static byte[] read(InputStream is) throws IOException { int buf = is.read(); int toread = 0; if(buf == -1) { return null; } if(buf == 'b') { toread = 1; } else if(buf == 'z') { // boolean toread = 1; } else if(buf == 's') { // short toread = 2; } else if(buf == 'i') { // int toread = 4; } else if(buf == 'l') { // long toread = 8; } else if(buf == 'f') { // float toread = 4; } else if(buf == 'd') { // double toread = 8; } else if(buf == 'x') { // string List<byte[]> bytes = new LinkedList<byte[]>(); int count = 0; byte[] byt = new byte[8]; int pos = 0; while(buf != '\0') { if(pos > 7) { bytes.add(byt); count++; byt = new byte[8]; pos = 0; } byt[pos] = (byte)buf; pos++; buf = is.read(); if(buf == -1) { return null; } } byte[] ret = new byte[count * 8 + pos - 1]; System.arraycopy(byt, 0, ret, count * 8, pos - 1); Iterator<byte[]> bite = bytes.iterator(); int i = 0; while(bite.hasNext()) { byt = bite.next(); System.arraycopy(byt, 0, ret, i, 8); i += 8; } return ret; } else { // other? return null; } byte[] ret = new byte[toread + 1]; ret[0] = (byte) buf; for(int i = 0; i < toread; i++) { buf = is.read(); if(buf == -1) { return null; } ret[i + 1] = (byte) buf; } return ret; } /** Read byte */ public static byte[] rby(InputStream is) throws IOException { byte[] ret = new byte[2]; ret[0] = 'b'; int buf; for(int i = 0; i < 1; i++) { buf = is.read(); if(buf == -1) { return null; } ret[i+1] = (byte) buf; } return ret; } /** Read char */ public static byte[] rc(InputStream is) throws IOException { byte[] ret = new byte[2]; ret[0] = 'c'; int buf; for(int i = 0; i < 1; i++) { buf = is.read(); if(buf == -1) { return null; } ret[i+1] = (byte) buf; } return ret; } /** Read boolean */ public static byte[] rbo(InputStream is) throws IOException { byte[] ret = new byte[2]; ret[0] = 'z'; int buf; for(int i = 0; i < 1; i++) { buf = is.read(); if(buf == -1) { return null; } ret[i+1] = (byte) buf; } return ret; } /** Read short */ public static byte[] rsh(InputStream is) throws IOException { byte[] ret = new byte[3]; ret[0] = 's'; int buf; for(int i = 0; i < 2; i++) { buf = is.read(); if(buf == -1) { return null; } ret[i+1] = (byte) buf; } return ret; } /** Read int */ public static byte[] ri(InputStream is) throws IOException { byte[] ret = new byte[5]; ret[0] = 'i'; int buf; for(int i = 0; i < 4; i++) { buf = is.read(); if(buf == -1) { return null; } ret[i+1] = (byte) buf; } return ret; } /** Read long */ public static byte[] rl(InputStream is) throws IOException { byte[] ret = new byte[9]; ret[0] = 'l'; int buf; for(int i = 0; i < 8; i++) { buf = is.read(); if(buf == -1) { return null; } ret[i+1] = (byte) buf; } return ret; } /** Read float */ public static byte[] rf(InputStream is) throws IOException { byte[] ret = new byte[5]; ret[0] = 'f'; int buf; for(int i = 0; i < 4; i++) { buf = is.read(); if(buf == -1) { return null; } ret[i+1] = (byte) buf; } return ret; } /** Read double */ public static byte[] rd(InputStream is) throws IOException { byte[] ret = new byte[9]; ret[0] = 'd'; int buf; for(int i = 0; i < 8; i++) { buf = is.read(); if(buf == -1) { return null; } ret[i+1] = (byte) buf; } return ret; } /** Read string */ public static byte[] rst(InputStream is) throws IOException { List<byte[]> bytes = new LinkedList<byte[]>(); int count = 0; byte[] byt = new byte[8]; byt[0] = 'x'; int pos = 1; int buf = 'A'; while(buf != '\0') { if(pos > 7) { bytes.add(byt); count++; byt = new byte[8]; pos = 0; } buf = is.read(); if(buf == -1) { return null; } byt[pos] = (byte)buf; pos++; } byte[] ret = new byte[count * 8 + pos - 1]; System.arraycopy(byt, 0, ret, count * 8, pos - 1); Iterator<byte[]> bite = bytes.iterator(); int i = 0; while(bite.hasNext()) { byt = bite.next(); System.arraycopy(byt, 0, ret, i, 8); i += 8; } return ret; } }
/* * Copyright 2016 Erik Wramner. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package name.wramner.jmstools; import java.net.InetAddress; import java.net.UnknownHostException; import java.util.List; import java.util.Properties; import java.util.concurrent.TimeUnit; import javax.jms.JMSException; import org.kohsuke.args4j.CmdLineException; import org.kohsuke.args4j.CmdLineParser; import com.atomikos.icatch.config.UserTransactionService; import com.atomikos.icatch.config.UserTransactionServiceImp; /** * Base class for JMS producers and consumers with support for command line parsing and thread creation/joining. It also * initializes and stops the transaction manager for XA transactions if they are enabled. * * @author Erik Wramner * @param <T> configuration class. */ public abstract class JmsClient<T extends JmsClientConfiguration> { private UserTransactionService _userTransactionService; /** * Parse command line arguments and initialize configuration, create threads with workers, start the threads and * wait for completion. Exit on errors. * * @param args The command line. */ public void run(String[] args) { T config = createConfiguration(); if (parseCommandLine(args, config)) { try { if (config.useXa()) { _userTransactionService = new UserTransactionServiceImp(); _userTransactionService.init(createAtomikosInitializationProperties(config)); } List<Thread> threads = createThreadsWithWorkers(config); startThreads(threads); waitForThreadsToComplete(threads); } catch (Exception e) { System.out.println("Failed with exception: " + e.getMessage()); e.printStackTrace(System.out); } finally { if (_userTransactionService != null) { int maxWaitSeconds = config.getJtaTimeoutSeconds() + 10; _userTransactionService.shutdown(TimeUnit.MILLISECONDS.convert(maxWaitSeconds, TimeUnit.SECONDS)); } } System.exit(0); } } /** * Create initialization properties for the Atomikos transaction manager. * * @param config The configuration. * @return initialization properties. * @throws UnknownHostException on failure to resolve local host. */ protected Properties createAtomikosInitializationProperties(T config) throws UnknownHostException { Properties props = new Properties(); if (config.isTmLogDisabled()) { props.setProperty("com.atomikos.icatch.enable_logging", "false"); } else { props.setProperty("com.atomikos.icatch.checkpoint_interval", String.valueOf( TimeUnit.MILLISECONDS.convert(config.getCheckpointIntervalSeconds(), TimeUnit.SECONDS))); props.setProperty("com.atomikos.icatch.recovery_delay", String.valueOf( TimeUnit.MILLISECONDS.convert(config.getRecoveryIntervalSeconds(), TimeUnit.SECONDS))); if (config.getXaLogBaseDir() != null) { props.setProperty("com.atomikos.icatch.log_base_dir", config.getXaLogBaseDir().getAbsolutePath()); } } props.setProperty("com.atomikos.icatch.automatic_resource_registration", "true"); props.setProperty("com.atomikos.icatch.max_actives", String.valueOf(config.getThreads() + 1)); String jtaTimeoutMillis = String .valueOf(TimeUnit.MILLISECONDS.convert(config.getJtaTimeoutSeconds(), TimeUnit.SECONDS)); props.setProperty("com.atomikos.icatch.max_timeout", jtaTimeoutMillis); props.setProperty("com.atomikos.icatch.default_jta_timeout", jtaTimeoutMillis); props.setProperty("com.atomikos.icatch.tm_unique_name", config.getTmName() != null ? config.getTmName() : createTmName()); return props; } /** * Build a reasonably unique transaction manager name. * * @return name. * @throws UnknownHostException on failure to find IP address for local host. */ protected String createTmName() throws UnknownHostException { return getClass().getSimpleName() + "-" + InetAddress.getLocalHost().getHostAddress(); } /** * Create configuration. Sub-classes should create provider-specific configuration classes. * * @return configuration. */ protected abstract T createConfiguration(); /** * Create threads with workers. * * @param config The initialized configuration. * @return list with threads, not started. * @throws JMSException on JMS errors. */ protected abstract List<Thread> createThreadsWithWorkers(T config) throws JMSException; /** * Parse the command line into the specified configuration. * * @param args The command line. * @param config The configuration class. * @return true if successful, false on errors such as missing arguments. */ protected boolean parseCommandLine(String[] args, T config) { CmdLineParser parser = new CmdLineParser(config); String validationError = null; try { parser.parseArgument(args); } catch (CmdLineException e) { validationError = e.getMessage(); } if (config.isPrintVersionRequested()) { printVersion(); } if (validationError != null || config.isHelpRequested()) { printUsage(parser); if (!config.isHelpRequested()) { System.out.println("Error: " + validationError); } return false; } return isConfigurationValid(config); } /** * Print program name and version. */ private void printVersion() { String version = getClass().getPackage().getImplementationVersion(); System.out.println(getClass().getSimpleName() + " " + (version != null ? version : "(unknown version)")); } /** * Check if configuration is valid and print error messages if not. * * @param config The configuration. * @return true if valid, false to abort. */ protected boolean isConfigurationValid(T config) { return true; } /** * Wait for all threads to complete, exit if interrupted. * * @param threads The list with threads. */ protected void waitForThreadsToComplete(List<Thread> threads) { for (Thread t : threads) { try { t.join(); } catch (InterruptedException e) { System.err.println("*** Interrupted - killing remaining threads!"); System.exit(0); } } } /** * Start all threads. * * @param threads The list with threads to start. */ protected void startThreads(List<Thread> threads) { threads.forEach(t -> t.start()); } /** * Print usage. All supported options are listed. * * @param parser The parser. */ protected void printUsage(CmdLineParser parser) { System.out.println("Usage: java " + getClass().getName() + " [options]"); System.out.println(); System.out.println("Where the options are:"); parser.printUsage(System.out); System.out.println(); } }
package cucumber.runtime; import java.io.IOException; import java.util.*; import java.util.function.Function; import java.util.regex.Matcher; import java.util.regex.Pattern; import java.util.stream.Collectors; import com.google.common.collect.Lists; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import cucumber.api.StepDefinitionReporter; import cucumber.runtime.io.ResourceLoader; import cucumber.runtime.java.JavaBackend; import cucumber.runtime.java.ParameterizedJavaStepDefinition; import cucumber.runtime.java.picocontainer.PicoFactory; import cucumber.runtime.model.*; import gherkin.formatter.Formatter; import gherkin.formatter.Reporter; import gherkin.formatter.model.Scenario; import gherkin.formatter.model.Step; import gherkin.formatter.model.Tag; import gherkin.formatter.model.TagStatement; import se.redmind.rmtest.cucumber.utils.Tags; import se.redmind.utils.Fields; import se.redmind.utils.Methods; /** * @author Jeremy Comte */ public class ParameterizableRuntime extends Runtime { public enum CompositionType { replace, full } private final Logger logger = LoggerFactory.getLogger(this.getClass()); private final RuntimeOptions runtimeOptions; private final ClassLoader classLoader; private final ResourceLoader resourceLoader; private final String name; private final Object[] parameters; private PicoFactory picoFactory; public ParameterizableRuntime(ResourceLoader resourceLoader, ClassFinder classFinder, ClassLoader classLoader, RuntimeOptions runtimeOptions) { this(resourceLoader, classFinder, classLoader, runtimeOptions, null, new Object[0]); } public ParameterizableRuntime(ResourceLoader resourceLoader, ClassFinder classFinder, ClassLoader classLoader, RuntimeOptions runtimeOptions, String name, Object[] parameters) { super(resourceLoader, classFinder, classLoader, runtimeOptions); this.runtimeOptions = runtimeOptions; this.classLoader = classLoader; this.resourceLoader = resourceLoader; this.name = name; this.parameters = parameters; } @Override public void run() throws IOException { // Make sure all features parse before initialising any reporters/formatters List<CucumberFeature> features = cucumberFeatures(); try (Formatter formatter = runtimeOptions.formatter(classLoader)) { Reporter reporter = runtimeOptions.reporter(classLoader); StepDefinitionReporter stepDefinitionReporter = runtimeOptions.stepDefinitionReporter(classLoader); getGlue().reportStepDefinitions(stepDefinitionReporter); features.forEach(cucumberFeature -> cucumberFeature.run(formatter, reporter, this)); formatter.done(); } printSummary(); } public Reporter reporter() { return runtimeOptions.reporter(classLoader); } public Formatter formatter() { return runtimeOptions.formatter(classLoader); } public List<CucumberFeature> cucumberFeatures() { // default cucumber overrides the filters given in the @CucumberOptions annotation // using the cucumber.filters System.property, one can extend the filters instead of overriding it. List<String> extraFilters = Shellwords.parse(System.getProperty("cucumber.filters", "")); for (int i = 0; i < extraFilters.size(); i += 2) { String type = extraFilters.get(i).trim(); switch (type) { case "--tags": runtimeOptions.getFilters().add(extraFilters.get(i + 1).trim()); break; case "--name": runtimeOptions.getFilters().add(Pattern.compile(extraFilters.get(i + 1).trim())); break; } } boolean hasTags = runtimeOptions.getFilters().stream() .filter(filter -> filter instanceof String) .map(filter -> (String) filter) .filter(filter -> filter.contains("@")) .findFirst().map(filter -> true).orElse(false); // if we work with tags or if there is no filter we want to add the ~@parameterized and ~@ignore filters if (hasTags || runtimeOptions.getFilters().isEmpty()) { runtimeOptions.getFilters().add("~" + Tags.IGNORE); runtimeOptions.getFilters().add("~" + Tags.PARAMETERIZED); } List<CucumberFeature> cucumberFeatures = new ArrayList<>(); // let's look for the @parameterized scenarios and add them cucumberFeatures.addAll(CucumberFeature.load(resourceLoader, Lists.newArrayList("classpath:"), Lists.newArrayList(Tags.PARAMETERIZED, "~" + Tags.IGNORE))); // the default features cucumberFeatures.addAll(runtimeOptions.cucumberFeatures(resourceLoader)); // 1. Get the children from the parent class, intercept any parameterized scenario and instantiate their factories Map<Pattern, ParameterizedJavaStepDefinition.Factory> parameterizedScenarios = getParameterizedScenarios(cucumberFeatures); // 2. Iterate over all the normal steps, and if the scenario is not quiet, rewrite and add the parameterized steps as normal steps. if (!cucumberFeatures.isEmpty() && !parameterizedScenarios.isEmpty()) { inject(parameterizedScenarios, cucumberFeatures); } return cucumberFeatures; } @Override public void buildBackendWorlds(Reporter reporter, Set<Tag> tags, Scenario gherkinScenario) { for (Object parameter : parameters) { picoFactory().addInstance(parameter); } super.buildBackendWorlds(reporter, tags, gherkinScenario); } public Map<Pattern, ParameterizedJavaStepDefinition.Factory> getParameterizedScenarios(List<CucumberFeature> features) { Map<Pattern, ParameterizedJavaStepDefinition.Factory> parameterizedScenarios = new LinkedHashMap<>(); for (int i = 0; i < features.size(); i++) { CucumberFeature feature = features.get(i); List<CucumberTagStatement> statements = feature.getFeatureElements(); for (int j = 0; j < statements.size(); j++) { CucumberTagStatement statement = statements.get(j); if (Tags.isParameterized(statement)) { ParameterizedJavaStepDefinition.Factory stepFactory = ParameterizedJavaStepDefinition.from(statement, this); parameterizedScenarios.put(stepFactory.pattern(), stepFactory); statements.remove(j--); } else if (name != null) { TagStatement tagStatement = statement.getGherkinModel(); Fields.set(tagStatement, "name", tagStatement.getName() + " " + name); } } if (statements.isEmpty()) { features.remove(i--); } } if (!features.isEmpty() && !parameterizedScenarios.isEmpty()) { StringBuilder stringBuilder = new StringBuilder(); int maxLength = parameterizedScenarios.values().stream().map(f -> f.statement().getVisualName().length()).max(Integer::compareTo).orElse(0); parameterizedScenarios.values().forEach(factory -> { CucumberFeature cucumberFeature = Fields.getValue(factory.statement(), "cucumberFeature"); String path = Fields.getValue(cucumberFeature, "path"); String visualName = factory.statement().getVisualName().replaceAll("Scenario:", ""); stringBuilder.append("\n ").append(visualName); for (int i = 0; i < maxLength - visualName.length() - 5; i++) { stringBuilder.append(" "); } stringBuilder.append("# ").append(path).append(":").append(factory.statement().getGherkinModel().getLine()); }); logger.info("\nregistering parameterized scenarios:" + stringBuilder.toString() + "\n"); } return parameterizedScenarios; } public void inject(Map<Pattern, ParameterizedJavaStepDefinition.Factory> parameterizedScenarios, List<CucumberFeature> features) throws RuntimeException { CompositionType compositionType = CompositionType.valueOf(System.getProperty("cucumber.compositionType", CompositionType.replace.name())); if (compositionType == CompositionType.full) { picoFactory().addInstance(this); getGlue().addStepDefinition(new ParameterizedJavaStepDefinition(Methods.findMethod(this.getClass(), "endOfParameterizedScenario"), Pattern.compile("}"), 0, picoFactory())); } features.forEach(feature -> { List<StepContainer> stepContainers = new ArrayList<>(feature.getFeatureElements()); CucumberBackground cucumberBackground = Fields.getValue(feature, "cucumberBackground"); if (cucumberBackground != null) { stepContainers.add(cucumberBackground); } parameterizedScenarios.values().forEach(scenario -> stepContainers.add(scenario.statement())); int modifiedSteps; // we need to keep trying as long as we find new parameterizable steps in order to support composite sub scenarios do { modifiedSteps = 0; for (StepContainer stepContainer : stepContainers) { for (int i = 0; i < stepContainer.getSteps().size(); i++) { Step step = stepContainer.getSteps().get(i); if (step instanceof ParameterizedStep) { if (((ParameterizedStep) step).getType() == ParameterizedStep.Type.Start || ((ParameterizedStep) step).getType() == ParameterizedStep.Type.Quiet) { continue; } } String stepName = step.getName(); for (Map.Entry<Pattern, ParameterizedJavaStepDefinition.Factory> parameterizedScenario : parameterizedScenarios.entrySet()) { Matcher matcher = parameterizedScenario.getKey().matcher(stepName); if (matcher.matches()) { Function<Step, ParameterizedStep> wrapper; String[] names = parameterizedScenario.getValue().parameters(); Object[] scenarioParameters = new Object[names.length]; for (int k = 0; k < names.length; k++) { String value = matcher.group(k + 1); if (value.startsWith("\"") && value.endsWith("\"")) { value = value.substring(1, value.length() - 1); } scenarioParameters[k] = value; } if (compositionType == CompositionType.full) { parameterizedScenario.getValue().addStartStepToGlue(); stepContainer.getSteps().set(i, ParameterizedStep.startOf(step)); wrapper = parameterizedStep -> ParameterizedStep.asSubStep(parameterizedStep, names, scenarioParameters); } else { stepContainer.getSteps().remove(i--); wrapper = parameterizedStep -> ParameterizedStep.parameterize(parameterizedStep, names, scenarioParameters); } List<Step> newSteps = parameterizedScenario.getValue().statement().getSteps().stream() .map(wrapper) .collect(Collectors.toList()); stepContainer.getSteps().addAll(i + 1, newSteps); i += newSteps.size(); if (compositionType == CompositionType.full) { stepContainer.getSteps().add(++i, ParameterizedStep.endOf(step)); } modifiedSteps++; } } } } } while (modifiedSteps > 0); }); } /** * this method is used as a target for the end of a parameterized scenario */ public void endOfParameterizedScenario() { } public PicoFactory picoFactory() throws RuntimeException { if (picoFactory == null) { Collection<? extends Backend> backends = Fields.getValue(this, "backends"); Optional<JavaBackend> first = backends.stream() .filter(backend -> backend instanceof JavaBackend) .map(backend -> (JavaBackend) backend) .findFirst(); if (first.isPresent()) { picoFactory = Fields.getValue(first.get(), "objectFactory"); } else { throw new RuntimeException("can't find a javaBackend instance"); } } return picoFactory; } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.codehaus.groovy.runtime; import groovy.lang.Closure; import groovy.lang.GeneratedGroovyProxy; import groovy.lang.GroovyClassLoader; import groovy.lang.GroovyObject; import groovy.lang.GroovyRuntimeException; import groovy.transform.Trait; import org.codehaus.groovy.ast.ClassHelper; import org.codehaus.groovy.ast.ClassNode; import org.codehaus.groovy.classgen.asm.BytecodeHelper; import org.codehaus.groovy.classgen.asm.util.TypeUtil; import org.codehaus.groovy.control.CompilationUnit; import org.codehaus.groovy.control.CompilerConfiguration; import org.codehaus.groovy.control.ErrorCollector; import org.codehaus.groovy.control.Phases; import org.codehaus.groovy.control.SourceUnit; import org.codehaus.groovy.tools.GroovyClass; import org.codehaus.groovy.transform.trait.Traits; import org.objectweb.asm.ClassVisitor; import org.objectweb.asm.ClassWriter; import org.objectweb.asm.Label; import org.objectweb.asm.MethodVisitor; import org.objectweb.asm.Opcodes; import org.objectweb.asm.Type; import java.lang.annotation.Annotation; import java.lang.reflect.Constructor; import java.lang.reflect.InvocationTargetException; import java.lang.reflect.Method; import java.lang.reflect.Modifier; import java.security.AccessController; import java.security.PrivilegedAction; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.HashMap; import java.util.HashSet; import java.util.LinkedHashSet; import java.util.List; import java.util.Map; import java.util.Set; import java.util.concurrent.atomic.AtomicLong; /** * A proxy generator responsible for mapping a map of closures to a class implementing a list of interfaces. For * example, the following code: * <pre> * abstract class Foo { * abstract void bar(); * abstract void baz(); * } * def dyn = [bar: { println 'hello' }, baz: { println 'world'}] as Foo * </pre> * will generate a proxy class which extends class <i>Foo</i> and delegates method calls to the provided closures. * * The generated proxy implements the {@link GroovyObject} interface. * * Additionally, this proxy generator supports delegation to another object. In that case, if a method is defined * both in the closure map and the delegate, the version from the map is preferred. This allows overriding methods * from delegates with ease. * * Internally, the proxy generator makes use of ASM to generate bytecode, for improved performance as compared * to the legacy proxy generation mechanism which made use of string templates. * * @since 2.0.0 */ public class ProxyGeneratorAdapter extends ClassVisitor implements Opcodes { private static final Map<String, Boolean> EMPTY_DELEGATECLOSURE_MAP = Collections.emptyMap(); private static final Set<String> EMPTY_STRING_SET = Collections.emptySet(); private static final String CLOSURES_MAP_FIELD = "$closures$delegate$map"; private static final String DELEGATE_OBJECT_FIELD = "$delegate"; private static List<Method> OBJECT_METHODS = getInheritedMethods(Object.class, new ArrayList<Method>()); private static List<Method> GROOVYOBJECT_METHODS = getInheritedMethods(GroovyObject.class, new ArrayList<Method>()); private static final AtomicLong pxyCounter = new AtomicLong(); private static final Set<String> GROOVYOBJECT_METHOD_NAMESS; private static final Object[] EMPTY_ARGS = new Object[0]; private static final String[] EMPTY_STRING_ARRAY = new String[0]; static { List<String> names = new ArrayList<String>(); for (Method method : GroovyObject.class.getMethods()) { names.add(method.getName()); } GROOVYOBJECT_METHOD_NAMESS = new HashSet<String>(names); } private final Class superClass; private final Class delegateClass; private final InnerLoader loader; private final String proxyName; private final LinkedHashSet<Class> classList; private final Map<String, Boolean> delegatedClosures; // if emptyBody == true, then we generate an empty body instead throwing error on unimplemented methods private final boolean emptyBody; private final boolean hasWildcard; private final boolean generateDelegateField; private final Set<String> objectDelegateMethods; private final Set<Object> visitedMethods; // cached class private final Class cachedClass; private final Constructor cachedNoArgConstructor; /** * Construct a proxy generator. This generator is used when we need to create a proxy object for a class or an * interface given a map of closures. * * @param closureMap the delegates implementations * @param superClass corresponding to the superclass class visitor * @param interfaces extra interfaces the proxy should implement * @param proxyLoader the class loader which should be used to load the generated proxy * @param delegateClass if not null, generate a delegate field with the corresponding class * @param emptyBody if set to true, the unimplemented abstract methods will receive an empty body instead of * throwing an {@link UnsupportedOperationException}. */ public ProxyGeneratorAdapter( final Map<Object, Object> closureMap, final Class superClass, final Class[] interfaces, final ClassLoader proxyLoader, final boolean emptyBody, final Class delegateClass) { super(CompilerConfiguration.ASM_API_VERSION, new ClassWriter(0)); this.loader = proxyLoader != null ? createInnerLoader(proxyLoader, interfaces) : findClassLoader(superClass, interfaces); this.visitedMethods = new LinkedHashSet<Object>(); this.delegatedClosures = closureMap.isEmpty() ? EMPTY_DELEGATECLOSURE_MAP : new HashMap<String, Boolean>(); boolean wildcard = false; for (Map.Entry<Object, Object> entry : closureMap.entrySet()) { String name = entry.getKey().toString(); if ("*".equals(name)) { wildcard = true; } this.delegatedClosures.put(name, Boolean.FALSE); } this.hasWildcard = wildcard; Class fixedSuperClass = adjustSuperClass(superClass, interfaces); // if we have to delegate to another object, generate the appropriate delegate field // and collect the name of the methods for which delegation is active this.generateDelegateField = delegateClass != null; this.objectDelegateMethods = generateDelegateField ? createDelegateMethodList(fixedSuperClass, delegateClass, interfaces) : EMPTY_STRING_SET; this.delegateClass = delegateClass; // a proxy is supposed to be a concrete class, so it cannot extend an interface. // If the provided superclass is an interface, then we replace the superclass with Object // and add this interface to the list of implemented interfaces this.superClass = fixedSuperClass; // create the base list of classes which have possible methods to be overloaded this.classList = new LinkedHashSet<Class>(); this.classList.add(superClass); if (generateDelegateField) { classList.add(delegateClass); Collections.addAll(this.classList, delegateClass.getInterfaces()); } if (interfaces != null) { Collections.addAll(this.classList, interfaces); } this.proxyName = proxyName(); this.emptyBody = emptyBody; // generate bytecode ClassWriter writer = (ClassWriter) cv; this.visit(Opcodes.V1_5, ACC_PUBLIC, proxyName, null, null, null); byte[] b = writer.toByteArray(); // CheckClassAdapter.verify(new ClassReader(b), true, new PrintWriter(System.err)); cachedClass = loader.defineClass(proxyName.replace('/', '.'), b); // cache no-arg constructor Class[] args = generateDelegateField ? new Class[]{Map.class, delegateClass} : new Class[]{Map.class}; Constructor constructor; try { constructor = cachedClass.getConstructor(args); } catch (NoSuchMethodException e) { constructor = null; } cachedNoArgConstructor = constructor; } private Class adjustSuperClass(Class superClass, final Class[] interfaces) { boolean isSuperClassAnInterface = superClass.isInterface(); if (!isSuperClassAnInterface) { return superClass; } Class result = Object.class; Set<ClassNode> traits = new LinkedHashSet<ClassNode>(); // check if it's a trait collectTraits(superClass, traits); if (interfaces != null) { for (Class anInterface : interfaces) { collectTraits(anInterface, traits); } } if (!traits.isEmpty()) { String name = superClass.getName() + "$TraitAdapter"; ClassNode cn = new ClassNode(name, ACC_PUBLIC | ACC_ABSTRACT, ClassHelper.OBJECT_TYPE, traits.toArray(ClassNode.EMPTY_ARRAY), null); CompilationUnit cu = new CompilationUnit(loader); CompilerConfiguration config = new CompilerConfiguration(); SourceUnit su = new SourceUnit(name + "wrapper", "", config, loader, new ErrorCollector(config)); cu.addSource(su); cu.compile(Phases.CONVERSION); su.getAST().addClass(cn); cu.compile(Phases.CLASS_GENERATION); @SuppressWarnings("unchecked") List<GroovyClass> classes = (List<GroovyClass>) cu.getClasses(); for (GroovyClass groovyClass : classes) { if (groovyClass.getName().equals(name)) { return loader.defineClass(name, groovyClass.getBytes()); } } } return result; } private static void collectTraits(final Class clazz, final Set<ClassNode> traits) { Annotation annotation = clazz.getAnnotation(Trait.class); if (annotation != null) { ClassNode trait = ClassHelper.make(clazz); traits.add(trait.getPlainNodeReference()); LinkedHashSet<ClassNode> selfTypes = new LinkedHashSet<ClassNode>(); Traits.collectSelfTypes(trait, selfTypes, true, true); for (ClassNode selfType : selfTypes) { if (Traits.isTrait(selfType)) { traits.add(selfType.getPlainNodeReference()); } } } } private static InnerLoader createInnerLoader(final ClassLoader parent, final Class[] interfaces) { return AccessController.doPrivileged((PrivilegedAction<InnerLoader>) () -> new InnerLoader(parent, interfaces)); } private InnerLoader findClassLoader(Class clazz, Class[] interfaces) { ClassLoader cl = clazz.getClassLoader(); if (cl == null) cl = this.getClass().getClassLoader(); return createInnerLoader(cl, interfaces); } private static Set<String> createDelegateMethodList(Class superClass, Class delegateClass, Class[] interfaces) { Set<String> selectedMethods = new HashSet<String>(); List<Method> interfaceMethods = new ArrayList<Method>(); List<Method> superClassMethods = new ArrayList<Method>(); Collections.addAll(superClassMethods, superClass.getDeclaredMethods()); if (interfaces != null) { for (Class thisInterface : interfaces) { getInheritedMethods(thisInterface, interfaceMethods); } for (Method method : interfaceMethods) { if (!(containsEquivalentMethod(superClassMethods, method))) { selectedMethods.add(method.getName() + Type.getMethodDescriptor(method)); } } } List<Method> additionalMethods = getInheritedMethods(delegateClass, new ArrayList<Method>()); for (Method method : additionalMethods) { if (method.getName().indexOf('$') != -1) continue; if (!containsEquivalentMethod(interfaceMethods, method) && !containsEquivalentMethod(OBJECT_METHODS, method) && !containsEquivalentMethod(GROOVYOBJECT_METHODS, method)) { selectedMethods.add(method.getName() + Type.getMethodDescriptor(method)); } } return selectedMethods; } private static List<Method> getInheritedMethods(Class baseClass, List<Method> methods) { Collections.addAll(methods, baseClass.getMethods()); Class currentClass = baseClass; while (currentClass != null) { Method[] protectedMethods = currentClass.getDeclaredMethods(); for (Method method : protectedMethods) { if (method.getName().indexOf('$') != -1) continue; if (Modifier.isProtected(method.getModifiers()) && !containsEquivalentMethod(methods, method)) methods.add(method); } currentClass = currentClass.getSuperclass(); } return methods; } private static boolean containsEquivalentMethod(Collection<Method> publicAndProtectedMethods, Method candidate) { for (Method method : publicAndProtectedMethods) { if (candidate.getName().equals(method.getName()) && candidate.getReturnType().equals(method.getReturnType()) && hasMatchingParameterTypes(candidate, method)) { return true; } } return false; } private static boolean hasMatchingParameterTypes(Method method, Method candidate) { Class[] candidateParamTypes = candidate.getParameterTypes(); Class[] methodParamTypes = method.getParameterTypes(); if (candidateParamTypes.length != methodParamTypes.length) return false; for (int i = 0; i < methodParamTypes.length; i++) { if (!candidateParamTypes[i].equals(methodParamTypes[i])) return false; } return true; } @Override public void visit(final int version, final int access, final String name, final String signature, final String superName, final String[] interfaces) { Set<String> interfacesSet = new LinkedHashSet<String>(); if (interfaces != null) Collections.addAll(interfacesSet, interfaces); for (Class extraInterface : classList) { if (extraInterface.isInterface()) interfacesSet.add(BytecodeHelper.getClassInternalName(extraInterface)); } final boolean addGroovyObjectSupport = !GroovyObject.class.isAssignableFrom(superClass); if (addGroovyObjectSupport) interfacesSet.add("groovy/lang/GroovyObject"); if (generateDelegateField) { classList.add(GeneratedGroovyProxy.class); interfacesSet.add("groovy/lang/GeneratedGroovyProxy"); } super.visit(V1_5, ACC_PUBLIC, proxyName, signature, BytecodeHelper.getClassInternalName(superClass), interfacesSet.toArray(EMPTY_STRING_ARRAY)); visitMethod(ACC_PUBLIC, "<init>", "()V", null, null); addDelegateFields(); if (addGroovyObjectSupport) { createGroovyObjectSupport(); } for (Class clazz : classList) { visitClass(clazz); } } /** * Visit every class/interface this proxy should implement, and generate the appropriate * bytecode for delegation if available. * * @param clazz an class for which to generate bytecode */ private void visitClass(final Class clazz) { Method[] methods = clazz.getDeclaredMethods(); for (Method method : methods) { Class<?>[] exceptionTypes = method.getExceptionTypes(); String[] exceptions = new String[exceptionTypes.length]; for (int i = 0; i < exceptions.length; i++) { exceptions[i] = BytecodeHelper.getClassInternalName(exceptionTypes[i]); } // for each method defined in the class, generate the appropriate delegation bytecode visitMethod(method.getModifiers(), method.getName(), BytecodeHelper.getMethodDescriptor(method.getReturnType(), method.getParameterTypes()), null, exceptions); } Constructor[] constructors = clazz.getDeclaredConstructors(); for (Constructor method : constructors) { Class<?>[] exceptionTypes = method.getExceptionTypes(); String[] exceptions = new String[exceptionTypes.length]; for (int i = 0; i < exceptions.length; i++) { exceptions[i] = BytecodeHelper.getClassInternalName(exceptionTypes[i]); } // for each method defined in the class, generate the appropriate delegation bytecode visitMethod(method.getModifiers(), "<init>", BytecodeHelper.getMethodDescriptor(Void.TYPE, method.getParameterTypes()), null, exceptions); } for (Class intf : clazz.getInterfaces()) { visitClass(intf); } Class superclass = clazz.getSuperclass(); if (superclass != null) visitClass(superclass); // Ultimately, methods can be available in the closure map which are not defined by the superclass // nor the interfaces for (Map.Entry<String, Boolean> entry : delegatedClosures.entrySet()) { Boolean visited = entry.getValue(); if (!visited) { String name = entry.getKey(); if (!"*".equals(name)) { // generate a new method visitMethod(ACC_PUBLIC, name, "([Ljava/lang/Object;)Ljava/lang/Object;", null, null); } } } } /** * When an object doesn't implement the GroovyObject interface, we generate bytecode for the * {@link GroovyObject} interface methods. Otherwise, the superclass is expected to implement them. */ private void createGroovyObjectSupport() { visitField(ACC_PRIVATE + ACC_TRANSIENT, "metaClass", "Lgroovy/lang/MetaClass;", null, null); // getMetaClass MethodVisitor mv; { mv = super.visitMethod(ACC_PUBLIC, "getMetaClass", "()Lgroovy/lang/MetaClass;", null, null); mv.visitCode(); Label l0 = new Label(); mv.visitLabel(l0); mv.visitVarInsn(ALOAD, 0); mv.visitFieldInsn(GETFIELD, proxyName, "metaClass", "Lgroovy/lang/MetaClass;"); Label l1 = new Label(); mv.visitJumpInsn(IFNONNULL, l1); Label l2 = new Label(); mv.visitLabel(l2); mv.visitVarInsn(ALOAD, 0); mv.visitVarInsn(ALOAD, 0); mv.visitMethodInsn(INVOKEVIRTUAL, "java/lang/Object", "getClass", "()Ljava/lang/Class;", false); mv.visitMethodInsn(INVOKESTATIC, "org/codehaus/groovy/runtime/InvokerHelper", "getMetaClass", "(Ljava/lang/Class;)Lgroovy/lang/MetaClass;", false); mv.visitFieldInsn(PUTFIELD, proxyName, "metaClass", "Lgroovy/lang/MetaClass;"); mv.visitLabel(l1); mv.visitVarInsn(ALOAD, 0); mv.visitFieldInsn(GETFIELD, proxyName, "metaClass", "Lgroovy/lang/MetaClass;"); mv.visitInsn(ARETURN); mv.visitMaxs(2, 1); mv.visitEnd(); } // setMetaClass { mv = super.visitMethod(ACC_PUBLIC, "setMetaClass", "(Lgroovy/lang/MetaClass;)V", null, null); mv.visitCode(); Label l0 = new Label(); mv.visitLabel(l0); mv.visitVarInsn(ALOAD, 0); mv.visitVarInsn(ALOAD, 1); mv.visitFieldInsn(PUTFIELD, proxyName, "metaClass", "Lgroovy/lang/MetaClass;"); Label l1 = new Label(); mv.visitLabel(l1); mv.visitInsn(RETURN); Label l2 = new Label(); mv.visitLabel(l2); mv.visitMaxs(2, 2); mv.visitEnd(); } } /** * Creates delegate fields for every closure defined in the map. */ private void addDelegateFields() { visitField(ACC_PRIVATE + ACC_FINAL, CLOSURES_MAP_FIELD, "Ljava/util/Map;", null, null); if (generateDelegateField) { visitField(ACC_PRIVATE + ACC_FINAL, DELEGATE_OBJECT_FIELD, BytecodeHelper.getTypeDescription(delegateClass), null, null); } } private String proxyName() { String name = delegateClass != null ? delegateClass.getName() : superClass.getName(); if (name.startsWith("[") && name.endsWith(";")) { name = name.substring(1, name.length() - 1) + "_array"; } int index = name.lastIndexOf('.'); if (index == -1) return name + pxyCounter.incrementAndGet() + "_groovyProxy"; return name.substring(index + 1) + pxyCounter.incrementAndGet() + "_groovyProxy"; } private static boolean isImplemented(Class clazz, String name, String desc) { Method[] methods = clazz.getDeclaredMethods(); for (Method method : methods) { if (method.getName().equals(name)) { if (desc.equals(Type.getMethodDescriptor(method))) { return !Modifier.isAbstract(method.getModifiers()); } } } Class parent = clazz.getSuperclass(); return parent != null && isImplemented(parent, name, desc); } @Override public MethodVisitor visitMethod(final int access, final String name, final String desc, final String signature, final String[] exceptions) { Object key = Arrays.asList(name, desc); if (visitedMethods.contains(key)) return null; if (Modifier.isPrivate(access) || Modifier.isNative(access) || ((access & ACC_SYNTHETIC) != 0)) { // do not generate bytecode for private methods return null; } int accessFlags = access; visitedMethods.add(key); if ((objectDelegateMethods.contains(name + desc) || delegatedClosures.containsKey(name) || (!"<init>".equals(name) && hasWildcard)) && !Modifier.isStatic(access) && !Modifier.isFinal(access)) { if (!GROOVYOBJECT_METHOD_NAMESS.contains(name)) { if (Modifier.isAbstract(access)) { // prevents the proxy from being abstract accessFlags -= ACC_ABSTRACT; } if (delegatedClosures.containsKey(name) || (!"<init>".equals(name) && hasWildcard)) { delegatedClosures.put(name, Boolean.TRUE); return makeDelegateToClosureCall(name, desc, signature, exceptions, accessFlags); } if (generateDelegateField && objectDelegateMethods.contains(name + desc)) { return makeDelegateCall(name, desc, signature, exceptions, accessFlags); } delegatedClosures.put(name, Boolean.TRUE); return makeDelegateToClosureCall(name, desc, signature, exceptions, accessFlags); } } else if ("getProxyTarget".equals(name) && "()Ljava/lang/Object;".equals(desc)) { return createGetProxyTargetMethod(access, name, desc, signature, exceptions); } else if ("<init>".equals(name) && (Modifier.isPublic(access) || Modifier.isProtected(access))) { return createConstructor(access, name, desc, signature, exceptions); } else if (Modifier.isAbstract(access) && !GROOVYOBJECT_METHOD_NAMESS.contains(name)) { if (isImplemented(superClass, name, desc)) { return null; } accessFlags -= ACC_ABSTRACT; MethodVisitor mv = super.visitMethod(accessFlags, name, desc, signature, exceptions); mv.visitCode(); Type[] args = Type.getArgumentTypes(desc); if (emptyBody) { Type returnType = Type.getReturnType(desc); if (returnType == Type.VOID_TYPE) { mv.visitInsn(RETURN); } else { int loadIns = getLoadInsn(returnType); switch (loadIns) { case ILOAD: mv.visitInsn(ICONST_0); break; case LLOAD: mv.visitInsn(LCONST_0); break; case FLOAD: mv.visitInsn(FCONST_0); break; case DLOAD: mv.visitInsn(DCONST_0); break; default: mv.visitInsn(ACONST_NULL); } mv.visitInsn(getReturnInsn(returnType)); mv.visitMaxs(2, registerLen(args) + 1); } } else { // for compatibility with the legacy proxy generator, we should throw an UnsupportedOperationException // instead of an AbtractMethodException mv.visitTypeInsn(NEW, "java/lang/UnsupportedOperationException"); mv.visitInsn(DUP); mv.visitMethodInsn(INVOKESPECIAL, "java/lang/UnsupportedOperationException", "<init>", "()V", false); mv.visitInsn(ATHROW); mv.visitMaxs(2, registerLen(args) + 1); } mv.visitEnd(); } return null; } private MethodVisitor createGetProxyTargetMethod(final int access, final String name, final String desc, final String signature, final String[] exceptions) { MethodVisitor mv = super.visitMethod(ACC_PUBLIC | ACC_FINAL, name, desc, signature, exceptions); mv.visitCode(); mv.visitIntInsn(ALOAD, 0); mv.visitFieldInsn(GETFIELD, proxyName, DELEGATE_OBJECT_FIELD, BytecodeHelper.getTypeDescription(delegateClass)); mv.visitInsn(ARETURN); mv.visitMaxs(1, 1); mv.visitEnd(); return null; } private static int registerLen(Type[] args) { int i = 0; for (Type arg : args) { i += registerLen(arg); } return i; } private static int registerLen(final Type arg) { return arg == Type.DOUBLE_TYPE || arg == Type.LONG_TYPE ? 2 : 1; } private MethodVisitor createConstructor(final int access, final String name, final String desc, final String signature, final String[] exceptions) { Type[] args = Type.getArgumentTypes(desc); StringBuilder newDesc = new StringBuilder("("); for (Type arg : args) { newDesc.append(arg.getDescriptor()); } newDesc.append("Ljava/util/Map;"); // the closure map if (generateDelegateField) { newDesc.append(BytecodeHelper.getTypeDescription(delegateClass)); } newDesc.append(")V"); MethodVisitor mv = super.visitMethod(access, name, newDesc.toString(), signature, exceptions); mv.visitCode(); initializeDelegateClosure(mv, args); if (generateDelegateField) { initializeDelegateObject(mv, args); } mv.visitVarInsn(ALOAD, 0); int idx = 1; for (Type arg : args) { if (isPrimitive(arg)) { mv.visitIntInsn(getLoadInsn(arg), idx); } else { mv.visitVarInsn(ALOAD, idx); // load argument i } idx += registerLen(arg); } mv.visitMethodInsn(INVOKESPECIAL, BytecodeHelper.getClassInternalName(superClass), "<init>", desc, false); mv.visitInsn(RETURN); int max = idx + 1 + (generateDelegateField ? 1 : 0); mv.visitMaxs(max, max); mv.visitEnd(); return null; } private void initializeDelegateClosure(final MethodVisitor mv, Type[] args) { int idx = 1 + getTypeArgsRegisterLength(args); mv.visitIntInsn(ALOAD, 0); // this mv.visitIntInsn(ALOAD, idx); // constructor arg n is the closure map mv.visitFieldInsn(PUTFIELD, proxyName, CLOSURES_MAP_FIELD, "Ljava/util/Map;"); } private void initializeDelegateObject(final MethodVisitor mv, Type[] args) { int idx = 2 + getTypeArgsRegisterLength(args); mv.visitIntInsn(ALOAD, 0); // this mv.visitIntInsn(ALOAD, idx); // constructor arg n is the closure map mv.visitFieldInsn(PUTFIELD, proxyName, DELEGATE_OBJECT_FIELD, BytecodeHelper.getTypeDescription(delegateClass)); } private static int getTypeArgsRegisterLength(Type[] args) { int length = 0; for (Type type : args) { length += registerLen(type); } return length; } /** * Generate a call to the delegate object. */ protected MethodVisitor makeDelegateCall(final String name, final String desc, final String signature, final String[] exceptions, final int accessFlags) { MethodVisitor mv = super.visitMethod(accessFlags, name, desc, signature, exceptions); mv.visitVarInsn(ALOAD, 0); // load this mv.visitFieldInsn(GETFIELD, proxyName, DELEGATE_OBJECT_FIELD, BytecodeHelper.getTypeDescription(delegateClass)); // load delegate // using InvokerHelper to allow potential intercepted calls int size; mv.visitLdcInsn(name); // method name Type[] args = Type.getArgumentTypes(desc); BytecodeHelper.pushConstant(mv, args.length); mv.visitTypeInsn(ANEWARRAY, "java/lang/Object"); size = 6; int idx = 1; for (int i = 0; i < args.length; i++) { Type arg = args[i]; mv.visitInsn(DUP); BytecodeHelper.pushConstant(mv, i); // primitive types must be boxed boxPrimitiveType(mv, idx, arg); size = Math.max(size, 5 + registerLen(arg)); idx += registerLen(arg); mv.visitInsn(AASTORE); // store value into array } mv.visitMethodInsn(INVOKESTATIC, "org/codehaus/groovy/runtime/InvokerHelper", "invokeMethod", "(Ljava/lang/Object;Ljava/lang/String;Ljava/lang/Object;)Ljava/lang/Object;", false); unwrapResult(mv, desc); mv.visitMaxs(size, registerLen(args) + 1); return mv; } protected MethodVisitor makeDelegateToClosureCall(final String name, final String desc, final String signature, final String[] exceptions, final int accessFlags) { MethodVisitor mv = super.visitMethod(accessFlags, name, desc, signature, exceptions); // TraceMethodVisitor tmv = new TraceMethodVisitor(mv); // mv = tmv; mv.visitCode(); int stackSize = 0; // method body should be: // this.$delegate$closure$methodName.call(new Object[] { method arguments }) Type[] args = Type.getArgumentTypes(desc); int arrayStore = args.length + 1; BytecodeHelper.pushConstant(mv, args.length); mv.visitTypeInsn(ANEWARRAY, "java/lang/Object"); // stack size = 1 stackSize = 1; int idx = 1; for (int i = 0; i < args.length; i++) { Type arg = args[i]; mv.visitInsn(DUP); // stack size = 2 BytecodeHelper.pushConstant(mv, i); // array index, stack size = 3 // primitive types must be boxed boxPrimitiveType(mv, idx, arg); idx += registerLen(arg); stackSize = Math.max(4, 3 + registerLen(arg)); mv.visitInsn(AASTORE); // store value into array } mv.visitVarInsn(ASTORE, arrayStore); // store array int arrayIndex = arrayStore; mv.visitVarInsn(ALOAD, 0); // load this mv.visitFieldInsn(GETFIELD, proxyName, CLOSURES_MAP_FIELD, "Ljava/util/Map;"); // load closure map mv.visitLdcInsn(name); // load method name mv.visitMethodInsn(INVOKEINTERFACE, "java/util/Map", "get", "(Ljava/lang/Object;)Ljava/lang/Object;", true); arrayStore++; mv.visitVarInsn(ASTORE, arrayStore); // if null, test if wildcard exists Label notNull = new Label(); mv.visitIntInsn(ALOAD, arrayStore); mv.visitJumpInsn(IFNONNULL, notNull); mv.visitVarInsn(ALOAD, 0); // load this mv.visitFieldInsn(GETFIELD, proxyName, CLOSURES_MAP_FIELD, "Ljava/util/Map;"); // load closure map mv.visitLdcInsn("*"); // load wildcard mv.visitMethodInsn(INVOKEINTERFACE, "java/util/Map", "get", "(Ljava/lang/Object;)Ljava/lang/Object;", true); mv.visitVarInsn(ASTORE, arrayStore); mv.visitLabel(notNull); mv.visitVarInsn(ALOAD, arrayStore); mv.visitMethodInsn(INVOKESTATIC, BytecodeHelper.getClassInternalName(this.getClass()), "ensureClosure", "(Ljava/lang/Object;)Lgroovy/lang/Closure;", false); mv.visitVarInsn(ALOAD, arrayIndex); // load argument array stackSize++; mv.visitMethodInsn(INVOKEVIRTUAL, "groovy/lang/Closure", "call", "([Ljava/lang/Object;)Ljava/lang/Object;", false); // call closure unwrapResult(mv, desc); mv.visitMaxs(stackSize, arrayStore + 1); mv.visitEnd(); // System.out.println("tmv.getText() = " + tmv.getText()); return null; } private void boxPrimitiveType(MethodVisitor mv, int idx, Type arg) { if (isPrimitive(arg)) { mv.visitIntInsn(getLoadInsn(arg), idx); String wrappedType = getWrappedClassDescriptor(arg); mv.visitMethodInsn(INVOKESTATIC, wrappedType, "valueOf", "(" + arg.getDescriptor() + ")L" + wrappedType + ";", false); } else { mv.visitVarInsn(ALOAD, idx); // load argument i } } private static void unwrapResult(final MethodVisitor mv, final String desc) { Type returnType = Type.getReturnType(desc); if (returnType == Type.VOID_TYPE) { mv.visitInsn(POP); mv.visitInsn(RETURN); } else { if (isPrimitive(returnType)) { BytecodeHelper.unbox(mv, ClassHelper.make(returnType.getClassName())); } else { mv.visitTypeInsn(CHECKCAST, returnType.getInternalName()); } mv.visitInsn(getReturnInsn(returnType)); } } @SuppressWarnings("unchecked") public GroovyObject proxy(Map<Object, Object> map, Object... constructorArgs) { if (constructorArgs == null && cachedNoArgConstructor != null) { // if there isn't any argument, we can make invocation faster using the cached constructor try { return (GroovyObject) cachedNoArgConstructor.newInstance(map); } catch (InstantiationException | InvocationTargetException | IllegalAccessException e) { throw new GroovyRuntimeException(e); } } if (constructorArgs == null) constructorArgs = EMPTY_ARGS; Object[] values = new Object[constructorArgs.length + 1]; System.arraycopy(constructorArgs, 0, values, 0, constructorArgs.length); values[values.length - 1] = map; return DefaultGroovyMethods.<GroovyObject>newInstance(cachedClass, values); } @SuppressWarnings("unchecked") public GroovyObject delegatingProxy(Object delegate, Map<Object, Object> map, Object... constructorArgs) { if (constructorArgs == null && cachedNoArgConstructor != null) { // if there isn't any argument, we can make invocation faster using the cached constructor try { return (GroovyObject) cachedNoArgConstructor.newInstance(map, delegate); } catch (InstantiationException | InvocationTargetException | IllegalAccessException e) { throw new GroovyRuntimeException(e); } } if (constructorArgs == null) constructorArgs = EMPTY_ARGS; Object[] values = new Object[constructorArgs.length + 2]; System.arraycopy(constructorArgs, 0, values, 0, constructorArgs.length); values[values.length - 2] = map; values[values.length - 1] = delegate; return DefaultGroovyMethods.<GroovyObject>newInstance(cachedClass, values); } /** * Ensures that the provided object is wrapped into a closure if it's not * a closure. * Do not trust IDEs, this method is used in bytecode. */ @SuppressWarnings("unchecked") public static Closure ensureClosure(Object o) { if (o == null) throw new UnsupportedOperationException(); if (o instanceof Closure) return (Closure) o; return new ReturnValueWrappingClosure(o); } private static int getLoadInsn(Type type) { return TypeUtil.getLoadInsnByType(type); } private static int getReturnInsn(Type type) { return TypeUtil.getReturnInsnByType(type); } private static boolean isPrimitive(Type type) { return TypeUtil.isPrimitiveType(type); } private static String getWrappedClassDescriptor(Type type) { return TypeUtil.getWrappedClassDescriptor(type); } private static class InnerLoader extends GroovyClassLoader { List<ClassLoader> internalClassLoaders = null; protected InnerLoader(final ClassLoader parent, final Class[] interfaces) { super(parent); if (interfaces != null) { for (Class c : interfaces) { if (c.getClassLoader() != parent) { if (internalClassLoaders == null) internalClassLoaders = new ArrayList<ClassLoader>(interfaces.length); if (!internalClassLoaders.contains(c.getClassLoader())) { internalClassLoaders.add(c.getClassLoader()); } } } } } public Class defineClass(String name, byte[] data) { return super.defineClass(name, data, 0, data.length); } public Class<?> loadClass(String name) throws ClassNotFoundException { // First check whether it's already been loaded, if so use it Class loadedClass = findLoadedClass(name); if (loadedClass != null) return loadedClass; // Check this class loader try { loadedClass = findClass(name); } catch (ClassNotFoundException ignore) { } if (loadedClass != null) return loadedClass; // Check parent classloader, keep the exception for future use ClassNotFoundException ex = null; try { loadedClass = super.loadClass(name); } catch (ClassNotFoundException e) { ex = e; } if (loadedClass != null) return loadedClass; // Not loaded, try to load it if (internalClassLoaders != null) { for (ClassLoader i : internalClassLoaders) { try { // Ignore parent delegation and just try to load locally loadedClass = i.loadClass(name); if (loadedClass != null) return loadedClass; } catch (ClassNotFoundException e) { // Swallow exception - does not exist locally } } } // Throw earlier exception from parent loader if it exists, otherwise create a new exception if (ex != null) throw ex; throw new ClassNotFoundException(name); } } private static class ReturnValueWrappingClosure<V> extends Closure<V>{ private static final long serialVersionUID = 1313135457715304501L; private final V value; public ReturnValueWrappingClosure(V returnValue) { super(null); value = returnValue; } @Override public V call(final Object... args) { return value; } } }
/* * Copyright 2011-2016 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.lambdaworks.redis.cluster.api.async; import java.util.List; import java.util.Map; import java.util.concurrent.TimeUnit; import com.lambdaworks.redis.RedisClusterAsyncConnection; import com.lambdaworks.redis.RedisFuture; import com.lambdaworks.redis.api.async.*; /** * A complete asynchronous and thread-safe cluster Redis API with 400+ Methods. * * @param <K> Key type. * @param <V> Value type. * @author Mark Paluch * @since 4.0 */ public interface RedisClusterAsyncCommands<K, V> extends RedisHashAsyncCommands<K, V>, RedisKeyAsyncCommands<K, V>, RedisStringAsyncCommands<K, V>, RedisListAsyncCommands<K, V>, RedisSetAsyncCommands<K, V>, RedisSortedSetAsyncCommands<K, V>, RedisScriptingAsyncCommands<K, V>, RedisServerAsyncCommands<K, V>, RedisHLLAsyncCommands<K, V>, RedisGeoAsyncCommands<K, V>, BaseRedisAsyncCommands<K, V>, RedisClusterAsyncConnection<K, V> { /** * Set the default timeout for operations. * * @param timeout the timeout value * @param unit the unit of the timeout value */ void setTimeout(long timeout, TimeUnit unit); /** * Authenticate to the server. * * @param password the password * @return String simple-string-reply */ String auth(String password); /** * Generate a new config epoch, incrementing the current epoch, assign the new epoch to this node, WITHOUT any consensus and * persist the configuration on disk before sending packets with the new configuration. * * @return String simple-string-reply If the new config epoch is generated and assigned either BUMPED (epoch) or STILL * (epoch) are returned. */ RedisFuture<String> clusterBumpepoch(); /** * Meet another cluster node to include the node into the cluster. The command starts the cluster handshake and returns with * {@literal OK} when the node was added to the cluster. * * @param ip IP address of the host * @param port port number. * @return String simple-string-reply */ RedisFuture<String> clusterMeet(String ip, int port); /** * Blacklist and remove the cluster node from the cluster. * * @param nodeId the node Id * @return String simple-string-reply */ RedisFuture<String> clusterForget(String nodeId); /** * Adds slots to the cluster node. The current node will become the master for the specified slots. * * @param slots one or more slots from {@literal 0} to {@literal 16384} * @return String simple-string-reply */ RedisFuture<String> clusterAddSlots(int... slots); /** * Removes slots from the cluster node. * * @param slots one or more slots from {@literal 0} to {@literal 16384} * @return String simple-string-reply */ RedisFuture<String> clusterDelSlots(int... slots); /** * Assign a slot to a node. The command migrates the specified slot from the current node to the specified node in * {@code nodeId} * * @param slot the slot * @param nodeId the id of the node that will become the master for the slot * @return String simple-string-reply */ RedisFuture<String> clusterSetSlotNode(int slot, String nodeId); /** * Clears migrating / importing state from the slot. * * @param slot the slot * @return String simple-string-reply */ RedisFuture<String> clusterSetSlotStable(int slot); /** * Flag a slot as {@literal MIGRATING} (outgoing) towards the node specified in {@code nodeId}. The slot must be handled by * the current node in order to be migrated. * * @param slot the slot * @param nodeId the id of the node is targeted to become the master for the slot * @return String simple-string-reply */ RedisFuture<String> clusterSetSlotMigrating(int slot, String nodeId); /** * Flag a slot as {@literal IMPORTING} (incoming) from the node specified in {@code nodeId}. * * @param slot the slot * @param nodeId the id of the node is the master of the slot * @return String simple-string-reply */ RedisFuture<String> clusterSetSlotImporting(int slot, String nodeId); /** * Get information and statistics about the cluster viewed by the current node. * * @return String bulk-string-reply as a collection of text lines. */ RedisFuture<String> clusterInfo(); /** * Obtain the nodeId for the currently connected node. * * @return String simple-string-reply */ RedisFuture<String> clusterMyId(); /** * Obtain details about all cluster nodes. Can be parsed using * {@link com.lambdaworks.redis.cluster.models.partitions.ClusterPartitionParser#parse} * * @return String bulk-string-reply as a collection of text lines */ RedisFuture<String> clusterNodes(); /** * List slaves for a certain node identified by its {@code nodeId}. Can be parsed using * {@link com.lambdaworks.redis.cluster.models.partitions.ClusterPartitionParser#parse} * * @param nodeId node id of the master node * @return List&lt;String&gt; array-reply list of slaves. The command returns data in the same format as * {@link #clusterNodes()} but one line per slave. */ RedisFuture<List<String>> clusterSlaves(String nodeId); /** * Retrieve the list of keys within the {@code slot}. * * @param slot the slot * @param count maximal number of keys * @return List&lt;K&gt; array-reply list of keys */ RedisFuture<List<K>> clusterGetKeysInSlot(int slot, int count); /** * Returns the number of keys in the specified Redis Cluster hash {@code slot}. * * @param slot the slot * @return Integer reply: The number of keys in the specified hash slot, or an error if the hash slot is invalid. */ RedisFuture<Long> clusterCountKeysInSlot(int slot); /** * Returns the number of failure reports for the specified node. Failure reports are the way Redis Cluster uses in order to * promote a {@literal PFAIL} state, that means a node is not reachable, to a {@literal FAIL} state, that means that the * majority of masters in the cluster agreed within a window of time that the node is not reachable. * * @param nodeId the node id * @return Integer reply: The number of active failure reports for the node. */ RedisFuture<Long> clusterCountFailureReports(String nodeId); /** * Returns an integer identifying the hash slot the specified key hashes to. This command is mainly useful for debugging and * testing, since it exposes via an API the underlying Redis implementation of the hashing algorithm. Basically the same as * {@link com.lambdaworks.redis.cluster.SlotHash#getSlot(byte[])}. If not, call Houston and report that we've got a problem. * * @param key the key. * @return Integer reply: The hash slot number. */ RedisFuture<Long> clusterKeyslot(K key); /** * Forces a node to save the nodes.conf configuration on disk. * * @return String simple-string-reply: {@code OK} or an error if the operation fails. */ RedisFuture<String> clusterSaveconfig(); /** * This command sets a specific config epoch in a fresh node. It only works when: * <ul> * <li>The nodes table of the node is empty.</li> * <li>The node current config epoch is zero.</li> * </ul> * * @param configEpoch the config epoch * @return String simple-string-reply: {@code OK} or an error if the operation fails. */ RedisFuture<String> clusterSetConfigEpoch(long configEpoch); /** * Get array of cluster slots to node mappings. * * @return RedisFuture&lt;List&lt;Object&gt;&gt; array-reply nested list of slot ranges with IP/Port mappings. */ RedisFuture<List<Object>> clusterSlots(); /** * The asking command is required after a {@code -ASK} redirection. The client should issue {@code ASKING} before to * actually send the command to the target instance. See the Redis Cluster specification for more information. * * @return String simple-string-reply */ RedisFuture<String> asking(); /** * Turn this node into a slave of the node with the id {@code nodeId}. * * @param nodeId master node id * @return String simple-string-reply */ RedisFuture<String> clusterReplicate(String nodeId); /** * Failover a cluster node. Turns the currently connected node into a master and the master into its slave. * * @param force do not coordinate with master if {@literal true} * @return String simple-string-reply */ RedisFuture<String> clusterFailover(boolean force); /** * Reset a node performing a soft or hard reset: * <ul> * <li>All other nodes are forgotten</li> * <li>All the assigned / open slots are released</li> * <li>If the node is a slave, it turns into a master</li> * <li>Only for hard reset: a new Node ID is generated</li> * <li>Only for hard reset: currentEpoch and configEpoch are set to 0</li> * <li>The new configuration is saved and the cluster state updated</li> * <li>If the node was a slave, the whole data set is flushed away</li> * </ul> * * @param hard {@literal true} for hard reset. Generates a new nodeId and currentEpoch/configEpoch are set to 0 * @return String simple-string-reply */ RedisFuture<String> clusterReset(boolean hard); /** * Delete all the slots associated with the specified node. The number of deleted slots is returned. * * @return String simple-string-reply */ RedisFuture<String> clusterFlushslots(); /** * Tells a Redis cluster slave node that the client is ok reading possibly stale data and is not interested in running write * queries. * * @return String simple-string-reply */ RedisFuture<String> readOnly(); /** * Resets readOnly flag. * * @return String simple-string-reply */ RedisFuture<String> readWrite(); /** * Delete a key with pipelining. Cross-slot keys will result in multiple calls to the particular cluster nodes. * * @param keys the key * @return RedisFuture&lt;Long&gt; integer-reply The number of keys that were removed. */ RedisFuture<Long> del(K... keys); /** * Get the values of all the given keys with pipelining. Cross-slot keys will result in multiple calls to the particular * cluster nodes. * * @param keys the key * @return RedisFuture&lt;List&lt;V&gt;&gt; array-reply list of values at the specified keys. */ RedisFuture<List<V>> mget(K... keys); /** * Set multiple keys to multiple values with pipelining. Cross-slot keys will result in multiple calls to the particular * cluster nodes. * * @param map the null * @return RedisFuture&lt;String&gt; simple-string-reply always {@code OK} since {@code MSET} can't fail. */ RedisFuture<String> mset(Map<K, V> map); /** * Set multiple keys to multiple values, only if none of the keys exist with pipelining. Cross-slot keys will result in * multiple calls to the particular cluster nodes. * * @param map the null * @return RedisFuture&lt;Boolean&gt; integer-reply specifically: * * {@code 1} if the all the keys were set. {@code 0} if no key was set (at least one key already existed). */ RedisFuture<Boolean> msetnx(Map<K, V> map); }
/* * To change this license header, choose License Headers in Project Properties. * To change this template file, choose Tools | Templates * and open the template in the editor. */ package com.fmi.advancedDataStructures.suffixArray; import java.util.Arrays; import java.util.LinkedList; import java.util.List; /** * ATTENTION: The code below is mighty ugly!!! * * But is reasonably fast. * * @author Dimitar */ public class SuffixArray { /* I was keeping all the suffixes in an array, but decided that it was taking too much memory. So now I'm keeping the whole text and when a need a suffix I call String.substring() */ private final String text; /* In this array we are keeping the indices, coresponding to the indices of the suffixes as if they were sorted. */ private final Integer[] indicesArray; /** * The constructor takes some text and builds the suffix array from it. For * example - let's say the text is: 'abbracaddabbra' * * <pre> * * The suffixes array And the sorted version looks * will look like that: like that: * * 0 abbracaddabbra 13 a * 1 bbracaddabbra 9 abbra * 2 bracaddabbra 0 abbracaddabbra * 3 racaddabbra 4 acaddabbra * 4 acaddabbra 6 addabbra * 5 caddabbra 10 bbra * 6 addabbra -> 1 bbracaddabbra * 7 ddabbra 11 bra * 8 dabbra 2 bracaddabbra * 9 abbra 5 caddabbra * 10 bbra 8 dabbra * 11 bra 7 ddabbra * 12 ra 12 ra * 13 a 3 racaddabbra * * </pre> * * In the indicesArray we keep only the sorted indices from the * suffixesArray. * * Having a sorted array we can use binary search! * * The current time complexity of building the array is O((n^2)*log(n)), * where n is the size of the text. * * If I have time I'll try to implement the DC3 algorithm that builds it for * O(n). * * @param text */ public SuffixArray(String text) { this.text = text; indicesArray = new Integer[text.length()]; for (int i = 0; i < indicesArray.length; i++) { indicesArray[i] = i; } Arrays.sort(indicesArray, (x, y) -> compare(x, y)); } /** * We're binary searching for the lower and upper bound where the pattern * matches. * * If there are such suffixes, we iterate through the interval and add their * indexes to the result list. * * The worst case scenario is when the text contains only equal characters * (for instance "aaaaaaaa") and we search for that character. In that * situation the lowerBound will be 0 and the upper - the length of the * text. * * Summary: We have two binary searches + one iteration through the range: * O(m*log(n)) + O(upperBound - lowerBound), where m is the length of the * pattern. * * @param pattern * @return List from all the indices from the original array. */ public List<Integer> search(String pattern) { List<Integer> result = new LinkedList<>(); int startIndex = lowerBound(pattern); int endIndex = upperBound(pattern); if (startIndex != -1 && endIndex != -1) { for (int i = startIndex; i < endIndex; i++) { result.add(indicesArray[i]); } } return result; } /** * Binary searches for the first appearances of the pattern. * * @param pattern * @return */ private int lowerBound(String pattern) { int start = 0; int end = indicesArray.length - 1; int mid = start + (end - start) / 2; while (true) { int cmp = compareToPattern(indicesArray[mid], pattern); if (cmp == 0 || cmp > 0 || startsWith(indicesArray[mid], pattern)) { end = mid - 1; if (end < start) { return mid; } } else { start = mid + 1; if (end < start) { return mid < indicesArray.length - 1 ? mid + 1 : -1; } } mid = start + (end - start) / 2; } } /** * Binary searches for the last appearances of the pattern. * * @param pattern * @return */ private int upperBound(String pattern) { int start = 0; int end = indicesArray.length - 1; int mid = start + (end - start) / 2; while (true) { int cmp = compareToPattern(indicesArray[mid], pattern); if (cmp == 0 || cmp < 0 || startsWith(indicesArray[mid], pattern)) { start = mid + 1; if (end < start) { return mid < indicesArray.length ? mid + 1 : -1; } } else { end = mid - 1; if (end < start) { return mid; } } mid = start + (end - start) / 2; } } /** * Below I'm defining 3 functions. * * I'm doing all this just to avoid using substring, which from Java 7 is * making a copy of the substring and is very slow. * * Just for comparison - while using substring the building of an array with * 1000000 elements took more than a minute. * * Now it takes less than a sec! * * * This first one compares two substring starting position x and y in the * text. * * @param x * @param y * @return */ private int compare(int x, int y) { int len1 = text.length() - x; int len2 = text.length() - y; int lim = Math.min(len1, len2); int k = 0; while (k < lim) { char c1 = text.charAt(x++); char c2 = text.charAt(y++); if (c1 != c2) { return c1 - c2; } k++; } return len1 - len2; } /** * This compares a given substring from the text with a given pattern. * * @param index * @param pattern * @return */ private int compareToPattern(int index, String pattern) { int len1 = text.length() - index; int len2 = pattern.length(); int lim = Math.min(len1, len2); int k = 0; while (k < lim) { char c1 = text.charAt(index++); char c2 = pattern.charAt(k++); if (c1 != c2) { return c1 - c2; } } return len1 - len2; } /** * Checks if a given substring from the text starts with a given pattern. * * @param index * @param pattern * @return */ private boolean startsWith(int index, String pattern) { if (text.length() - index < pattern.length()) { return false; } for (int i = 0; i < pattern.length(); i++) { if (pattern.charAt(i) != text.charAt(index + i)) { return false; } } return true; } }
/* * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except * in compliance with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express * or implied. See the License for the specific language governing permissions and limitations under * the License. */ /* * This code was generated by https://github.com/googleapis/google-api-java-client-services/ * Modify at your own risk. */ package com.google.api.services.mybusinesslodging.v1.model; /** * Information about eating features in the living area. * * <p> This is the Java data model class that specifies how to parse/serialize into the JSON that is * transmitted over HTTP when working with the My Business Lodging API. For a detailed explanation * see: * <a href="https://developers.google.com/api-client-library/java/google-http-java-client/json">https://developers.google.com/api-client-library/java/google-http-java-client/json</a> * </p> * * @author Google, Inc. */ @SuppressWarnings("javadoc") public final class LivingAreaEating extends com.google.api.client.json.GenericJson { /** * Coffee maker. An electric appliance that brews coffee by heating and forcing water through * ground coffee. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.Boolean coffeeMaker; /** * Coffee maker exception. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String coffeeMakerException; /** * Cookware. Kitchen pots, pans and utensils used in connection with the preparation of food. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.Boolean cookware; /** * Cookware exception. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String cookwareException; /** * Dishwasher. A counter-height electrical cabinet containing racks for dirty dishware, cookware * and cutlery, and a dispenser for soap built into the pull-down door. The cabinet is attached to * the plumbing system to facilitate the automatic cleaning of its contents. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.Boolean dishwasher; /** * Dishwasher exception. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String dishwasherException; /** * Indoor grill. Metal grates built into an indoor cooktop on which food is cooked over an open * flame or electric heat source. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.Boolean indoorGrill; /** * Indoor grill exception. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String indoorGrillException; /** * Kettle. A covered container with a handle and a spout used for boiling water. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.Boolean kettle; /** * Kettle exception. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String kettleException; /** * Kitchen available. An area of the guestroom designated for the preparation and storage of food * via the presence of a refrigerator, cook top, oven and sink, as well as cutlery, dishes and * cookware. Usually includes small appliances such a coffee maker and a microwave. May or may not * include an automatic dishwasher. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.Boolean kitchenAvailable; /** * Kitchen available exception. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String kitchenAvailableException; /** * Microwave. An electric oven that quickly cooks and heats food by microwave energy. Smaller than * a standing or wall mounted oven. Usually placed on a kitchen counter, a shelf or tabletop or * mounted above a cooktop. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.Boolean microwave; /** * Microwave exception. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String microwaveException; /** * Minibar. A small refrigerated cabinet in the guestroom containing bottles/cans of soft drinks, * mini bottles of alcohol, and snacks. The items are most commonly available for a fee. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.Boolean minibar; /** * Minibar exception. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String minibarException; /** * Outdoor grill. Metal grates on which food is cooked over an open flame or electric heat source. * Part of an outdoor apparatus that supports the grates. Also known as barbecue grill or * barbecue. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.Boolean outdoorGrill; /** * Outdoor grill exception. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String outdoorGrillException; /** * Oven. A temperature controlled, heated metal cabinet powered by gas or electricity in which * food is placed for the purpose of cooking or reheating. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.Boolean oven; /** * Oven exception. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String ovenException; /** * Refrigerator. A large, climate-controlled electrical cabinet with vertical doors. Built for the * purpose of chilling and storing perishable foods. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.Boolean refrigerator; /** * Refrigerator exception. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String refrigeratorException; /** * Sink. A basin with a faucet attached to a water source and used for the purpose of washing and * rinsing. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.Boolean sink; /** * Sink exception. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String sinkException; /** * Snackbar. A small cabinet in the guestroom containing snacks. The items are most commonly * available for a fee. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.Boolean snackbar; /** * Snackbar exception. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String snackbarException; /** * Stove. A kitchen appliance powered by gas or electricity for the purpose of creating a flame or * hot surface on which pots of food can be cooked. Also known as cooktop or hob. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.Boolean stove; /** * Stove exception. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String stoveException; /** * Tea station. A small area with the supplies needed to heat water and make tea. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.Boolean teaStation; /** * Tea station exception. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String teaStationException; /** * Toaster. A small, temperature controlled electric appliance with rectangular slots at the top * that are lined with heated coils for the purpose of browning slices of bread products. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.Boolean toaster; /** * Toaster exception. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String toasterException; /** * Coffee maker. An electric appliance that brews coffee by heating and forcing water through * ground coffee. * @return value or {@code null} for none */ public java.lang.Boolean getCoffeeMaker() { return coffeeMaker; } /** * Coffee maker. An electric appliance that brews coffee by heating and forcing water through * ground coffee. * @param coffeeMaker coffeeMaker or {@code null} for none */ public LivingAreaEating setCoffeeMaker(java.lang.Boolean coffeeMaker) { this.coffeeMaker = coffeeMaker; return this; } /** * Coffee maker exception. * @return value or {@code null} for none */ public java.lang.String getCoffeeMakerException() { return coffeeMakerException; } /** * Coffee maker exception. * @param coffeeMakerException coffeeMakerException or {@code null} for none */ public LivingAreaEating setCoffeeMakerException(java.lang.String coffeeMakerException) { this.coffeeMakerException = coffeeMakerException; return this; } /** * Cookware. Kitchen pots, pans and utensils used in connection with the preparation of food. * @return value or {@code null} for none */ public java.lang.Boolean getCookware() { return cookware; } /** * Cookware. Kitchen pots, pans and utensils used in connection with the preparation of food. * @param cookware cookware or {@code null} for none */ public LivingAreaEating setCookware(java.lang.Boolean cookware) { this.cookware = cookware; return this; } /** * Cookware exception. * @return value or {@code null} for none */ public java.lang.String getCookwareException() { return cookwareException; } /** * Cookware exception. * @param cookwareException cookwareException or {@code null} for none */ public LivingAreaEating setCookwareException(java.lang.String cookwareException) { this.cookwareException = cookwareException; return this; } /** * Dishwasher. A counter-height electrical cabinet containing racks for dirty dishware, cookware * and cutlery, and a dispenser for soap built into the pull-down door. The cabinet is attached to * the plumbing system to facilitate the automatic cleaning of its contents. * @return value or {@code null} for none */ public java.lang.Boolean getDishwasher() { return dishwasher; } /** * Dishwasher. A counter-height electrical cabinet containing racks for dirty dishware, cookware * and cutlery, and a dispenser for soap built into the pull-down door. The cabinet is attached to * the plumbing system to facilitate the automatic cleaning of its contents. * @param dishwasher dishwasher or {@code null} for none */ public LivingAreaEating setDishwasher(java.lang.Boolean dishwasher) { this.dishwasher = dishwasher; return this; } /** * Dishwasher exception. * @return value or {@code null} for none */ public java.lang.String getDishwasherException() { return dishwasherException; } /** * Dishwasher exception. * @param dishwasherException dishwasherException or {@code null} for none */ public LivingAreaEating setDishwasherException(java.lang.String dishwasherException) { this.dishwasherException = dishwasherException; return this; } /** * Indoor grill. Metal grates built into an indoor cooktop on which food is cooked over an open * flame or electric heat source. * @return value or {@code null} for none */ public java.lang.Boolean getIndoorGrill() { return indoorGrill; } /** * Indoor grill. Metal grates built into an indoor cooktop on which food is cooked over an open * flame or electric heat source. * @param indoorGrill indoorGrill or {@code null} for none */ public LivingAreaEating setIndoorGrill(java.lang.Boolean indoorGrill) { this.indoorGrill = indoorGrill; return this; } /** * Indoor grill exception. * @return value or {@code null} for none */ public java.lang.String getIndoorGrillException() { return indoorGrillException; } /** * Indoor grill exception. * @param indoorGrillException indoorGrillException or {@code null} for none */ public LivingAreaEating setIndoorGrillException(java.lang.String indoorGrillException) { this.indoorGrillException = indoorGrillException; return this; } /** * Kettle. A covered container with a handle and a spout used for boiling water. * @return value or {@code null} for none */ public java.lang.Boolean getKettle() { return kettle; } /** * Kettle. A covered container with a handle and a spout used for boiling water. * @param kettle kettle or {@code null} for none */ public LivingAreaEating setKettle(java.lang.Boolean kettle) { this.kettle = kettle; return this; } /** * Kettle exception. * @return value or {@code null} for none */ public java.lang.String getKettleException() { return kettleException; } /** * Kettle exception. * @param kettleException kettleException or {@code null} for none */ public LivingAreaEating setKettleException(java.lang.String kettleException) { this.kettleException = kettleException; return this; } /** * Kitchen available. An area of the guestroom designated for the preparation and storage of food * via the presence of a refrigerator, cook top, oven and sink, as well as cutlery, dishes and * cookware. Usually includes small appliances such a coffee maker and a microwave. May or may not * include an automatic dishwasher. * @return value or {@code null} for none */ public java.lang.Boolean getKitchenAvailable() { return kitchenAvailable; } /** * Kitchen available. An area of the guestroom designated for the preparation and storage of food * via the presence of a refrigerator, cook top, oven and sink, as well as cutlery, dishes and * cookware. Usually includes small appliances such a coffee maker and a microwave. May or may not * include an automatic dishwasher. * @param kitchenAvailable kitchenAvailable or {@code null} for none */ public LivingAreaEating setKitchenAvailable(java.lang.Boolean kitchenAvailable) { this.kitchenAvailable = kitchenAvailable; return this; } /** * Kitchen available exception. * @return value or {@code null} for none */ public java.lang.String getKitchenAvailableException() { return kitchenAvailableException; } /** * Kitchen available exception. * @param kitchenAvailableException kitchenAvailableException or {@code null} for none */ public LivingAreaEating setKitchenAvailableException(java.lang.String kitchenAvailableException) { this.kitchenAvailableException = kitchenAvailableException; return this; } /** * Microwave. An electric oven that quickly cooks and heats food by microwave energy. Smaller than * a standing or wall mounted oven. Usually placed on a kitchen counter, a shelf or tabletop or * mounted above a cooktop. * @return value or {@code null} for none */ public java.lang.Boolean getMicrowave() { return microwave; } /** * Microwave. An electric oven that quickly cooks and heats food by microwave energy. Smaller than * a standing or wall mounted oven. Usually placed on a kitchen counter, a shelf or tabletop or * mounted above a cooktop. * @param microwave microwave or {@code null} for none */ public LivingAreaEating setMicrowave(java.lang.Boolean microwave) { this.microwave = microwave; return this; } /** * Microwave exception. * @return value or {@code null} for none */ public java.lang.String getMicrowaveException() { return microwaveException; } /** * Microwave exception. * @param microwaveException microwaveException or {@code null} for none */ public LivingAreaEating setMicrowaveException(java.lang.String microwaveException) { this.microwaveException = microwaveException; return this; } /** * Minibar. A small refrigerated cabinet in the guestroom containing bottles/cans of soft drinks, * mini bottles of alcohol, and snacks. The items are most commonly available for a fee. * @return value or {@code null} for none */ public java.lang.Boolean getMinibar() { return minibar; } /** * Minibar. A small refrigerated cabinet in the guestroom containing bottles/cans of soft drinks, * mini bottles of alcohol, and snacks. The items are most commonly available for a fee. * @param minibar minibar or {@code null} for none */ public LivingAreaEating setMinibar(java.lang.Boolean minibar) { this.minibar = minibar; return this; } /** * Minibar exception. * @return value or {@code null} for none */ public java.lang.String getMinibarException() { return minibarException; } /** * Minibar exception. * @param minibarException minibarException or {@code null} for none */ public LivingAreaEating setMinibarException(java.lang.String minibarException) { this.minibarException = minibarException; return this; } /** * Outdoor grill. Metal grates on which food is cooked over an open flame or electric heat source. * Part of an outdoor apparatus that supports the grates. Also known as barbecue grill or * barbecue. * @return value or {@code null} for none */ public java.lang.Boolean getOutdoorGrill() { return outdoorGrill; } /** * Outdoor grill. Metal grates on which food is cooked over an open flame or electric heat source. * Part of an outdoor apparatus that supports the grates. Also known as barbecue grill or * barbecue. * @param outdoorGrill outdoorGrill or {@code null} for none */ public LivingAreaEating setOutdoorGrill(java.lang.Boolean outdoorGrill) { this.outdoorGrill = outdoorGrill; return this; } /** * Outdoor grill exception. * @return value or {@code null} for none */ public java.lang.String getOutdoorGrillException() { return outdoorGrillException; } /** * Outdoor grill exception. * @param outdoorGrillException outdoorGrillException or {@code null} for none */ public LivingAreaEating setOutdoorGrillException(java.lang.String outdoorGrillException) { this.outdoorGrillException = outdoorGrillException; return this; } /** * Oven. A temperature controlled, heated metal cabinet powered by gas or electricity in which * food is placed for the purpose of cooking or reheating. * @return value or {@code null} for none */ public java.lang.Boolean getOven() { return oven; } /** * Oven. A temperature controlled, heated metal cabinet powered by gas or electricity in which * food is placed for the purpose of cooking or reheating. * @param oven oven or {@code null} for none */ public LivingAreaEating setOven(java.lang.Boolean oven) { this.oven = oven; return this; } /** * Oven exception. * @return value or {@code null} for none */ public java.lang.String getOvenException() { return ovenException; } /** * Oven exception. * @param ovenException ovenException or {@code null} for none */ public LivingAreaEating setOvenException(java.lang.String ovenException) { this.ovenException = ovenException; return this; } /** * Refrigerator. A large, climate-controlled electrical cabinet with vertical doors. Built for the * purpose of chilling and storing perishable foods. * @return value or {@code null} for none */ public java.lang.Boolean getRefrigerator() { return refrigerator; } /** * Refrigerator. A large, climate-controlled electrical cabinet with vertical doors. Built for the * purpose of chilling and storing perishable foods. * @param refrigerator refrigerator or {@code null} for none */ public LivingAreaEating setRefrigerator(java.lang.Boolean refrigerator) { this.refrigerator = refrigerator; return this; } /** * Refrigerator exception. * @return value or {@code null} for none */ public java.lang.String getRefrigeratorException() { return refrigeratorException; } /** * Refrigerator exception. * @param refrigeratorException refrigeratorException or {@code null} for none */ public LivingAreaEating setRefrigeratorException(java.lang.String refrigeratorException) { this.refrigeratorException = refrigeratorException; return this; } /** * Sink. A basin with a faucet attached to a water source and used for the purpose of washing and * rinsing. * @return value or {@code null} for none */ public java.lang.Boolean getSink() { return sink; } /** * Sink. A basin with a faucet attached to a water source and used for the purpose of washing and * rinsing. * @param sink sink or {@code null} for none */ public LivingAreaEating setSink(java.lang.Boolean sink) { this.sink = sink; return this; } /** * Sink exception. * @return value or {@code null} for none */ public java.lang.String getSinkException() { return sinkException; } /** * Sink exception. * @param sinkException sinkException or {@code null} for none */ public LivingAreaEating setSinkException(java.lang.String sinkException) { this.sinkException = sinkException; return this; } /** * Snackbar. A small cabinet in the guestroom containing snacks. The items are most commonly * available for a fee. * @return value or {@code null} for none */ public java.lang.Boolean getSnackbar() { return snackbar; } /** * Snackbar. A small cabinet in the guestroom containing snacks. The items are most commonly * available for a fee. * @param snackbar snackbar or {@code null} for none */ public LivingAreaEating setSnackbar(java.lang.Boolean snackbar) { this.snackbar = snackbar; return this; } /** * Snackbar exception. * @return value or {@code null} for none */ public java.lang.String getSnackbarException() { return snackbarException; } /** * Snackbar exception. * @param snackbarException snackbarException or {@code null} for none */ public LivingAreaEating setSnackbarException(java.lang.String snackbarException) { this.snackbarException = snackbarException; return this; } /** * Stove. A kitchen appliance powered by gas or electricity for the purpose of creating a flame or * hot surface on which pots of food can be cooked. Also known as cooktop or hob. * @return value or {@code null} for none */ public java.lang.Boolean getStove() { return stove; } /** * Stove. A kitchen appliance powered by gas or electricity for the purpose of creating a flame or * hot surface on which pots of food can be cooked. Also known as cooktop or hob. * @param stove stove or {@code null} for none */ public LivingAreaEating setStove(java.lang.Boolean stove) { this.stove = stove; return this; } /** * Stove exception. * @return value or {@code null} for none */ public java.lang.String getStoveException() { return stoveException; } /** * Stove exception. * @param stoveException stoveException or {@code null} for none */ public LivingAreaEating setStoveException(java.lang.String stoveException) { this.stoveException = stoveException; return this; } /** * Tea station. A small area with the supplies needed to heat water and make tea. * @return value or {@code null} for none */ public java.lang.Boolean getTeaStation() { return teaStation; } /** * Tea station. A small area with the supplies needed to heat water and make tea. * @param teaStation teaStation or {@code null} for none */ public LivingAreaEating setTeaStation(java.lang.Boolean teaStation) { this.teaStation = teaStation; return this; } /** * Tea station exception. * @return value or {@code null} for none */ public java.lang.String getTeaStationException() { return teaStationException; } /** * Tea station exception. * @param teaStationException teaStationException or {@code null} for none */ public LivingAreaEating setTeaStationException(java.lang.String teaStationException) { this.teaStationException = teaStationException; return this; } /** * Toaster. A small, temperature controlled electric appliance with rectangular slots at the top * that are lined with heated coils for the purpose of browning slices of bread products. * @return value or {@code null} for none */ public java.lang.Boolean getToaster() { return toaster; } /** * Toaster. A small, temperature controlled electric appliance with rectangular slots at the top * that are lined with heated coils for the purpose of browning slices of bread products. * @param toaster toaster or {@code null} for none */ public LivingAreaEating setToaster(java.lang.Boolean toaster) { this.toaster = toaster; return this; } /** * Toaster exception. * @return value or {@code null} for none */ public java.lang.String getToasterException() { return toasterException; } /** * Toaster exception. * @param toasterException toasterException or {@code null} for none */ public LivingAreaEating setToasterException(java.lang.String toasterException) { this.toasterException = toasterException; return this; } @Override public LivingAreaEating set(String fieldName, Object value) { return (LivingAreaEating) super.set(fieldName, value); } @Override public LivingAreaEating clone() { return (LivingAreaEating) super.clone(); } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.hbase.master.procedure; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNull; import static org.junit.Assert.assertTrue; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.HTableDescriptor; import org.apache.hadoop.hbase.NamespaceDescriptor; import org.apache.hadoop.hbase.NamespaceNotFoundException; import org.apache.hadoop.hbase.constraint.ConstraintException; import org.apache.hadoop.hbase.procedure2.Procedure; import org.apache.hadoop.hbase.procedure2.ProcedureExecutor; import org.apache.hadoop.hbase.procedure2.ProcedureTestingUtility; import org.apache.hadoop.hbase.testclassification.MasterTests; import org.apache.hadoop.hbase.testclassification.MediumTests; import org.junit.After; import org.junit.AfterClass; import org.junit.Before; import org.junit.BeforeClass; import org.junit.Test; import org.junit.experimental.categories.Category; @Category({MasterTests.class, MediumTests.class}) public class TestModifyNamespaceProcedure { private static final Log LOG = LogFactory.getLog(TestModifyNamespaceProcedure.class); protected static final HBaseTestingUtility UTIL = new HBaseTestingUtility(); private static void setupConf(Configuration conf) { conf.setInt(MasterProcedureConstants.MASTER_PROCEDURE_THREADS, 1); } @BeforeClass public static void setupCluster() throws Exception { setupConf(UTIL.getConfiguration()); UTIL.startMiniCluster(1); } @AfterClass public static void cleanupTest() throws Exception { try { UTIL.shutdownMiniCluster(); } catch (Exception e) { LOG.warn("failure shutting down cluster", e); } } @Before public void setup() throws Exception { ProcedureTestingUtility.setKillAndToggleBeforeStoreUpdate(getMasterProcedureExecutor(), false); } @After public void tearDown() throws Exception { ProcedureTestingUtility.setKillAndToggleBeforeStoreUpdate(getMasterProcedureExecutor(), false); for (HTableDescriptor htd: UTIL.getAdmin().listTables()) { LOG.info("Tear down, remove table=" + htd.getTableName()); UTIL.deleteTable(htd.getTableName()); } } @Test(timeout = 60000) public void testModifyNamespace() throws Exception { final NamespaceDescriptor nsd = NamespaceDescriptor.create("testModifyNamespace").build(); final String nsKey1 = "hbase.namespace.quota.maxregions"; final String nsValue1before = "1111"; final String nsValue1after = "9999"; final String nsKey2 = "hbase.namespace.quota.maxtables"; final String nsValue2 = "10"; final ProcedureExecutor<MasterProcedureEnv> procExec = getMasterProcedureExecutor(); nsd.setConfiguration(nsKey1, nsValue1before); createNamespaceForTesting(nsd); // Before modify NamespaceDescriptor currentNsDescriptor = UTIL.getAdmin().getNamespaceDescriptor(nsd.getName()); assertEquals(currentNsDescriptor.getConfigurationValue(nsKey1), nsValue1before); assertNull(currentNsDescriptor.getConfigurationValue(nsKey2)); // Update nsd.setConfiguration(nsKey1, nsValue1after); nsd.setConfiguration(nsKey2, nsValue2); long procId1 = procExec.submitProcedure( new ModifyNamespaceProcedure(procExec.getEnvironment(), nsd)); // Wait the completion ProcedureTestingUtility.waitProcedure(procExec, procId1); ProcedureTestingUtility.assertProcNotFailed(procExec, procId1); // Verify the namespace is updated. currentNsDescriptor = UTIL.getAdmin().getNamespaceDescriptor(nsd.getName()); assertEquals(nsd.getConfigurationValue(nsKey1), nsValue1after); assertEquals(currentNsDescriptor.getConfigurationValue(nsKey2), nsValue2); } @Test(timeout=60000) public void testModifyNonExistNamespace() throws Exception { final String namespaceName = "testModifyNonExistNamespace"; final ProcedureExecutor<MasterProcedureEnv> procExec = getMasterProcedureExecutor(); try { NamespaceDescriptor nsDescriptor = UTIL.getAdmin().getNamespaceDescriptor(namespaceName); assertNull(nsDescriptor); } catch (NamespaceNotFoundException nsnfe) { // Expected LOG.debug("The namespace " + namespaceName + " does not exist. This is expected."); } final NamespaceDescriptor nsd = NamespaceDescriptor.create(namespaceName).build(); long procId = procExec.submitProcedure( new ModifyNamespaceProcedure(procExec.getEnvironment(), nsd)); // Wait the completion ProcedureTestingUtility.waitProcedure(procExec, procId); // Expect fail with NamespaceNotFoundException Procedure<?> result = procExec.getResult(procId); assertTrue(result.isFailed()); LOG.debug("modify namespace failed with exception: " + result.getException()); assertTrue( ProcedureTestingUtility.getExceptionCause(result) instanceof NamespaceNotFoundException); } @Test(timeout=60000) public void testModifyNamespaceWithInvalidRegionCount() throws Exception { final NamespaceDescriptor nsd = NamespaceDescriptor.create("testModifyNamespaceWithInvalidRegionCount").build(); final String nsKey = "hbase.namespace.quota.maxregions"; final String nsValue = "-1"; final ProcedureExecutor<MasterProcedureEnv> procExec = getMasterProcedureExecutor(); createNamespaceForTesting(nsd); // Modify nsd.setConfiguration(nsKey, nsValue); long procId = procExec.submitProcedure( new ModifyNamespaceProcedure(procExec.getEnvironment(), nsd)); // Wait the completion ProcedureTestingUtility.waitProcedure(procExec, procId); Procedure<?> result = procExec.getResult(procId); assertTrue(result.isFailed()); LOG.debug("Modify namespace failed with exception: " + result.getException()); assertTrue(ProcedureTestingUtility.getExceptionCause(result) instanceof ConstraintException); } @Test(timeout=60000) public void testModifyNamespaceWithInvalidTableCount() throws Exception { final NamespaceDescriptor nsd = NamespaceDescriptor.create("testModifyNamespaceWithInvalidTableCount").build(); final String nsKey = "hbase.namespace.quota.maxtables"; final String nsValue = "-1"; final ProcedureExecutor<MasterProcedureEnv> procExec = getMasterProcedureExecutor(); createNamespaceForTesting(nsd); // Modify nsd.setConfiguration(nsKey, nsValue); long procId = procExec.submitProcedure( new ModifyNamespaceProcedure(procExec.getEnvironment(), nsd)); // Wait the completion ProcedureTestingUtility.waitProcedure(procExec, procId); Procedure<?> result = procExec.getResult(procId); assertTrue(result.isFailed()); LOG.debug("Modify namespace failed with exception: " + result.getException()); assertTrue(ProcedureTestingUtility.getExceptionCause(result) instanceof ConstraintException); } @Test(timeout = 60000) public void testRecoveryAndDoubleExecution() throws Exception { final NamespaceDescriptor nsd = NamespaceDescriptor.create("testRecoveryAndDoubleExecution").build(); final String nsKey = "foo"; final String nsValue = "bar"; final ProcedureExecutor<MasterProcedureEnv> procExec = getMasterProcedureExecutor(); createNamespaceForTesting(nsd); ProcedureTestingUtility.waitNoProcedureRunning(procExec); ProcedureTestingUtility.setKillAndToggleBeforeStoreUpdate(procExec, true); // Modify nsd.setConfiguration(nsKey, nsValue); // Start the Modify procedure && kill the executor long procId = procExec.submitProcedure( new ModifyNamespaceProcedure(procExec.getEnvironment(), nsd)); // Restart the executor and execute the step twice MasterProcedureTestingUtility.testRecoveryAndDoubleExecution(procExec, procId); ProcedureTestingUtility.assertProcNotFailed(procExec, procId); // Validate NamespaceDescriptor currentNsDescriptor = UTIL.getAdmin().getNamespaceDescriptor(nsd.getName()); assertEquals(currentNsDescriptor.getConfigurationValue(nsKey), nsValue); } @Test(timeout = 60000) public void testRollbackAndDoubleExecution() throws Exception { final NamespaceDescriptor nsd = NamespaceDescriptor.create("testRollbackAndDoubleExecution").build(); final String nsKey = "foo"; final String nsValue = "bar"; final ProcedureExecutor<MasterProcedureEnv> procExec = getMasterProcedureExecutor(); createNamespaceForTesting(nsd); ProcedureTestingUtility.waitNoProcedureRunning(procExec); ProcedureTestingUtility.setKillAndToggleBeforeStoreUpdate(procExec, true); // Modify nsd.setConfiguration(nsKey, nsValue); // Start the Modify procedure && kill the executor long procId = procExec.submitProcedure( new ModifyNamespaceProcedure(procExec.getEnvironment(), nsd)); int numberOfSteps = 0; // failing at pre operation MasterProcedureTestingUtility.testRollbackAndDoubleExecution(procExec, procId, numberOfSteps); // Validate NamespaceDescriptor currentNsDescriptor = UTIL.getAdmin().getNamespaceDescriptor(nsd.getName()); assertNull(currentNsDescriptor.getConfigurationValue(nsKey)); } private ProcedureExecutor<MasterProcedureEnv> getMasterProcedureExecutor() { return UTIL.getHBaseCluster().getMaster().getMasterProcedureExecutor(); } private void createNamespaceForTesting(NamespaceDescriptor nsDescriptor) throws Exception { final ProcedureExecutor<MasterProcedureEnv> procExec = getMasterProcedureExecutor(); long procId = procExec.submitProcedure( new CreateNamespaceProcedure(procExec.getEnvironment(), nsDescriptor)); // Wait the completion ProcedureTestingUtility.waitProcedure(procExec, procId); ProcedureTestingUtility.assertProcNotFailed(procExec, procId); } }
package com.youtube.vitess.client.grpc; import com.google.common.util.concurrent.AsyncFunction; import com.google.common.util.concurrent.Futures; import com.google.common.util.concurrent.ListenableFuture; import com.youtube.vitess.client.Context; import com.youtube.vitess.client.RpcClient; import com.youtube.vitess.client.StreamIterator; import com.youtube.vitess.proto.Query.QueryResult; import com.youtube.vitess.proto.Vtgate.BeginRequest; import com.youtube.vitess.proto.Vtgate.BeginResponse; import com.youtube.vitess.proto.Vtgate.CommitRequest; import com.youtube.vitess.proto.Vtgate.CommitResponse; import com.youtube.vitess.proto.Vtgate.ExecuteBatchKeyspaceIdsRequest; import com.youtube.vitess.proto.Vtgate.ExecuteBatchKeyspaceIdsResponse; import com.youtube.vitess.proto.Vtgate.ExecuteBatchShardsRequest; import com.youtube.vitess.proto.Vtgate.ExecuteBatchShardsResponse; import com.youtube.vitess.proto.Vtgate.ExecuteEntityIdsRequest; import com.youtube.vitess.proto.Vtgate.ExecuteEntityIdsResponse; import com.youtube.vitess.proto.Vtgate.ExecuteKeyRangesRequest; import com.youtube.vitess.proto.Vtgate.ExecuteKeyRangesResponse; import com.youtube.vitess.proto.Vtgate.ExecuteKeyspaceIdsRequest; import com.youtube.vitess.proto.Vtgate.ExecuteKeyspaceIdsResponse; import com.youtube.vitess.proto.Vtgate.ExecuteRequest; import com.youtube.vitess.proto.Vtgate.ExecuteResponse; import com.youtube.vitess.proto.Vtgate.ExecuteShardsRequest; import com.youtube.vitess.proto.Vtgate.ExecuteShardsResponse; import com.youtube.vitess.proto.Vtgate.GetSrvKeyspaceRequest; import com.youtube.vitess.proto.Vtgate.GetSrvKeyspaceResponse; import com.youtube.vitess.proto.Vtgate.RollbackRequest; import com.youtube.vitess.proto.Vtgate.RollbackResponse; import com.youtube.vitess.proto.Vtgate.SplitQueryRequest; import com.youtube.vitess.proto.Vtgate.SplitQueryResponse; import com.youtube.vitess.proto.Vtgate.StreamExecuteKeyRangesRequest; import com.youtube.vitess.proto.Vtgate.StreamExecuteKeyRangesResponse; import com.youtube.vitess.proto.Vtgate.StreamExecuteKeyspaceIdsRequest; import com.youtube.vitess.proto.Vtgate.StreamExecuteKeyspaceIdsResponse; import com.youtube.vitess.proto.Vtgate.StreamExecuteRequest; import com.youtube.vitess.proto.Vtgate.StreamExecuteResponse; import com.youtube.vitess.proto.Vtgate.StreamExecuteShardsRequest; import com.youtube.vitess.proto.Vtgate.StreamExecuteShardsResponse; import com.youtube.vitess.proto.grpc.VitessGrpc; import com.youtube.vitess.proto.grpc.VitessGrpc.VitessFutureStub; import com.youtube.vitess.proto.grpc.VitessGrpc.VitessStub; import org.joda.time.Duration; import io.grpc.ManagedChannel; import io.grpc.StatusRuntimeException; import java.io.IOException; import java.sql.SQLException; import java.sql.SQLIntegrityConstraintViolationException; import java.sql.SQLInvalidAuthorizationSpecException; import java.sql.SQLNonTransientException; import java.sql.SQLSyntaxErrorException; import java.sql.SQLTimeoutException; import java.sql.SQLTransientException; import java.util.concurrent.TimeUnit; /** * GrpcClient is a gRPC-based implementation of Vitess Rpcclient. */ public class GrpcClient implements RpcClient { private final ManagedChannel channel; private final VitessStub asyncStub; private final VitessFutureStub futureStub; public GrpcClient(ManagedChannel channel) { this.channel = channel; asyncStub = VitessGrpc.newStub(channel); futureStub = VitessGrpc.newFutureStub(channel); } @Override public void close() throws IOException { channel.shutdown(); } @Override public ListenableFuture<ExecuteResponse> execute(Context ctx, ExecuteRequest request) throws SQLException { return Futures.catchingAsync( getFutureStub(ctx).execute(request), Exception.class, new ExceptionConverter<ExecuteResponse>()); } @Override public ListenableFuture<ExecuteShardsResponse> executeShards( Context ctx, ExecuteShardsRequest request) throws SQLException { return Futures.catchingAsync( getFutureStub(ctx).executeShards(request), Exception.class, new ExceptionConverter<ExecuteShardsResponse>()); } @Override public ListenableFuture<ExecuteKeyspaceIdsResponse> executeKeyspaceIds( Context ctx, ExecuteKeyspaceIdsRequest request) throws SQLException { return Futures.catchingAsync( getFutureStub(ctx).executeKeyspaceIds(request), Exception.class, new ExceptionConverter<ExecuteKeyspaceIdsResponse>()); } @Override public ListenableFuture<ExecuteKeyRangesResponse> executeKeyRanges( Context ctx, ExecuteKeyRangesRequest request) throws SQLException { return Futures.catchingAsync( getFutureStub(ctx).executeKeyRanges(request), Exception.class, new ExceptionConverter<ExecuteKeyRangesResponse>()); } @Override public ListenableFuture<ExecuteEntityIdsResponse> executeEntityIds( Context ctx, ExecuteEntityIdsRequest request) throws SQLException { return Futures.catchingAsync( getFutureStub(ctx).executeEntityIds(request), Exception.class, new ExceptionConverter<ExecuteEntityIdsResponse>()); } @Override public ListenableFuture<ExecuteBatchShardsResponse> executeBatchShards( Context ctx, ExecuteBatchShardsRequest request) throws SQLException { return Futures.catchingAsync( getFutureStub(ctx).executeBatchShards(request), Exception.class, new ExceptionConverter<ExecuteBatchShardsResponse>()); } @Override public ListenableFuture<ExecuteBatchKeyspaceIdsResponse> executeBatchKeyspaceIds( Context ctx, ExecuteBatchKeyspaceIdsRequest request) throws SQLException { return Futures.catchingAsync( getFutureStub(ctx).executeBatchKeyspaceIds(request), Exception.class, new ExceptionConverter<ExecuteBatchKeyspaceIdsResponse>()); } @Override public StreamIterator<QueryResult> streamExecute(Context ctx, StreamExecuteRequest request) throws SQLException { GrpcStreamAdapter<StreamExecuteResponse, QueryResult> adapter = new GrpcStreamAdapter<StreamExecuteResponse, QueryResult>() { @Override QueryResult getResult(StreamExecuteResponse response) throws SQLException { return response.getResult(); } }; getAsyncStub(ctx).streamExecute(request, adapter); return adapter; } @Override public StreamIterator<QueryResult> streamExecuteShards( Context ctx, StreamExecuteShardsRequest request) throws SQLException { GrpcStreamAdapter<StreamExecuteShardsResponse, QueryResult> adapter = new GrpcStreamAdapter<StreamExecuteShardsResponse, QueryResult>() { @Override QueryResult getResult(StreamExecuteShardsResponse response) throws SQLException { return response.getResult(); } }; getAsyncStub(ctx).streamExecuteShards(request, adapter); return adapter; } @Override public StreamIterator<QueryResult> streamExecuteKeyspaceIds( Context ctx, StreamExecuteKeyspaceIdsRequest request) throws SQLException { GrpcStreamAdapter<StreamExecuteKeyspaceIdsResponse, QueryResult> adapter = new GrpcStreamAdapter<StreamExecuteKeyspaceIdsResponse, QueryResult>() { @Override QueryResult getResult(StreamExecuteKeyspaceIdsResponse response) throws SQLException { return response.getResult(); } }; getAsyncStub(ctx).streamExecuteKeyspaceIds(request, adapter); return adapter; } @Override public StreamIterator<QueryResult> streamExecuteKeyRanges( Context ctx, StreamExecuteKeyRangesRequest request) throws SQLException { GrpcStreamAdapter<StreamExecuteKeyRangesResponse, QueryResult> adapter = new GrpcStreamAdapter<StreamExecuteKeyRangesResponse, QueryResult>() { @Override QueryResult getResult(StreamExecuteKeyRangesResponse response) throws SQLException { return response.getResult(); } }; getAsyncStub(ctx).streamExecuteKeyRanges(request, adapter); return adapter; } @Override public ListenableFuture<BeginResponse> begin(Context ctx, BeginRequest request) throws SQLException { return Futures.catchingAsync( getFutureStub(ctx).begin(request), Exception.class, new ExceptionConverter<BeginResponse>()); } @Override public ListenableFuture<CommitResponse> commit(Context ctx, CommitRequest request) throws SQLException { return Futures.catchingAsync( getFutureStub(ctx).commit(request), Exception.class, new ExceptionConverter<CommitResponse>()); } @Override public ListenableFuture<RollbackResponse> rollback(Context ctx, RollbackRequest request) throws SQLException { return Futures.catchingAsync( getFutureStub(ctx).rollback(request), Exception.class, new ExceptionConverter<RollbackResponse>()); } @Override public ListenableFuture<SplitQueryResponse> splitQuery(Context ctx, SplitQueryRequest request) throws SQLException { return Futures.catchingAsync( getFutureStub(ctx).splitQuery(request), Exception.class, new ExceptionConverter<SplitQueryResponse>()); } @Override public ListenableFuture<GetSrvKeyspaceResponse> getSrvKeyspace( Context ctx, GetSrvKeyspaceRequest request) throws SQLException { return Futures.catchingAsync( getFutureStub(ctx).getSrvKeyspace(request), Exception.class, new ExceptionConverter<GetSrvKeyspaceResponse>()); } /** * Converts an exception from the gRPC framework into the appropriate {@link SQLException}. */ static SQLException convertGrpcError(Throwable e) { if (e instanceof StatusRuntimeException) { StatusRuntimeException sre = (StatusRuntimeException) e; switch (sre.getStatus().getCode()) { case INVALID_ARGUMENT: return new SQLSyntaxErrorException(sre.toString(), sre); case DEADLINE_EXCEEDED: return new SQLTimeoutException(sre.toString(), sre); case ALREADY_EXISTS: return new SQLIntegrityConstraintViolationException(sre.toString(), sre); case UNAUTHENTICATED: return new SQLInvalidAuthorizationSpecException(sre.toString(), sre); case UNAVAILABLE: return new SQLTransientException(sre.toString(), sre); default: // Covers e.g. UNKNOWN. String advice = ""; if (e.getCause() instanceof java.nio.channels.ClosedChannelException) { advice = "Failed to connect to vtgate. Make sure that vtgate is running and you are using the correct address. Details: "; } return new SQLNonTransientException( "gRPC StatusRuntimeException: " + advice + e.toString(), e); } } return new SQLNonTransientException("gRPC error: " + e.toString(), e); } static class ExceptionConverter<V> implements AsyncFunction<Exception, V> { @Override public ListenableFuture<V> apply(Exception e) throws Exception { throw convertGrpcError(e); } } private VitessStub getAsyncStub(Context ctx) { Duration timeout = ctx.getTimeout(); if (timeout == null) { return asyncStub; } return asyncStub.withDeadlineAfter(timeout.getMillis(), TimeUnit.MILLISECONDS); } private VitessFutureStub getFutureStub(Context ctx) { Duration timeout = ctx.getTimeout(); if (timeout == null) { return futureStub; } return futureStub.withDeadlineAfter(timeout.getMillis(), TimeUnit.MILLISECONDS); } }
// Copyright 2000-2021 JetBrains s.r.o. and contributors. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file. package com.intellij.diagnostic; import com.intellij.application.options.RegistryManager; import com.intellij.execution.process.OSProcessUtil; import com.intellij.featureStatistics.fusCollectors.LifecycleUsageTriggerCollector; import com.intellij.ide.plugins.PluginManagerCore; import com.intellij.internal.statistic.utils.PluginInfo; import com.intellij.internal.statistic.utils.PluginInfoDetectorKt; import com.intellij.openapi.Disposable; import com.intellij.openapi.application.*; import com.intellij.openapi.application.impl.ApplicationInfoImpl; import com.intellij.openapi.diagnostic.Attachment; import com.intellij.openapi.diagnostic.IdeaLoggingEvent; import com.intellij.openapi.diagnostic.Logger; import com.intellij.openapi.extensions.ExtensionNotApplicableException; import com.intellij.openapi.util.SystemInfo; import com.intellij.openapi.util.io.FileUtil; import com.intellij.openapi.util.io.FileUtilRt; import com.intellij.openapi.util.registry.RegistryValue; import com.intellij.openapi.util.registry.RegistryValueListener; import com.intellij.openapi.util.text.StringUtil; import com.intellij.util.ArrayUtil; import com.intellij.util.SystemProperties; import com.intellij.util.concurrency.AppExecutorUtil; import com.intellij.util.concurrency.AppScheduledExecutorService; import com.intellij.util.containers.ContainerUtil; import one.util.streamex.StreamEx; import org.jetbrains.annotations.ApiStatus; import org.jetbrains.annotations.NonNls; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import javax.swing.*; import java.io.File; import java.io.IOException; import java.lang.management.ThreadInfo; import java.nio.file.Files; import java.nio.file.Path; import java.text.SimpleDateFormat; import java.util.Arrays; import java.util.Date; import java.util.List; import java.util.Objects; import java.util.concurrent.Future; import java.util.concurrent.ScheduledExecutorService; import java.util.concurrent.ScheduledFuture; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicReference; import java.util.function.BiConsumer; public final class PerformanceWatcher implements Disposable { private static @Nullable PerformanceWatcher ourInstance = CachedSingletonsRegistry.markCachedField(PerformanceWatcher.class); private static final Logger LOG = Logger.getInstance(PerformanceWatcher.class); private static final int TOLERABLE_LATENCY = 100; private static final String THREAD_DUMPS_PREFIX = "threadDumps-"; static final String DUMP_PREFIX = "threadDump-"; private static final String DURATION_FILE_NAME = ".duration"; private static final String PID_FILE_NAME = ".pid"; private final File myLogDir = new File(PathManager.getLogPath()); private volatile ApdexData mySwingApdex = ApdexData.EMPTY; private volatile ApdexData myGeneralApdex = ApdexData.EMPTY; private volatile long myLastSampling = System.nanoTime(); private int myActiveEvents; private static final long ourIdeStart = System.currentTimeMillis(); private final ScheduledExecutorService myExecutor = AppExecutorUtil.createBoundedScheduledExecutorService("EDT Performance Checker", 1); private @Nullable ScheduledFuture<?> myThread; private @Nullable FreezeCheckerTask myCurrentEDTEventChecker; private final JitWatcher myJitWatcher = new JitWatcher(); private final @NotNull RegistryValue mySamplingInterval; private final @NotNull RegistryValue myMaxAttemptsCount; private final @NotNull RegistryValue myUnresponsiveInterval; private final @NotNull RegistryValue myMaxDumpDuration; @ApiStatus.Internal public static @Nullable PerformanceWatcher getInstanceOrNull() { PerformanceWatcher watcher = ourInstance; if (watcher == null && LoadingState.CONFIGURATION_STORE_INITIALIZED.isOccurred()) { Application app = ApplicationManager.getApplication(); if (app != null) { watcher = app.getServiceIfCreated(PerformanceWatcher.class); } } return watcher; } public static @NotNull PerformanceWatcher getInstance() { LoadingState.CONFIGURATION_STORE_INITIALIZED.checkOccurred(); return ourInstance != null ? ourInstance : ApplicationManager.getApplication().getService(PerformanceWatcher.class); } @SuppressWarnings("AssignmentToStaticFieldFromInstanceMethod") private PerformanceWatcher() { Application application = ApplicationManager.getApplication(); if (application == null) { throw ExtensionNotApplicableException.INSTANCE; } RegistryManager registryManager = application.getService(RegistryManager.class); mySamplingInterval = registryManager.get("performance.watcher.sampling.interval.ms"); myMaxAttemptsCount = registryManager.get("performance.watcher.unresponsive.max.attempts.before.log"); myUnresponsiveInterval = registryManager.get("performance.watcher.unresponsive.interval.ms"); myMaxDumpDuration = registryManager.get("performance.watcher.dump.duration.s"); if (application.isHeadlessEnvironment()) { return; } RegistryValueListener cancelingListener = new RegistryValueListener() { @Override public void afterValueChanged(@NotNull RegistryValue value) { int samplingIntervalMs = getUnresponsiveInterval() > 0 && getMaxAttemptsCount() > 0 ? getSamplingInterval() : 0; if (samplingIntervalMs <= 0) { cancelThread(); myThread = null; } else if (mySamplingInterval == value) { cancelThread(); myThread = myExecutor.scheduleWithFixedDelay(() -> samplePerformance(samplingIntervalMs), samplingIntervalMs, samplingIntervalMs, TimeUnit.MILLISECONDS); } } }; for (RegistryValue value : List.of(mySamplingInterval, myMaxAttemptsCount, myUnresponsiveInterval)) { value.addListener(cancelingListener, this); } RegistryValue ourReasonableThreadPoolSize = registryManager.get("core.pooled.threads"); AppScheduledExecutorService service = (AppScheduledExecutorService)AppExecutorUtil.getAppScheduledExecutorService(); service.setNewThreadListener((thread, runnable) -> { if (service.getBackendPoolExecutorSize() > ourReasonableThreadPoolSize.asInteger() && ApplicationInfoImpl.getShadowInstance().isEAP()) { File file = dumpThreads("newPooledThread/", true); LOG.info("Not enough pooled threads" + (file != null ? "; dumped threads into file '" + file.getPath() + "'" : "")); } }); reportCrashesIfAny(); cleanOldFiles(myLogDir, 0); cancelingListener.afterValueChanged(mySamplingInterval); ourInstance = this; } private static void reportCrashesIfAny() { Path systemDir = Path.of(PathManager.getSystemPath()); try { Path appInfoFile = systemDir.resolve(IdeaFreezeReporter.APPINFO_FILE_NAME); Path pidFile = systemDir.resolve(PID_FILE_NAME); // TODO: check jre in app info, not the current // Only report if on JetBrains jre if (SystemInfo.isJetBrainsJvm && Files.isRegularFile(appInfoFile) && Files.isRegularFile(pidFile)) { String pid = Files.readString(pidFile); File[] crashFiles = new File(SystemProperties.getUserHome()).listFiles(file -> { return file.getName().startsWith("java_error_in") && file.getName().endsWith(pid + ".log") && file.isFile(); }); if (crashFiles != null) { long appInfoFileLastModified = Files.getLastModifiedTime(appInfoFile).toMillis(); for (File file : crashFiles) { if (file.lastModified() > appInfoFileLastModified) { if (file.length() > 5 * FileUtilRt.MEGABYTE) { LOG.info("Crash file " + file + " is too big to report"); break; } String content = FileUtil.loadFile(file); // TODO: maybe we need to notify the user if (content.contains("fuck_the_regulations")) { break; } Attachment attachment = new Attachment("crash.txt", content); attachment.setIncluded(true); // include plugins list String plugins = StreamEx.of(PluginManagerCore.getLoadedPlugins()) .filter(d -> d.isEnabled() && !d.isBundled()) .map(PluginInfoDetectorKt::getPluginInfoByDescriptor) .filter(PluginInfo::isSafeToReport) .map(i -> i.getId() + " (" + i.getVersion() + ")") .joining("\n", "Extra plugins:\n", ""); Attachment pluginsAttachment = new Attachment("plugins.txt", plugins); attachment.setIncluded(true); Attachment[] attachments = new Attachment[]{attachment, pluginsAttachment}; // look for extended crash logs File extraLog = findExtraLogFile(pid, appInfoFileLastModified); if (extraLog != null) { Attachment extraAttachment = new Attachment("jbr_err.txt", FileUtil.loadFile(extraLog)); extraAttachment.setIncluded(true); attachments = ArrayUtil.append(attachments, extraAttachment); } String message = StringUtil.substringBefore(content, "--------------- P R O C E S S ---------------"); IdeaLoggingEvent event = LogMessage.createEvent(new JBRCrash(), message, attachments); IdeaFreezeReporter.setAppInfo(event, Files.readString(appInfoFile)); IdeaFreezeReporter.report(event); LifecycleUsageTriggerCollector.onCrashDetected(); break; } } } } IdeaFreezeReporter.saveAppInfo(appInfoFile, true); Files.createDirectories(pidFile.getParent()); Files.writeString(pidFile, OSProcessUtil.getApplicationPid()); } catch (IOException e) { LOG.info(e); } } @Nullable private static File findExtraLogFile(String pid, long lastModified) { if (!SystemInfo.isMac) { return null; } String logFileName = "jbr_err_pid" + pid + ".log"; List<File> candidates = List.of(new File(SystemProperties.getUserHome(), logFileName), new File(logFileName)); return ContainerUtil.find(candidates, file -> file.isFile() && file.lastModified() > lastModified); } private static @Nullable IdePerformanceListener getPublisher() { Application application = ApplicationManager.getApplication(); return application != null && !application.isDisposed() ? application.getMessageBus().syncPublisher(IdePerformanceListener.TOPIC) : null; } public void processUnfinishedFreeze(@NotNull BiConsumer<? super File, ? super Integer> consumer) { File[] files = myLogDir.listFiles(); if (files != null) { Arrays.stream(files) .filter(file -> file.getName().startsWith(THREAD_DUMPS_PREFIX)) .filter(file -> Files.exists(file.toPath().resolve(DURATION_FILE_NAME))) .findFirst().ifPresent(f -> { File marker = new File(f, DURATION_FILE_NAME); try { String s = FileUtil.loadFile(marker); cleanup(f); consumer.accept(f, Integer.parseInt(s)); } catch (Exception ignored) { } }); } } private static void cleanOldFiles(File dir, final int level) { File[] children = dir.listFiles((dir1, name) -> level > 0 || name.startsWith(THREAD_DUMPS_PREFIX)); if (children == null) return; Arrays.sort(children); for (int i = 0; i < children.length; i++) { File child = children[i]; if (i < children.length - 100 || ageInDays(child) > 10) { FileUtil.delete(child); } else if (level < 3) { cleanOldFiles(child, level + 1); } } } private static long ageInDays(File file) { return TimeUnit.DAYS.convert(System.currentTimeMillis() - file.lastModified(), TimeUnit.MILLISECONDS); } private void cancelThread() { if (myThread != null) { myThread.cancel(true); } } @Override public void dispose() { cancelThread(); myExecutor.shutdownNow(); } private void samplePerformance(long samplingIntervalMs) { long current = System.nanoTime(); long diffMs = TimeUnit.NANOSECONDS.toMillis(current - myLastSampling) - samplingIntervalMs; myLastSampling = current; // an unexpected delay of 3 seconds is considered as several delays: of 3, 2 and 1 seconds, because otherwise // this background thread would be sampled 3 times. while (diffMs >= 0) { //noinspection NonAtomicOperationOnVolatileField myGeneralApdex = myGeneralApdex.withEvent(TOLERABLE_LATENCY, diffMs); diffMs -= samplingIntervalMs; } myJitWatcher.checkJitState(); SwingUtilities.invokeLater(() -> { long latencyMs = TimeUnit.NANOSECONDS.toMillis(System.nanoTime() - current); //noinspection NonAtomicOperationOnVolatileField mySwingApdex = mySwingApdex.withEvent(TOLERABLE_LATENCY, latencyMs); IdePerformanceListener publisher = getPublisher(); if (publisher != null) { publisher.uiResponded(latencyMs); } }); } public static @NotNull String printStacktrace(@NotNull String headerMsg, @NotNull Thread thread, StackTraceElement @NotNull [] stackTrace) { @SuppressWarnings("NonConstantStringShouldBeStringBuffer") StringBuilder trace = new StringBuilder( headerMsg + thread + " (" + (thread.isAlive() ? "alive" : "dead") + ") " + thread.getState() + "\n--- its stacktrace:\n"); for (final StackTraceElement stackTraceElement : stackTrace) { trace.append(" at ").append(stackTraceElement).append("\n"); } trace.append("---\n"); return trace.toString(); } private int getSamplingInterval() { return mySamplingInterval.asInteger(); } private int getMaxAttemptsCount() { return myMaxAttemptsCount.asInteger(); } int getDumpInterval() { return getSamplingInterval() * getMaxAttemptsCount(); } int getUnresponsiveInterval() { return myUnresponsiveInterval.asInteger(); } int getMaxDumpDuration() { return myMaxDumpDuration.asInteger() * 1000; } private static String buildName() { return ApplicationInfo.getInstance().getBuild().asString(); } private static String formatTime(long timeMs) { return new SimpleDateFormat("yyyyMMdd-HHmmss").format(new Date(timeMs)); } private static void cleanup(File dir) { FileUtil.delete(new File(dir, DURATION_FILE_NAME)); } @ApiStatus.Internal public void edtEventStarted() { long start = System.nanoTime(); myActiveEvents++; if (myThread != null) { if (myCurrentEDTEventChecker != null) { myCurrentEDTEventChecker.stop(); } myCurrentEDTEventChecker = new FreezeCheckerTask(start); } } @ApiStatus.Internal public void edtEventFinished() { myActiveEvents--; if (myThread != null) { Objects.requireNonNull(myCurrentEDTEventChecker).stop(); myCurrentEDTEventChecker = myActiveEvents > 0 ? new FreezeCheckerTask(System.nanoTime()) : null; } } public @Nullable File dumpThreads(@NotNull String pathPrefix, boolean millis) { return myThread != null ? dumpThreads(pathPrefix, millis, ThreadDumper.getThreadDumpInfo(ThreadDumper.getThreadInfos()).getRawDump()) : null; } private @Nullable File dumpThreads(@NotNull String pathPrefix, boolean millis, @NotNull String rawDump) { if (!pathPrefix.contains("/")) { pathPrefix = THREAD_DUMPS_PREFIX + pathPrefix + "-" + formatTime(ourIdeStart) + "-" + buildName() + "/"; } else if (!pathPrefix.startsWith(THREAD_DUMPS_PREFIX)) { pathPrefix = THREAD_DUMPS_PREFIX + pathPrefix; } long now = System.currentTimeMillis(); String suffix = millis ? "-" + now : ""; File file = new File(myLogDir, pathPrefix + DUMP_PREFIX + formatTime(now) + suffix + ".txt"); File dir = file.getParentFile(); if (!(dir.isDirectory() || dir.mkdirs())) { return null; } String memoryUsage = getMemoryUsage(); if (!memoryUsage.isEmpty()) { LOG.info(memoryUsage + " while dumping threads to " + file); } try { FileUtil.writeToFile(file, rawDump); } catch (IOException e) { LOG.info("Failed to write the thread dump file: " + e.getMessage()); } return file; } private @NotNull String getMemoryUsage() { Runtime rt = Runtime.getRuntime(); long maxMemory = rt.maxMemory(); long usedMemory = rt.totalMemory() - rt.freeMemory(); long freeMemory = maxMemory - usedMemory; String diagnosticInfo = ""; if (freeMemory < maxMemory / 5) { diagnosticInfo = "High memory usage (free " + (freeMemory / 1024 / 1024) + " of " + (maxMemory / 1024 / 1024) + " MB)"; } String jitProblem = getJitProblem(); if (jitProblem != null) { if (!diagnosticInfo.isEmpty()) { diagnosticInfo += ", "; } diagnosticInfo += jitProblem; } return diagnosticInfo; } @Nullable String getJitProblem() { return myJitWatcher.getJitProblem(); } @SuppressWarnings("UseOfSystemOutOrSystemErr") public static void dumpThreadsToConsole(@NonNls String message) { System.err.println(message); System.err.println(ThreadDumper.dumpThreadsToString()); } @NotNull static List<StackTraceElement> getStacktraceCommonPart(final @NotNull List<StackTraceElement> commonPart, final StackTraceElement @NotNull [] stackTraceElements) { for (int i = 0; i < commonPart.size() && i < stackTraceElements.length; i++) { StackTraceElement el1 = commonPart.get(commonPart.size() - i - 1); StackTraceElement el2 = stackTraceElements[stackTraceElements.length - i - 1]; if (!compareStackTraceElements(el1, el2)) { return commonPart.subList(commonPart.size() - i, commonPart.size()); } } return commonPart; } // same as java.lang.StackTraceElement.equals, but do not care about the line number static boolean compareStackTraceElements(StackTraceElement el1, StackTraceElement el2) { if (el1 == el2) { return true; } return el1.getClassName().equals(el2.getClassName()) && Objects.equals(el1.getMethodName(), el2.getMethodName()) && Objects.equals(el1.getFileName(), el2.getFileName()); } public void clearFreezeStacktraces() { if (myCurrentEDTEventChecker != null) { myCurrentEDTEventChecker.stopDumping(); } } public final class Snapshot { private final ApdexData myStartGeneralSnapshot = myGeneralApdex; private final ApdexData myStartSwingSnapshot = mySwingApdex; private final long myStartMillis = System.currentTimeMillis(); private Snapshot() { } public void logResponsivenessSinceCreation(@NonNls @NotNull String activityName) { LOG.info(getLogResponsivenessSinceCreationMessage(activityName)); } @NotNull public String getLogResponsivenessSinceCreationMessage(@NonNls @NotNull String activityName) { return activityName + " took " + (System.currentTimeMillis() - myStartMillis) + "ms" + "; general responsiveness: " + myGeneralApdex.summarizePerformanceSince(myStartGeneralSnapshot) + "; EDT responsiveness: " + mySwingApdex.summarizePerformanceSince(myStartSwingSnapshot); } } public static @NotNull Snapshot takeSnapshot() { return getInstance().new Snapshot(); } ScheduledExecutorService getExecutor() { return myExecutor; } private enum CheckerState { CHECKING, FREEZE, FINISHED } private final class FreezeCheckerTask { private final AtomicReference<CheckerState> myState = new AtomicReference<>(CheckerState.CHECKING); private final @NotNull Future<?> myFuture; private final long myTaskStart; private String myFreezeFolder; private volatile SamplingTask myDumpTask; FreezeCheckerTask(long taskStart) { myFuture = myExecutor.schedule(this::edtFrozen, getUnresponsiveInterval(), TimeUnit.MILLISECONDS); myTaskStart = taskStart; } private long getDuration(long current, @NotNull TimeUnit unit) { return unit.convert(current - myTaskStart, TimeUnit.NANOSECONDS); } void stop() { myFuture.cancel(false); if (myState.getAndSet(CheckerState.FINISHED) == CheckerState.FREEZE) { long taskStop = System.nanoTime(); stopDumping(); // stop sampling as early as possible try { myExecutor.submit(() -> { stopDumping(); IdePerformanceListener publisher = getPublisher(); if (publisher != null) { long durationMs = getDuration(taskStop, TimeUnit.MILLISECONDS); publisher.uiFreezeFinished(durationMs, findReportDirectory(durationMs)); } }).get(); } catch (Exception e) { LOG.warn(e); } } } private void edtFrozen() { myFreezeFolder = THREAD_DUMPS_PREFIX + "freeze-" + formatTime(System.currentTimeMillis()) + "-" + buildName(); if (myState.compareAndSet(CheckerState.CHECKING, CheckerState.FREEZE)) { //TODO always true for some reason //myFreezeDuringStartup = !LoadingState.INDEXING_FINISHED.isOccurred(); File reportDir = new File(myLogDir, myFreezeFolder); reportDir.mkdirs(); IdePerformanceListener publisher = getPublisher(); if (publisher == null) { return; } publisher.uiFreezeStarted(reportDir); myDumpTask = new SamplingTask(getDumpInterval(), getMaxDumpDuration()) { @Override protected void dumpedThreads(@NotNull ThreadDump threadDump) { if (myState.get() == CheckerState.FINISHED) { stop(); } else { File file = dumpThreads(myFreezeFolder + "/", false, threadDump.getRawDump()); if (file != null) { try { long duration = getDuration(System.nanoTime(), TimeUnit.SECONDS); FileUtil.writeToFile(new File(file.getParentFile(), DURATION_FILE_NAME), Long.toString(duration)); publisher.dumpedThreads(file, threadDump); } catch (IOException e) { LOG.info("Failed to write the duration file: " + e.getMessage()); } } } } }; } } private @Nullable File findReportDirectory(long durationMs) { File dir = new File(myLogDir, myFreezeFolder); File reportDir = null; if (dir.exists()) { cleanup(dir); reportDir = new File(myLogDir, dir.getName() + getFreezePlaceSuffix() + "-" + TimeUnit.MILLISECONDS.toSeconds(durationMs) + "sec"); if (!dir.renameTo(reportDir)) { LOG.warn("Unable to create freeze folder " + reportDir); reportDir = dir; } String message = "UI was frozen for " + durationMs + "ms, details saved to " + reportDir; if (PluginManagerCore.isRunningFromSources()) { LOG.info(message); } else { LOG.warn(message); } } return reportDir; } void stopDumping() { SamplingTask task = myDumpTask; if (task != null) { task.stop(); myDumpTask = null; } } private String getFreezePlaceSuffix() { List<StackTraceElement> stacktraceCommonPart = null; SamplingTask task = myDumpTask; if (task == null) { return ""; } for (ThreadInfo[] info : task.getThreadInfos()) { ThreadInfo edt = ContainerUtil.find(info, ThreadDumper::isEDT); if (edt != null) { StackTraceElement[] edtStack = edt.getStackTrace(); if (edtStack != null) { if (stacktraceCommonPart == null) { stacktraceCommonPart = ContainerUtil.newArrayList(edtStack); } else { stacktraceCommonPart = getStacktraceCommonPart(stacktraceCommonPart, edtStack); } } } } if (!ContainerUtil.isEmpty(stacktraceCommonPart)) { StackTraceElement element = stacktraceCommonPart.get(0); return "-" + FileUtil.sanitizeFileName(StringUtil.getShortName(element.getClassName())) + "." + FileUtil.sanitizeFileName(element.getMethodName()); } return ""; } } }
/* * Copyright 2000-2010 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.jetbrains.plugins.github.ui; import com.intellij.ide.BrowserUtil; import com.intellij.openapi.diagnostic.Logger; import com.intellij.openapi.ui.Messages; import com.intellij.openapi.util.Comparing; import com.intellij.openapi.util.text.StringUtil; import com.intellij.ui.HyperlinkAdapter; import org.jetbrains.annotations.NotNull; import org.jetbrains.plugins.github.*; import org.jetbrains.plugins.github.api.GithubUserDetailed; import javax.swing.*; import javax.swing.event.DocumentEvent; import javax.swing.event.DocumentListener; import javax.swing.event.HyperlinkEvent; import java.awt.*; import java.awt.event.*; import java.io.IOException; /** * @author oleg * @date 10/20/10 */ public class GithubSettingsPanel { private static final String DEFAULT_PASSWORD_TEXT = "************"; private final static String AUTH_PASSWORD = "Password"; private final static String AUTH_TOKEN = "Token"; private static final Logger LOG = GithubUtil.LOG; private final GithubSettings mySettings; private JTextField myLoginTextField; private JPasswordField myPasswordField; private JTextPane mySignupTextField; private JPanel myPane; private JButton myTestButton; private JTextField myHostTextField; private JComboBox myAuthTypeComboBox; private boolean myCredentialsModified; public GithubSettingsPanel(@NotNull final GithubSettings settings) { mySettings = settings; mySignupTextField.addHyperlinkListener(new HyperlinkAdapter() { @Override protected void hyperlinkActivated(final HyperlinkEvent e) { BrowserUtil.browse(e.getURL()); } }); mySignupTextField.setText( "<html>Do not have an account at github.com? <a href=\"https://github.com\">" + "Sign up" + "</a></html>"); mySignupTextField.setBackground(myPane.getBackground()); mySignupTextField.setCursor(new Cursor(Cursor.HAND_CURSOR)); myAuthTypeComboBox.addItem(AUTH_PASSWORD); myAuthTypeComboBox.addItem(AUTH_TOKEN); reset(); myTestButton.addActionListener(new ActionListener() { @Override public void actionPerformed(ActionEvent e) { try { GithubUserDetailed user = GithubUtil.checkAuthData(getAuthData()); if (!getLogin().equalsIgnoreCase(user.getLogin())) { setLogin(user.getLogin()); Messages.showInfoMessage(myPane, "Login doesn't match credentials. Fixed", "Success"); return; } Messages.showInfoMessage(myPane, "Connection successful", "Success"); } catch (GithubAuthenticationException ex) { Messages.showErrorDialog(myPane, "Can't login using given credentials: " + ex.getMessage(), "Login Failure"); } catch (IOException ex) { LOG.info(ex); Messages.showErrorDialog(myPane, "Can't login: " + GithubUtil.getErrorTextFromException(ex), "Login Failure"); } } }); myPasswordField.getDocument().addDocumentListener(new DocumentListener() { @Override public void insertUpdate(DocumentEvent e) { myCredentialsModified = true; } @Override public void removeUpdate(DocumentEvent e) { myCredentialsModified = true; } @Override public void changedUpdate(DocumentEvent e) { myCredentialsModified = true; } }); DocumentListener passwordEraser = new DocumentListener() { @Override public void insertUpdate(DocumentEvent e) { if (!myCredentialsModified) { setPassword(""); myCredentialsModified = true; } } @Override public void removeUpdate(DocumentEvent e) { if (!myCredentialsModified) { setPassword(""); myCredentialsModified = true; } } @Override public void changedUpdate(DocumentEvent e) { if (!myCredentialsModified) { setPassword(""); myCredentialsModified = true; } } }; myHostTextField.getDocument().addDocumentListener(passwordEraser); myLoginTextField.getDocument().addDocumentListener(passwordEraser); myPasswordField.addFocusListener(new FocusListener() { @Override public void focusGained(FocusEvent e) { if (!myCredentialsModified && !getPassword().isEmpty()) { setPassword(""); myCredentialsModified = true; } } @Override public void focusLost(FocusEvent e) { } }); myAuthTypeComboBox.addItemListener(new ItemListener() { @Override public void itemStateChanged(ItemEvent e) { setPassword(""); myCredentialsModified = true; } }); } public JComponent getPanel() { return myPane; } @NotNull public String getHost() { return myHostTextField.getText().trim(); } @NotNull public String getLogin() { return myLoginTextField.getText().trim(); } public void setHost(@NotNull final String host) { myHostTextField.setText(host); } public void setLogin(@NotNull final String login) { myLoginTextField.setText(login); } @NotNull private String getPassword() { return String.valueOf(myPasswordField.getPassword()); } private void setPassword(@NotNull final String password) { // Show password as blank if password is empty myPasswordField.setText(StringUtil.isEmpty(password) ? null : password); } @NotNull public GithubAuthData.AuthType getAuthType() { Object selected = myAuthTypeComboBox.getSelectedItem(); if (AUTH_PASSWORD.equals(selected)) return GithubAuthData.AuthType.BASIC; if (AUTH_TOKEN.equals(selected)) return GithubAuthData.AuthType.TOKEN; LOG.error("GithubSettingsPanel: illegal selection: basic AuthType returned", selected.toString()); return GithubAuthData.AuthType.BASIC; } public void setAuthType(@NotNull final GithubAuthData.AuthType type) { switch (type) { case BASIC: myAuthTypeComboBox.setSelectedItem(AUTH_PASSWORD); break; case TOKEN: myAuthTypeComboBox.setSelectedItem(AUTH_TOKEN); break; case ANONYMOUS: default: myAuthTypeComboBox.setSelectedItem(AUTH_PASSWORD); } } @NotNull public GithubAuthData getAuthData() { if (!myCredentialsModified) { return mySettings.getAuthData(); } Object selected = myAuthTypeComboBox.getSelectedItem(); if (AUTH_PASSWORD.equals(selected)) return GithubAuthData.createBasicAuth(getHost(), getLogin(), getPassword()); if (AUTH_TOKEN.equals(selected)) return GithubAuthData.createTokenAuth(getHost(), getPassword()); LOG.error("GithubSettingsPanel: illegal selection: anonymous AuthData created", selected.toString()); return GithubAuthData.createAnonymous(getHost()); } public void reset() { String login = mySettings.getLogin(); setHost(mySettings.getHost()); setLogin(login); setPassword(login.isEmpty() ? "" : DEFAULT_PASSWORD_TEXT); setAuthType(mySettings.getAuthType()); resetCredentialsModification(); } public boolean isModified() { return !Comparing.equal(mySettings.getHost(), getHost()) || !Comparing.equal(mySettings.getLogin(), getLogin()) || myCredentialsModified; } public void resetCredentialsModification() { myCredentialsModified = false; } }
package com.github.cjnosal.yats.slideshow; import android.content.Context; import android.graphics.PorterDuff; import android.os.Build; import android.os.Bundle; import android.support.annotation.ColorInt; import android.support.design.widget.AppBarLayout; import android.support.design.widget.Snackbar; import android.support.v4.graphics.ColorUtils; import android.support.v4.view.ViewPager; import android.support.v4.widget.ContentLoadingProgressBar; import android.support.v7.graphics.Palette; import android.util.TypedValue; import android.view.KeyEvent; import android.view.MotionEvent; import android.view.View; import android.view.inputmethod.InputMethodManager; import android.widget.EditText; import android.widget.TextView; import com.github.cjnosal.yats.R; import com.github.cjnosal.yats.YATSApplication; import com.github.cjnosal.yats.modules.ApplicationComponent; import com.github.cjnosal.yats.slideshow.modules.DaggerSlideshowComponent; import com.github.cjnosal.yats.slideshow.modules.SlideshowComponent; import com.trello.rxlifecycle.components.support.RxAppCompatActivity; import java.util.ArrayList; import java.util.List; import javax.inject.Inject; import butterknife.Bind; import butterknife.ButterKnife; import timber.log.Timber; public class SlideshowActivity extends RxAppCompatActivity implements SlideshowContract.View { private static final String DEFAULT_SUB = "pics"; private static final String CURRENT_SLIDE = "current_slide"; private static final String SLIDES = "slides"; @Bind(R.id.slide_pager) ViewPager slidePager; @Bind(R.id.slide_progress_bar) ContentLoadingProgressBar progressBar; @Bind(R.id.root) View rootView; @Bind(R.id.app_bar) AppBarLayout appBarLayout; @Bind(R.id.subreddit_edit_text) EditText subredditEditText; @Inject SlideshowContract.Presenter presenter; @Inject SlideAdapter adapter; int slidePosition = 0; float slideOffset = 0; String subreddit = DEFAULT_SUB; @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.activity_slideshow); ButterKnife.bind(this); ApplicationComponent applicationComponent = ((YATSApplication) getApplication()).getApplicationComponent(); SlideshowComponent slideshowComponent = DaggerSlideshowComponent.builder().applicationComponent(applicationComponent).build(); slideshowComponent.inject(this); adapter.setListener(new SlideAdapter.Listener() { @Override public void onImageLoaded() { setBackgroundColor(); } @Override public void onImageFailed() { } }); slidePager.setAdapter(adapter); slidePager.setOffscreenPageLimit(2); slidePager.addOnPageChangeListener(new ViewPager.OnPageChangeListener() { @Override public void onPageScrolled(int position, float positionOffset, int positionOffsetPixels) { slidePosition = position; slideOffset = positionOffset; setBackgroundColor(); } @Override public void onPageSelected(int position) { Timber.d("Display %s at position %d", adapter.getSlides().get(position).getImageUrl(), position); slidePosition = position; slideOffset = 0; setBackgroundColor(); if (position == (adapter.getCount() - 3)) { loadImages(false); } } @Override public void onPageScrollStateChanged(int state) { } }); getWindow().getDecorView().setOnSystemUiVisibilityChangeListener(new View.OnSystemUiVisibilityChangeListener() { @Override public void onSystemUiVisibilityChange(int visibility) { if ((visibility & View.SYSTEM_UI_FLAG_FULLSCREEN) == 0) { appBarLayout.setVisibility(View.VISIBLE); subredditEditText.requestFocus(); subredditEditText.setSelection(0, subredditEditText.getText().length()); leaveImmersiveMode(); } else { appBarLayout.setVisibility(View.GONE); enterImmersiveMode(); } } }); subredditEditText.setText(DEFAULT_SUB); subredditEditText.setOnEditorActionListener(new TextView.OnEditorActionListener() { @Override public boolean onEditorAction(TextView v, int actionId, KeyEvent event) { subreddit = v.getText().toString(); loadImages(true); enterImmersiveMode(); return false; } }); subredditEditText.setOnFocusChangeListener(new View.OnFocusChangeListener() { @Override public void onFocusChange(View v, boolean hasFocus) { if (hasFocus) { subredditEditText.setText(""); } } }); slidePager.setOnTouchListener(new View.OnTouchListener() { @Override public boolean onTouch(View v, MotionEvent event) { if (event.getAction() == MotionEvent.ACTION_DOWN) { InputMethodManager imm = (InputMethodManager)getSystemService(Context.INPUT_METHOD_SERVICE); imm.hideSoftInputFromWindow(getWindow().getDecorView().getWindowToken(), 0); enterImmersiveMode(); } return false; } }); presenter.init(this); if (savedInstanceState != null) { adapter.setImages((List<Slide>)savedInstanceState.getSerializable(SLIDES)); slidePager.setCurrentItem(savedInstanceState.getInt(CURRENT_SLIDE), false); } else { loadImages(true); } } private void loadImages(boolean reset) { presenter.findImages(subreddit, reset); progressBar.show(); } @Override protected void onResume() { super.onResume(); enterImmersiveMode(); } private void enterImmersiveMode() { if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.KITKAT) { getWindow().getDecorView().setSystemUiVisibility( View.SYSTEM_UI_FLAG_HIDE_NAVIGATION // hide nav bar | View.SYSTEM_UI_FLAG_FULLSCREEN // hide status bar | View.SYSTEM_UI_FLAG_IMMERSIVE); } } private void leaveImmersiveMode() { if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.KITKAT) { getWindow().getDecorView().setSystemUiVisibility( View.SYSTEM_UI_FLAG_VISIBLE); } } private void setBackgroundColor() { Palette left = adapter.getPalette(slidePosition); Palette right = adapter.getPalette(slidePosition + 1); // null for last slide TypedValue a = new TypedValue(); getTheme().resolveAttribute(android.R.attr.colorBackground, a, true); @ColorInt int backgroundColor = getColorFromPalettes(left, right, a.data); if (backgroundColor == a.data) { Timber.w("Failed to generate swatch for index " + slidePosition); } slidePager.setBackgroundColor(backgroundColor); progressBar.getIndeterminateDrawable().setColorFilter(backgroundColor, PorterDuff.Mode.SRC_IN); } private @ColorInt int getColorFromPalettes(Palette left, Palette right, @ColorInt int defaultColor) { @ColorInt int leftColor = getColorFromPalette(left, defaultColor); @ColorInt int rightColor = getColorFromPalette(right, defaultColor); return ColorUtils.blendARGB(leftColor, rightColor, slideOffset); } private @ColorInt int getColorFromPalette(Palette palette, @ColorInt int defaultColor) { if (palette == null) { return defaultColor; } Palette.Swatch swatch = palette.getMutedSwatch(); if (swatch == null) { swatch = palette.getVibrantSwatch(); } if (swatch == null) { return defaultColor; } return swatch.getRgb(); } @Override public void displayImages(List<Slide> slides) { adapter.setImages(slides); slidePager.setCurrentItem(0, false); progressBar.hide(); } @Override public void addImages(List<Slide> slides) { List<Slide> adapterImages = adapter.getSlides(); adapterImages.addAll(slides); adapter.setImages(adapterImages); progressBar.hide(); } @Override public void loadFailed() { progressBar.hide(); Snackbar.make(rootView, R.string.error, Snackbar.LENGTH_SHORT).show(); } @Override protected void onSaveInstanceState(Bundle outState) { super.onSaveInstanceState(outState); outState.putInt(CURRENT_SLIDE, slidePager.getCurrentItem()); outState.putSerializable(SLIDES, new ArrayList<>(adapter.getSlides())); } }
/* * Copyright (c) 2014 Google, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.common.truth; import static com.google.common.truth.ExpectFailure.assertThat; import static com.google.common.truth.Platform.floatToString; import static com.google.common.truth.Truth.assertThat; import static org.junit.Assert.fail; import com.google.common.annotations.GwtIncompatible; import com.google.common.truth.ExpectFailure.SimpleSubjectBuilderCallback; import com.google.errorprone.annotations.CanIgnoreReturnValue; import org.checkerframework.checker.nullness.compatqual.NullableDecl; import org.junit.Test; import org.junit.runner.RunWith; import org.junit.runners.JUnit4; /** * Tests for Float Subjects. * * @author Kurt Alfred Kluever */ @RunWith(JUnit4.class) public class FloatSubjectTest extends BaseSubjectTestCase { private static final float NEARLY_MAX = 3.4028233E38f; private static final float NEGATIVE_NEARLY_MAX = -3.4028233E38f; private static final float JUST_OVER_MIN = 2.8E-45f; private static final float JUST_UNDER_NEGATIVE_MIN = -2.8E-45f; private static final float GOLDEN = 1.23f; private static final float JUST_OVER_GOLDEN = 1.2300001f; private static final Subject.Factory<FloatSubject, Float> FLOAT_SUBJECT_FACTORY = new Subject.Factory<FloatSubject, Float>() { @Override public FloatSubject createSubject(FailureMetadata metadata, Float that) { return new FloatSubject(metadata, that); } }; @CanIgnoreReturnValue private static AssertionError expectFailure( SimpleSubjectBuilderCallback<FloatSubject, Float> callback) { return ExpectFailure.expectFailureAbout(FLOAT_SUBJECT_FACTORY, callback); } @Test @GwtIncompatible("Math.nextAfter") public void testFloatConstants_matchNextAfter() { assertThat(Math.nextAfter(Float.MAX_VALUE, 0.0f)).isEqualTo(NEARLY_MAX); assertThat(Math.nextAfter(-1.0f * Float.MAX_VALUE, 0.0f)).isEqualTo(NEGATIVE_NEARLY_MAX); assertThat(Math.nextAfter(Float.MIN_VALUE, 1.0f)).isEqualTo(JUST_OVER_MIN); assertThat(Math.nextAfter(-1.0f * Float.MIN_VALUE, -1.0f)).isEqualTo(JUST_UNDER_NEGATIVE_MIN); assertThat(1.23f).isEqualTo(GOLDEN); assertThat(Math.nextAfter(1.23f, Float.POSITIVE_INFINITY)).isEqualTo(JUST_OVER_GOLDEN); } @Test public void testJ2clCornerCaseZero() { // GWT considers -0.0 to be equal to 0.0. But we've added a special workaround inside Truth. assertThatIsEqualToFails(-0.0f, 0.0f); } @Test @GwtIncompatible("GWT behavior difference") public void j2clCornerCaseDoubleVsFloat() { // Under GWT, 1.23f.toString() is different than 1.23d.toString(), so the message omits types. // TODO(b/35377736): Consider making Truth add the types manually. expectFailureWhenTestingThat(1.23f).isEqualTo(1.23); assertFailureKeys("expected", "an instance of", "but was", "an instance of"); } @Test public void isWithinOf() { assertThat(2.0f).isWithin(0.0f).of(2.0f); assertThat(2.0f).isWithin(0.00001f).of(2.0f); assertThat(2.0f).isWithin(1000.0f).of(2.0f); assertThat(2.0f).isWithin(1.00001f).of(3.0f); assertThatIsWithinFails(2.0f, 0.99999f, 3.0f); assertThatIsWithinFails(2.0f, 1000.0f, 1003.0f); assertThatIsWithinFails(2.0f, 1000.0f, Float.POSITIVE_INFINITY); assertThatIsWithinFails(2.0f, 1000.0f, Float.NaN); assertThatIsWithinFails(Float.NEGATIVE_INFINITY, 1000.0f, 2.0f); assertThatIsWithinFails(Float.NaN, 1000.0f, 2.0f); } private static void assertThatIsWithinFails( final float actual, final float tolerance, final float expected) { ExpectFailure.SimpleSubjectBuilderCallback<FloatSubject, Float> callback = new ExpectFailure.SimpleSubjectBuilderCallback<FloatSubject, Float>() { @Override public void invokeAssertion(SimpleSubjectBuilder<FloatSubject, Float> expect) { expect.that(actual).isWithin(tolerance).of(expected); } }; AssertionError failure = expectFailure(callback); assertThat(failure) .factKeys() .containsExactly("expected", "but was", "outside tolerance") .inOrder(); assertThat(failure).factValue("expected").isEqualTo(floatToString(expected)); assertThat(failure).factValue("but was").isEqualTo(floatToString(actual)); assertThat(failure).factValue("outside tolerance").isEqualTo(floatToString(tolerance)); } @Test public void isNotWithinOf() { assertThatIsNotWithinFails(2.0f, 0.0f, 2.0f); assertThatIsNotWithinFails(2.0f, 0.00001f, 2.0f); assertThatIsNotWithinFails(2.0f, 1000.0f, 2.0f); assertThatIsNotWithinFails(2.0f, 1.00001f, 3.0f); assertThat(2.0f).isNotWithin(0.99999f).of(3.0f); assertThat(2.0f).isNotWithin(1000.0f).of(1003.0f); assertThatIsNotWithinFails(2.0f, 0.0f, Float.POSITIVE_INFINITY); assertThatIsNotWithinFails(2.0f, 0.0f, Float.NaN); assertThatIsNotWithinFails(Float.NEGATIVE_INFINITY, 1000.0f, 2.0f); assertThatIsNotWithinFails(Float.NaN, 1000.0f, 2.0f); } private static void assertThatIsNotWithinFails( final float actual, final float tolerance, final float expected) { ExpectFailure.SimpleSubjectBuilderCallback<FloatSubject, Float> callback = new ExpectFailure.SimpleSubjectBuilderCallback<FloatSubject, Float>() { @Override public void invokeAssertion(SimpleSubjectBuilder<FloatSubject, Float> expect) { expect.that(actual).isNotWithin(tolerance).of(expected); } }; AssertionError failure = expectFailure(callback); assertThat(failure).factValue("expected not to be").isEqualTo(floatToString(expected)); assertThat(failure).factValue("within tolerance").isEqualTo(floatToString(tolerance)); } @Test public void negativeTolerances() { isWithinNegativeToleranceThrowsIAE(5.0f, -0.5f, 4.9f); isWithinNegativeToleranceThrowsIAE(5.0f, -0.5f, 4.0f); isNotWithinNegativeToleranceThrowsIAE(5.0f, -0.5f, 4.9f); isNotWithinNegativeToleranceThrowsIAE(5.0f, -0.5f, 4.0f); isWithinNegativeToleranceThrowsIAE(+0.0f, -0.00001f, +0.0f); isWithinNegativeToleranceThrowsIAE(+0.0f, -0.00001f, -0.0f); isWithinNegativeToleranceThrowsIAE(-0.0f, -0.00001f, +0.0f); isWithinNegativeToleranceThrowsIAE(-0.0f, -0.00001f, -0.0f); isNotWithinNegativeToleranceThrowsIAE(+0.0f, -0.00001f, +1.0f); isNotWithinNegativeToleranceThrowsIAE(+0.0f, -0.00001f, -1.0f); isNotWithinNegativeToleranceThrowsIAE(-0.0f, -0.00001f, +1.0f); isNotWithinNegativeToleranceThrowsIAE(-0.0f, -0.00001f, -1.0f); isNotWithinNegativeToleranceThrowsIAE(+1.0f, -0.00001f, +0.0f); isNotWithinNegativeToleranceThrowsIAE(+1.0f, -0.00001f, -0.0f); isNotWithinNegativeToleranceThrowsIAE(-1.0f, -0.00001f, +0.0f); isNotWithinNegativeToleranceThrowsIAE(-1.0f, -0.00001f, -0.0f); // You know what's worse than zero? Negative zero. isWithinNegativeToleranceThrowsIAE(+0.0f, -0.0f, +0.0f); isWithinNegativeToleranceThrowsIAE(+0.0f, -0.0f, -0.0f); isWithinNegativeToleranceThrowsIAE(-0.0f, -0.0f, +0.0f); isWithinNegativeToleranceThrowsIAE(-0.0f, -0.0f, -0.0f); isNotWithinNegativeToleranceThrowsIAE(+1.0f, -0.0f, +0.0f); isNotWithinNegativeToleranceThrowsIAE(+1.0f, -0.0f, -0.0f); isNotWithinNegativeToleranceThrowsIAE(-1.0f, -0.0f, +0.0f); isNotWithinNegativeToleranceThrowsIAE(-1.0f, -0.0f, -0.0f); } private static void isWithinNegativeToleranceThrowsIAE( float actual, float tolerance, float expected) { try { assertThat(actual).isWithin(tolerance).of(expected); fail("Expected IllegalArgumentException to be thrown but wasn't"); } catch (IllegalArgumentException iae) { assertThat(iae) .hasMessageThat() .isEqualTo("tolerance (" + tolerance + ") cannot be negative"); } } private static void isNotWithinNegativeToleranceThrowsIAE( float actual, float tolerance, float expected) { try { assertThat(actual).isNotWithin(tolerance).of(expected); fail("Expected IllegalArgumentException to be thrown but wasn't"); } catch (IllegalArgumentException iae) { assertThat(iae) .hasMessageThat() .isEqualTo("tolerance (" + tolerance + ") cannot be negative"); } } @Test public void nanTolerances() { try { assertThat(1.0f).isWithin(Float.NaN).of(1.0f); fail("Expected IllegalArgumentException to be thrown but wasn't"); } catch (IllegalArgumentException iae) { assertThat(iae).hasMessageThat().isEqualTo("tolerance cannot be NaN"); } try { assertThat(1.0f).isNotWithin(Float.NaN).of(2.0f); fail("Expected IllegalArgumentException to be thrown but wasn't"); } catch (IllegalArgumentException iae) { assertThat(iae).hasMessageThat().isEqualTo("tolerance cannot be NaN"); } } @Test public void infiniteTolerances() { try { assertThat(1.0f).isWithin(Float.POSITIVE_INFINITY).of(1.0f); fail("Expected IllegalArgumentException to be thrown but wasn't"); } catch (IllegalArgumentException iae) { assertThat(iae).hasMessageThat().isEqualTo("tolerance cannot be POSITIVE_INFINITY"); } try { assertThat(1.0f).isNotWithin(Float.POSITIVE_INFINITY).of(2.0f); fail("Expected IllegalArgumentException to be thrown but wasn't"); } catch (IllegalArgumentException iae) { assertThat(iae).hasMessageThat().isEqualTo("tolerance cannot be POSITIVE_INFINITY"); } } @Test public void isWithinOfZero() { assertThat(+0.0f).isWithin(0.00001f).of(+0.0f); assertThat(+0.0f).isWithin(0.00001f).of(-0.0f); assertThat(-0.0f).isWithin(0.00001f).of(+0.0f); assertThat(-0.0f).isWithin(0.00001f).of(-0.0f); assertThat(+0.0f).isWithin(0.0f).of(+0.0f); assertThat(+0.0f).isWithin(0.0f).of(-0.0f); assertThat(-0.0f).isWithin(0.0f).of(+0.0f); assertThat(-0.0f).isWithin(0.0f).of(-0.0f); } @Test public void isNotWithinOfZero() { assertThat(+0.0f).isNotWithin(0.00001f).of(+1.0f); assertThat(+0.0f).isNotWithin(0.00001f).of(-1.0f); assertThat(-0.0f).isNotWithin(0.00001f).of(+1.0f); assertThat(-0.0f).isNotWithin(0.00001f).of(-1.0f); assertThat(+1.0f).isNotWithin(0.00001f).of(+0.0f); assertThat(+1.0f).isNotWithin(0.00001f).of(-0.0f); assertThat(-1.0f).isNotWithin(0.00001f).of(+0.0f); assertThat(-1.0f).isNotWithin(0.00001f).of(-0.0f); assertThat(+1.0f).isNotWithin(0.0f).of(+0.0f); assertThat(+1.0f).isNotWithin(0.0f).of(-0.0f); assertThat(-1.0f).isNotWithin(0.0f).of(+0.0f); assertThat(-1.0f).isNotWithin(0.0f).of(-0.0f); assertThatIsNotWithinFails(-0.0f, 0.0f, 0.0f); } @Test public void isWithinZeroTolerance() { float max = Float.MAX_VALUE; assertThat(max).isWithin(0.0f).of(max); assertThat(NEARLY_MAX).isWithin(0.0f).of(NEARLY_MAX); assertThatIsWithinFails(max, 0.0f, NEARLY_MAX); assertThatIsWithinFails(NEARLY_MAX, 0.0f, max); float negativeMax = -1.0f * Float.MAX_VALUE; assertThat(negativeMax).isWithin(0.0f).of(negativeMax); assertThat(NEGATIVE_NEARLY_MAX).isWithin(0.0f).of(NEGATIVE_NEARLY_MAX); assertThatIsWithinFails(negativeMax, 0.0f, NEGATIVE_NEARLY_MAX); assertThatIsWithinFails(NEGATIVE_NEARLY_MAX, 0.0f, negativeMax); float min = Float.MIN_VALUE; assertThat(min).isWithin(0.0f).of(min); assertThat(JUST_OVER_MIN).isWithin(0.0f).of(JUST_OVER_MIN); assertThatIsWithinFails(min, 0.0f, JUST_OVER_MIN); assertThatIsWithinFails(JUST_OVER_MIN, 0.0f, min); float negativeMin = -1.0f * Float.MIN_VALUE; assertThat(negativeMin).isWithin(0.0f).of(negativeMin); assertThat(JUST_UNDER_NEGATIVE_MIN).isWithin(0.0f).of(JUST_UNDER_NEGATIVE_MIN); assertThatIsWithinFails(negativeMin, 0.0f, JUST_UNDER_NEGATIVE_MIN); assertThatIsWithinFails(JUST_UNDER_NEGATIVE_MIN, 0.0f, negativeMin); } @Test public void isNotWithinZeroTolerance() { float max = Float.MAX_VALUE; assertThatIsNotWithinFails(max, 0.0f, max); assertThatIsNotWithinFails(NEARLY_MAX, 0.0f, NEARLY_MAX); assertThat(max).isNotWithin(0.0f).of(NEARLY_MAX); assertThat(NEARLY_MAX).isNotWithin(0.0f).of(max); float min = Float.MIN_VALUE; assertThatIsNotWithinFails(min, 0.0f, min); assertThatIsNotWithinFails(JUST_OVER_MIN, 0.0f, JUST_OVER_MIN); assertThat(min).isNotWithin(0.0f).of(JUST_OVER_MIN); assertThat(JUST_OVER_MIN).isNotWithin(0.0f).of(min); } @Test public void isWithinNonFinite() { assertThatIsWithinFails(Float.NaN, 0.00001f, Float.NaN); assertThatIsWithinFails(Float.NaN, 0.00001f, Float.POSITIVE_INFINITY); assertThatIsWithinFails(Float.NaN, 0.00001f, Float.NEGATIVE_INFINITY); assertThatIsWithinFails(Float.NaN, 0.00001f, +0.0f); assertThatIsWithinFails(Float.NaN, 0.00001f, -0.0f); assertThatIsWithinFails(Float.NaN, 0.00001f, +1.0f); assertThatIsWithinFails(Float.NaN, 0.00001f, -0.0f); assertThatIsWithinFails(Float.POSITIVE_INFINITY, 0.00001f, Float.POSITIVE_INFINITY); assertThatIsWithinFails(Float.POSITIVE_INFINITY, 0.00001f, Float.NEGATIVE_INFINITY); assertThatIsWithinFails(Float.POSITIVE_INFINITY, 0.00001f, +0.0f); assertThatIsWithinFails(Float.POSITIVE_INFINITY, 0.00001f, -0.0f); assertThatIsWithinFails(Float.POSITIVE_INFINITY, 0.00001f, +1.0f); assertThatIsWithinFails(Float.POSITIVE_INFINITY, 0.00001f, -0.0f); assertThatIsWithinFails(Float.NEGATIVE_INFINITY, 0.00001f, Float.NEGATIVE_INFINITY); assertThatIsWithinFails(Float.NEGATIVE_INFINITY, 0.00001f, +0.0f); assertThatIsWithinFails(Float.NEGATIVE_INFINITY, 0.00001f, -0.0f); assertThatIsWithinFails(Float.NEGATIVE_INFINITY, 0.00001f, +1.0f); assertThatIsWithinFails(Float.NEGATIVE_INFINITY, 0.00001f, -0.0f); assertThatIsWithinFails(+1.0f, 0.00001f, Float.NaN); assertThatIsWithinFails(+1.0f, 0.00001f, Float.POSITIVE_INFINITY); assertThatIsWithinFails(+1.0f, 0.00001f, Float.NEGATIVE_INFINITY); } @Test public void isNotWithinNonFinite() { assertThatIsNotWithinFails(Float.NaN, 0.00001f, Float.NaN); assertThatIsNotWithinFails(Float.NaN, 0.00001f, Float.POSITIVE_INFINITY); assertThatIsNotWithinFails(Float.NaN, 0.00001f, Float.NEGATIVE_INFINITY); assertThatIsNotWithinFails(Float.NaN, 0.00001f, +0.0f); assertThatIsNotWithinFails(Float.NaN, 0.00001f, -0.0f); assertThatIsNotWithinFails(Float.NaN, 0.00001f, +1.0f); assertThatIsNotWithinFails(Float.NaN, 0.00001f, -0.0f); assertThatIsNotWithinFails(Float.POSITIVE_INFINITY, 0.00001f, Float.POSITIVE_INFINITY); assertThatIsNotWithinFails(Float.POSITIVE_INFINITY, 0.00001f, Float.NEGATIVE_INFINITY); assertThatIsNotWithinFails(Float.POSITIVE_INFINITY, 0.00001f, +0.0f); assertThatIsNotWithinFails(Float.POSITIVE_INFINITY, 0.00001f, -0.0f); assertThatIsNotWithinFails(Float.POSITIVE_INFINITY, 0.00001f, +1.0f); assertThatIsNotWithinFails(Float.POSITIVE_INFINITY, 0.00001f, -0.0f); assertThatIsNotWithinFails(Float.NEGATIVE_INFINITY, 0.00001f, Float.NEGATIVE_INFINITY); assertThatIsNotWithinFails(Float.NEGATIVE_INFINITY, 0.00001f, +0.0f); assertThatIsNotWithinFails(Float.NEGATIVE_INFINITY, 0.00001f, -0.0f); assertThatIsNotWithinFails(Float.NEGATIVE_INFINITY, 0.00001f, +1.0f); assertThatIsNotWithinFails(Float.NEGATIVE_INFINITY, 0.00001f, -0.0f); assertThatIsNotWithinFails(+1.0f, 0.00001f, Float.NaN); assertThatIsNotWithinFails(+1.0f, 0.00001f, Float.POSITIVE_INFINITY); assertThatIsNotWithinFails(+1.0f, 0.00001f, Float.NEGATIVE_INFINITY); } @SuppressWarnings("TruthSelfEquals") @Test public void isEqualTo() { assertThat(GOLDEN).isEqualTo(GOLDEN); assertThatIsEqualToFails(GOLDEN, JUST_OVER_GOLDEN); assertThat(Float.POSITIVE_INFINITY).isEqualTo(Float.POSITIVE_INFINITY); assertThat(Float.NaN).isEqualTo(Float.NaN); assertThat((Float) null).isEqualTo(null); assertThat(1.0f).isEqualTo(1); } private static void assertThatIsEqualToFails(final float actual, final float expected) { ExpectFailure.SimpleSubjectBuilderCallback<FloatSubject, Float> callback = new ExpectFailure.SimpleSubjectBuilderCallback<FloatSubject, Float>() { @Override public void invokeAssertion(SimpleSubjectBuilder<FloatSubject, Float> expect) { expect.that(actual).isEqualTo(expected); } }; expectFailure(callback); } @Test public void isNotEqualTo() { assertThatIsNotEqualToFails(GOLDEN); assertThat(GOLDEN).isNotEqualTo(JUST_OVER_GOLDEN); assertThatIsNotEqualToFails(Float.POSITIVE_INFINITY); assertThatIsNotEqualToFails(Float.NaN); assertThat(-0.0f).isNotEqualTo(0.0f); assertThatIsNotEqualToFails(null); assertThat(1.23f).isNotEqualTo(1.23); assertThat(1.0f).isNotEqualTo(2); } private static void assertThatIsNotEqualToFails(@NullableDecl final Float value) { ExpectFailure.SimpleSubjectBuilderCallback<FloatSubject, Float> callback = new ExpectFailure.SimpleSubjectBuilderCallback<FloatSubject, Float>() { @Override public void invokeAssertion(SimpleSubjectBuilder<FloatSubject, Float> expect) { expect.that(value).isNotEqualTo(value); } }; expectFailure(callback); } @Test public void isZero() { assertThat(0.0f).isZero(); assertThat(-0.0f).isZero(); assertThatIsZeroFails(Float.MIN_VALUE); assertThatIsZeroFails(-1.23f); assertThatIsZeroFails(Float.POSITIVE_INFINITY); assertThatIsZeroFails(Float.NaN); assertThatIsZeroFails(null); } private static void assertThatIsZeroFails(@NullableDecl final Float value) { ExpectFailure.SimpleSubjectBuilderCallback<FloatSubject, Float> callback = new ExpectFailure.SimpleSubjectBuilderCallback<FloatSubject, Float>() { @Override public void invokeAssertion(SimpleSubjectBuilder<FloatSubject, Float> expect) { expect.that(value).isZero(); } }; AssertionError failure = expectFailure(callback); assertThat(failure).factKeys().containsExactly("expected zero", "but was").inOrder(); } @Test public void isNonZero() { assertThatIsNonZeroFails(0.0f, "expected not to be zero"); assertThatIsNonZeroFails(-0.0f, "expected not to be zero"); assertThat(Float.MIN_VALUE).isNonZero(); assertThat(-1.23f).isNonZero(); assertThat(Float.POSITIVE_INFINITY).isNonZero(); assertThat(Float.NaN).isNonZero(); assertThatIsNonZeroFails(null, "expected a float other than zero"); } private static void assertThatIsNonZeroFails(@NullableDecl final Float value, String factKey) { ExpectFailure.SimpleSubjectBuilderCallback<FloatSubject, Float> callback = new ExpectFailure.SimpleSubjectBuilderCallback<FloatSubject, Float>() { @Override public void invokeAssertion(SimpleSubjectBuilder<FloatSubject, Float> expect) { expect.that(value).isNonZero(); } }; AssertionError failure = expectFailure(callback); assertThat(failure).factKeys().containsExactly(factKey, "but was").inOrder(); } @Test public void isPositiveInfinity() { assertThat(Float.POSITIVE_INFINITY).isPositiveInfinity(); assertThatIsPositiveInfinityFails(1.23f); assertThatIsPositiveInfinityFails(Float.NEGATIVE_INFINITY); assertThatIsPositiveInfinityFails(Float.NaN); assertThatIsPositiveInfinityFails(null); } private static void assertThatIsPositiveInfinityFails(@NullableDecl final Float value) { ExpectFailure.SimpleSubjectBuilderCallback<FloatSubject, Float> callback = new ExpectFailure.SimpleSubjectBuilderCallback<FloatSubject, Float>() { @Override public void invokeAssertion(SimpleSubjectBuilder<FloatSubject, Float> expect) { expect.that(value).isPositiveInfinity(); } }; expectFailure(callback); } @Test public void isNegativeInfinity() { assertThat(Float.NEGATIVE_INFINITY).isNegativeInfinity(); assertThatIsNegativeInfinityFails(1.23f); assertThatIsNegativeInfinityFails(Float.POSITIVE_INFINITY); assertThatIsNegativeInfinityFails(Float.NaN); assertThatIsNegativeInfinityFails(null); } private static void assertThatIsNegativeInfinityFails(@NullableDecl final Float value) { ExpectFailure.SimpleSubjectBuilderCallback<FloatSubject, Float> callback = new ExpectFailure.SimpleSubjectBuilderCallback<FloatSubject, Float>() { @Override public void invokeAssertion(SimpleSubjectBuilder<FloatSubject, Float> expect) { expect.that(value).isNegativeInfinity(); } }; expectFailure(callback); } @Test public void isNaN() { assertThat(Float.NaN).isNaN(); assertThatIsNaNFails(1.23f); assertThatIsNaNFails(Float.POSITIVE_INFINITY); assertThatIsNaNFails(Float.NEGATIVE_INFINITY); assertThatIsNaNFails(null); } private static void assertThatIsNaNFails(@NullableDecl final Float value) { ExpectFailure.SimpleSubjectBuilderCallback<FloatSubject, Float> callback = new ExpectFailure.SimpleSubjectBuilderCallback<FloatSubject, Float>() { @Override public void invokeAssertion(SimpleSubjectBuilder<FloatSubject, Float> expect) { expect.that(value).isNaN(); } }; expectFailure(callback); } @Test public void isFinite() { assertThat(1.23f).isFinite(); assertThat(Float.MAX_VALUE).isFinite(); assertThat(-1.0 * Float.MIN_VALUE).isFinite(); assertThatIsFiniteFails(Float.POSITIVE_INFINITY); assertThatIsFiniteFails(Float.NEGATIVE_INFINITY); assertThatIsFiniteFails(Float.NaN); assertThatIsFiniteFails(null); } private static void assertThatIsFiniteFails(@NullableDecl final Float value) { ExpectFailure.SimpleSubjectBuilderCallback<FloatSubject, Float> callback = new ExpectFailure.SimpleSubjectBuilderCallback<FloatSubject, Float>() { @Override public void invokeAssertion(SimpleSubjectBuilder<FloatSubject, Float> expect) { expect.that(value).isFinite(); } }; AssertionError failure = expectFailure(callback); assertThat(failure).factKeys().containsExactly("expected to be finite", "but was").inOrder(); } @Test public void isNotNaN() { assertThat(1.23f).isNotNaN(); assertThat(Float.MAX_VALUE).isNotNaN(); assertThat(-1.0 * Float.MIN_VALUE).isNotNaN(); assertThat(Float.POSITIVE_INFINITY).isNotNaN(); assertThat(Float.NEGATIVE_INFINITY).isNotNaN(); } @Test public void isNotNaNIsNaN() { expectFailureWhenTestingThat(Float.NaN).isNotNaN(); } @Test public void isNotNaNIsNull() { expectFailureWhenTestingThat(null).isNotNaN(); assertFailureKeys("expected a float other than NaN", "but was"); } @Test public void isGreaterThan_int_strictly() { expectFailureWhenTestingThat(2.0f).isGreaterThan(3); } @Test public void isGreaterThan_int() { expectFailureWhenTestingThat(2.0f).isGreaterThan(2); assertThat(2.0f).isGreaterThan(1); assertThat(0x1.0p30f).isGreaterThan((1 << 30) - 1); } @Test public void isLessThan_int_strictly() { expectFailureWhenTestingThat(2.0f).isLessThan(1); } @Test public void isLessThan_int() { expectFailureWhenTestingThat(2.0f).isLessThan(2); assertThat(2.0f).isLessThan(3); assertThat(0x1.0p30f).isLessThan((1 << 30) + 1); } @Test public void isAtLeast_int() { expectFailureWhenTestingThat(2.0f).isAtLeast(3); assertThat(2.0f).isAtLeast(2); assertThat(2.0f).isAtLeast(1); } @Test public void isAtLeast_int_withNoExactFloatRepresentation() { expectFailureWhenTestingThat(0x1.0p30f).isAtLeast((1 << 30) + 1); } @Test public void isAtMost_int() { expectFailureWhenTestingThat(2.0f).isAtMost(1); assertThat(2.0f).isAtMost(2); assertThat(2.0f).isAtMost(3); } @Test public void isAtMost_int_withNoExactFloatRepresentation() { expectFailureWhenTestingThat(0x1.0p30f).isAtMost((1 << 30) - 1); } private FloatSubject expectFailureWhenTestingThat(Float actual) { return expectFailure.whenTesting().that(actual); } }
/* * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.facebook.presto.connector.thrift.integration; import com.facebook.presto.Session; import com.facebook.presto.connector.thrift.ThriftPlugin; import com.facebook.presto.connector.thrift.location.HostList; import com.facebook.presto.connector.thrift.server.ThriftTpchService; import com.facebook.presto.cost.CostCalculator; import com.facebook.presto.metadata.Metadata; import com.facebook.presto.metadata.QualifiedObjectName; import com.facebook.presto.server.testing.TestingPrestoServer; import com.facebook.presto.spi.HostAddress; import com.facebook.presto.spi.Plugin; import com.facebook.presto.testing.MaterializedResult; import com.facebook.presto.testing.QueryRunner; import com.facebook.presto.testing.TestingAccessControlManager; import com.facebook.presto.tests.DistributedQueryRunner; import com.facebook.presto.transaction.TransactionManager; import com.facebook.swift.codec.ThriftCodecManager; import com.facebook.swift.service.ThriftServer; import com.facebook.swift.service.ThriftServiceProcessor; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import io.airlift.log.Logger; import io.airlift.testing.Closeables; import java.util.ArrayList; import java.util.List; import java.util.Map; import java.util.concurrent.locks.Lock; import static com.facebook.presto.testing.TestingSession.testSessionBuilder; import static com.google.common.collect.ImmutableList.toImmutableList; import static java.util.Objects.requireNonNull; public final class ThriftQueryRunner { private ThriftQueryRunner() {} public static QueryRunner createThriftQueryRunner(int thriftServers, int workers) throws Exception { List<ThriftServer> servers = null; DistributedQueryRunner runner = null; try { servers = startThriftServers(thriftServers); runner = createThriftQueryRunnerInternal(servers, workers); return new ThriftQueryRunnerWithServers(runner, servers); } catch (Throwable t) { Closeables.closeQuietly(runner); // runner might be null, so closing servers explicitly if (servers != null) { for (ThriftServer server : servers) { Closeables.closeQuietly(server); } } throw t; } } public static void main(String[] args) throws Exception { ThriftQueryRunnerWithServers queryRunner = (ThriftQueryRunnerWithServers) createThriftQueryRunner(3, 3); Thread.sleep(10); Logger log = Logger.get(ThriftQueryRunner.class); log.info("======== SERVER STARTED ========"); log.info("\n====\n%s\n====", queryRunner.getCoordinator().getBaseUrl()); } private static List<ThriftServer> startThriftServers(int thriftServers) { List<ThriftServer> servers = new ArrayList<>(thriftServers); for (int i = 0; i < thriftServers; i++) { ThriftServiceProcessor processor = new ThriftServiceProcessor(new ThriftCodecManager(), ImmutableList.of(), new ThriftTpchService()); servers.add(new ThriftServer(processor).start()); } return servers; } private static DistributedQueryRunner createThriftQueryRunnerInternal(List<ThriftServer> servers, int workers) throws Exception { List<HostAddress> addresses = servers.stream() .map(server -> HostAddress.fromParts("localhost", server.getPort())) .collect(toImmutableList()); HostList hosts = HostList.fromList(addresses); Session defaultSession = testSessionBuilder() .setCatalog("thrift") .setSchema("tiny") .build(); DistributedQueryRunner queryRunner = new DistributedQueryRunner(defaultSession, workers); queryRunner.installPlugin(new ThriftPlugin()); Map<String, String> connectorProperties = ImmutableMap.of( "static-location.hosts", hosts.stringValue(), "PrestoThriftService.thrift.client.connect-timeout", "30s" ); queryRunner.createCatalog("thrift", "presto-thrift", connectorProperties); return queryRunner; } /** * Wraps QueryRunner and a list of ThriftServers to clean them up together. */ private static class ThriftQueryRunnerWithServers implements QueryRunner { private final DistributedQueryRunner source; private final List<ThriftServer> thriftServers; private ThriftQueryRunnerWithServers(DistributedQueryRunner source, List<ThriftServer> thriftServers) { this.source = requireNonNull(source, "source is null"); this.thriftServers = ImmutableList.copyOf(requireNonNull(thriftServers, "thriftServers is null")); } public TestingPrestoServer getCoordinator() { return source.getCoordinator(); } @Override public void close() { Closeables.closeQuietly(source); for (ThriftServer server : thriftServers) { Closeables.closeQuietly(server); } } @Override public int getNodeCount() { return source.getNodeCount(); } @Override public Session getDefaultSession() { return source.getDefaultSession(); } @Override public TransactionManager getTransactionManager() { return source.getTransactionManager(); } @Override public Metadata getMetadata() { return source.getMetadata(); } @Override public CostCalculator getCostCalculator() { return source.getCostCalculator(); } @Override public TestingAccessControlManager getAccessControl() { return source.getAccessControl(); } @Override public MaterializedResult execute(String sql) { return source.execute(sql); } @Override public MaterializedResult execute(Session session, String sql) { return source.execute(session, sql); } @Override public List<QualifiedObjectName> listTables(Session session, String catalog, String schema) { return source.listTables(session, catalog, schema); } @Override public boolean tableExists(Session session, String table) { return source.tableExists(session, table); } @Override public void installPlugin(Plugin plugin) { source.installPlugin(plugin); } @Override public void createCatalog(String catalogName, String connectorName, Map<String, String> properties) { source.createCatalog(catalogName, connectorName, properties); } @Override public Lock getExclusiveLock() { return source.getExclusiveLock(); } } }
/** * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.cassandra.utils.obs; import java.util.Arrays; import java.io.Serializable; /** An "open" BitSet implementation that allows direct access to the array of words * storing the bits. * <p/> * Unlike java.util.bitset, the fact that bits are packed into an array of longs * is part of the interface. This allows efficient implementation of other algorithms * by someone other than the author. It also allows one to efficiently implement * alternate serialization or interchange formats. * <p/> * <code>OpenBitSet</code> is faster than <code>java.util.BitSet</code> in most operations * and *much* faster at calculating cardinality of sets and results of set operations. * It can also handle sets of larger cardinality (up to 64 * 2**32-1) * <p/> * The goals of <code>OpenBitSet</code> are the fastest implementation possible, and * maximum code reuse. Extra safety and encapsulation * may always be built on top, but if that's built in, the cost can never be removed (and * hence people re-implement their own version in order to get better performance). * If you want a "safe", totally encapsulated (and slower and limited) BitSet * class, use <code>java.util.BitSet</code>. * <p/> * <h3>Performance Results</h3> * Test system: Pentium 4, Sun Java 1.5_06 -server -Xbatch -Xmx64M <br/>BitSet size = 1,000,000 <br/>Results are java.util.BitSet time divided by OpenBitSet time. <table border="1"> <tr> <th></th> <th>cardinality</th> <th>intersect_count</th> <th>union</th> <th>nextSetBit</th> <th>get</th> <th>iterator</th> </tr> <tr> <th>50% full</th> <td>3.36</td> <td>3.96</td> <td>1.44</td> <td>1.46</td> <td>1.99</td> <td>1.58</td> </tr> <tr> <th>1% full</th> <td>3.31</td> <td>3.90</td> <td>&nbsp;</td> <td>1.04</td> <td>&nbsp;</td> <td>0.99</td> </tr> </table> <br/> Test system: AMD Opteron, 64 bit linux, Sun Java 1.5_06 -server -Xbatch -Xmx64M <br/>BitSet size = 1,000,000 <br/>Results are java.util.BitSet time divided by OpenBitSet time. <table border="1"> <tr> <th></th> <th>cardinality</th> <th>intersect_count</th> <th>union</th> <th>nextSetBit</th> <th>get</th> <th>iterator</th> </tr> <tr> <th>50% full</th> <td>2.50</td> <td>3.50</td> <td>1.00</td> <td>1.03</td> <td>1.12</td> <td>1.25</td> </tr> <tr> <th>1% full</th> <td>2.51</td> <td>3.49</td> <td>&nbsp;</td> <td>1.00</td> <td>&nbsp;</td> <td>1.02</td> </tr> </table> */ public class OpenBitSet implements Cloneable, Serializable { protected long[] bits; protected int wlen; // number of words (elements) used in the array /** Constructs an OpenBitSet large enough to hold numBits. * * @param numBits */ public OpenBitSet(long numBits) { bits = new long[bits2words(numBits)]; wlen = bits.length; } public OpenBitSet() { this(64); } /** Constructs an OpenBitSet from an existing long[]. * <br/> * The first 64 bits are in long[0], * with bit index 0 at the least significant bit, and bit index 63 at the most significant. * Given a bit index, * the word containing it is long[index/64], and it is at bit number index%64 within that word. * <p> * numWords are the number of elements in the array that contain * set bits (non-zero longs). * numWords should be &lt= bits.length, and * any existing words in the array at position &gt= numWords should be zero. * */ public OpenBitSet(long[] bits, int numWords) { this.bits = bits; this.wlen = numWords; } /** Returns the current capacity in bits (1 greater than the index of the last bit) */ public long capacity() { return bits.length << 6; } /** * Returns the current capacity of this set. Included for * compatibility. This is *not* equal to {@link #cardinality} */ public long size() { return capacity(); } // @Override -- not until Java 1.6 public int length() { return bits.length << 6; } /** Returns true if there are no set bits */ public boolean isEmpty() { return cardinality()==0; } /** Expert: returns the long[] storing the bits */ public long[] getBits() { return bits; } /** Expert: sets a new long[] to use as the bit storage */ public void setBits(long[] bits) { this.bits = bits; } /** Expert: gets the number of longs in the array that are in use */ public int getNumWords() { return wlen; } /** Expert: sets the number of longs in the array that are in use */ public void setNumWords(int nWords) { this.wlen=nWords; } /** Returns true or false for the specified bit index. */ public boolean get(int index) { int i = index >> 6; // div 64 // signed shift will keep a negative index and force an // array-index-out-of-bounds-exception, removing the need for an explicit check. if (i>=bits.length) return false; int bit = index & 0x3f; // mod 64 long bitmask = 1L << bit; return (bits[i] & bitmask) != 0; } /** Returns true or false for the specified bit index. * The index should be less than the OpenBitSet size */ public boolean fastGet(int index) { int i = index >> 6; // div 64 // signed shift will keep a negative index and force an // array-index-out-of-bounds-exception, removing the need for an explicit check. int bit = index & 0x3f; // mod 64 long bitmask = 1L << bit; return (bits[i] & bitmask) != 0; } /** Returns true or false for the specified bit index */ public boolean get(long index) { int i = (int)(index >> 6); // div 64 if (i>=bits.length) return false; int bit = (int)index & 0x3f; // mod 64 long bitmask = 1L << bit; return (bits[i] & bitmask) != 0; } /** Returns true or false for the specified bit index. * The index should be less than the OpenBitSet size. */ public boolean fastGet(long index) { int i = (int)(index >> 6); // div 64 int bit = (int)index & 0x3f; // mod 64 long bitmask = 1L << bit; return (bits[i] & bitmask) != 0; } /* // alternate implementation of get() public boolean get1(int index) { int i = index >> 6; // div 64 int bit = index & 0x3f; // mod 64 return ((bits[i]>>>bit) & 0x01) != 0; // this does a long shift and a bittest (on x86) vs // a long shift, and a long AND, (the test for zero is prob a no-op) // testing on a P4 indicates this is slower than (bits[i] & bitmask) != 0; } */ /** returns 1 if the bit is set, 0 if not. * The index should be less than the OpenBitSet size */ public int getBit(int index) { int i = index >> 6; // div 64 int bit = index & 0x3f; // mod 64 return ((int)(bits[i]>>>bit)) & 0x01; } /* public boolean get2(int index) { int word = index >> 6; // div 64 int bit = index & 0x0000003f; // mod 64 return (bits[word] << bit) < 0; // hmmm, this would work if bit order were reversed // we could right shift and check for parity bit, if it was available to us. } */ /** sets a bit, expanding the set size if necessary */ public void set(long index) { int wordNum = expandingWordNum(index); int bit = (int)index & 0x3f; long bitmask = 1L << bit; bits[wordNum] |= bitmask; } /** Sets the bit at the specified index. * The index should be less than the OpenBitSet size. */ public void fastSet(int index) { int wordNum = index >> 6; // div 64 int bit = index & 0x3f; // mod 64 long bitmask = 1L << bit; bits[wordNum] |= bitmask; } /** Sets the bit at the specified index. * The index should be less than the OpenBitSet size. */ public void fastSet(long index) { int wordNum = (int)(index >> 6); int bit = (int)index & 0x3f; long bitmask = 1L << bit; bits[wordNum] |= bitmask; } /** Sets a range of bits, expanding the set size if necessary * * @param startIndex lower index * @param endIndex one-past the last bit to set */ public void set(long startIndex, long endIndex) { if (endIndex <= startIndex) return; int startWord = (int)(startIndex>>6); // since endIndex is one past the end, this is index of the last // word to be changed. int endWord = expandingWordNum(endIndex-1); long startmask = -1L << startIndex; long endmask = -1L >>> -endIndex; // 64-(endIndex&0x3f) is the same as -endIndex due to wrap if (startWord == endWord) { bits[startWord] |= (startmask & endmask); return; } bits[startWord] |= startmask; Arrays.fill(bits, startWord+1, endWord, -1L); bits[endWord] |= endmask; } protected int expandingWordNum(long index) { int wordNum = (int)(index >> 6); if (wordNum>=wlen) { ensureCapacity(index+1); wlen = wordNum+1; } return wordNum; } /** clears a bit. * The index should be less than the OpenBitSet size. */ public void fastClear(int index) { int wordNum = index >> 6; int bit = index & 0x03f; long bitmask = 1L << bit; bits[wordNum] &= ~bitmask; // hmmm, it takes one more instruction to clear than it does to set... any // way to work around this? If there were only 63 bits per word, we could // use a right shift of 10111111...111 in binary to position the 0 in the // correct place (using sign extension). // Could also use Long.rotateRight() or rotateLeft() *if* they were converted // by the JVM into a native instruction. // bits[word] &= Long.rotateLeft(0xfffffffe,bit); } /** clears a bit. * The index should be less than the OpenBitSet size. */ public void fastClear(long index) { int wordNum = (int)(index >> 6); // div 64 int bit = (int)index & 0x3f; // mod 64 long bitmask = 1L << bit; bits[wordNum] &= ~bitmask; } /** clears a bit, allowing access beyond the current set size without changing the size.*/ public void clear(long index) { int wordNum = (int)(index >> 6); // div 64 if (wordNum>=wlen) return; int bit = (int)index & 0x3f; // mod 64 long bitmask = 1L << bit; bits[wordNum] &= ~bitmask; } /** Clears a range of bits. Clearing past the end does not change the size of the set. * * @param startIndex lower index * @param endIndex one-past the last bit to clear */ public void clear(int startIndex, int endIndex) { if (endIndex <= startIndex) return; int startWord = (startIndex>>6); if (startWord >= wlen) return; // since endIndex is one past the end, this is index of the last // word to be changed. int endWord = ((endIndex-1)>>6); long startmask = -1L << startIndex; long endmask = -1L >>> -endIndex; // 64-(endIndex&0x3f) is the same as -endIndex due to wrap // invert masks since we are clearing startmask = ~startmask; endmask = ~endmask; if (startWord == endWord) { bits[startWord] &= (startmask | endmask); return; } bits[startWord] &= startmask; int middle = Math.min(wlen, endWord); Arrays.fill(bits, startWord+1, middle, 0L); if (endWord < wlen) { bits[endWord] &= endmask; } } /** Clears a range of bits. Clearing past the end does not change the size of the set. * * @param startIndex lower index * @param endIndex one-past the last bit to clear */ public void clear(long startIndex, long endIndex) { if (endIndex <= startIndex) return; int startWord = (int)(startIndex>>6); if (startWord >= wlen) return; // since endIndex is one past the end, this is index of the last // word to be changed. int endWord = (int)((endIndex-1)>>6); long startmask = -1L << startIndex; long endmask = -1L >>> -endIndex; // 64-(endIndex&0x3f) is the same as -endIndex due to wrap // invert masks since we are clearing startmask = ~startmask; endmask = ~endmask; if (startWord == endWord) { bits[startWord] &= (startmask | endmask); return; } bits[startWord] &= startmask; int middle = Math.min(wlen, endWord); Arrays.fill(bits, startWord+1, middle, 0L); if (endWord < wlen) { bits[endWord] &= endmask; } } /** Sets a bit and returns the previous value. * The index should be less than the OpenBitSet size. */ public boolean getAndSet(int index) { int wordNum = index >> 6; // div 64 int bit = index & 0x3f; // mod 64 long bitmask = 1L << bit; boolean val = (bits[wordNum] & bitmask) != 0; bits[wordNum] |= bitmask; return val; } /** Sets a bit and returns the previous value. * The index should be less than the OpenBitSet size. */ public boolean getAndSet(long index) { int wordNum = (int)(index >> 6); // div 64 int bit = (int)index & 0x3f; // mod 64 long bitmask = 1L << bit; boolean val = (bits[wordNum] & bitmask) != 0; bits[wordNum] |= bitmask; return val; } /** flips a bit. * The index should be less than the OpenBitSet size. */ public void fastFlip(int index) { int wordNum = index >> 6; // div 64 int bit = index & 0x3f; // mod 64 long bitmask = 1L << bit; bits[wordNum] ^= bitmask; } /** flips a bit. * The index should be less than the OpenBitSet size. */ public void fastFlip(long index) { int wordNum = (int)(index >> 6); // div 64 int bit = (int)index & 0x3f; // mod 64 long bitmask = 1L << bit; bits[wordNum] ^= bitmask; } /** flips a bit, expanding the set size if necessary */ public void flip(long index) { int wordNum = expandingWordNum(index); int bit = (int)index & 0x3f; // mod 64 long bitmask = 1L << bit; bits[wordNum] ^= bitmask; } /** flips a bit and returns the resulting bit value. * The index should be less than the OpenBitSet size. */ public boolean flipAndGet(int index) { int wordNum = index >> 6; // div 64 int bit = index & 0x3f; // mod 64 long bitmask = 1L << bit; bits[wordNum] ^= bitmask; return (bits[wordNum] & bitmask) != 0; } /** flips a bit and returns the resulting bit value. * The index should be less than the OpenBitSet size. */ public boolean flipAndGet(long index) { int wordNum = (int)(index >> 6); // div 64 int bit = (int)index & 0x3f; // mod 64 long bitmask = 1L << bit; bits[wordNum] ^= bitmask; return (bits[wordNum] & bitmask) != 0; } /** Flips a range of bits, expanding the set size if necessary * * @param startIndex lower index * @param endIndex one-past the last bit to flip */ public void flip(long startIndex, long endIndex) { if (endIndex <= startIndex) return; int startWord = (int)(startIndex>>6); // since endIndex is one past the end, this is index of the last // word to be changed. int endWord = expandingWordNum(endIndex-1); /*** Grrr, java shifting wraps around so -1L>>>64 == -1 * for that reason, make sure not to use endmask if the bits to flip will * be zero in the last word (redefine endWord to be the last changed...) long startmask = -1L << (startIndex & 0x3f); // example: 11111...111000 long endmask = -1L >>> (64-(endIndex & 0x3f)); // example: 00111...111111 ***/ long startmask = -1L << startIndex; long endmask = -1L >>> -endIndex; // 64-(endIndex&0x3f) is the same as -endIndex due to wrap if (startWord == endWord) { bits[startWord] ^= (startmask & endmask); return; } bits[startWord] ^= startmask; for (int i=startWord+1; i<endWord; i++) { bits[i] = ~bits[i]; } bits[endWord] ^= endmask; } /* public static int pop(long v0, long v1, long v2, long v3) { // derived from pop_array by setting last four elems to 0. // exchanges one pop() call for 10 elementary operations // saving about 7 instructions... is there a better way? long twosA=v0 & v1; long ones=v0^v1; long u2=ones^v2; long twosB =(ones&v2)|(u2&v3); ones=u2^v3; long fours=(twosA&twosB); long twos=twosA^twosB; return (pop(fours)<<2) + (pop(twos)<<1) + pop(ones); } */ /** @return the number of set bits */ public long cardinality() { return BitUtil.pop_array(bits,0,wlen); } /** Returns the popcount or cardinality of the intersection of the two sets. * Neither set is modified. */ public static long intersectionCount(OpenBitSet a, OpenBitSet b) { return BitUtil.pop_intersect(a.bits, b.bits, 0, Math.min(a.wlen, b.wlen)); } /** Returns the popcount or cardinality of the union of the two sets. * Neither set is modified. */ public static long unionCount(OpenBitSet a, OpenBitSet b) { long tot = BitUtil.pop_union(a.bits, b.bits, 0, Math.min(a.wlen, b.wlen)); if (a.wlen < b.wlen) { tot += BitUtil.pop_array(b.bits, a.wlen, b.wlen-a.wlen); } else if (a.wlen > b.wlen) { tot += BitUtil.pop_array(a.bits, b.wlen, a.wlen-b.wlen); } return tot; } /** Returns the popcount or cardinality of "a and not b" * or "intersection(a, not(b))". * Neither set is modified. */ public static long andNotCount(OpenBitSet a, OpenBitSet b) { long tot = BitUtil.pop_andnot(a.bits, b.bits, 0, Math.min(a.wlen, b.wlen)); if (a.wlen > b.wlen) { tot += BitUtil.pop_array(a.bits, b.wlen, a.wlen-b.wlen); } return tot; } /** Returns the popcount or cardinality of the exclusive-or of the two sets. * Neither set is modified. */ public static long xorCount(OpenBitSet a, OpenBitSet b) { long tot = BitUtil.pop_xor(a.bits, b.bits, 0, Math.min(a.wlen, b.wlen)); if (a.wlen < b.wlen) { tot += BitUtil.pop_array(b.bits, a.wlen, b.wlen-a.wlen); } else if (a.wlen > b.wlen) { tot += BitUtil.pop_array(a.bits, b.wlen, a.wlen-b.wlen); } return tot; } /** Returns the index of the first set bit starting at the index specified. * -1 is returned if there are no more set bits. */ public int nextSetBit(int index) { int i = index>>6; if (i>=wlen) return -1; int subIndex = index & 0x3f; // index within the word long word = bits[i] >> subIndex; // skip all the bits to the right of index if (word!=0) { return (i<<6) + subIndex + BitUtil.ntz(word); } while(++i < wlen) { word = bits[i]; if (word!=0) return (i<<6) + BitUtil.ntz(word); } return -1; } /** Returns the index of the first set bit starting at the index specified. * -1 is returned if there are no more set bits. */ public long nextSetBit(long index) { int i = (int)(index>>>6); if (i>=wlen) return -1; int subIndex = (int)index & 0x3f; // index within the word long word = bits[i] >>> subIndex; // skip all the bits to the right of index if (word!=0) { return (((long)i)<<6) + (subIndex + BitUtil.ntz(word)); } while(++i < wlen) { word = bits[i]; if (word!=0) return (((long)i)<<6) + BitUtil.ntz(word); } return -1; } @Override public Object clone() { try { OpenBitSet obs = (OpenBitSet)super.clone(); obs.bits = obs.bits.clone(); // hopefully an array clone is as fast(er) than arraycopy return obs; } catch (CloneNotSupportedException e) { throw new RuntimeException(e); } } /** this = this AND other */ public void intersect(OpenBitSet other) { int newLen= Math.min(this.wlen,other.wlen); long[] thisArr = this.bits; long[] otherArr = other.bits; // testing against zero can be more efficient int pos=newLen; while(--pos>=0) { thisArr[pos] &= otherArr[pos]; } if (this.wlen > newLen) { // fill zeros from the new shorter length to the old length Arrays.fill(bits,newLen,this.wlen,0); } this.wlen = newLen; } /** this = this OR other */ public void union(OpenBitSet other) { int newLen = Math.max(wlen,other.wlen); ensureCapacityWords(newLen); long[] thisArr = this.bits; long[] otherArr = other.bits; int pos=Math.min(wlen,other.wlen); while(--pos>=0) { thisArr[pos] |= otherArr[pos]; } if (this.wlen < newLen) { System.arraycopy(otherArr, this.wlen, thisArr, this.wlen, newLen-this.wlen); } this.wlen = newLen; } /** Remove all elements set in other. this = this AND_NOT other */ public void remove(OpenBitSet other) { int idx = Math.min(wlen,other.wlen); long[] thisArr = this.bits; long[] otherArr = other.bits; while(--idx>=0) { thisArr[idx] &= ~otherArr[idx]; } } /** this = this XOR other */ public void xor(OpenBitSet other) { int newLen = Math.max(wlen,other.wlen); ensureCapacityWords(newLen); long[] thisArr = this.bits; long[] otherArr = other.bits; int pos=Math.min(wlen,other.wlen); while(--pos>=0) { thisArr[pos] ^= otherArr[pos]; } if (this.wlen < newLen) { System.arraycopy(otherArr, this.wlen, thisArr, this.wlen, newLen-this.wlen); } this.wlen = newLen; } // some BitSet compatability methods //** see {@link intersect} */ public void and(OpenBitSet other) { intersect(other); } //** see {@link union} */ public void or(OpenBitSet other) { union(other); } //** see {@link andNot} */ public void andNot(OpenBitSet other) { remove(other); } /** returns true if the sets have any elements in common */ public boolean intersects(OpenBitSet other) { int pos = Math.min(this.wlen, other.wlen); long[] thisArr = this.bits; long[] otherArr = other.bits; while (--pos>=0) { if ((thisArr[pos] & otherArr[pos])!=0) return true; } return false; } /** Expand the long[] with the size given as a number of words (64 bit longs). * getNumWords() is unchanged by this call. */ public void ensureCapacityWords(int numWords) { if (bits.length < numWords) { bits = ArrayUtil.grow(bits, numWords); } } /** Ensure that the long[] is big enough to hold numBits, expanding it if necessary. * getNumWords() is unchanged by this call. */ public void ensureCapacity(long numBits) { ensureCapacityWords(bits2words(numBits)); } /** Lowers numWords, the number of words in use, * by checking for trailing zero words. */ public void trimTrailingZeros() { int idx = wlen-1; while (idx>=0 && bits[idx]==0) idx--; wlen = idx+1; } /** returns the number of 64 bit words it would take to hold numBits */ public static int bits2words(long numBits) { return (int)(((numBits-1)>>>6)+1); } /** returns true if both sets have the same bits set */ @Override public boolean equals(Object o) { if (this == o) return true; if (!(o instanceof OpenBitSet)) return false; OpenBitSet a; OpenBitSet b = (OpenBitSet)o; // make a the larger set. if (b.wlen > this.wlen) { a = b; b=this; } else { a=this; } // check for any set bits out of the range of b for (int i=a.wlen-1; i>=b.wlen; i--) { if (a.bits[i]!=0) return false; } for (int i=b.wlen-1; i>=0; i--) { if (a.bits[i] != b.bits[i]) return false; } return true; } @Override public int hashCode() { // Start with a zero hash and use a mix that results in zero if the input is zero. // This effectively truncates trailing zeros without an explicit check. long h = 0; for (int i = bits.length; --i>=0;) { h ^= bits[i]; h = (h << 1) | (h >>> 63); // rotate left } // fold leftmost bits into right and add a constant to prevent // empty sets from returning 0, which is too common. return (int)((h>>32) ^ h) + 0x98761234; } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.ignite.internal.managers; import java.util.UUID; import java.util.concurrent.CountDownLatch; import org.apache.ignite.IgniteCheckedException; import org.apache.ignite.configuration.IgniteConfiguration; import org.apache.ignite.internal.GridKernalContext; import org.apache.ignite.internal.managers.communication.GridIoUserMessage; import org.apache.ignite.lang.IgniteBiPredicate; import org.apache.ignite.plugin.AbstractTestPluginProvider; import org.apache.ignite.plugin.ExtensionRegistry; import org.apache.ignite.plugin.PluginContext; import org.apache.ignite.plugin.extensions.communication.IgniteMessageFactory; import org.apache.ignite.plugin.extensions.communication.MessageFactory; import org.apache.ignite.plugin.extensions.communication.MessageFactoryProvider; import org.apache.ignite.spi.IgniteSpi; import org.apache.ignite.spi.IgniteSpiAdapter; import org.apache.ignite.spi.IgniteSpiContext; import org.apache.ignite.spi.IgniteSpiException; import org.apache.ignite.spi.communication.tcp.TcpCommunicationSpi; import org.apache.ignite.testframework.junits.common.GridCommonAbstractTest; import org.jetbrains.annotations.Nullable; import org.junit.Test; import static java.util.concurrent.TimeUnit.MILLISECONDS; import static java.util.concurrent.TimeUnit.NANOSECONDS; import static java.util.concurrent.TimeUnit.SECONDS; /** * Test Managers to add and remove local message listener. */ public class GridManagerLocalMessageListenerSelfTest extends GridCommonAbstractTest { /** */ private static final short DIRECT_TYPE = 210; /** {@inheritDoc} */ @Override protected IgniteConfiguration getConfiguration(String igniteInstanceName) throws Exception { IgniteConfiguration c = super.getConfiguration(igniteInstanceName); c.setPluginProviders(new TestPluginProvider()); TcpCommunicationSpi commSpi = new TcpCommunicationSpi(); c.setCommunicationSpi(commSpi); return c; } /** {@inheritDoc} */ @Override protected void afterTest() throws Exception { stopAllGrids(); } /** * @throws Exception If failed. */ @Test public void testSendMessage() throws Exception { startGridsMultiThreaded(2); IgniteSpiContext ctx0 = ((IgniteSpiAdapter)grid(0).context().io().getSpi()).getSpiContext(); IgniteSpiContext ctx1 = ((IgniteSpiAdapter)grid(1).context().io().getSpi()).getSpiContext(); String topic = "test-topic"; final CountDownLatch latch = new CountDownLatch(1); ctx1.addLocalMessageListener(topic, new IgniteBiPredicate<UUID, Object>() { @Override public boolean apply(UUID nodeId, Object msg) { assertEquals("Message", msg); latch.countDown(); return true; } }); long time = System.nanoTime(); ctx0.send(grid(1).localNode(), "Message", topic); assert latch.await(3, SECONDS); time = System.nanoTime() - time; info(">>>"); info(">>> send() time (ms): " + MILLISECONDS.convert(time, NANOSECONDS)); info(">>>"); } /** * @throws Exception If failed. */ @Test public void testAddLocalMessageListener() throws Exception { startGrid(); Manager mgr = new Manager(grid().context(), new Spi()); mgr.start(); mgr.onKernalStart(true); assertTrue(mgr.enabled()); } /** * @throws Exception If failed. */ @Test public void testRemoveLocalMessageListener() throws Exception { startGrid(); Manager mgr = new Manager(grid().context(), new Spi()); assertTrue(mgr.enabled()); mgr.onKernalStart(true); mgr.onKernalStop(false); mgr.stop(false); assertTrue(mgr.enabled()); } /** */ private static class Manager extends GridManagerAdapter<IgniteSpi> { /** * @param ctx Kernal context. * @param spis Specific SPI instance. */ protected Manager(GridKernalContext ctx, IgniteSpi... spis) { super(ctx, spis); } /** {@inheritDoc} */ @Override public void start() throws IgniteCheckedException { // No-op. } /** {@inheritDoc} */ @Override public void stop(boolean cancel) throws IgniteCheckedException { // No-op. } } /** * Test Spi. */ private static interface TestSpi extends IgniteSpi { // No-op. } /** * Spi */ private static class Spi extends IgniteSpiAdapter implements TestSpi { /** Ignite Spi Context. **/ private IgniteSpiContext spiCtx; /** Test message topic. **/ private static final String TEST_TOPIC = "test_topic"; /** {@inheritDoc} */ @Override public void spiStart(@Nullable String igniteInstanceName) throws IgniteSpiException { // No-op. } /** {@inheritDoc} */ @Override public void spiStop() throws IgniteSpiException { // No-op. } /** {@inheritDoc} */ @Override public void onContextInitialized0(IgniteSpiContext spiCtx) throws IgniteSpiException { this.spiCtx = spiCtx; spiCtx.addLocalMessageListener(TEST_TOPIC, new IgniteBiPredicate<UUID, Object>() { @Override public boolean apply(UUID uuid, Object o) { return true; } }); } /** {@inheritDoc} */ @Override public void onContextDestroyed0() { spiCtx.removeLocalMessageListener(TEST_TOPIC, new IgniteBiPredicate<UUID, Object>() { @Override public boolean apply(UUID uuid, Object o) { return true; } }); } } /** */ public static class TestPluginProvider extends AbstractTestPluginProvider { /** {@inheritDoc} */ @Override public String name() { return "TEST_PLUGIN"; } /** {@inheritDoc} */ @Override public void initExtensions(PluginContext ctx, ExtensionRegistry registry) { registry.registerExtension(MessageFactory.class, new MessageFactoryProvider() { @Override public void registerAll(IgniteMessageFactory factory) { factory.register(DIRECT_TYPE, GridIoUserMessage::new); } }); } } }
package us.kbase.nextgen.dapi; import java.io.BufferedReader; import java.io.BufferedWriter; import java.io.File; import java.io.FileWriter; import java.io.IOException; import java.io.StringReader; import java.io.Writer; import java.util.List; import java.util.Map; import java.util.Vector; import org.apache.commons.io.FileUtils; import org.apache.commons.io.output.WriterOutputStream; import us.kbase.jkidl.StaticIncludeProvider; import us.kbase.kidl.KbFuncdef; import us.kbase.kidl.KbList; import us.kbase.kidl.KbMapping; import us.kbase.kidl.KbModule; import us.kbase.kidl.KbModuleComp; import us.kbase.kidl.KbParameter; import us.kbase.kidl.KbScalar; import us.kbase.kidl.KbService; import us.kbase.kidl.KbStruct; import us.kbase.kidl.KbStructItem; import us.kbase.kidl.KbType; import us.kbase.kidl.KbTypedef; import us.kbase.kidl.KidlParser; import com.tinkerpop.blueprints.Graph; import com.tinkerpop.blueprints.Vertex; import com.tinkerpop.blueprints.impls.tg.TinkerGraph; import com.tinkerpop.blueprints.util.GraphHelper; import com.tinkerpop.blueprints.util.io.graphml.GraphMLWriter; /** * Class to generate type-type and type-method graph for a given specfile or collection of specfiles. * This is a version of TypeMethodGraphGenerator that uses Tinker implementation of graphs, edges, nodes, and graphml writer * * @version 1.0 * @author Pavel Novichkov * */ public class TypeMethodGraphGenerator2{ private static final String NODE_TYPE_DATA = "D"; private static final String NODE_TYPE_METHOD = "M"; private static final String EDGE_TYPE_LIST_OF = "LIST_OF"; private static final String EDGE_TYPE_HASH_KEY = "HASH_KEY"; private static final String EDGE_TYPE_HASH_VALUE = "HASH_VALUE"; private static final String EDGE_TYPE_SUBTYPE = "SUBTYPE"; private static final String EDGE_TYPE_METHOD_PARAM = "METHOD_PARAM"; private static final String EDGE_TYPE_METHOD_RETURN = "METHOD_RETURN"; private static final Object PROPERTY_NAME = "name"; private static final Object PROPERTY_MODULE_NAME = "moduleName"; private static final Object PROPERTY_TYPE_NAME = "typeName"; private static final Object PROPERTY_NODE_TYPE = "nodeType"; private static final Object PROPERTY_NODE_COMMENT = "comment"; /** * Whether the type-type should be taking into account */ private static boolean useType2TypeEdges; /** * Whether the method-type should be taking into account */ private static boolean useType2MethodEdges; /** * Whether graphs for each individual specs should be built */ private static boolean flProcessIndividualFiles; /** * Whether the combined graph for all specs should be built */ private static boolean flBuildSingleGraph; /** * Include provider that provides specDocuments to be included in a given spec via "include" statement */ private StaticIncludeProvider sip; /** * Run TypeMethodGraphGenerator * @param graphFileDir * @param specFileDir * @throws Exception */ private void run(File specFileDir, File graphFileDir) throws Exception{ buildIncludeProvider(specFileDir); if(flProcessIndividualFiles){ processIndividualFiles(specFileDir, graphFileDir); } if(flBuildSingleGraph){ buildSingleGraph(specFileDir, graphFileDir); } } /** * Builds include provider * @param cleanFileDir * @throws IOException */ private void buildIncludeProvider(File cleanFileDir) throws IOException { sip = new StaticIncludeProvider(); for(File specFile: cleanFileDir.listFiles()){ if(specFile.getName().endsWith(".spec")){ String fileName = specFile.getName(); String moduleName = fileName.substring(0, fileName.indexOf(".") ); String specDocument = FileUtils.readFileToString(specFile); sip.addSpecFile(moduleName, specDocument); } } } /** * Builds individual graphs for spec files * @param specFileDir * @param graphFileDir */ private void processIndividualFiles(File specFileDir, File graphFileDir){ for(File specFile: specFileDir.listFiles()){ if(specFile.getName().endsWith(".spec")){ processSpecFile(specFile, graphFileDir); } } } /** * Builds a combined graph for all spec files * @param specFileDir * @param graphFileDir */ private void buildSingleGraph(File specFileDir, File graphFileDir){ Graph graph = new TinkerGraph(); // Build graph for(File specFile: specFileDir.listFiles()){ if(!specFile.getName().endsWith(".spec")) continue; System.out.print("Doing spec file: " + specFile.getName() + "..."); try{ String specDocument = FileUtils.readFileToString(specFile); populateGraph(graph, specDocument); System.out.println(" Done!"); } catch(Exception e){ System.out.println(e.getMessage()); } } // Export graph try{ File graphFile = new File(graphFileDir, "_combined.graphml"); FileWriter fw = new FileWriter(graphFile); exportGraphML(graph, fw); fw.flush(); fw.close(); }catch(Exception e){ System.out.println(e.getMessage()); } } /** * Processes one spec file * @param specFile * @param exportDir */ private void processSpecFile(File specFile, File exportDir){ try{ System.out.print("Doing spec file: " + specFile.getName() + "..."); File graphFile = new File(exportDir, specFile.getName() + ".graphml"); Graph graph = new TinkerGraph(); String specDocument = FileUtils.readFileToString(specFile); populateGraph(graph, specDocument); FileWriter fw = new FileWriter(graphFile); exportGraphML(graph, fw); fw.flush(); fw.close(); System.out.println(" Done!"); } catch(Exception e){ System.out.println(e.getMessage()); } } private Vertex buildNode(Graph graph, KbTypedef typedef){ String id = nodeId(typedef); Vertex v = graph.getVertex(id); if(v == null){ String moduleName = typedef.getModule(); String typeName = typedef.getName(); v = GraphHelper.addVertex(graph, id , PROPERTY_NAME, id , PROPERTY_MODULE_NAME, moduleName , PROPERTY_TYPE_NAME, typeName , PROPERTY_NODE_TYPE, NODE_TYPE_DATA , PROPERTY_NODE_COMMENT, typedef.getComment() ); } return v; } private Vertex buildNode(Graph graph, KbModule module, KbFuncdef funcdef){ String id = nodeId(module, funcdef); Vertex v = graph.getVertex(id); if(v == null){ String moduleName = module.getModuleName(); String typeName = funcdef.getName(); v = GraphHelper.addVertex(graph, id , PROPERTY_NAME, id , PROPERTY_MODULE_NAME, moduleName , PROPERTY_TYPE_NAME, typeName , PROPERTY_NODE_TYPE, NODE_TYPE_METHOD , PROPERTY_NODE_COMMENT, funcdef.getComment() ); } return v; } /** * * @param typedef * @return */ private String nodeId(KbTypedef typedef){ return NODE_TYPE_DATA + "." + typedef.getModule() + "." + typedef.getName(); } private String nodeId(KbModule module, KbFuncdef funcdef){ return NODE_TYPE_METHOD + "." + module.getModuleName() + "." + funcdef.getName(); } /** * Process a given type and all subtypes recursively, and add corresponding edges to the graph if needed * * @param graph * @param rootNode * @param edge * @param type * @throws Exception */ private void processTypedef(Graph graph, KbModule module, Vertex rootNode, String edgeType, KbType type) throws Exception{ // Do not crete type-type edges if not needed if(!useType2TypeEdges) return; KbTypedef typedef = null; // If the type is either "typedef" or "typedef structure" if(type instanceof KbTypedef){ typedef = (KbTypedef) type; type = typedef.getAliasType(); // Check whether typedef was processed before boolean wasProcessed = graph.getVertex(nodeId(typedef)) != null; // Create a new node for this typedef and add edge between this node and the rootNode Vertex node = buildNode(graph, typedef); if(rootNode != null && node != null){ GraphHelper.addEdge(graph, nextEdgeId(), node, rootNode, edgeType , "moduleName", module.getModuleName()); } // Do not process typedef further if it was processed before if(wasProcessed){ return; } // Now the new node is the root rootNode = node; } if(type instanceof KbScalar){ // nothing to be done } else if(type instanceof KbList){ KbList kbList = (KbList) type; processTypedef(graph, module, rootNode, EDGE_TYPE_LIST_OF, kbList.getElementType()); } else if (type instanceof KbMapping){ KbMapping kbMapping = (KbMapping) type; processTypedef(graph, module, rootNode, EDGE_TYPE_HASH_KEY, kbMapping.getKeyType()); processTypedef(graph, module, rootNode, EDGE_TYPE_HASH_VALUE, kbMapping.getValueType()); } else if (type instanceof KbStruct){ KbStruct kbStruct = (KbStruct) type; if(typedef == null){ throw new Exception("If the type is KbStruct, then typedef should exist"); } // Try add edges for all subtypes for(KbStructItem item: kbStruct.getItems()){ processTypedef(graph, module, rootNode, EDGE_TYPE_SUBTYPE, item.getItemType()); } } } private void aggregateTypedefs(KbType type, List<KbTypedef> typedefs){ if(type instanceof KbTypedef){ typedefs.add( (KbTypedef) type); } else if(type instanceof KbScalar){ // nothing to be done } else if(type instanceof KbList){ KbList kbList = (KbList) type; aggregateTypedefs(kbList.getElementType(), typedefs); } else if (type instanceof KbMapping){ KbMapping kbMapping = (KbMapping) type; aggregateTypedefs(kbMapping.getKeyType(), typedefs); aggregateTypedefs(kbMapping.getValueType(), typedefs); } else if (type instanceof KbStruct){ KbStruct kbStruct = (KbStruct) type; for(KbStructItem item: kbStruct.getItems()){ aggregateTypedefs(item.getItemType(), typedefs); } } } private static int _edgeId = 0; private Integer nextEdgeId() { return _edgeId++; } /** * Populate a graph for a given spec file * @param graph * @param specDocument * @throws Exception */ private void populateGraph(Graph graph, String specDocument) throws Exception { StringReader r = new StringReader(specDocument); List<KbTypedef> mtTypedefs = new Vector<KbTypedef>(); Map<String, Map<String, String>> moduleToTypeToSchema = null; Map<?, ?> parseMap = KidlParser.parseSpecInt(r, moduleToTypeToSchema,sip); List<KbService> services = KidlParser.parseSpec(parseMap); for (KbService service : services) { for (KbModule module : service.getModules()) { for (KbModuleComp comp : module.getModuleComponents()) { if (comp instanceof KbTypedef) { KbTypedef typedef = (KbTypedef) comp; processTypedef(graph, module, null, null, typedef); } else if (comp instanceof KbFuncdef) { KbFuncdef func = (KbFuncdef) comp; Vertex funcNode = buildNode(graph, module, func); for (KbParameter param : func.getParameters()) { mtTypedefs.clear(); aggregateTypedefs(param.getType(), mtTypedefs); for(KbTypedef paramTypedef: mtTypedefs){ // try to add edges for subtypes processTypedef(graph, module, null, null, paramTypedef); // Add type-method edge only if it was requested if(useType2MethodEdges) { // add edge for dataype-method connection Vertex paramNode = buildNode(graph, paramTypedef); GraphHelper.addEdge(graph, nextEdgeId(), paramNode, funcNode, EDGE_TYPE_METHOD_PARAM , PROPERTY_MODULE_NAME, module.getModuleName()); } } } for (KbParameter param : func.getReturnType()) { mtTypedefs.clear(); aggregateTypedefs(param.getType(), mtTypedefs); for(KbTypedef returnTypedef: mtTypedefs){ // try to add edges for subtypes processTypedef(graph, module, null, null, returnTypedef); // Add type-method edge only if it was requested if(useType2MethodEdges) { // add edge for dataype-method connection Vertex returnNode = buildNode(graph, returnTypedef); GraphHelper.addEdge(graph, nextEdgeId(), funcNode, returnNode, EDGE_TYPE_METHOD_RETURN , PROPERTY_MODULE_NAME, module.getModuleName()); } } } } } } } } /** * Export a graph in the GraphML format * @param graph * @param writer * @throws IOException */ private void exportGraphML(Graph graph, Writer writer) throws IOException{ GraphMLWriter graphWirter = new GraphMLWriter(graph); graphWirter.outputGraph(new WriterOutputStream(writer)); /* Example of a node in yEd <data key="d0"> <y:ShapeNode> <y:Geometry x="165.0" y="178.0" width="30.0" height="30.0"/> <y:Fill color="#CCCCFF" transparent="false"/> <y:BorderStyle type="line" width="1.0" color="#000000"/> <y:NodeLabel x="9.5" y="5.6494140625" width="11.0" height="18.701171875" visible="true" alignment="center" fontFamily="Dialog" fontSize="12" fontStyle="plain" textColor="#000000" modelName="internal" modelPosition="c" autoSizePolicy="content">1</y:NodeLabel> <y:Shape type="rectangle"/> </y:ShapeNode> </data> */ /* GraphMLWriter<Node,Edge> graphWriter = new GraphMLWriter<Node,Edge>(); graphWriter.addVertexData("d0", null, null, new Transformer<Node, String>() { public String transform(Node node) { return "<y:ShapeNode>" + "<y:Shape type='rectangle'/>" + "<y:Fill color='" + (node.getType().equals(Node.TYPE_METHOD) ? "#FF5555" : "#CCCCFF") +"' transparent='false'/>" +"<y:NodeLabel>" + node.getName() +"</y:NodeLabel>" +"</y:ShapeNode>"; } } ); */ /* Example of an edge in yEd <edge id="e2" source="n1" target="n0"> <data key="d2"> <y:PolyLineEdge> <y:Path sx="0.0" sy="0.0" tx="0.0" ty="0.0"/> <y:LineStyle type="line" width="1.0" color="#000000"/> <y:Arrows source="none" target="none"/> <y:BendStyle smoothed="false"/> </y:PolyLineEdge> </data> <data key="d3">222</data> </edge> */ /* graphWriter.addEdgeData("d2", null, null, new Transformer<Edge, String>() { public String transform(Edge node) { return "<y:PolyLineEdge>" + "<y:Arrows source='none' target='delta'/>" +"</y:PolyLineEdge>"; } } ); StringWriter tmpWriter = new StringWriter(); graphWriter.save(graph, tmpWriter); adoptForYEd(tmpWriter.toString(), writer); */ } /** * Hack, to adopt graphML to be visualized in yEd * @param draphDoc * @param writer * @throws IOException */ private void adoptForYEd(String draphDoc, Writer writer) throws IOException { BufferedReader br = new BufferedReader(new StringReader(draphDoc)); BufferedWriter bw = new BufferedWriter(writer); bw.write( "<?xml version='1.0' encoding='UTF-8'?>" + "\n<graphml xmlns='http://graphml.graphdrawing.org/xmlns/graphml' xmlns:xsi='http://www.w3.org/2001/XMLSchema-instance'" + "\nxsi:schemaLocation='http://graphml.graphdrawing.org/xmlns/graphml http://www.yworks.com/xml/schema/graphml/1.0/ygraphml.xsd'" + "\nxmlns:y='http://www.yworks.com/xml/graphml'>" + "\n<key id='d0' for='node' yfiles.type='nodegraphics'/>" + "\n<key id='d2' for='edge' yfiles.type='edgegraphics'/>" ); boolean startFound = false; for(String line = br.readLine(); line != null; line = br.readLine()){ if(!startFound){ startFound = line.startsWith("<graph "); } if(startFound){ bw.append("\n"); bw.append(line); } } bw.flush(); } /** * Main mehtod * @param args * @throws Exception */ public static void main(String[] args) throws Exception { flProcessIndividualFiles = false; flBuildSingleGraph = true; useType2TypeEdges = true; useType2MethodEdges = true; File specFileDir = new File("/kb/dev_container/modules/nextgen/diagrams/typespecs/specs_clean/"); File graphFileDir = new File("/kb/dev_container/modules/nextgen/diagrams/typespecs/graphs_tinker/"); new TypeMethodGraphGenerator2().run(specFileDir, graphFileDir); } }
/* * CheckItemFieldsPage.java * Created on 2013/06/28 * * Copyright (C) 2011-2013 Nippon Telegraph and Telephone Corporation * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package tubame.knowhow.plugin.ui.editor.multi.checkitem; import tubame.common.util.CmnStringUtil; import org.eclipse.swt.layout.GridLayout; import org.eclipse.ui.forms.IManagedForm; import org.eclipse.ui.forms.editor.FormEditor; import org.eclipse.ui.forms.editor.FormPage; import org.eclipse.ui.forms.widgets.FormToolkit; import org.eclipse.ui.forms.widgets.ScrolledForm; import org.eclipse.ui.forms.widgets.Section; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import tubame.knowhow.plugin.model.view.PortabilityKnowhowListViewOperation; import tubame.knowhow.plugin.ui.editor.multi.EditorDirty; import tubame.knowhow.util.resource.ResourceUtil; /** * Check Item Details tab configuration class know-how of the editor.<br/> * Configuration list page from ({@link CheckItemFieldsBlock}).<br/> */ public class CheckItemFieldsPage extends FormPage { /** Logger */ private static final Logger LOGGER = LoggerFactory .getLogger(CheckItemFieldsPage.class); /** Check items section */ private CheckItemFieldsBlock checkItemFieldsBlock; /** Search information sect */ private SearchInfoFieldsBlock saerchInfoFieldsBlock; /** PortabilityKnowhowListViewOperation. */ private PortabilityKnowhowListViewOperation tempKnowhowListViewData; /** Presence or absence of storage */ private boolean dirty; /** * Constructor.<br/> * * @param editor * Know-how editor * @param id * ID * @param title * Title */ public CheckItemFieldsPage(FormEditor editor, String id, String title) { super(editor, id, title); checkItemFieldsBlock = new CheckItemFieldsBlock(this); saerchInfoFieldsBlock = new SearchInfoFieldsBlock(this); } /** * {@inheritDoc} */ @Override protected void createFormContent(IManagedForm managedForm) { ScrolledForm form = managedForm.getForm(); FormToolkit toolkit = managedForm.getToolkit(); form.setText(ResourceUtil.checkItem); form.getBody().setLayout(new GridLayout(2, false)); Section checkItemSection = toolkit.createSection(form.getBody(), Section.TITLE_BAR | Section.DESCRIPTION); checkItemSection.setText(ResourceUtil.checkItemInfo); checkItemSection.setDescription(ResourceUtil.checkItemInfoDescription); checkItemSection.setLayout(new GridLayout(2, false)); checkItemFieldsBlock.createSection(toolkit, checkItemSection, managedForm); Section searchInfoSection = toolkit.createSection(form.getBody(), Section.TITLE_BAR | Section.DESCRIPTION); searchInfoSection.setText(ResourceUtil.searchInfo); searchInfoSection.setDescription(ResourceUtil.searchInfoDescription); searchInfoSection.setLayout(new GridLayout(2, false)); saerchInfoFieldsBlock.createSection(toolkit, searchInfoSection, managedForm); form.reflow(true); } /** * {@inheritDoc} */ @Override public boolean isDirty() { CheckItemFieldsPage.LOGGER.debug(CmnStringUtil.EMPTY); return dirty; } /** * Set dirty.<br/> * * @param dirty * Change information */ public void setDirty(boolean dirty) { CheckItemFieldsPage.LOGGER.debug("[dirty]" + dirty); this.dirty = dirty; // Send change notification super.setActive(dirty); } /** * Get Editor.<br/> * * @return Editor */ public EditorDirty getEditorEitry() { CheckItemFieldsPage.LOGGER.debug(CmnStringUtil.EMPTY); return (EditorDirty) super.getEditor(); } /** * Set to check items block the data registered by the know-how entry view.<br/> * * @param knowhowListViewData * PortabilityKnowhowListViewData */ public void updateCheckItemBrock( PortabilityKnowhowListViewOperation knowhowListViewData) { CheckItemFieldsPage.LOGGER.debug("[knowhowListViewData]" + knowhowListViewData); checkItemFieldsBlock.setRegisterItemData(knowhowListViewData .getKnowhowViewType()); checkItemFieldsBlock.setEnableGroup(true); // Set the data in the child hierarchy updateSearchInfoBrock(knowhowListViewData); this.tempKnowhowListViewData = knowhowListViewData; } /** * Set to search information block search information data of the check * items under.<br/> * * @param knowhowListViewData * PortabilityKnowhowListViewData */ private void updateSearchInfoBrock( PortabilityKnowhowListViewOperation knowhowListViewData) { if (!knowhowListViewData.getChildList().isEmpty()) { saerchInfoFieldsBlock.setEnableGroup(true); for (PortabilityKnowhowListViewOperation childData : knowhowListViewData .getChildList()) { saerchInfoFieldsBlock.setRegisterItemData(childData .getKnowhowViewType()); } } else { saerchInfoFieldsBlock.setEnableGroup(false); } } /** * Clear data of the check item information tab.<br/> * */ public void clear() { CheckItemFieldsPage.LOGGER.debug(CmnStringUtil.EMPTY); if (checkItemFieldsBlock.isClear() || saerchInfoFieldsBlock.isClear()) { checkItemFieldsBlock.clear(); checkItemFieldsBlock.setEnableGroup(false); saerchInfoFieldsBlock.clear(); saerchInfoFieldsBlock.setEnableGroup(false); } } /** * Get checkItemFieldsBlock.<br/> * * @return checkItemFieldsBlock */ public CheckItemFieldsBlock getCheckItemFieldsBlock() { CheckItemFieldsPage.LOGGER.debug(CmnStringUtil.EMPTY); return checkItemFieldsBlock; } /** * Get saerchInfoFieldsBlock.<br/> * * @return saerchInfoFieldsBlock */ public SearchInfoFieldsBlock getSaerchInfoFieldsBlock() { CheckItemFieldsPage.LOGGER.debug(CmnStringUtil.EMPTY); return saerchInfoFieldsBlock; } /** * Make the process of rewriting the data that has changed.<br/> * */ public void temporaryStorage() { CheckItemFieldsPage.LOGGER.debug(CmnStringUtil.EMPTY); // Data update process if (this.tempKnowhowListViewData != null) { checkItemFieldsBlock.updateData(tempKnowhowListViewData); if (!tempKnowhowListViewData.getChildList().isEmpty()) { saerchInfoFieldsBlock.updateData(tempKnowhowListViewData .getChildList().get(0)); } } } }
/* * Copyright 2000-2015 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intellij.openapi.fileEditor.impl; import com.intellij.mock.Mock; import com.intellij.openapi.command.CommandProcessor; import com.intellij.openapi.editor.EditorFactory; import com.intellij.openapi.fileEditor.FileEditor; import com.intellij.openapi.fileEditor.FileEditorProvider; import com.intellij.openapi.fileEditor.FileEditorState; import com.intellij.openapi.fileEditor.FileEditorStateLevel; import com.intellij.openapi.util.Pair; import com.intellij.openapi.vfs.VirtualFile; import com.intellij.openapi.vfs.VirtualFileManager; import com.intellij.testFramework.PlatformTestCase; import org.jetbrains.annotations.NotNull; public class IdeDocumentHistoryTest extends PlatformTestCase { private IdeDocumentHistoryImpl myHistory; private Mock.MyFileEditor mySelectedEditor; private Mock.MyVirtualFile mySelectedFile; private FileEditorState myEditorState; private FileEditorProvider myProvider; private MyState myState1; private MyState myState2; private MyState myState3; @Override protected void setUp() throws Exception { super.setUp(); myHistory = new IdeDocumentHistoryImpl(getProject(), EditorFactory.getInstance(), new EditorManager(), VirtualFileManager.getInstance(), CommandProcessor.getInstance(), new Mock.MyToolWindowManager()) { @Override protected Pair<FileEditor,FileEditorProvider> getSelectedEditor() { return Pair.create ((FileEditor)mySelectedEditor, myProvider); } @Override protected void executeCommand(Runnable runnable, String name, Object groupId) { myHistory.onCommandStarted(); runnable.run(); myHistory.onSelectionChanged(); myHistory.onCommandFinished(groupId); } }; mySelectedEditor = new Mock.MyFileEditor() { @Override @NotNull public FileEditorState getState(@NotNull FileEditorStateLevel level) { return myEditorState; } @Override public void setState(@NotNull FileEditorState state) { myEditorState = state; } }; mySelectedFile = new Mock.MyVirtualFile(); myEditorState = new MyState(false, "start"); myProvider = new Mock.MyFileEditorProvider() { @Override @NotNull public String getEditorTypeId() { return "EditorType"; } }; } public void testNoHistoryRecording() throws Throwable { myHistory.onCommandStarted(); myHistory.onCommandFinished(null); assertFalse(myHistory.isBackAvailable()); assertFalse(myHistory.isForwardAvailable()); } public void testNavigationRecording() throws Throwable { makeNavigationChange(myState2); assertTrue(myHistory.isBackAvailable()); assertFalse(myHistory.isForwardAvailable()); assertEquals(1, myHistory.getBackPlaces().size()); } public void testMergingForwardPlaces() throws Throwable { myEditorState = new MyState(true, "state1"); makeNavigationChange(new MyState(true, "state2")); assertTrue(myHistory.isBackAvailable()); assertFalse(myHistory.isForwardAvailable()); assertEquals(1, myHistory.getBackPlaces().size()); } public void testSimpleNavigation() throws Throwable { pushTwoStates(); assertFalse(myHistory.isForwardAvailable()); assertTrue(myHistory.isBackAvailable()); myHistory.back(); assertTrue(myHistory.isBackAvailable()); assertTrue(myHistory.isForwardAvailable()); assertSame(myState2, myEditorState); myHistory.back(); assertFalse(myHistory.isBackAvailable()); assertTrue(myHistory.isForwardAvailable()); assertSame(myState1, myEditorState); myHistory.forward(); assertTrue(myHistory.isBackAvailable()); assertTrue(myHistory.isForwardAvailable()); assertSame(myState2, myEditorState); myHistory.forward(); assertTrue(myHistory.isBackAvailable()); assertFalse(myHistory.isForwardAvailable()); assertSame(myState3, myEditorState); } public void testQueueCutOff() throws Throwable { pushTwoStates(); myHistory.back(); assertTrue(myHistory.isBackAvailable()); assertTrue(myHistory.isForwardAvailable()); MyState newState = new MyState(false, "newState"); makeNavigationChange(newState); assertTrue(myHistory.isBackAvailable()); assertFalse(myHistory.isForwardAvailable()); myHistory.back(); assertSame(myState2, myEditorState); myHistory.back(); assertSame(myState1, myEditorState); assertFalse(myHistory.isBackAvailable()); } public void testRemoveInvalid() throws Throwable { pushTwoStates(); assertTrue(myHistory.isBackAvailable()); mySelectedFile.myValid = false; myHistory.onFileDeleted(); assertFalse(myHistory.isBackAvailable()); assertFalse(myHistory.isForwardAvailable()); } private void pushTwoStates() { myState1 = new MyState(false, "state1"); myState2 = new MyState(false, "state2"); myState3 = new MyState(false, "state3"); myEditorState = myState1; makeNavigationChange(myState2); makeNavigationChange(myState3); } private void makeNavigationChange(MyState newState) { myHistory.onCommandStarted(); myHistory.onSelectionChanged(); myHistory.onCommandFinished(null); myEditorState = newState; } private class EditorManager extends Mock.MyFileEditorManager { @Override public VirtualFile getFile(@NotNull FileEditor editor) { return mySelectedFile; } @Override @NotNull public Pair<FileEditor[],FileEditorProvider[]> openFileWithProviders(@NotNull VirtualFile file, boolean focusEditor, boolean searchForSplitter) { return Pair.create (new FileEditor[] {mySelectedEditor}, new FileEditorProvider[] {myProvider}); } @Override public FileEditorProvider getProvider(FileEditor editor) { return myProvider; } } private static class MyState implements FileEditorState { private final boolean myCanBeMerged; private final String myName; public MyState(boolean canBeMerged, String name) { myCanBeMerged = canBeMerged; myName = name; } @Override public boolean canBeMergedWith(FileEditorState otherState, FileEditorStateLevel level) { return myCanBeMerged; } public String toString() { return myName; } } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.flink.api.java.typeutils.runtime; import org.apache.flink.api.common.typeutils.ComparatorTestBase; import org.apache.flink.api.common.typeutils.TypeComparator; import org.apache.flink.api.common.typeutils.TypeSerializer; import org.junit.Test; import java.util.ArrayList; import java.util.List; public abstract class AbstractGenericTypeComparatorTest { @Test public void testString() { runTests( "", "Lorem Ipsum Dolor Omit Longer", "aaaa", "abcd", "abce", "abdd", "accd", "bbcd"); } @Test public void testSimpleTypesObjects() { runTests( new SimpleTypes(0, 1, (byte) 2, "", (short) 3, 4.0), new SimpleTypes(1, 1, (byte) 2, "", (short) 3, 4.0), new SimpleTypes(1, 2, (byte) 2, "", (short) 3, 4.0), new SimpleTypes(1, 2, (byte) 3, "", (short) 3, 4.0), new SimpleTypes(1, 2, (byte) 3, "a", (short) 3, 4.0), new SimpleTypes(1, 2, (byte) 3, "b", (short) 3, 4.0), new SimpleTypes(1, 2, (byte) 3, "b", (short) 4, 4.0), new SimpleTypes(1, 2, (byte) 3, "b", (short) 4, 6.0)); } @Test public void testCompositeObject() { ComplexNestedObject1 o1 = new ComplexNestedObject1(-1100); ComplexNestedObject1 o2 = new ComplexNestedObject1(0); ComplexNestedObject1 o3 = new ComplexNestedObject1(44); ComplexNestedObject1 o4 = new ComplexNestedObject1(76923, "A"); ComplexNestedObject1 o5 = new ComplexNestedObject1(5626435, "A somewhat random collection"); runTests(o1, o2, o3, o4, o5); } @Test public void testBeanStyleObjects() { { Book b111 = new Book(-1L, "A Low level interfaces", 0xC); Book b122 = new Book(-1L, "Low level interfaces", 0xC); Book b123 = new Book(-1L, "Low level interfaces", 0xC0FFEE); Book b2 = new Book(0L, "Debugging byte streams", 1337); Book b3 = new Book(976243875L, "The Serialization Odysse", 42); runTests(b111, b122, b123, b2, b3); } { BookAuthor b1 = new BookAuthor(976243875L, new ArrayList<String>(), "Arno Nym"); ArrayList<String> list = new ArrayList<String>(); list.add("A"); list.add("B"); list.add("C"); list.add("D"); list.add("E"); BookAuthor b2 = new BookAuthor(976243875L, list, "The Saurus"); runTests(b1, b2); } } // ------------------------------------------------------------------------ private <T> void runTests(T... sortedTestData) { ComparatorTestInstance<T> testBase = new ComparatorTestInstance<T>(sortedTestData); testBase.testAll(); } protected abstract <T> TypeSerializer<T> createSerializer(Class<T> type); // ------------------------------------------------------------------------ // test instance // ------------------------------------------------------------------------ private class ComparatorTestInstance<T> extends ComparatorTestBase<T> { private final T[] testData; private final Class<T> type; @SuppressWarnings("unchecked") public ComparatorTestInstance(T[] testData) { if (testData == null || testData.length == 0) { throw new IllegalArgumentException(); } this.testData = testData; this.type = (Class<T>) testData[0].getClass(); } @Override @SuppressWarnings({"unchecked", "rawtypes"}) protected TypeComparator<T> createComparator(boolean ascending) { return new GenericTypeComparator( ascending, AbstractGenericTypeComparatorTest.this.createSerializer(this.type), this.type); } @Override protected TypeSerializer<T> createSerializer() { return AbstractGenericTypeComparatorTest.this.createSerializer(this.type); } @Override protected T[] getSortedTestData() { return this.testData; } public void testAll() { testDuplicate(); testEquality(); testEqualityWithReference(); testInequality(); testInequalityWithReference(); testNormalizedKeysEqualsFullLength(); testNormalizedKeysEqualsHalfLength(); testNormalizedKeysGreatSmallFullLength(); testNormalizedKeysGreatSmallAscDescHalfLength(); testNormalizedKeyReadWriter(); } } // ------------------------------------------------------------------------ // test objects // ------------------------------------------------------------------------ public static final class SimpleTypes implements Comparable<SimpleTypes> { private final int iVal; private final long lVal; private final byte bVal; private final String sVal; private final short rVal; private final double dVal; public SimpleTypes() { this(0, 0, (byte) 0, "", (short) 0, 0); } public SimpleTypes(int iVal, long lVal, byte bVal, String sVal, short rVal, double dVal) { this.iVal = iVal; this.lVal = lVal; this.bVal = bVal; this.sVal = sVal; this.rVal = rVal; this.dVal = dVal; } @Override public String toString() { return String.format("(%d, %d, %d, %s, %d, %f)", iVal, lVal, bVal, sVal, rVal, dVal); } @Override public boolean equals(Object obj) { if (obj.getClass() == SimpleTypes.class) { SimpleTypes other = (SimpleTypes) obj; return other.iVal == this.iVal && other.lVal == this.lVal && other.bVal == this.bVal && other.sVal.equals(this.sVal) && other.rVal == this.rVal && other.dVal == this.dVal; } else { return false; } } @Override public int compareTo(SimpleTypes o) { int cmp = (this.iVal < o.iVal ? -1 : (this.iVal == o.iVal ? 0 : 1)); if (cmp != 0) { return cmp; } cmp = (this.lVal < o.lVal ? -1 : (this.lVal == o.lVal ? 0 : 1)); if (cmp != 0) { return cmp; } cmp = (this.bVal < o.bVal ? -1 : (this.bVal == o.bVal ? 0 : 1)); if (cmp != 0) { return cmp; } cmp = this.sVal.compareTo(o.sVal); if (cmp != 0) { return cmp; } cmp = (this.rVal < o.rVal ? -1 : (this.rVal == o.rVal ? 0 : 1)); if (cmp != 0) { return cmp; } return (this.dVal < o.dVal ? -1 : (this.dVal == o.dVal ? 0 : 1)); } } public static class ComplexNestedObject1 implements Comparable<ComplexNestedObject1> { private double doubleValue; private List<String> stringList; public ComplexNestedObject1() {} public ComplexNestedObject1(double value, String... listElements) { this.doubleValue = value; this.stringList = new ArrayList<String>(); for (String str : listElements) { this.stringList.add(str); } } @Override public boolean equals(Object obj) { if (obj.getClass() == ComplexNestedObject1.class) { ComplexNestedObject1 other = (ComplexNestedObject1) obj; return other.doubleValue == this.doubleValue && this.stringList.equals(other.stringList); } else { return false; } } @Override public int compareTo(ComplexNestedObject1 o) { int cmp = (this.doubleValue < o.doubleValue ? -1 : (this.doubleValue == o.doubleValue ? 0 : 1)); if (cmp != 0) { return cmp; } int size = this.stringList.size(); int otherSize = o.stringList.size(); cmp = (size < otherSize ? -1 : (size == otherSize ? 0 : 1)); if (cmp != 0) { return cmp; } for (int i = 0; i < size; i++) { cmp = this.stringList.get(i).compareTo(o.stringList.get(i)); if (cmp != 0) { return cmp; } } return 0; } } public static class Book implements Comparable<Book> { private long bookId; private String title; private long authorId; public Book() {} public Book(long bookId, String title, long authorId) { this.bookId = bookId; this.title = title; this.authorId = authorId; } @Override public boolean equals(Object obj) { if (obj.getClass() == Book.class) { Book other = (Book) obj; return other.bookId == this.bookId && other.authorId == this.authorId && this.title.equals(other.title); } else { return false; } } @Override public int compareTo(Book o) { int cmp = (this.bookId < o.bookId ? -1 : (this.bookId == o.bookId ? 0 : 1)); if (cmp != 0) { return cmp; } cmp = title.compareTo(o.title); if (cmp != 0) { return cmp; } return (this.authorId < o.authorId ? -1 : (this.authorId == o.authorId ? 0 : 1)); } } public static class BookAuthor implements Comparable<BookAuthor> { private long authorId; private List<String> bookTitles; private String authorName; public BookAuthor() {} public BookAuthor(long authorId, List<String> bookTitles, String authorName) { this.authorId = authorId; this.bookTitles = bookTitles; this.authorName = authorName; } @Override public boolean equals(Object obj) { if (obj.getClass() == BookAuthor.class) { BookAuthor other = (BookAuthor) obj; return other.authorName.equals(this.authorName) && other.authorId == this.authorId && other.bookTitles.equals(this.bookTitles); } else { return false; } } @Override public int compareTo(BookAuthor o) { int cmp = (this.authorId < o.authorId ? -1 : (this.authorId == o.authorId ? 0 : 1)); if (cmp != 0) return cmp; int size = this.bookTitles.size(); int oSize = o.bookTitles.size(); cmp = (size < oSize ? -1 : (size == oSize ? 0 : 1)); if (cmp != 0) return cmp; for (int i = 0; i < size; i++) { cmp = this.bookTitles.get(i).compareTo(o.bookTitles.get(i)); if (cmp != 0) return cmp; } return this.authorName.compareTo(o.authorName); } } }
package eu.synectique.verveine.extractor.visitors.ref; import org.eclipse.cdt.core.dom.ast.IASTBinaryExpression; import org.eclipse.cdt.core.dom.ast.IASTCastExpression; import org.eclipse.cdt.core.dom.ast.IASTFieldReference; import org.eclipse.cdt.core.dom.ast.IASTFunctionCallExpression; import org.eclipse.cdt.core.dom.ast.IASTFunctionDeclarator; import org.eclipse.cdt.core.dom.ast.IASTIdExpression; import org.eclipse.cdt.core.dom.ast.IASTImplicitName; import org.eclipse.cdt.core.dom.ast.IASTImplicitNameOwner; import org.eclipse.cdt.core.dom.ast.IASTInitializerClause; import org.eclipse.cdt.core.dom.ast.IASTLiteralExpression; import org.eclipse.cdt.core.dom.ast.IASTName; import org.eclipse.cdt.core.dom.ast.IASTNode; import org.eclipse.cdt.core.dom.ast.IASTSimpleDeclaration; import org.eclipse.cdt.core.dom.ast.IASTUnaryExpression; import org.eclipse.cdt.core.dom.ast.IBinding; import org.eclipse.cdt.core.dom.ast.cpp.ICPPASTConstructorChainInitializer; import org.eclipse.cdt.core.dom.ast.cpp.ICPPASTConstructorInitializer; import org.eclipse.cdt.core.dom.ast.cpp.ICPPASTLiteralExpression; import org.eclipse.cdt.core.dom.ast.cpp.ICPPASTUnaryExpression; import org.eclipse.cdt.core.index.IIndex; import org.eclipse.cdt.core.model.ITranslationUnit; import eu.synectique.verveine.core.Dictionary; import eu.synectique.verveine.core.gen.famix.Access; import eu.synectique.verveine.core.gen.famix.Association; import eu.synectique.verveine.core.gen.famix.Attribute; import eu.synectique.verveine.core.gen.famix.BehaviouralEntity; import eu.synectique.verveine.core.gen.famix.DereferencedInvocation; import eu.synectique.verveine.core.gen.famix.Function; import eu.synectique.verveine.core.gen.famix.Invocation; import eu.synectique.verveine.core.gen.famix.Method; import eu.synectique.verveine.core.gen.famix.NamedEntity; import eu.synectique.verveine.core.gen.famix.StructuralEntity; import eu.synectique.verveine.core.gen.famix.Type; import eu.synectique.verveine.core.gen.famix.UnknownVariable; import eu.synectique.verveine.extractor.plugin.CDictionary; import eu.synectique.verveine.extractor.utils.QualifiedName; public class InvocationAccessRefVisitor extends AbstractRefVisitor { protected static final String EMPTY_ARGUMENT_NAME = "__Empty_Argument__"; /** * set in visit(IASTUnaryExpression) to be used when visiting the operand */ private boolean inAmpersandUnaryExpression; protected boolean inCastExpression; public InvocationAccessRefVisitor(CDictionary dico, IIndex index, String rootFolder) { super(dico, index, rootFolder); } @Override protected String msgTrace() { return "recording accesses to variables and invocations to methods/functions"; } /** * Overriden to initialize {@link #inAmpersandUnaryExpression} and {@link #inCastExpr} to <code>false</code> * (e.g. at the begining of a .c file) */ @Override public void visit(ITranslationUnit elt) { super.visit(elt); inAmpersandUnaryExpression = false; inCastExpression = false; } @Override protected int visit(IASTSimpleDeclaration node) { if (declarationIsTypedef(node)) { return PROCESS_SKIP; } else { return super.visit(node); } } @Override protected int visit(IASTFunctionDeclarator node) { if (! inCastExpression) { super.visit(node); } // else this is something like a cast to a function pointer type: do not handle it return PROCESS_SKIP; } @Override public int visit(IASTFunctionCallExpression node) { nodeBnd = null; nodeName = null; returnedEntity = null; IASTNode[] children = node.getFunctionNameExpression().getChildren(); for (int i=0; i < children.length - 1; i++) { // for all children except the last one (presumably the called function's name) children[i].accept(this); } // try to identify (or create if a stub) the Behavioural being invoked Invocation invok = resolveInvokFromName(node, children[children.length - 1]); // sometimes there is no function name in the node therefore, no fmx to invok // this happens when the result of the call is casted, this creates 2 IASTFunctionCallExpression // The parent holds the cast and has empty function name visitInvocationArguments(node.getArguments(), invok); return PROCESS_SKIP; } protected void visitInvocationArguments(IASTInitializerClause[] args, Invocation invok) { for (IASTInitializerClause icl : args) { icl.accept(this); if (returnedEntity instanceof Association) { if (invok != null) { invok.addArguments((Association) returnedEntity); } } else { // so that the position of arguments match exactly their corresponding parameters // we create fake associations for arguments that we could not resolve IBinding fakeBnd = resolver.mkStubKey(EMPTY_ARGUMENT_NAME, UnknownVariable.class); UnknownVariable fake = dico.ensureFamixUniqEntity(UnknownVariable.class, fakeBnd, EMPTY_ARGUMENT_NAME); Access acc = dico.addFamixAccess(getContext().topBehaviouralEntity(), fake, /*isWrite*/false, /*prev*/null); if (invok != null) { invok.addArguments(acc); } dico.addSourceAnchor(acc, filename, icl.getFileLocation()); } } } protected Invocation resolveInvokFromName(IASTFunctionCallExpression node, IASTNode invokNode) { Invocation invok = null; NamedEntity fmx = null; if (invokNode instanceof IASTName) { nodeName = (IASTName)invokNode; nodeBnd = resolver.getBinding( nodeName ); if (nodeBnd != null) { fmx = dico.getEntityByKey(nodeBnd); } if ( (fmx == null) && (nodeName != null) ) { fmx = resolver.resolveOrCreate(nodeName.toString(), /*mayBeNull*/true, /*mustBeClass*/false); } if ( (fmx == null) && (nodeName != null) ) { fmx = makeStubBehavioural(nodeName.toString(), node.getArguments().length, /*isMethod*/false); } if (fmx instanceof eu.synectique.verveine.core.gen.famix.Class) { // found a class instead of a behavioral. May happen, for example in the case of a "throw ClassName(...)" fmx = makeStubBehavioural(fmx.getName(), node.getArguments().length, /*isMethod*/true); } // now create the invocation if (fmx != null) { if (fmx instanceof BehaviouralEntity) { invok = invocationOfBehavioural((BehaviouralEntity) fmx); dico.addSourceAnchor(invok, filename, node.getFileLocation()); } else if (fmx instanceof StructuralEntity) { // fmx is probably a pointer to a BehavioralEntity String stubSig = resolver.mkStubSig(fmx.getName(), node.getArguments().length); invok = (DereferencedInvocation) dereferencedInvocation( (StructuralEntity)fmx, stubSig); dico.addSourceAnchor(invok, filename, node.getFileLocation()); } } } return invok; } /** * Other entry point for this visitor */ @Override protected int visit(ICPPASTConstructorChainInitializer node) { IASTName memberName = node.getMemberInitializerId(); nodeBnd = resolver.getBinding(memberName); returnedEntity = dico.getEntityByKey(nodeBnd); if (returnedEntity == null) { Type parent = null; // top of context stack should be the constructor method that is ChainInitialized if (resolver.getContext().topMethod() != null) { parent = resolver.getContext().topMethod().getParentType(); } // just in case, we look if the class of the constructor is not in the context stack ... else if (resolver.getContext().topType() != null) { parent = resolver.getContext().topType(); } if (parent != null) { returnedEntity = resolver.findInParent(memberName.toString(), parent, /*recursive*/true); } } node.getInitializer().accept(this); return PROCESS_SKIP; } @Override protected int visit(ICPPASTConstructorInitializer node) { IASTImplicitNameOwner parent = (IASTImplicitNameOwner)node.getParent() ; NamedEntity fmx = null; Invocation invok = null; // if this is an implicit call to a constructor for (IASTImplicitName candidate : parent.getImplicitNames()) { IBinding constBnd = null; constBnd = resolver.getBinding( candidate ); if (constBnd != null) { fmx = dico.getEntityByKey(constBnd); if (fmx instanceof BehaviouralEntity) { break; // we found one method matching the implicit constructor. We are happy for now. } } } // if we could not get it, try to create a meaningful stub if (fmx == null) { // get the name of the called constructor (or attribute initialized) String mthName = null; if (parent.getImplicitNames().length > 0) { mthName = parent.getImplicitNames()[0].toString(); } else if (parent instanceof ICPPASTConstructorChainInitializer) { // FIXME what if returnedType == null but should be Attribute ... ? if ( returnedEntity instanceof Attribute ) { // hopefully set in visit(ICPPASTConstructorChainInitializer) mthName = ((Attribute)returnedEntity).getDeclaredType().getName(); } else { // Constructor name is the name of its class (possibly fully qualified) + name of the class (unqualified) QualifiedName qualName = new QualifiedName( ((ICPPASTConstructorChainInitializer)parent).getMemberInitializerId().toString() ); mthName = qualName.toString() + QualifiedName.CPP_NAME_SEPARATOR + qualName.unqualifiedName(); } } // create the constructor if (mthName != null) { fmx = makeStubBehavioural(mthName, node.getArguments().length, /*isMethod*/true); } } if (fmx != null) { invok = invocationOfBehavioural((BehaviouralEntity) fmx); returnedEntity = invok; dico.addSourceAnchor(returnedEntity, filename, node.getFileLocation()); } visitInvocationArguments(node.getArguments(), invok); return PROCESS_SKIP; } @Override protected int visit(IASTIdExpression node) { returnedEntity = associationToName(((IASTIdExpression) node).getName(), node.getParent()); return PROCESS_SKIP; } @Override protected int visit(IASTFieldReference node) { node.getFieldOwner().accept(this); // to detect some field accesses returnedEntity = associationToName(node.getFieldName(), node.getParent()); return PROCESS_SKIP; } @Override protected int visit(IASTLiteralExpression node) { returnedEntity = null; if ( node.getKind() == ICPPASTLiteralExpression.lk_this ) { if (getContext().topType() != null) { returnedEntity = accessToVar(dico.ensureFamixImplicitVariable(Dictionary.SELF_NAME, /*type*/getContext().topType(), /*owner*/getContext().topBehaviouralEntity())); } else if (getContext().topMethod() != null) { returnedEntity = accessToVar(dico.ensureFamixImplicitVariable(Dictionary.SELF_NAME, /*type*/getContext().topMethod().getParentType(), /*owner*/getContext().topBehaviouralEntity())); } if (returnedEntity != null) { dico.addSourceAnchor(returnedEntity, filename, node.getFileLocation()); } } return PROCESS_SKIP; } @Override protected int visit(IASTUnaryExpression node) { inAmpersandUnaryExpression = (node.getOperator() == ICPPASTUnaryExpression.op_amper); node.getOperand().accept(this); inAmpersandUnaryExpression = false; return PROCESS_SKIP; } @Override public int visit(IASTBinaryExpression node) { node.getOperand1().accept(this); switch (node.getOperator()) { case IASTBinaryExpression.op_assign: case IASTBinaryExpression.op_binaryAndAssign: case IASTBinaryExpression.op_binaryOrAssign: case IASTBinaryExpression.op_binaryXorAssign: case IASTBinaryExpression.op_divideAssign: case IASTBinaryExpression.op_minusAssign: case IASTBinaryExpression.op_moduloAssign: case IASTBinaryExpression.op_multiplyAssign: case IASTBinaryExpression.op_plusAssign: case IASTBinaryExpression.op_shiftLeftAssign: case IASTBinaryExpression.op_shiftRightAssign: if (this.returnedEntity() instanceof Access) { ((Access) this.returnedEntity()).setIsWrite(true); } } node.getOperand2().accept(this); return PROCESS_SKIP; } @Override protected int visit(IASTCastExpression node) { inCastExpression = true; node.getTypeId().accept(this); inCastExpression = false; node.getOperand().accept(this); return PROCESS_SKIP; } protected Association associationToName(IASTName nodeName, IASTNode nodeParent) { NamedEntity fmx = null; Association assoc = null; nodeBnd = resolver.getBinding(nodeName); if (nodeBnd != null) { fmx = dico.getEntityByKey(nodeBnd); } else { fmx = resolver.findInParent(nodeName.toString(), getContext().top(), /*recursive*/true); } if (fmx instanceof StructuralEntity) { assoc = accessToVar((StructuralEntity) fmx); } else if (fmx instanceof BehaviouralEntity) { //&& (! inAmpersandUnaryExpression) ) { if (inAmpersandUnaryExpression) { return behaviouralPointer((BehaviouralEntity) fmx); } else { assoc = invocationOfBehavioural((BehaviouralEntity) fmx); } } if (assoc != null) { dico.addSourceAnchor(assoc, filename, nodeParent.getFileLocation()); } return assoc; } /** * Records an Access to a StructuralEntity and sets lastAccess attribute. * Assumes the context is correctly set (i.e. top contains a BehaviouralEntity that makes the s) * @param fmx -- Accessed StructuralEntity * @return the Access created */ protected Access accessToVar(StructuralEntity fmx) { BehaviouralEntity accessor; // put false to isWrite by default, will be corrected in the visitor accessor = this.getContext().topBehaviouralEntity(); Access acc = dico.addFamixAccess(accessor, fmx, /*isWrite*/false, getContext().getLastAccess()); getContext().setLastAccess(acc); return acc; } protected BehaviouralEntity makeStubBehavioural(String name, int nbArgs, boolean isMethod) { BehaviouralEntity fmx; String stubSig = resolver.mkStubSig(name, nbArgs); if (isMethod) { fmx = dico.ensureFamixMethod(/*key*/resolver.mkStubKey(name+"__"+nbArgs, Method.class), name, stubSig, /*container*/null); } else { fmx = dico.ensureFamixFunction(/*key*/resolver.mkStubKey(name+"__"+nbArgs, Function.class), name, stubSig, /*container*/null); } fmx.setNumberOfParameters(nbArgs); // there are 2 ways to get the number of parameters of a BehaviouralEntity: getNumberOfParameters() and numberOfParameters() // the first returns the attribute numberOfParameters (set here), the second computes the size of parameters return fmx; } }
package net.scapeemulator.game.model.player.skills.construction; import net.scapeemulator.game.GameServer; import net.scapeemulator.game.dialogue.Dialogue; import net.scapeemulator.game.dialogue.DialogueContext; import net.scapeemulator.game.dialogue.DialogueOption; import net.scapeemulator.game.dispatcher.button.ButtonDispatcher; import net.scapeemulator.game.dispatcher.button.WindowHandler; import net.scapeemulator.game.model.ExtendedOption; import net.scapeemulator.game.model.World; import net.scapeemulator.game.model.player.Item; import net.scapeemulator.game.model.player.Player; import net.scapeemulator.game.model.player.PlayerVariables.Variable; import net.scapeemulator.game.model.player.RegionPalette.Tile.Rotation; import net.scapeemulator.game.model.player.ScriptInputListenerAdapter; import net.scapeemulator.game.model.player.requirement.ItemRequirement; import net.scapeemulator.game.model.player.skills.construction.House.BuildingSession; import net.scapeemulator.game.model.mob.Animation; import net.scapeemulator.game.model.player.skills.construction.furniture.Material; import net.scapeemulator.game.model.player.skills.construction.room.RoomType; /** * @author David Insley */ public class Construction { private static final boolean ENABLED = true; public static final int POH_LOADING_INTERFACE = 399; public static final int ROOM_CREATE_INTERFACE = 402; public static final Animation BUILD_ANIM = new Animation(3676); public static final Animation REMOVE_ANIM = new Animation(3685); public static final Dialogue<Integer> PREVIEW_DIALOGUE; public static final Dialogue<Integer> ROOM_DELETION_DIALOGUE; public static final Dialogue<Integer> FURNITURE_DELETION_DIALOGUE; public static final Dialogue<Integer> ENTER_PORTAL_DIALOGUE; public static final ItemRequirement SAW_REQ = new ItemRequirement(8794, false, "You need a hammer and saw to make furniture."); public static final ItemRequirement HAMMER_REQ = new ItemRequirement(2347, false, "You need a hammer and saw to make furniture."); static { ENTER_PORTAL_DIALOGUE = new Dialogue<Integer>() { @Override public void initialize(DialogueContext<Integer> context) { context.openOptionDialogue("Go to your house", "Go to your house (building mode)", "Go to a friend's house", "Nevermind"); } @Override public void handleOption(final DialogueContext<Integer> context, DialogueOption option) { switch (option) { case OPTION_1: case OPTION_2: context.stop(); context.getPlayer().getHouse().ownerEnterPortal(option == DialogueOption.OPTION_2); break; case OPTION_3: context.stop(); context.getPlayer().getScriptInput().showUsernameScriptInput("Enter name:", new ScriptInputListenerAdapter() { @Override public void usernameInputReceived(long value) { Player friend = World.getWorld().getPlayerByLongName(value); if (friend != null) { if (friend == context.getPlayer()) { context.getPlayer().getHouse().ownerEnterPortal(false); } else { friend.getHouse().otherEnterPortal(context.getPlayer()); } } else { context.getPlayer().sendMessage("No online user found by that name."); } } }); break; case OPTION_4: default: context.stop(); break; } } }; PREVIEW_DIALOGUE = new Dialogue<Integer>() { @Override public void initialize(DialogueContext<Integer> context) { context.openOptionDialogue("Rotate clockwise", "Rotate counter-clockwise", "Finish", "Cancel"); } @Override public void handleOption(DialogueContext<Integer> context, DialogueOption option) { BuildingSession session = context.getPlayer().getHouse().getBuildingSession(); if (session == null) { context.stop(); return; } switch (option) { case OPTION_1: session.rotatePreview(Rotation.CW_90); initialize(context); break; case OPTION_2: session.rotatePreview(Rotation.CW_270); initialize(context); break; case OPTION_3: context.stop(); session.finishPreview(); break; case OPTION_4: default: context.stop(); session.cancelPreview(); break; } } }; ROOM_DELETION_DIALOGUE = new Dialogue<Integer>() { @Override public void initialize(DialogueContext<Integer> context) { context.openTextDialogue("Are you sure you want to delete this room? Any furniture inside will be lost.", true); context.setStage(1); } @Override public void handleOption(DialogueContext<Integer> context, DialogueOption option) { switch (context.getStage()) { case 1: context.openOptionDialogue("Yes, remove it.", "No thanks, I've changed my mind."); context.setStage(2); break; case 2: BuildingSession session = context.getPlayer().getHouse().getBuildingSession(); if (session == null) { context.stop(); return; } if (option == DialogueOption.OPTION_1) { session.finishRoomDeletion(true); } else { session.finishRoomDeletion(false); } context.stop(); break; } } }; FURNITURE_DELETION_DIALOGUE = new Dialogue<Integer>() { @Override public void initialize(DialogueContext<Integer> context) { context.openTextDialogue("Are you sure you want to remove this?", true); context.setStage(1); } @Override public void handleOption(DialogueContext<Integer> context, DialogueOption option) { switch (context.getStage()) { case 1: context.openOptionDialogue("Yes, remove it.", "Yes, and don't ask again this session.", "No, I'll keep it."); context.setStage(2); break; case 2: BuildingSession session = context.getPlayer().getHouse().getBuildingSession(); if (session == null) { context.stop(); return; } if (option == DialogueOption.OPTION_1) { session.finishFurnitureRemove(true); } else if (option == DialogueOption.OPTION_2) { session.getBuilder().getVariables().setVar(Variable.CON_FURN_REMOVE, 1); session.finishFurnitureRemove(true); } else { session.finishFurnitureRemove(false); } context.stop(); break; } } }; } public static void giveMats(Player player) { player.getInventory().add(new Item(8794)); player.getInventory().add(new Item(2347)); player.getInventory().add(new Item(Material.OAK.getItemId(), 5)); player.getInventory().add(new Item(Material.TEAK.getItemId(), 5)); player.getInventory().add(new Item(Material.MAHOGANY.getItemId(), 5)); player.getInventory().add(new Item(Material.CLOTH.getItemId(), 7)); player.getInventory().add(new Item(Material.GOLD_LEAF.getItemId(), 4)); } public static void initialize() { if (!ENABLED) { return; } // Load the room palette regions GameServer.getInstance().getMapLoader().load(29, 79); ButtonDispatcher.getInstance().bind(new WindowHandler(ROOM_CREATE_INTERFACE) { @Override public boolean handle(Player player, int windowId, int child, ExtendedOption option, int dyn) { if (player.getHouse().getBuildingSession() != null) { player.getHouse().getBuildingSession().handleSelectRoomInterface(child); } return true; } }); GameServer.getInstance().getMessageDispatcher().getObjectDispatcher().bind(new ObjectBuildHandler()); } static RoomType defaultRoom(int height) { switch (height) { case 0: return RoomType.DUNGEON_CLEAR; case 1: return RoomType.GRASS; default: return RoomType.NONE; } } }
package com.example.tzachi.music; import android.app.AlarmManager; import android.app.PendingIntent; import android.content.BroadcastReceiver; import android.content.Context; import android.content.IntentFilter; import android.graphics.Color; import android.media.MediaPlayer; import android.os.AsyncTask; import android.os.SystemClock; import android.support.v7.app.ActionBarActivity; import android.support.v7.app.ActionBar; import android.support.v4.app.Fragment; import android.os.Bundle; import android.util.Log; import android.view.LayoutInflater; import android.view.Menu; import android.view.MenuItem; import android.view.View; import android.view.ViewGroup; import android.os.Build; import android.widget.Button; import android.content.Intent; import android.view.View.OnClickListener; import android.net.Uri; import android.os.SystemClock; import android.util.Log; import java.net.DatagramPacket; import java.net.DatagramSocket; import java.net.InetAddress; import android.media.Ringtone; import android.media.RingtoneManager; import java.util.Date; class SntpClient { private static final String TAG = "SntpClient"; private static final int REFERENCE_TIME_OFFSET = 16; private static final int ORIGINATE_TIME_OFFSET = 24; private static final int RECEIVE_TIME_OFFSET = 32; private static final int TRANSMIT_TIME_OFFSET = 40; private static final int NTP_PACKET_SIZE = 48; private static final int NTP_PORT = 123; private static final int NTP_MODE_CLIENT = 3; private static final int NTP_VERSION = 3; // Number of seconds between Jan 1, 1900 and Jan 1, 1970 // 70 years plus 17 leap days private static final long OFFSET_1900_TO_1970 = ((365L * 70L) + 17L) * 24L * 60L * 60L; // system time computed from NTP server response private long mNtpTime; // value of SystemClock.elapsedRealtime() corresponding to mNtpTime private long mNtpTimeReference; // round trip time in milliseconds private long mRoundTripTime; /** * Sends an SNTP request to the given host and processes the response. * * @param host host name of the server. * @param timeout network timeout in milliseconds. * @return true if the transaction was successful. */ public boolean requestTime(String host, int timeout) { DatagramSocket socket = null; try { socket = new DatagramSocket(); socket.setSoTimeout(timeout); InetAddress address = InetAddress.getByName(host); byte[] buffer = new byte[NTP_PACKET_SIZE]; DatagramPacket request = new DatagramPacket(buffer, buffer.length, address, NTP_PORT); // set mode = 3 (client) and version = 3 // mode is in low 3 bits of first byte // version is in bits 3-5 of first byte buffer[0] = NTP_MODE_CLIENT | (NTP_VERSION << 3); // get current time and write it to the request packet long requestTime = System.currentTimeMillis(); long requestTicks = SystemClock.elapsedRealtime(); writeTimeStamp(buffer, TRANSMIT_TIME_OFFSET, requestTime); socket.send(request); // read the response DatagramPacket response = new DatagramPacket(buffer, buffer.length); socket.receive(response); long responseTicks = SystemClock.elapsedRealtime(); long responseTime = requestTime + (responseTicks - requestTicks); // extract the results long originateTime = readTimeStamp(buffer, ORIGINATE_TIME_OFFSET); long receiveTime = readTimeStamp(buffer, RECEIVE_TIME_OFFSET); long transmitTime = readTimeStamp(buffer, TRANSMIT_TIME_OFFSET); long roundTripTime = responseTicks - requestTicks - (transmitTime - receiveTime); // receiveTime = originateTime + transit + skew // responseTime = transmitTime + transit - skew // clockOffset = ((receiveTime - originateTime) + (transmitTime - responseTime))/2 // = ((originateTime + transit + skew - originateTime) + // (transmitTime - (transmitTime + transit - skew)))/2 // = ((transit + skew) + (transmitTime - transmitTime - transit + skew))/2 // = (transit + skew - transit + skew)/2 // = (2 * skew)/2 = skew long clockOffset = ((receiveTime - originateTime) + (transmitTime - responseTime))/2; // if (false) Log.d(TAG, "round trip: " + roundTripTime + " ms"); // if (false) Log.d(TAG, "clock offset: " + clockOffset + " ms"); // save our results - use the times on this side of the network latency // (response rather than request time) mNtpTime = responseTime + clockOffset; mNtpTimeReference = responseTicks; mRoundTripTime = roundTripTime; } catch (Exception e) { Log.d(TAG, "request time failed: " + e); return false; } finally { if (socket != null) { socket.close(); } } return true; } /** * Returns the time computed from the NTP transaction. * * @return time value computed from NTP server response. */ public long getNtpTime() { return mNtpTime; } /** * Returns the reference clock value (value of SystemClock.elapsedRealtime()) * corresponding to the NTP time. * * @return reference clock corresponding to the NTP time. */ public long getNtpTimeReference() { return mNtpTimeReference; } /** * Returns the round trip time of the NTP transaction * * @return round trip time in milliseconds. */ public long getRoundTripTime() { return mRoundTripTime; } /** * Reads an unsigned 32 bit big endian number from the given offset in the buffer. */ private long read32(byte[] buffer, int offset) { byte b0 = buffer[offset]; byte b1 = buffer[offset+1]; byte b2 = buffer[offset+2]; byte b3 = buffer[offset+3]; // convert signed bytes to unsigned values int i0 = ((b0 & 0x80) == 0x80 ? (b0 & 0x7F) + 0x80 : b0); int i1 = ((b1 & 0x80) == 0x80 ? (b1 & 0x7F) + 0x80 : b1); int i2 = ((b2 & 0x80) == 0x80 ? (b2 & 0x7F) + 0x80 : b2); int i3 = ((b3 & 0x80) == 0x80 ? (b3 & 0x7F) + 0x80 : b3); return ((long)i0 << 24) + ((long)i1 << 16) + ((long)i2 << 8) + (long)i3; } /** * Reads the NTP time stamp at the given offset in the buffer and returns * it as a system time (milliseconds since January 1, 1970). */ private long readTimeStamp(byte[] buffer, int offset) { long seconds = read32(buffer, offset); long fraction = read32(buffer, offset + 4); return ((seconds - OFFSET_1900_TO_1970) * 1000) + ((fraction * 1000L) / 0x100000000L); } /** * Writes system time (milliseconds since January 1, 1970) as an NTP time stamp * at the given offset in the buffer. */ private void writeTimeStamp(byte[] buffer, int offset, long time) { long seconds = time / 1000L; long milliseconds = time - seconds * 1000L; seconds += OFFSET_1900_TO_1970; // write seconds in big endian format buffer[offset++] = (byte)(seconds >> 24); buffer[offset++] = (byte)(seconds >> 16); buffer[offset++] = (byte)(seconds >> 8); buffer[offset++] = (byte)(seconds >> 0); long fraction = milliseconds * 0x100000000L / 1000L; // write fraction in big endian format buffer[offset++] = (byte)(fraction >> 24); buffer[offset++] = (byte)(fraction >> 16); buffer[offset++] = (byte)(fraction >> 8); // low order bits should be random data buffer[offset++] = (byte)(Math.random() * 255.0); } } class MyAsyncTask extends android.os.AsyncTask { @Override protected Object doInBackground(Object[] objects) { Log.i("NTP tag","startingrrrrrrrrrrrrrrrrrrrresulr" ); //do something asynchronously SntpClient client = new SntpClient(); if (client.requestTime("time.nist.gov", 10000)) { long now = client.getNtpTime() + SystemClock.elapsedRealtime() - client.getNtpTimeReference(); Date current = new Date(now); Log.i("NTP tag","rrrrrrrrrrrrrrrrrrrresulr" + current.toString()+" now= "+ now + " sysetm time" + new Date().getTime()); }else { Log.i("NTP tag","rrrrrrrrrrrrrrrrrrrresulr errro"); } return null; } } // should allow 2 functions play, stop. the caller sholdnot care for it's state... class PlayFile { static PlayFile singletone; static PlayFile get_player() { if(singletone == null) { Log.e("tag","Creating a new PlayFile"); singletone = new PlayFile(); } else { Log.e("tag","Using existing PlayFile"); } return singletone; } private MediaPlayer mediaPlayer; // not playing, playing, finished int state; void startMusic(Context ctx) { Log.e("tag", "start called"); mediaPlayer = MediaPlayer.create(ctx, R.raw.a_neel_kamal_5702); //(this, R.raw.a_neel_kamal_5702); mediaPlayer.start(); } void stopMusic() { Log.e("tag", "stop called"); if (mediaPlayer != null) { mediaPlayer.stop(); mediaPlayer.release(); mediaPlayer = null; } } } public class MainActivity extends ActionBarActivity { Button button; public void SetAlarm() { // final Button button = buttons[2]; // replace with a button from your own UI BroadcastReceiver receiver = new BroadcastReceiver() { @Override public void onReceive( Context context, Intent I_ ) { Log.e("tag", "On recieve called"); button.setBackgroundColor( Color.RED ); context.unregisterReceiver( this ); // this == BroadcastReceiver, not Activity } }; Intent intent = new Intent(); intent.setAction("de.vogella.android.mybroadcast"); // sendBroadcast(intent); AlarmManager alarmMgr = (AlarmManager)this.getSystemService(Context.ALARM_SERVICE); // Intent intent = new Intent(context, AlarmReceiver.class); PendingIntent alarmIntent = PendingIntent.getBroadcast(this /* ??? context */, 0, intent, 0); alarmMgr.set(AlarmManager.ELAPSED_REALTIME_WAKEUP, SystemClock.elapsedRealtime() + 15 * 1000, alarmIntent); /* old code did not work, when app died this.registerReceiver( receiver, new IntentFilter("com.blah.blah.somemessage") ); PendingIntent pintent = PendingIntent.getBroadcast(this, 0, new Intent("com.blah.blah.somemessage"), 0); AlarmManager manager = (AlarmManager)(this.getSystemService( Context.ALARM_SERVICE )); // set alarm to fire 5 sec (1000*5) from now (SystemClock.elapsedRealtime()) manager.set( AlarmManager.ELAPSED_REALTIME_WAKEUP, SystemClock.elapsedRealtime() + 1000*15, pintent ); */ } @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.activity_main); /* if (savedInstanceState == null) { getSupportFragmentManager().beginTransaction() .add(R.id.container, new PlaceholderFragment()) .commit(); }*/ addListenerOnButton(); } public void addListenerOnButton() { button = (Button) findViewById(R.id.button1); button.setOnClickListener(new OnClickListener() { @Override public void onClick(View arg0) { //(new MyAsyncTask()).execute(); /* Uri alert = RingtoneManager.getDefaultUri(RingtoneManager.TYPE_ALARM); if(alert == null){ // alert is null, using backup alert = RingtoneManager.getDefaultUri(RingtoneManager.TYPE_NOTIFICATION); // I can't see this ever being null (as always have a default notification) // but just incase if(alert == null) { // alert backup is null, using 2nd backup alert = RingtoneManager.getDefaultUri(RingtoneManager.TYPE_RINGTONE); } } r = RingtoneManager.getRingtone(getApplicationContext(), alert); r.play(); */ SetAlarm(); // PlayFile player = PlayFile.get_player(); // player.startMusic(getApplicationContext()); } }); button = (Button) findViewById(R.id.button2); button.setOnClickListener(new OnClickListener() { @Override public void onClick(View arg0) { //r.stop(); PlayFile player = PlayFile.get_player(); player.stopMusic(); } }); } @Override public boolean onCreateOptionsMenu(Menu menu) { // Inflate the menu; this adds items to the action bar if it is present. getMenuInflater().inflate(R.menu.menu_main, menu); return true; } @Override public boolean onOptionsItemSelected(MenuItem item) { // Handle action bar item clicks here. The action bar will // automatically handle clicks on the Home/Up button, so long // as you specify a parent activity in AndroidManifest.xml. int id = item.getItemId(); //noinspection SimplifiableIfStatement if (id == R.id.action_settings) { return true; } return super.onOptionsItemSelected(item); } /** * A placeholder fragment containing a simple view. */ public static class PlaceholderFragment extends Fragment { public PlaceholderFragment() { } @Override public View onCreateView(LayoutInflater inflater, ViewGroup container, Bundle savedInstanceState) { View rootView = inflater.inflate(R.layout.fragment_main, container, false); return rootView; } } }
package org.wso2.carbon.apimgt.rest.api.store.impl; import java.net.URI; import java.net.URISyntaxException; import java.util.HashMap; import java.util.List; import java.util.Map; import javax.ws.rs.core.HttpHeaders; import javax.ws.rs.core.MediaType; import javax.ws.rs.core.Response; import org.apache.commons.lang3.StringUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.wso2.carbon.apimgt.core.api.APIStore; import org.wso2.carbon.apimgt.core.exception.APIManagementException; import org.wso2.carbon.apimgt.core.exception.ErrorHandler; import org.wso2.carbon.apimgt.core.exception.ExceptionCodes; import org.wso2.carbon.apimgt.core.models.API; import org.wso2.carbon.apimgt.core.models.Comment; import org.wso2.carbon.apimgt.core.models.DocumentContent; import org.wso2.carbon.apimgt.core.models.DocumentInfo; import org.wso2.carbon.apimgt.core.models.Rating; import org.wso2.carbon.apimgt.core.util.APIMgtConstants; import org.wso2.carbon.apimgt.core.util.ETagUtils; import org.wso2.carbon.apimgt.rest.api.common.RestApiConstants; import org.wso2.carbon.apimgt.rest.api.common.dto.ErrorDTO; import org.wso2.carbon.apimgt.rest.api.common.util.RestApiUtil; import org.wso2.carbon.apimgt.rest.api.store.ApisApiService; import org.wso2.carbon.apimgt.rest.api.store.NotFoundException; import org.wso2.carbon.apimgt.rest.api.store.dto.APIDTO; import org.wso2.carbon.apimgt.rest.api.store.dto.APIListDTO; import org.wso2.carbon.apimgt.rest.api.store.dto.CommentDTO; import org.wso2.carbon.apimgt.rest.api.store.dto.CommentListDTO; import org.wso2.carbon.apimgt.rest.api.store.dto.DocumentDTO; import org.wso2.carbon.apimgt.rest.api.store.dto.DocumentListDTO; import org.wso2.carbon.apimgt.rest.api.store.dto.RatingDTO; import org.wso2.carbon.apimgt.rest.api.store.dto.RatingListDTO; import org.wso2.carbon.apimgt.rest.api.store.mappings.APIMappingUtil; import org.wso2.carbon.apimgt.rest.api.store.mappings.CommentMappingUtil; import org.wso2.carbon.apimgt.rest.api.store.mappings.DocumentationMappingUtil; import org.wso2.carbon.apimgt.rest.api.store.mappings.RatingMappingUtil; import org.wso2.msf4j.Request; @javax.annotation.Generated(value = "class org.wso2.maven.plugins.JavaMSF4JServerCodegen", date = "2016-11-01T13:48:55.078+05:30") public class ApisApiServiceImpl extends ApisApiService { private static final Logger log = LoggerFactory.getLogger(ApisApiServiceImpl.class); /** * Deletes a comment * * @param commentId Comment ID * @param apiId API ID * @param ifMatch If-Match header value * @param ifUnmodifiedSince If-Unmodified-Since header value * @param request msf4j request object * @return 200 response if the deletion was successful * @throws NotFoundException if this method is not defined in ApisApiServiceImpl */ @Override public Response apisApiIdCommentsCommentIdDelete(String commentId, String apiId, String ifMatch, String ifUnmodifiedSince, Request request) throws NotFoundException { String username = RestApiUtil.getLoggedInUsername(); try { APIStore apiStore = RestApiUtil.getConsumer(username); String existingFingerprint = apisApiIdCommentsCommentIdDeleteFingerprint(commentId, apiId, ifMatch, ifUnmodifiedSince, request); if (!StringUtils.isEmpty(ifMatch) && !StringUtils.isEmpty(existingFingerprint) && !ifMatch .contains(existingFingerprint)) { return Response.status(Response.Status.PRECONDITION_FAILED).build(); } apiStore.deleteComment(commentId, apiId, username); } catch (APIManagementException e) { String errorMessage = "Error while deleting comment with commentId: " + commentId + " of apiID :" + apiId; Map<String, String> paramList = new HashMap<String, String>(); paramList.put(APIMgtConstants.ExceptionsConstants.API_ID, apiId); paramList.put(APIMgtConstants.ExceptionsConstants.COMMENT_ID, commentId); ErrorDTO errorDTO = RestApiUtil.getErrorDTO(e.getErrorHandler(), paramList); log.error(errorMessage, e); return Response.status(e.getErrorHandler().getHttpStatusCode()).entity(errorDTO).build(); } return Response.ok().build(); } /** * Retrives comments for a given API ID and comment ID * * @param commentId Comment ID * @param apiId API ID * @param accept accept header value * @param ifNoneMatch If-None-Match header value * @param ifModifiedSince If-Modified-Since header value * @param request msf4j request object * @return CommentDTO object * @throws NotFoundException if this method is not defined in ApisApiServiceImpl */ @Override public Response apisApiIdCommentsCommentIdGet(String commentId, String apiId, String accept, String ifNoneMatch, String ifModifiedSince, Request request) throws NotFoundException { String username = RestApiUtil.getLoggedInUsername(); try { APIStore apiStore = RestApiUtil.getConsumer(username); String existingFingerprint = apisApiIdCommentsCommentIdGetFingerprint(commentId, apiId, accept, ifNoneMatch, ifModifiedSince, request); if (!StringUtils.isEmpty(ifNoneMatch) && !StringUtils.isEmpty(existingFingerprint) && ifNoneMatch .contains(existingFingerprint)) { return Response.notModified().build(); } Comment comment = apiStore.getCommentByUUID(commentId, apiId); CommentDTO commentDTO = CommentMappingUtil.fromCommentToDTO(comment); return Response.ok().header(HttpHeaders.ETAG, "\"" + existingFingerprint + "\"").entity(commentDTO).build(); } catch (APIManagementException e) { String errorMessage = "Error while retrieving comment with commentId: " + commentId + " of apiID :" + apiId; Map<String, String> paramList = new HashMap<String, String>(); paramList.put(APIMgtConstants.ExceptionsConstants.API_ID, apiId); paramList.put(APIMgtConstants.ExceptionsConstants.COMMENT_ID, commentId); ErrorDTO errorDTO = RestApiUtil.getErrorDTO(e.getErrorHandler(), paramList); log.error(errorMessage, e); return Response.status(e.getErrorHandler().getHttpStatusCode()).entity(errorDTO).build(); } } /** * Retrieves the fingerprint of a comment for commentGet * * @param commentId Comment ID * @param apiId API ID * @param accept accept header value * @param ifNoneMatch If-None-Match header value * @param ifModifiedSince If-Modified-Since header value * @param request msf4j request object * @return Fingerprint of the comment */ public String apisApiIdCommentsCommentIdGetFingerprint(String commentId, String apiId, String accept, String ifNoneMatch, String ifModifiedSince, Request request) { return getEtag(commentId); } /** * Retrieves the fingerprint of a comment for commentPut * * @param commentId Comment ID * @param apiId API ID * @param body body of the request * @param contentType Content-Type header value * @param ifMatch If-Match header value * @param ifUnmodifiedSince If-Unmodified-Since header value * @param request msf4j request object * @return Fingerprint of the comment */ public String apisApiIdCommentsCommentIdPutFingerprint(String commentId, String apiId, CommentDTO body, String contentType, String ifMatch, String ifUnmodifiedSince, Request request) { return getEtag(commentId); } /** * Retrieves the fingerprint of a comment for commentDelete * * @param commentId Comment ID * @param apiId API ID * @param ifMatch If-Match header value * @param ifUnmodifiedSince If-Unmodified-Since header value * @param request msf4j request object * @return Fingerprint of the comment */ public String apisApiIdCommentsCommentIdDeleteFingerprint(String commentId, String apiId, String ifMatch, String ifUnmodifiedSince, Request request) { return getEtag(commentId); } /** * Retrieves last updatedtime for a comment given the comment id * * @param commentId Comment ID * @return Last updated time */ private String getEtag(String commentId){ String username = RestApiUtil.getLoggedInUsername(); try { String lastUpdatedTime = RestApiUtil.getConsumer(username).getLastUpdatedTimeOfComment(commentId); return ETagUtils.generateETag(lastUpdatedTime); } catch (APIManagementException e) { String errorMessage = "Error while retrieving last updated time of comment " + commentId; log.error(errorMessage, e); return null; } } /** * Retrives A list of comments for a given API ID * * @param apiId API ID * @param limit Max number of comments to return * @param offset Starting point of pagination * @param accept accept header value * @param request msf4j request object * @return CommentListDTO object * @throws NotFoundException if this method is not defined in ApisApiServiceImpl */ @Override public Response apisApiIdCommentsGet(String apiId, Integer limit, Integer offset, String accept, Request request) throws NotFoundException { String username = RestApiUtil.getLoggedInUsername(); try { APIStore apiStore = RestApiUtil.getConsumer(username); List<Comment> commentList = apiStore.getCommentsForApi(apiId); CommentListDTO commentListDTO = CommentMappingUtil.fromCommentListToDTO(commentList, limit, offset); return Response.ok().entity(commentListDTO).build(); } catch (APIManagementException e) { String errorMessage = "Error while retrieving comments for api : " + apiId; Map<String, String> paramList = new HashMap<String, String>(); paramList.put(APIMgtConstants.ExceptionsConstants.API_ID, apiId); ErrorDTO errorDTO = RestApiUtil.getErrorDTO(e.getErrorHandler(), paramList); log.error(errorMessage, e); return Response.status(e.getErrorHandler().getHttpStatusCode()).entity(errorDTO).build(); } } /** * Update a comment * * @param apiId API ID * @param body comment body * @param contentType content-type header * @param request msf4j request object * @return comment update response * @throws NotFoundException if this method is not defined in ApisApiServiceImpl */ @Override public Response apisApiIdCommentsPost(String apiId, CommentDTO body, String contentType, Request request) throws NotFoundException { String username = RestApiUtil.getLoggedInUsername(); try { APIStore apiStore = RestApiUtil.getConsumer(username); Comment comment = CommentMappingUtil.fromDTOToComment(body, username); String createdCommentId = apiStore.addComment(comment, apiId); Comment createdComment = apiStore.getCommentByUUID(createdCommentId, apiId); CommentDTO createdCommentDTO = CommentMappingUtil.fromCommentToDTO(createdComment); URI location = new URI( RestApiConstants.RESOURCE_PATH_APIS + "/" + apiId + RestApiConstants.SUBRESOURCE_PATH_COMMENTS + "/" + createdCommentId); String fingerprint = getEtag(comment.getUuid()); return Response.status(Response.Status.CREATED).header(RestApiConstants.LOCATION_HEADER, location).header(HttpHeaders.ETAG, "\"" + fingerprint + "\"").entity(createdCommentDTO) .build(); } catch (APIManagementException e) { String errorMessage = "Error while adding comment to api : " + apiId; Map<String, String> paramList = new HashMap<String, String>(); paramList.put(APIMgtConstants.ExceptionsConstants.API_ID, body.getApiId()); ErrorDTO errorDTO = RestApiUtil.getErrorDTO(e.getErrorHandler(), paramList); log.error(errorMessage, e); return Response.status(e.getErrorHandler().getHttpStatusCode()).entity(errorDTO).build(); } catch (URISyntaxException e) { String errorMessage = "Error while adding location header in response for comment"; ErrorHandler errorHandler = ExceptionCodes.LOCATION_HEADER_INCORRECT; ErrorDTO errorDTO = RestApiUtil.getErrorDTO(errorHandler); log.error(errorMessage, e); return Response.status(errorHandler.getHttpStatusCode()).entity(errorDTO).build(); } } /** * @param commentId Comment ID * @param apiId API ID * @param body comment body * @param contentType content-type header * @param ifMatch If-Match header value * @param ifUnmodifiedSince If-Unmodified-Since header value * @param request msf4j request object * @return comment update response * @throws NotFoundException if this method is not defined in ApisApiServiceImpl */ @Override public Response apisApiIdCommentsCommentIdPut(String commentId, String apiId, CommentDTO body, String contentType, String ifMatch, String ifUnmodifiedSince, Request request) throws NotFoundException { String username = RestApiUtil.getLoggedInUsername(); try { APIStore apiStore = RestApiUtil.getConsumer(username); String existingFingerprint = apisApiIdCommentsCommentIdPutFingerprint(commentId, apiId, body, contentType, ifMatch, ifUnmodifiedSince, request); if (!StringUtils.isEmpty(ifMatch) && !StringUtils.isEmpty(existingFingerprint) && !ifMatch .contains(existingFingerprint)) { return Response.status(Response.Status.PRECONDITION_FAILED).build(); } Comment comment = CommentMappingUtil.fromDTOToComment(body, username); apiStore.updateComment(comment, commentId, apiId, username); Comment updatedComment = apiStore.getCommentByUUID(commentId, apiId); CommentDTO updatedCommentDTO = CommentMappingUtil.fromCommentToDTO(updatedComment); String newFingerprint = getEtag(commentId); return Response.ok().header(HttpHeaders.ETAG, "\"" + newFingerprint + "\"").entity(updatedCommentDTO).build(); } catch (APIManagementException e) { String errorMessage = "Error while updating comment : " + commentId; Map<String, String> paramList = new HashMap<String, String>(); paramList.put(APIMgtConstants.ExceptionsConstants.API_ID, body.getApiId()); paramList.put(APIMgtConstants.ExceptionsConstants.COMMENT_ID, commentId); ErrorDTO errorDTO = RestApiUtil.getErrorDTO(e.getErrorHandler(), paramList); log.error(errorMessage, e); return Response.status(e.getErrorHandler().getHttpStatusCode()).entity(errorDTO).build(); } } /** * Retrieves the content of the document * * @param apiId API ID * @param documentId Document ID * @param accept Accept header value * @param ifNoneMatch If-None-Match header value * @param ifModifiedSince If-Modified-Since header value * @param request msf4j request object * @return content of the document * @throws NotFoundException When the particular resource does not exist in the system */ @Override public Response apisApiIdDocumentsDocumentIdContentGet(String apiId, String documentId, String accept, String ifNoneMatch, String ifModifiedSince, Request request) throws NotFoundException { String username = RestApiUtil.getLoggedInUsername(); try { APIStore apiStore = RestApiUtil.getConsumer(username); String existingFingerprint = apisApiIdDocumentsDocumentIdContentGetFingerprint(apiId, documentId, accept, ifNoneMatch, ifModifiedSince, request); if (!StringUtils.isEmpty(ifNoneMatch) && !StringUtils.isEmpty(existingFingerprint) && ifNoneMatch .contains(existingFingerprint)) { return Response.notModified().build(); } DocumentContent documentationContent = apiStore.getDocumentationContent(documentId); DocumentInfo documentInfo = documentationContent.getDocumentInfo(); if (DocumentInfo.SourceType.FILE.equals(documentInfo.getSourceType())) { String filename = documentInfo.getFileName(); return Response.ok(documentationContent.getFileContent()) .header(HttpHeaders.CONTENT_TYPE, MediaType.APPLICATION_OCTET_STREAM_TYPE) .header(HttpHeaders.CONTENT_DISPOSITION, "attachment; filename=\"" + filename + "\"") .header(HttpHeaders.ETAG, "\"" + existingFingerprint + "\"") .build(); } else if (DocumentInfo.SourceType.INLINE.equals(documentInfo.getSourceType())) { String content = documentationContent.getInlineContent(); return Response.ok(content) .header(RestApiConstants.HEADER_CONTENT_TYPE, MediaType.TEXT_PLAIN) .header(HttpHeaders.ETAG, "\"" + existingFingerprint + "\"") .build(); } else if (DocumentInfo.SourceType.URL.equals(documentInfo.getSourceType())) { String sourceUrl = documentInfo.getSourceURL(); return Response.seeOther(new URI(sourceUrl)) .header(HttpHeaders.ETAG, "\"" + existingFingerprint + "\"") .build(); } } catch (APIManagementException e) { String errorMessage = "Error while retrieving document " + documentId + " of the API " + apiId; HashMap<String, String> paramList = new HashMap<String, String>(); paramList.put(APIMgtConstants.ExceptionsConstants.API_ID, apiId); ErrorDTO errorDTO = RestApiUtil.getErrorDTO(e.getErrorHandler(), paramList); log.error(errorMessage, e); return Response.status(e.getErrorHandler().getHttpStatusCode()).entity(errorDTO).build(); } catch (URISyntaxException e) { String errorMessage = "Error while retrieving source URI location of " + documentId; ErrorDTO errorDTO = RestApiUtil.getErrorDTO(errorMessage, 900313L, errorMessage); log.error(errorMessage, e); return Response.status(Response.Status.INTERNAL_SERVER_ERROR).entity(errorDTO).build(); } return null; } /** * Retrieves the fingerprint of a document content given its UUID * * @param apiId API ID * @param documentId Document ID * @param accept Accept header value * @param ifNoneMatch If-None-Match header value * @param ifModifiedSince If-Modified-Since header value * @param request msf4j request object * @return Fingerprint of the document content */ public String apisApiIdDocumentsDocumentIdContentGetFingerprint(String apiId, String documentId, String accept, String ifNoneMatch, String ifModifiedSince, Request request) { String username = RestApiUtil.getLoggedInUsername(); try { String lastUpdatedTime = RestApiUtil.getConsumer(username) .getLastUpdatedTimeOfDocumentContent(apiId, documentId); return ETagUtils.generateETag(lastUpdatedTime); } catch (APIManagementException e) { //gives a warning and let it continue the execution String errorMessage = "Error while retrieving last updated time of content of document " + documentId + " of API " + apiId; log.error(errorMessage, e); return null; } } /** * Retrives the document identified by the API's ID and the document's ID * * @param apiId UUID of API * @param documentId UUID of the document * @param accept Accept header value * @param ifNoneMatch If-None-Match header value * @param ifModifiedSince If-Modified-Since header value * @param request minor version header * @return the document qualifying for the provided IDs * @throws NotFoundException When the particular resource does not exist in the system */ @Override public Response apisApiIdDocumentsDocumentIdGet(String apiId, String documentId, String accept, String ifNoneMatch, String ifModifiedSince, Request request) throws NotFoundException { DocumentDTO documentDTO = null; String username = RestApiUtil.getLoggedInUsername(); try { APIStore apiStore = RestApiUtil.getConsumer(username); String existingFingerprint = apisApiIdDocumentsDocumentIdGetFingerprint(apiId, documentId, accept, ifNoneMatch, ifModifiedSince, request); if (!StringUtils.isEmpty(ifNoneMatch) && !StringUtils.isEmpty(existingFingerprint) && ifNoneMatch .contains(existingFingerprint)) { return Response.notModified().build(); } DocumentInfo documentInfo = apiStore.getDocumentationSummary(documentId); documentDTO = DocumentationMappingUtil.fromDocumentationToDTO(documentInfo); return Response.ok().entity(documentDTO) .header(HttpHeaders.ETAG, "\"" + existingFingerprint + "\"").build(); } catch (APIManagementException e) { String errorMessage = "Error while retrieving documentation for given apiId " + apiId + "with docId " + documentId; HashMap<String, String> paramList = new HashMap<String, String>(); paramList.put(APIMgtConstants.ExceptionsConstants.API_ID, apiId); paramList.put(APIMgtConstants.ExceptionsConstants.DOC_ID, documentId); ErrorDTO errorDTO = RestApiUtil.getErrorDTO(e.getErrorHandler(), paramList); log.error(errorMessage, e); return Response.status(e.getErrorHandler().getHttpStatusCode()).entity(errorDTO).build(); } } /** * Retrieves the fingerprint of the document given its UUID * * @param apiId API ID * @param documentId Document ID * @param accept Accept header value * @param ifNoneMatch If-None-Match header value * @param ifModifiedSince If-Modified-Since header value * @param request msf4j request object * @return Fingerprint of the document */ public String apisApiIdDocumentsDocumentIdGetFingerprint(String apiId, String documentId, String accept, String ifNoneMatch, String ifModifiedSince, Request request) { String username = RestApiUtil.getLoggedInUsername(); try { String lastUpdatedTime = RestApiUtil.getConsumer(username) .getLastUpdatedTimeOfDocument(documentId); return ETagUtils.generateETag(lastUpdatedTime); } catch (APIManagementException e) { //gives a warning and let it continue the execution String errorMessage = "Error while retrieving last updated time of document " + documentId + " of API " + apiId; log.error(errorMessage, e); return null; } } /** * Retrieves a list of documents of an API * * @param apiId UUID of API * @param limit maximum documents to return * @param offset starting position of the pagination * @param accept Accept header value * @param ifNoneMatch If-None-Match header value * @param request minor version header * @return a list of document DTOs * @throws NotFoundException When the particular resource does not exist in the system */ @Override public Response apisApiIdDocumentsGet(String apiId, Integer limit, Integer offset, String accept, String ifNoneMatch, Request request) throws NotFoundException { DocumentListDTO documentListDTO = null; limit = limit != null ? limit : RestApiConstants.PAGINATION_LIMIT_DEFAULT; offset = offset != null ? offset : RestApiConstants.PAGINATION_OFFSET_DEFAULT; String username = RestApiUtil.getLoggedInUsername(); try { APIStore apiStore = RestApiUtil.getConsumer(username); List<DocumentInfo> documentInfoResults = apiStore.getAllDocumentation(apiId, offset, limit); documentListDTO = DocumentationMappingUtil .fromDocumentationListToDTO(documentInfoResults, offset, limit); } catch (APIManagementException e) { String errorMessage = "Error while retrieving documentation for given apiId " + apiId; HashMap<String, String> paramList = new HashMap<String, String>(); paramList.put(APIMgtConstants.ExceptionsConstants.API_ID, apiId); ErrorDTO errorDTO = RestApiUtil.getErrorDTO(e.getErrorHandler(), paramList); log.error(errorMessage, e); return Response.status(e.getErrorHandler().getHttpStatusCode()).entity(errorDTO).build(); } return Response.ok().entity(documentListDTO).build(); } /** * Get API of given ID * * @param apiId API ID * @param accept accept header value * @param ifNoneMatch If-None-Match header value * @param ifModifiedSince If-Modified-Since header value * @param request msf4j request object * @return API of the given ID * @throws NotFoundException If failed to get the API */ @Override public Response apisApiIdGet(String apiId, String accept, String ifNoneMatch, String ifModifiedSince, Request request) throws NotFoundException { APIDTO apiToReturn = null; try { String username = RestApiUtil.getLoggedInUsername(); APIStore apiStore = RestApiUtil.getConsumer(username); String existingFingerprint = apisApiIdGetFingerprint(apiId, accept, ifNoneMatch, ifModifiedSince, request); if (!StringUtils.isEmpty(ifNoneMatch) && !StringUtils.isEmpty(existingFingerprint) && ifNoneMatch .contains(existingFingerprint)) { return Response.notModified().build(); } API api = apiStore.getAPIbyUUID(apiId); apiToReturn = APIMappingUtil.toAPIDTO(api); return Response.ok().entity(apiToReturn) .header(HttpHeaders.ETAG, "\"" + existingFingerprint + "\"") .build(); } catch (APIManagementException e) { String errorMessage = "Error while retrieving API : " + apiId; HashMap<String, String> paramList = new HashMap<String, String>(); paramList.put(APIMgtConstants.ExceptionsConstants.API_ID, apiId); ErrorDTO errorDTO = RestApiUtil.getErrorDTO(e.getErrorHandler(), paramList); log.error(errorMessage, e); return Response.status(e.getErrorHandler().getHttpStatusCode()).entity(errorDTO).build(); } } /** * Retrieves a list of ratings for given API ID * * @param apiId API ID * @param limit response limit * @param offset response offset * @param accept accept header value * @param request msf4j request object * @return List of Ratings for API * @throws NotFoundException if failed to find method implementation */ @Override public Response apisApiIdRatingsGet(String apiId, Integer limit, Integer offset, String accept, Request request) throws NotFoundException { double avgRating; String username = RestApiUtil.getLoggedInUsername(); int userRatingValue = 0; try { APIStore apiStore = RestApiUtil.getConsumer(username); Rating userRating = apiStore.getRatingForApiFromUser(apiId, username); if(userRating != null) { userRatingValue = userRating.getRating(); } avgRating = apiStore.getAvgRating(apiId); List<Rating> ratingListForApi = apiStore.getRatingsListForApi(apiId); List<RatingDTO> ratingDTOList = RatingMappingUtil.fromRatingListToDTOList(ratingListForApi); RatingListDTO ratingListDTO = RatingMappingUtil.fromRatingDTOListToRatingListDTO(avgRating, userRatingValue, offset, limit, ratingDTOList); return Response.ok().entity(ratingListDTO).build(); } catch (APIManagementException e) { String errorMessage = "Error while retrieving rating for given API " + apiId; Map<String, String> paramList = new HashMap<String, String>(); paramList.put(APIMgtConstants.ExceptionsConstants.API_ID, apiId); ErrorDTO errorDTO = RestApiUtil.getErrorDTO(e.getErrorHandler(), paramList); log.error(errorMessage, e); return Response.status(e.getErrorHandler().getHttpStatusCode()).entity(errorDTO).build(); } } @Override public Response apisApiIdRatingsRatingIdGet(String apiId, String ratingId, String accept, String ifNoneMatch, String ifModifiedSince, Request request) throws NotFoundException { String username = RestApiUtil.getLoggedInUsername(); try { APIStore apiStore = RestApiUtil.getConsumer(username); Rating rating = apiStore.getRatingByUUID(apiId, ratingId); RatingDTO ratingDTO = RatingMappingUtil.fromRatingToDTO(rating); return Response.ok().entity(ratingDTO).build(); } catch (APIManagementException e) { String errorMessage = "Error while retrieving rating for given API " + apiId; Map<String, String> paramList = new HashMap<String, String>(); paramList.put(APIMgtConstants.ExceptionsConstants.API_ID, apiId); paramList.put(APIMgtConstants.ExceptionsConstants.RATING_ID, ratingId); ErrorDTO errorDTO = RestApiUtil.getErrorDTO(e.getErrorHandler(), paramList); log.error(errorMessage, e); return Response.status(e.getErrorHandler().getHttpStatusCode()).entity(errorDTO).build(); } } /** * Add or update raating to an API * * @param apiId APIID * @param body RatingDTO object * @param contentType content-type header * @param request msf4j request * @return 201 response if successful * @throws NotFoundException if failed to find method implementation */ @Override public Response apisApiIdUserRatingPut(String apiId, RatingDTO body, String contentType, Request request) throws NotFoundException { String username = RestApiUtil.getLoggedInUsername(); String ratingId; try { APIStore apiStore = RestApiUtil.getConsumer(username); Rating ratingFromPayload = RatingMappingUtil.fromDTOToRating(username, apiId, body); Rating existingRating = apiStore.getRatingForApiFromUser(apiId, username); if (existingRating != null) { String existingRatingUUID = existingRating.getUuid(); apiStore.updateRating(apiId, existingRatingUUID, ratingFromPayload); Rating updatedRating = apiStore.getRatingByUUID(apiId, existingRatingUUID); RatingDTO updatedRatingDTO = RatingMappingUtil.fromRatingToDTO(updatedRating); return Response.ok().entity(updatedRatingDTO) .build(); } else { ratingId = apiStore.addRating(apiId, ratingFromPayload); Rating createdRating = apiStore.getRatingByUUID(apiId, ratingId); RatingDTO createdRatingDTO = RatingMappingUtil.fromRatingToDTO(createdRating); URI location = new URI( RestApiConstants.RESOURCE_PATH_APIS + "/" + apiId + RestApiConstants.SUBRESOURCE_PATH_RATINGS + "/" + ratingId); return Response.status(Response.Status.CREATED).header(RestApiConstants.LOCATION_HEADER, location) .entity(createdRatingDTO).build(); } } catch (APIManagementException e) { String errorMessage = "Error while adding/updating rating for user " + username + " for given API " + apiId; Map<String, String> paramList = new HashMap<String, String>(); paramList.put(APIMgtConstants.ExceptionsConstants.API_ID, apiId); ErrorDTO errorDTO = RestApiUtil.getErrorDTO(e.getErrorHandler(), paramList); log.error(errorMessage, e); return Response.status(e.getErrorHandler().getHttpStatusCode()).entity(errorDTO).build(); } catch (URISyntaxException e) { String errorMessage = "Error while adding location header in response for comment"; ErrorHandler errorHandler = ExceptionCodes.LOCATION_HEADER_INCORRECT; ErrorDTO errorDTO = RestApiUtil.getErrorDTO(errorHandler); log.error(errorMessage, e); return Response.status(errorHandler.getHttpStatusCode()).entity(errorDTO).build(); } } /** * Retrieves the fingerprint of the API given its ID * * @param apiId API ID * @param accept Accept header value * @param ifNoneMatch If-None-Match header value * @param ifModifiedSince If-Modified-Since header value * @param request msf4j request object * @return Fingerprint of the API */ public String apisApiIdGetFingerprint(String apiId, String accept, String ifNoneMatch, String ifModifiedSince, Request request) { String username = RestApiUtil.getLoggedInUsername(); try { String lastUpdatedTime = RestApiUtil.getConsumer(username).getLastUpdatedTimeOfAPI(apiId); return ETagUtils.generateETag(lastUpdatedTime); } catch (APIManagementException e) { //gives a warning and let it continue the execution String errorMessage = "Error while retrieving last updated time of API " + apiId; log.error(errorMessage, e); return null; } } /** * Retrieves the swagger definition of an API * * @param apiId UUID of API * @param accept Accept header value * @param ifNoneMatch If-None-Match header value * @param ifModifiedSince If-Modified-Since header value * @param request minor version header * @return swagger definition of an API * @throws NotFoundException When the particular resource does not exist in the system */ @Override public Response apisApiIdSwaggerGet(String apiId, String accept, String ifNoneMatch, String ifModifiedSince, Request request) throws NotFoundException { String username = RestApiUtil.getLoggedInUsername(); try { APIStore apiStore = RestApiUtil.getConsumer(username); String existingFingerprint = apisApiIdSwaggerGetFingerprint(apiId, accept, ifNoneMatch, ifModifiedSince, request); if (!StringUtils.isEmpty(ifNoneMatch) && !StringUtils.isEmpty(existingFingerprint) && ifNoneMatch .contains(existingFingerprint)) { return Response.notModified().build(); } String swagger = apiStore.getApiSwaggerDefinition(apiId); return Response.ok().header(HttpHeaders.ETAG, "\"" + existingFingerprint + "\"").entity(swagger).build(); } catch (APIManagementException e) { String errorMessage = "Error while retrieving swagger definition of API : " + apiId; HashMap<String, String> paramList = new HashMap<String, String>(); paramList.put(APIMgtConstants.ExceptionsConstants.API_ID, apiId); ErrorDTO errorDTO = RestApiUtil.getErrorDTO(e.getErrorHandler(), paramList); log.error(errorMessage, e); return Response.status(e.getErrorHandler().getHttpStatusCode()).entity(errorDTO).build(); } } /** * Retrieves the fingerprint of the swagger given its API's ID * * @param apiId API ID * @param accept Accept header value * @param ifNoneMatch If-None-Match header value * @param ifModifiedSince If-Modified-Since header value * @param request msf4j request object * @return Retrieves the fingerprint String of the swagger */ public String apisApiIdSwaggerGetFingerprint(String apiId, String accept, String ifNoneMatch, String ifModifiedSince, Request request) { String username = RestApiUtil.getLoggedInUsername(); try { String lastUpdatedTime = RestApiUtil.getConsumer(username).getLastUpdatedTimeOfAPI(apiId); return ETagUtils.generateETag(lastUpdatedTime); } catch (APIManagementException e) { //gives a warning and let it continue the execution String errorMessage = "Error while retrieving last updated time of Swagger definition of API :" + apiId; log.error(errorMessage, e); return null; } } /** * Retrieves APIs qualifying under given search condition * * @param limit maximum number of APIs returns * @param offset starting index * @param query search condition * @param accept Accept header value * @param ifNoneMatch If-None-Match header value * @param request msf4j request object * @return matched APIs for the given search condition */ @Override public Response apisGet(Integer limit, Integer offset, String query, String accept, String ifNoneMatch, Request request) throws NotFoundException { List<API> apisResult = null; APIListDTO apiListDTO = null; try { String username = RestApiUtil.getLoggedInUsername(); APIStore apiStore = RestApiUtil.getConsumer(username); apisResult = apiStore.searchAPIs(query, offset, limit); // convert API apiListDTO = APIMappingUtil.toAPIListDTO(apisResult); } catch (APIManagementException e) { String errorMessage = "Error while retrieving APIs "; HashMap<String, String> paramList = new HashMap<String, String>(); paramList.put(APIMgtConstants.ExceptionsConstants.API_NAME, query); ErrorDTO errorDTO = RestApiUtil.getErrorDTO(e.getErrorHandler(), paramList); log.error(errorMessage, e); return Response.status(e.getErrorHandler().getHttpStatusCode()).entity(errorDTO).build(); } return Response.ok().entity(apiListDTO).build(); } }
package com.compositesw.services.system.util.common; import javax.xml.bind.JAXBElement; import javax.xml.bind.annotation.XmlElementDecl; import javax.xml.bind.annotation.XmlRegistry; import javax.xml.namespace.QName; /** * This object contains factory methods for each * Java content interface and Java element interface * generated in the com.compositesw.services.system.util.common package. * <p>An ObjectFactory allows you to programatically * construct new instances of the Java representation * for XML content. The Java representation of XML * content can consist of schema derived interfaces * and classes representing the binding of schema * type definitions, element declarations and model * groups. Factory methods for each of these are * provided in this class. * */ @XmlRegistry public class ObjectFactory { private final static QName _CancelServerTask_QNAME = new QName("http://www.compositesw.com/services/system/util/common", "cancelServerTask"); private final static QName _CancelServerTaskResponse_QNAME = new QName("http://www.compositesw.com/services/system/util/common", "cancelServerTaskResponse"); private final static QName _Fault_QNAME = new QName("http://www.compositesw.com/services/system/util/common", "fault"); /** * Create a new ObjectFactory that can be used to create new instances of schema derived classes for package: com.compositesw.services.system.util.common * */ public ObjectFactory() { } /** * Create an instance of {@link DataType.XmlType } * */ public DataType.XmlType createDataTypeXmlType() { return new DataType.XmlType(); } /** * Create an instance of {@link BaseResponse } * */ public BaseResponse createBaseResponse() { return new BaseResponse(); } /** * Create an instance of {@link MessageEntryList } * */ public MessageEntryList createMessageEntryList() { return new MessageEntryList(); } /** * Create an instance of {@link AttributeComplexValue } * */ public AttributeComplexValue createAttributeComplexValue() { return new AttributeComplexValue(); } /** * Create an instance of {@link CancelServerTaskResponse } * */ public CancelServerTaskResponse createCancelServerTaskResponse() { return new CancelServerTaskResponse(); } /** * Create an instance of {@link CancelServerTaskRequest } * */ public CancelServerTaskRequest createCancelServerTaskRequest() { return new CancelServerTaskRequest(); } /** * Create an instance of {@link NameList } * */ public NameList createNameList() { return new NameList(); } /** * Create an instance of {@link ServerTaskResultResponse } * */ public ServerTaskResultResponse createServerTaskResultResponse() { return new ServerTaskResultResponse(); } /** * Create an instance of {@link AttributeTypeValueList } * */ public AttributeTypeValueList createAttributeTypeValueList() { return new AttributeTypeValueList(); } /** * Create an instance of {@link MessageEntry } * */ public MessageEntry createMessageEntry() { return new MessageEntry(); } /** * Create an instance of {@link Page } * */ public Page createPage() { return new Page(); } /** * Create an instance of {@link Attribute } * */ public Attribute createAttribute() { return new Attribute(); } /** * Create an instance of {@link AttributeTypeValueMap } * */ public AttributeTypeValueMap createAttributeTypeValueMap() { return new AttributeTypeValueMap(); } /** * Create an instance of {@link MessageList } * */ public MessageList createMessageList() { return new MessageList(); } /** * Create an instance of {@link ServerTaskRequest } * */ public ServerTaskRequest createServerTaskRequest() { return new ServerTaskRequest(); } /** * Create an instance of {@link AttributeList } * */ public AttributeList createAttributeList() { return new AttributeList(); } /** * Create an instance of {@link Marker } * */ public Marker createMarker() { return new Marker(); } /** * Create an instance of {@link AttributeDef } * */ public AttributeDef createAttributeDef() { return new AttributeDef(); } /** * Create an instance of {@link AttributeTypeValueMap.Entry } * */ public AttributeTypeValueMap.Entry createAttributeTypeValueMapEntry() { return new AttributeTypeValueMap.Entry(); } /** * Create an instance of {@link DataType } * */ public DataType createDataType() { return new DataType(); } /** * Create an instance of {@link DataType.PseudoType } * */ public DataType.PseudoType createDataTypePseudoType() { return new DataType.PseudoType(); } /** * Create an instance of {@link AttributeSimpleValueList } * */ public AttributeSimpleValueList createAttributeSimpleValueList() { return new AttributeSimpleValueList(); } /** * Create an instance of {@link DataType.SqlType } * */ public DataType.SqlType createDataTypeSqlType() { return new DataType.SqlType(); } /** * Create an instance of {@link BaseRequest } * */ public BaseRequest createBaseRequest() { return new BaseRequest(); } /** * Create an instance of {@link AttributeDefList } * */ public AttributeDefList createAttributeDefList() { return new AttributeDefList(); } /** * Create an instance of {@link AttributeTypeValue } * */ public AttributeTypeValue createAttributeTypeValue() { return new AttributeTypeValue(); } /** * Create an instance of {@link Fault } * */ public Fault createFault() { return new Fault(); } /** * Create an instance of {@link ServerTaskResultRequest } * */ public ServerTaskResultRequest createServerTaskResultRequest() { return new ServerTaskResultRequest(); } /** * Create an instance of {@link ServerTaskResponse } * */ public ServerTaskResponse createServerTaskResponse() { return new ServerTaskResponse(); } /** * Create an instance of {@link JAXBElement }{@code <}{@link CancelServerTaskRequest }{@code >}} * */ @XmlElementDecl(namespace = "http://www.compositesw.com/services/system/util/common", name = "cancelServerTask") public JAXBElement<CancelServerTaskRequest> createCancelServerTask(CancelServerTaskRequest value) { return new JAXBElement<CancelServerTaskRequest>(_CancelServerTask_QNAME, CancelServerTaskRequest.class, null, value); } /** * Create an instance of {@link JAXBElement }{@code <}{@link CancelServerTaskResponse }{@code >}} * */ @XmlElementDecl(namespace = "http://www.compositesw.com/services/system/util/common", name = "cancelServerTaskResponse") public JAXBElement<CancelServerTaskResponse> createCancelServerTaskResponse(CancelServerTaskResponse value) { return new JAXBElement<CancelServerTaskResponse>(_CancelServerTaskResponse_QNAME, CancelServerTaskResponse.class, null, value); } /** * Create an instance of {@link JAXBElement }{@code <}{@link Fault }{@code >}} * */ @XmlElementDecl(namespace = "http://www.compositesw.com/services/system/util/common", name = "fault") public JAXBElement<Fault> createFault(Fault value) { return new JAXBElement<Fault>(_Fault_QNAME, Fault.class, null, value); } }
/** * Copyright (C) 2014 Kno.e.sis * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ /* * To change this license header, choose License Headers in Project Properties. * To change this template file, choose Tools | Templates * and open the template in the editor. */ package org.knoesis.lucene.indexer; import java.util.HashMap; import java.util.Map; import java.util.Properties; import org.apache.lucene.analysis.Analyzer; import org.apache.lucene.analysis.KeywordAnalyzer; import org.apache.lucene.document.Field.Index; import org.apache.lucene.document.Field.Store; import org.apache.lucene.document.Field.TermVector; /** * * @author Alan Smith */ class FieldParams { private static Store DEFAULT_STORE = Store.YES; private static Index DEFAULT_INDEX = Index.NOT_ANALYZED_NO_NORMS; private static TermVector DEFAULT_TERM_VECTOR = TermVector.NO; private static FieldType DEFAULT_FIELD_TYPE = FieldType.TEXT; private static Class<? extends Analyzer> DEFAULT_ANALYZER_CLASS = KeywordAnalyzer.class; public static FieldParams defaults(String fieldName) { return new FieldParams(fieldName, DEFAULT_ANALYZER_CLASS, DEFAULT_STORE, DEFAULT_INDEX, DEFAULT_TERM_VECTOR, DEFAULT_FIELD_TYPE); } public static void setDefaults(Class<? extends Analyzer> analyzerClass, Store store, Index index, TermVector tv, FieldType type) { DEFAULT_ANALYZER_CLASS = analyzerClass; DEFAULT_STORE = store; DEFAULT_INDEX = index; DEFAULT_TERM_VECTOR = tv; DEFAULT_FIELD_TYPE = type; } public static void setDefaultAnalyzerClass(Class<? extends Analyzer> analyzerClass) { DEFAULT_ANALYZER_CLASS = analyzerClass; } public static void setDefaultStore(Store store) { DEFAULT_STORE = store; } public static void setDefaultIndex(Index index) { DEFAULT_INDEX = index; } public static void setDefaultTermVector(TermVector tv) { DEFAULT_TERM_VECTOR = tv; } public static void setDefaultFieldType(FieldType type) { DEFAULT_FIELD_TYPE = type; } public static Class<Analyzer> getDefaultAnalyzerClass() { return (Class<Analyzer>) DEFAULT_ANALYZER_CLASS; } public static Store getDefaultStore() { return DEFAULT_STORE; } public static Index getDefaultIndex() { return DEFAULT_INDEX; } public static TermVector getDefaultTermVector() { return DEFAULT_TERM_VECTOR; } public static FieldType getDefaultFieldType() { return DEFAULT_FIELD_TYPE; } public static Map<String, FieldParams> parseProperties(Properties properties) { try { setDefaultAnalyzerClass((Class<Analyzer>) Class.forName(properties.getProperty(Constant.PROP_DEFAULT_ANALYZER, DEFAULT_ANALYZER_CLASS.getName()))); } catch(Exception ex) {} try { setDefaultStore(Store.valueOf(properties.getProperty(Constant.PROP_DEFAULT_STORE, DEFAULT_STORE.toString()))); } catch(Exception ex) {} try { setDefaultIndex(Index.valueOf(properties.getProperty(Constant.PROP_DEFAULT_INDEX, DEFAULT_INDEX.toString()))); } catch(Exception ex) {} try { setDefaultTermVector(TermVector.valueOf(properties.getProperty(Constant.PROP_DEFAULT_TERMVECTOR, DEFAULT_TERM_VECTOR.toString()))); } catch(Exception ex) {} try { setDefaultFieldType(FieldType.valueOf(properties.getProperty(Constant.PROP_DEFAULT_FIELDTYPE, DEFAULT_FIELD_TYPE.toString()))); } catch(Exception ex) {} Map<String, FieldParams> params = new HashMap<String, FieldParams>(); for (String property : properties.stringPropertyNames()) { if (property.startsWith(Constant.PROP_PREFIX_FIELD)) { String fieldName = property.substring(Constant.PROP_PREFIX_FIELD.length(), property.indexOf(".", Constant.PROP_PREFIX_FIELD.length())); if (params.containsKey(fieldName)) { continue; } FieldParams.Builder builder = new FieldParams.Builder(fieldName); try { builder.setAnalyzerClass((Class<Analyzer>) Class.forName(properties.getProperty(Constant.PROP_PREFIX_FIELD + fieldName + Constant.PROP_SUFFIX_ANALYZER))); } catch (Exception ex) {} try { builder.setStore(Store.valueOf(properties.getProperty(Constant.PROP_PREFIX_FIELD + fieldName + Constant.PROP_SUFFIX_STORE))); } catch (Exception ex) {} try { builder.setIndex(Index.valueOf(properties.getProperty(Constant.PROP_PREFIX_FIELD + fieldName + Constant.PROP_SUFFIX_INDEX))); } catch (Exception ex) {} try { builder.setTermVector(TermVector.valueOf(properties.getProperty(Constant.PROP_PREFIX_FIELD + fieldName + Constant.PROP_SUFFIX_TERMVECTOR))); } catch (Exception ex) {} try { builder.setType(FieldType.valueOf(properties.getProperty(Constant.PROP_PREFIX_FIELD + fieldName + Constant.PROP_SUFFIX_FIELDTYPE))); } catch (Exception ex) {} params.put(fieldName, builder.build()); } } return params; } private final String fieldName; private final Class<Analyzer> analyzerClass; private final Store store; private final Index index; private final TermVector tv; private final FieldType type; public FieldParams(String fieldName, Class<? extends Analyzer> analyzerClass, Store store, Index index, TermVector tv, FieldType type) { this.fieldName = fieldName; this.analyzerClass = (Class<Analyzer>) analyzerClass; this.store = store; this.index = index; this.tv = tv; this.type = type; } public String getFieldName() { return fieldName; } public Class<Analyzer> getAnalyzerClass() { return analyzerClass; } public Store getStore() { return store; } public Index getIndex() { return index; } public TermVector getTermVector() { return tv; } public FieldType getType() { return type; } public static final class Builder { private final String fieldname; private Class<? extends Analyzer> analyzerClass = null; private Store store = null; private Index index = null; private TermVector tv = null; private FieldType type = null; public Builder(String fieldName) { this.fieldname = fieldName; } public Builder setAnalyzerClass(Class<? extends Analyzer> analyzerClass) { this.analyzerClass = analyzerClass; return this; } public Builder setStore(Store store) { this.store = store; return this; } public Builder setIndex(Index index) { this.index = index; return this; } public Builder setTermVector(TermVector tv) { this.tv = tv; return this; } public Builder setType(FieldType type) { this.type = type; return this; } public FieldParams build() { return new FieldParams(fieldname, analyzerClass != null ? analyzerClass : DEFAULT_ANALYZER_CLASS, store != null ? store : DEFAULT_STORE, index != null ? index : DEFAULT_INDEX, tv != null ? tv : DEFAULT_TERM_VECTOR, type != null ? type : DEFAULT_FIELD_TYPE); } } }
package sedgewick; /************************************************************************* * Compilation: javac StdRandom.java * Execution: java StdRandom * * A library of static methods to generate pseudo-random numbers from * different distributions (bernoulli, uniform, gaussian, discrete, * and exponential). Also includes a method for shuffling an array. * * * % java StdRandom 5 * seed = 1316600602069 * 59 16.81826 true 8.83954 0 * 32 91.32098 true 9.11026 0 * 35 10.11874 true 8.95396 3 * 92 32.88401 true 8.87089 0 * 72 92.55791 true 9.46241 0 * * % java StdRandom 5 * seed = 1316600616575 * 96 60.17070 true 8.72821 0 * 79 32.01607 true 8.58159 0 * 81 59.49065 true 9.10423 1 * 96 51.65818 true 9.02102 0 * 99 17.55771 true 8.99762 0 * * % java StdRandom 5 1316600616575 * seed = 1316600616575 * 96 60.17070 true 8.72821 0 * 79 32.01607 true 8.58159 0 * 81 59.49065 true 9.10423 1 * 96 51.65818 true 9.02102 0 * 99 17.55771 true 8.99762 0 * * * Remark * ------ * - Relies on randomness of nextDouble() method in java.util.Random * to generate pseudorandom numbers in [0, 1). * * - This library allows you to set and get the pseudorandom number seed. * * - See http://www.honeylocust.com/RngPack/ for an industrial * strength random number generator in Java. * *************************************************************************/ import java.util.Random; /** * <i>Standard random</i>. This class provides methods for generating * random number from various distributions. * <p> * For additional documentation, see <a href="http://introcs.cs.princeton.edu/22library">Section 2.2</a> of * <i>Introduction to Programming in Java: An Interdisciplinary Approach</i> by Robert Sedgewick and Kevin Wayne. */ public final class StdRandom { private static Random random; // pseudo-random number generator private static long seed; // pseudo-random number generator seed // static initializer static { // this is how the seed was set in Java 1.4 seed = System.currentTimeMillis(); random = new Random(seed); } // singleton pattern - can't instantiate private StdRandom() { } /** * Set the seed of the psedurandom number generator. */ public static void setSeed(long s) { seed = s; random = new Random(seed); } /** * Get the seed of the psedurandom number generator. */ public static long getSeed() { return seed; } /** * Return real number uniformly in [0, 1). */ public static double uniform() { return random.nextDouble(); } /** * Return an integer uniformly between 0 and N-1. */ public static int uniform(int N) { return random.nextInt(N); } /////////////////////////////////////////////////////////////////////////// // STATIC METHODS BELOW RELY ON JAVA.UTIL.RANDOM ONLY INDIRECTLY VIA // THE STATIC METHODS ABOVE. /////////////////////////////////////////////////////////////////////////// /** * Return real number uniformly in [0, 1). */ public static double random() { return uniform(); } /** * Return int uniformly in [a, b). */ public static int uniform(int a, int b) { return a + uniform(b - a); } /** * Return real number uniformly in [a, b). */ public static double uniform(double a, double b) { return a + uniform() * (b-a); } /** * Return a boolean, which is true with probability p, and false otherwise. */ public static boolean bernoulli(double p) { return uniform() < p; } /** * Return a boolean, which is true with probability .5, and false otherwise. */ public static boolean bernoulli() { return bernoulli(0.5); } /** * Return a real number with a standard Gaussian distribution. */ public static double gaussian() { // use the polar form of the Box-Muller transform double r, x, y; do { x = uniform(-1.0, 1.0); y = uniform(-1.0, 1.0); r = x*x + y*y; } while (r >= 1 || r == 0); return x * Math.sqrt(-2 * Math.log(r) / r); // Remark: y * Math.sqrt(-2 * Math.log(r) / r) // is an independent random gaussian } /** * Return a real number from a gaussian distribution with given mean and stddev */ public static double gaussian(double mean, double stddev) { return mean + stddev * gaussian(); } /** * Return an integer with a geometric distribution with mean 1/p. */ public static int geometric(double p) { // using algorithm given by Knuth return (int) Math.ceil(Math.log(uniform()) / Math.log(1.0 - p)); } /** * Return an integer with a Poisson distribution with mean lambda. */ public static int poisson(double lambda) { // using algorithm given by Knuth // see http://en.wikipedia.org/wiki/Poisson_distribution int k = 0; double p = 1.0; double L = Math.exp(-lambda); do { k++; p *= uniform(); } while (p >= L); return k-1; } /** * Return a real number with a Pareto distribution with parameter alpha. */ public static double pareto(double alpha) { return Math.pow(1 - uniform(), -1.0/alpha) - 1.0; } /** * Return a real number with a Cauchy distribution. */ public static double cauchy() { return Math.tan(Math.PI * (uniform() - 0.5)); } /** * Return a number from a discrete distribution: i with probability a[i]. */ public static int discrete(double[] a) { // precondition: sum of array entries equals 1 double r = uniform(); double sum = 0.0; for (int i = 0; i < a.length; i++) { sum = sum + a[i]; if (sum >= r) return i; } assert false; return -1; } /** * Return a real number from an exponential distribution with rate lambda. */ public static double exp(double lambda) { return -Math.log(1 - uniform()) / lambda; } /** * Rearrange the elements of an array in random order. */ public static void shuffle(Object[] a) { int N = a.length; for (int i = 0; i < N; i++) { int r = i + uniform(N-i); // between i and N-1 Object temp = a[i]; a[i] = a[r]; a[r] = temp; } } /** * Rearrange the elements of a double array in random order. */ public static void shuffle(double[] a) { int N = a.length; for (int i = 0; i < N; i++) { int r = i + uniform(N-i); // between i and N-1 double temp = a[i]; a[i] = a[r]; a[r] = temp; } } /** * Rearrange the elements of an int array in random order. */ public static void shuffle(int[] a) { int N = a.length; for (int i = 0; i < N; i++) { int r = i + uniform(N-i); // between i and N-1 int temp = a[i]; a[i] = a[r]; a[r] = temp; } } /** * Rearrange the elements of the subarray a[lo..hi] in random order. */ public static void shuffle(Object[] a, int lo, int hi) { if (lo < 0 || lo > hi || hi >= a.length) throw new RuntimeException("Illegal subarray range"); for (int i = lo; i <= hi; i++) { int r = i + uniform(hi-i+1); // between i and hi Object temp = a[i]; a[i] = a[r]; a[r] = temp; } } /** * Rearrange the elements of the subarray a[lo..hi] in random order. */ public static void shuffle(double[] a, int lo, int hi) { if (lo < 0 || lo > hi || hi >= a.length) throw new RuntimeException("Illegal subarray range"); for (int i = lo; i <= hi; i++) { int r = i + uniform(hi-i+1); // between i and hi double temp = a[i]; a[i] = a[r]; a[r] = temp; } } /** * Rearrange the elements of the subarray a[lo..hi] in random order. */ public static void shuffle(int[] a, int lo, int hi) { if (lo < 0 || lo > hi || hi >= a.length) throw new RuntimeException("Illegal subarray range"); for (int i = lo; i <= hi; i++) { int r = i + uniform(hi-i+1); // between i and hi int temp = a[i]; a[i] = a[r]; a[r] = temp; } } /** * Unit test. */ public static void main(String[] args) { int N = Integer.parseInt(args[0]); if (args.length == 2) StdRandom.setSeed(Long.parseLong(args[1])); double[] t = { .5, .3, .1, .1 }; StdOut.println("seed = " + StdRandom.getSeed()); for (int i = 0; i < N; i++) { StdOut.printf("%2d " , uniform(100)); StdOut.printf("%8.5f ", uniform(10.0, 99.0)); StdOut.printf("%5b " , bernoulli(.5)); StdOut.printf("%7.5f ", gaussian(9.0, .2)); StdOut.printf("%2d " , discrete(t)); StdOut.println(); } } }
/* * Copyright 2015-2019 The twitlatte authors * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.github.moko256.twitlatte; import android.content.DialogInterface; import android.graphics.PorterDuff; import android.graphics.PorterDuffColorFilter; import android.graphics.drawable.ColorDrawable; import android.graphics.drawable.ShapeDrawable; import android.graphics.drawable.shapes.OvalShape; import android.os.Bundle; import android.text.SpannableString; import android.text.Spanned; import android.text.TextUtils; import android.text.method.LinkMovementMethod; import android.view.LayoutInflater; import android.view.View; import android.view.ViewGroup; import android.widget.Button; import android.widget.ImageView; import android.widget.TextView; import androidx.annotation.NonNull; import androidx.annotation.Nullable; import androidx.appcompat.app.AlertDialog; import androidx.fragment.app.Fragment; import androidx.lifecycle.Lifecycle; import androidx.lifecycle.LifecycleEventObserver; import androidx.lifecycle.LifecycleOwner; import androidx.lifecycle.ViewModelProviders; import com.bumptech.glide.Glide; import com.bumptech.glide.RequestManager; import com.bumptech.glide.load.resource.drawable.DrawableTransitionOptions; import com.github.moko256.latte.client.base.MediaUrlConverter; import com.github.moko256.latte.client.base.entity.Emoji; import com.github.moko256.latte.client.base.entity.Media; import com.github.moko256.latte.client.base.entity.User; import com.github.moko256.twitlatte.entity.Client; import com.github.moko256.twitlatte.intent.AppCustomTabsKt; import com.github.moko256.twitlatte.text.TwitterStringUtils; import com.github.moko256.twitlatte.text.style.ClickableNoLineSpan; import com.github.moko256.twitlatte.view.DpToPxKt; import com.github.moko256.twitlatte.view.EmojiToTextViewSetter; import com.github.moko256.twitlatte.viewmodel.UserInfoViewModel; import com.github.moko256.twitlatte.widget.UserHeaderImageView; import java.text.DateFormat; import static com.github.moko256.latte.client.base.ApiClientKt.CLIENT_TYPE_NOTHING; import static com.github.moko256.latte.client.twitter.TwitterApiClientImplKt.CLIENT_TYPE_TWITTER; /** * Created by moko256 on 2017/01/15. * * @author moko256 */ public class UserInfoFragment extends Fragment implements ToolbarTitleInterface { private UserInfoViewModel viewModel; private Client client; private RequestManager glideRequests; private UserHeaderImageView header; private ImageView icon; private TextView userNameText; private TextView userIdText; private TextView userBioText; private TextView userLocation; private TextView userUrl; private TextView userCreatedAt; private TextView userCounts; private TextView userIsYouOrFollowedYou; private Button userFollowButton; private Button userUnfollowButton; @Override public void onCreate(@Nullable Bundle savedInstanceState) { super.onCreate(savedInstanceState); client = GlobalApplicationKt.getClient(requireActivity()); viewModel = ViewModelProviders.of(requireActivity()).get(UserInfoViewModel.class); } @Nullable @Override public View onCreateView(@NonNull LayoutInflater inflater, @Nullable ViewGroup container, @Nullable Bundle savedInstanceState) { View view = inflater.inflate(R.layout.content_show_user, container, false); glideRequests = Glide.with(this); header = view.findViewById(R.id.show_user_bgimage); header.setWidthPerHeight((client.getAccessToken().getClientType() == CLIENT_TYPE_TWITTER) ? 3 : 2); icon = view.findViewById(R.id.show_user_image); ShapeDrawable iconBackground = new ShapeDrawable(); iconBackground.setColorFilter( new PorterDuffColorFilter( ((ColorDrawable) requireActivity().getWindow().getDecorView().getBackground()).getColor(), PorterDuff.Mode.SRC_ATOP ) ); iconBackground.setShape(new OvalShape()); icon.setBackground(iconBackground); userNameText = view.findViewById(R.id.show_user_name); userIdText = view.findViewById(R.id.show_user_id); userBioText = view.findViewById(R.id.show_user_bio); userBioText.setMovementMethod(LinkMovementMethod.getInstance()); userLocation = view.findViewById(R.id.show_user_location); userUrl = view.findViewById(R.id.show_user_url); userCreatedAt = view.findViewById(R.id.show_user_created_at); userCounts = view.findViewById(R.id.show_user_counts); userIsYouOrFollowedYou = view.findViewById(R.id.followed_or_you); userFollowButton = view.findViewById(R.id.follow); userUnfollowButton = view.findViewById(R.id.unfollow); return view; } @Override public void onViewCreated(@NonNull View view, @Nullable Bundle savedInstanceState) { super.onViewCreated(view, savedInstanceState); viewModel.getUser().observe(this, this::setShowUserInfo); viewModel.getFriendship().observe( this, friendship -> { if (friendship.getFollowedBy()) { userIsYouOrFollowedYou.setVisibility(View.VISIBLE); } else { userIsYouOrFollowedYou.setVisibility(View.GONE); } userIsYouOrFollowedYou.setText(R.string.follows_you); if (friendship.getFollowing()) { userFollowButton.setVisibility(View.GONE); userUnfollowButton.setVisibility(View.VISIBLE); } else { userFollowButton.setVisibility(View.VISIBLE); userUnfollowButton.setVisibility(View.GONE); } } ); userFollowButton.setOnClickListener( v -> confirmDialog( R.string.confirm_follow, (dialog, which) -> viewModel.requestCreateFollow(getString(R.string.did_follow)) ) ); userUnfollowButton.setOnClickListener( v -> confirmDialog( R.string.confirm_unfollow, (dialog, which) -> viewModel.requestDestroyFollow(getString(R.string.did_unfollow)) ) ); } private void confirmDialog(int message, DialogInterface.OnClickListener callback) { new AlertDialog.Builder(requireContext()) .setMessage(message) .setCancelable(true) .setPositiveButton(android.R.string.ok, callback) .setNegativeButton(android.R.string.cancel, (dialog, which) -> dialog.cancel()) .show(); } @Override public void onDestroyView() { glideRequests.clear(icon); glideRequests.clear(header); glideRequests = null; super.onDestroyView(); } @Override public int getTitleResourceId() { return R.string.account; } private void setShowUserInfo(User user) { if (user.getId() == client.getAccessToken().getUserId()) { userIsYouOrFollowedYou.setVisibility(View.VISIBLE); userIsYouOrFollowedYou.setText(R.string.you); } MediaUrlConverter mediaUrlConverter = client.getMediaUrlConverter(); String headerUrl = mediaUrlConverter.convertProfileBannerLargeUrl(user); if (headerUrl != null) { glideRequests .load(headerUrl) .transition(DrawableTransitionOptions.withCrossFade()) .into(header); header.setOnClickListener(v -> startActivity( ShowMediasActivity.Companion.getIntent( v.getContext(), new Media[]{ new Media( null, headerUrl, null, Media.MediaType.PICTURE.getValue() ) }, CLIENT_TYPE_NOTHING, 0 ) )); } glideRequests .load( mediaUrlConverter.convertProfileIconUriBySize( user, DpToPxKt.dpToPx(this, 68) ) ) .circleCrop() .transition(DrawableTransitionOptions.withCrossFade()) .into(icon); icon.setOnClickListener(v -> startActivity( ShowMediasActivity.Companion.getIntent( v.getContext(), new Media[]{ new Media( null, mediaUrlConverter.convertProfileIconOriginalUrl(user), null, Media.MediaType.PICTURE.getValue() ) }, CLIENT_TYPE_NOTHING, 0 ) )); CharSequence userName = TwitterStringUtils.plusUserMarks( user.getName(), userNameText, user.isProtected(), user.isVerified() ); CharSequence userBio = TwitterStringUtils.getLinkedSequence( client.getAccessToken(), user.getDescription(), user.getDescriptionLinks() ); userNameText.setText(userName); userBioText.setText(userBio); Emoji[] userNameEmojis = user.getEmojis(); if (userNameEmojis != null) { EmojiToTextViewSetter nameSetter = new EmojiToTextViewSetter(glideRequests, userNameText, userName, userNameEmojis); EmojiToTextViewSetter bioSetter = new EmojiToTextViewSetter(glideRequests, userBioText, userBio, userNameEmojis); getLifecycle().addObserver(new LifecycleEventObserver() { @Override public void onStateChanged(@NonNull LifecycleOwner source, @NonNull Lifecycle.Event event) { if (event == Lifecycle.Event.ON_DESTROY) { nameSetter.dispose(); bioSetter.dispose(); getLifecycle().removeObserver(this); } } }); } userIdText.setText(TwitterStringUtils.plusAtMark(user.getScreenName())); if (!TextUtils.isEmpty(user.getLocation())) { userLocation.setText(getString(R.string.location_is, user.getLocation())); } else { userLocation.setVisibility(View.GONE); } final String url = user.getUrl(); if (!TextUtils.isEmpty(url)) { String text = getString(R.string.url_is, url); SpannableString spannableString = new SpannableString(text); int start = text.indexOf(url); spannableString.setSpan(new ClickableNoLineSpan() { @Override public void onClick(@NonNull View widget) { AppCustomTabsKt.launchChromeCustomTabs(widget.getContext(), url, false); } }, start, start + url.length(), Spanned.SPAN_EXCLUSIVE_EXCLUSIVE); userUrl.setText(spannableString); userUrl.setMovementMethod(LinkMovementMethod.getInstance()); } else { userUrl.setVisibility(View.GONE); } userCreatedAt.setText(DateFormat.getDateTimeInstance(DateFormat.SHORT, DateFormat.FULL).format(user.getCreatedAt())); userCounts.setText(getString(R.string.user_counts_is, user.getStatusesCount(), user.getFriendsCount(), user.getFollowersCount())); } }
// (c) Copyright 2015 Cloudera, Inc. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package com.cloudera.director.aws; import com.amazonaws.AmazonClientException; import com.amazonaws.AmazonServiceException; import com.amazonaws.services.ec2.model.StateReason; import com.amazonaws.services.securitytoken.AWSSecurityTokenServiceAsyncClient; import com.amazonaws.services.securitytoken.model.DecodeAuthorizationMessageRequest; import com.cloudera.director.aws.ec2.EC2InstanceTemplate; import com.cloudera.director.spi.v2.model.exception.InvalidCredentialsException; import com.cloudera.director.spi.v2.model.exception.PluginExceptionConditionAccumulator; import com.cloudera.director.spi.v2.model.exception.PluginExceptionDetails; import com.cloudera.director.spi.v2.model.exception.TransientProviderException; import com.cloudera.director.spi.v2.model.exception.UnrecoverableProviderException; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; import com.google.common.collect.Maps; import java.util.Map; import java.util.Set; import java.util.stream.Collectors; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * Provides utilities for dealing with AWS exceptions. * * @see <a href="http://docs.aws.amazon.com/AWSEC2/latest/APIReference/errors-overview.html">AWS Errors</a> */ public class AWSExceptions { private static final Logger LOG = LoggerFactory.getLogger(AWSExceptions.class); /** * Error code suffix indicating that a resource was not found. */ private static final String NOT_FOUND_ERROR_CODE = ".NotFound"; private static final String RESOURCE_NOT_FOUND = "Resource not found, might be a transient error"; /** * Returns whether a {@code Throwable} indicates that an AWS resource cannot be found. * * @param throwable a throwable * @return whether a {@code Throwable} indicates that an AWS resource cannot be found */ public static boolean isNotFound(Throwable throwable) { boolean isNotFound = AmazonServiceException.class.isInstance(throwable) && ((AmazonServiceException) throwable).getErrorCode().endsWith(NOT_FOUND_ERROR_CODE); if (isNotFound) { if (LOG.isDebugEnabled()) { LOG.debug(RESOURCE_NOT_FOUND, throwable); } else { LOG.info(RESOURCE_NOT_FOUND); } } return isNotFound; } /** * Insufficient instance capacity error code. */ public static final String INSUFFICIENT_INSTANCE_CAPACITY = "InsufficientInstanceCapacity"; /** * Instance limit exceeded error code. */ public static final String INSTANCE_LIMIT_EXCEEDED = "InstanceLimitExceeded"; /** * Request limit exceeded error code.. */ public static final String REQUEST_LIMIT_EXCEEDED = "RequestLimitExceeded"; /** * Volume limit exceeded error code. */ public static final String VOLUME_LIMIT_EXCEEDED = "VolumeLimitExceeded"; /** * Volume limit exceeded state reason. */ public static final String VOLUME_LIMIT_EXCEEDED_STATE_REASON = "Client.VolumeLimitExceeded"; /** * Internal error error code. */ public static final String INTERNAL_ERROR = "InternalError"; // The set of error codes representing authorization failures. public static final Set<String> AUTHORIZATION_ERROR_CODES = ImmutableSet.of( "AuthFailure", "UnauthorizedOperation" ); public static final String ENCODED_MESSAGE_STRING = "Encoded authorization failure message:"; /** * Parses a set of exceptions and a set of failed state reasons and throws * an appropriate plugin exception with appropriate plugin exception details. * This will throw an {@link UnrecoverableProviderException} if the exception * set contains any exceptions that are unrecoverable. An UnrecoverableProviderException * will also be thrown if the set of failed state reason is not empty. In other cases a * {@link TransientProviderException} is thrown. * * @param stsClient the AWS STS client for decoding authorization messages * @param message the plugin exception message to set * @param exceptions a set of exceptions * @param failedStateReasons state reasons for instances that transitioned to terminated * @param template the EC2 instance template */ public static void propagate(AWSSecurityTokenServiceAsyncClient stsClient, String message, Set<Exception> exceptions, Set<StateReason> failedStateReasons, EC2InstanceTemplate template) { PluginExceptionConditionAccumulator accumulator = new PluginExceptionConditionAccumulator(); Set<Exception> decodedExceptions = exceptions.stream() .map(e -> { if (e instanceof AmazonServiceException) return decodeAuthorizationMessageIfPossible(stsClient, (AmazonServiceException) e); return e; }) .collect(Collectors.toSet()); boolean isUnrecoverable = addErrors(template, decodedExceptions, accumulator); if (!failedStateReasons.isEmpty()) { isUnrecoverable = true; addStateReasonErrors(template, failedStateReasons, accumulator); } PluginExceptionDetails pluginExceptionDetails = new PluginExceptionDetails(accumulator.getConditionsByKey()); if (isUnrecoverable) { throw new UnrecoverableProviderException(message, pluginExceptionDetails); } throw new TransientProviderException(message, pluginExceptionDetails); } /** * Returns an appropriate SPI exception in response to the specified AWS exception. * * @param stsClient the AWS STS client for decoding authorization messages * @param e the AWS exception * @return the corresponding SPI exception */ public static RuntimeException propagate(AWSSecurityTokenServiceAsyncClient stsClient, AmazonClientException e) { propagateIfUnrecoverable(stsClient, e); // otherwise assume this is a transient error throw new TransientProviderException(e.getMessage(), e); } /** * Propagates exception as an unrecoverable error when the relevant * indicators are present. * * @param stsClient the AWS STS client for decoding authorization messages * @param e the Amazon client exception */ public static void propagateIfUnrecoverable(AWSSecurityTokenServiceAsyncClient stsClient, AmazonClientException e) { if (isUnrecoverable(e)) { if (e instanceof AmazonServiceException) { AmazonServiceException ase = (AmazonServiceException) e; if (AUTHORIZATION_ERROR_CODES.contains(ase.getErrorCode())) { AmazonServiceException decodedException = decodeAuthorizationMessageIfPossible(stsClient, ase); throw new InvalidCredentialsException(decodedException.getErrorMessage(), decodedException); } } throw new UnrecoverableProviderException(e.getMessage(), e); } } /** * Returns whether the specified throwable is unrecoverable, treating any throwable that is * not an Amazon client exception to be unrecoverable. * * @param t the throwable * @return whether the specified throwable is unrecoverable */ public static boolean isUnrecoverable(Throwable t) { return !(t instanceof AmazonClientException) || isUnrecoverable((AmazonClientException) t); } /** * Returns whether the specified Amazon client exception is unrecoverable, considering a few * categories of exception as unrecoverable in addition to those which are flagged as not * retryable by Amazon. * * @param e the Amazon client exception * @return whether the specified Amazon client exception is unrecoverable */ public static boolean isUnrecoverable(AmazonClientException e) { if (e instanceof AmazonServiceException) { AmazonServiceException ase = (AmazonServiceException) e; if (AUTHORIZATION_ERROR_CODES.contains(ase.getErrorCode())) { return true; } // All exceptions that represent client errors are unrecoverable because the request itself is wrong // See {@see AmazonServiceException#ErrorType} // * OperationNotPermitted exception is unrecoverable. This can happen when terminating an // instance that has termination protection enabled, or trying to detach the primary // network interface (eth0) from an instance. // * Unsupported exception is also unrecoverable, since it represents an unsupported request. // See docs at http://docs.aws.amazon.com/AWSEC2/latest/APIReference/errors-overview.html // * InvalidParameterValue is unrecoverable, as one of the parameters supplied by the user is invalid. // * PendingVerification is unrecoverable since it can take up to 2 hours for verification to complete. if (ase.getErrorType() == AmazonServiceException.ErrorType.Client || "OperationNotPermitted".equals(ase.getErrorCode()) || "Unsupported".equals(ase.getErrorCode()) || "InvalidParameterValue".equals(ase.getErrorCode()) || "PendingVerification".equals(ase.getErrorCode())) { return true; } // Consider instance limits, insufficient capacity, volume limits, and internal error as unrecoverable if (INSTANCE_LIMIT_EXCEEDED.equals(ase.getErrorCode()) || INSUFFICIENT_INSTANCE_CAPACITY.equals(ase.getErrorCode()) || VOLUME_LIMIT_EXCEEDED.equals(ase.getErrorCode()) || INTERNAL_ERROR.equals(ase.getErrorCode())) { return true; } } return !e.isRetryable(); } /** * Decodes authorization messages if possible. Will not fail if the messages cannot be decoded. * * @param stsClient the client to use to decode the authorization messages * @param e the exception to decode the message from * @return the exception with a decoded message, or as is if the message could not be decoded */ public static AmazonServiceException decodeAuthorizationMessageIfPossible( AWSSecurityTokenServiceAsyncClient stsClient, AmazonServiceException e) { String errorMessage = e.getErrorMessage(); if (stsClient != null && AUTHORIZATION_ERROR_CODES.contains(e.getErrorCode()) && errorMessage.contains(ENCODED_MESSAGE_STRING)) { try { int indexOfEncodedMessage = errorMessage.indexOf(ENCODED_MESSAGE_STRING); String encodedErrorMessage = errorMessage.substring( errorMessage.indexOf(ENCODED_MESSAGE_STRING) + ENCODED_MESSAGE_STRING.length()); DecodeAuthorizationMessageRequest request = new DecodeAuthorizationMessageRequest(); request.setEncodedMessage(encodedErrorMessage); e.setErrorMessage(errorMessage.substring(0, indexOfEncodedMessage) + "Decoded authorization message: " + stsClient.decodeAuthorizationMessage(request).getDecodedMessage()); LOG.debug("Successfully decoded authorization message."); } catch (AmazonClientException clientException) { LOG.warn("Unable to decode authorization message."); LOG.debug("Unable to decode authorization message.", e); } } return e; } /** * Returns whether the specified throwable is an {@code AmazonServiceException} with * the specified error code. * * @param throwable the throwable * @param errorCode the error code * @return whether the specified throwable is an {@code AmazonServiceException} with * the specified error code */ public static boolean isAmazonServiceException(Throwable throwable, String errorCode) { return (throwable instanceof AmazonServiceException) && errorCode.equals(((AmazonServiceException) throwable).getErrorCode()); } /** * Converts a set of exceptions to plugin error conditions. These conditions * will be added to the provided accumulator. Returns whether an unrecoverable * error is present in provided exceptions. * * @param template the EC2 instance template * @param exceptions the list of encountered exceptions * @param accumulator the exception condition accumulator to add to * @return whether an unrecoverable exception is present */ private static boolean addErrors(EC2InstanceTemplate template, Set<Exception> exceptions, PluginExceptionConditionAccumulator accumulator) { boolean hasUnrecoverableExceptions = false; // Only report each error code once Map<String, AmazonServiceException> awsExceptions = Maps.newHashMap(); for (Exception e : exceptions) { hasUnrecoverableExceptions = hasUnrecoverableExceptions || isUnrecoverable(e); if (e instanceof AmazonServiceException) { AmazonServiceException awsEx = (AmazonServiceException) e; awsExceptions.put(awsEx.getErrorCode(), awsEx); } else { accumulator.addError(toExceptionInfoMap(e.getMessage(), "N/A", "N/A")); } } for (AmazonServiceException awsException : awsExceptions.values()) { accumulator.addError(toExceptionInfoMap(template, awsException)); } return hasUnrecoverableExceptions; } private static void addStateReasonErrors(EC2InstanceTemplate template, Set<StateReason> stateReasons, PluginExceptionConditionAccumulator accumulator) { for (StateReason stateReason : stateReasons) { String stateReasonCode = stateReason.getCode(); String stateReasonMessage = stateReason.getMessage(); String message = "Instance(s) were unexpectedly terminated"; if (stateReasonCode.equals(VOLUME_LIMIT_EXCEEDED_STATE_REASON)) { message = String.format("Instance(s) were terminated due to volume limits for %s volume type", template.getEbsVolumeType()); } accumulator.addError(toExceptionInfoMap(message, stateReasonCode, stateReasonMessage)); } } private static Map<String, String> toExceptionInfoMap(EC2InstanceTemplate template, AmazonServiceException ex) { String awsErrorCode = ex.getErrorCode(); String message = "Encountered AWS exception"; // give a clearer message on more common AWS exceptions switch (awsErrorCode) { case INSUFFICIENT_INSTANCE_CAPACITY: message = String.format("AWS does not have available capacity for instance type %s", template.getType()); break; case INSTANCE_LIMIT_EXCEEDED: message = String.format("Exceeded instance limit for instance type %s", template.getType()); break; case REQUEST_LIMIT_EXCEEDED: message = "API request limit exceeded"; break; default: break; } return toExceptionInfoMap(message, ex.getErrorCode(), ex.getErrorMessage()); } private static Map<String, String> toExceptionInfoMap(String message, String awsErrorCode, String awsErrorMessage) { return ImmutableMap.of( "message", message, "awsErrorCode", awsErrorCode, "awsErrorMessage", awsErrorMessage ); } /** * Private constructor to prevent instantiation. */ private AWSExceptions() { } }
/* ***** BEGIN LICENSE BLOCK ***** * Version: MPL 1.1/GPL 2.0/LGPL 2.1 * * The contents of this file are subject to the Mozilla Public License Version * 1.1 (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * http://www.mozilla.org/MPL/ * * Software distributed under the License is distributed on an "AS IS" basis, * WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License * for the specific language governing rights and limitations under the * License. * * The Original Code is part of dcm4che, an implementation of DICOM(TM) in * Java(TM), available at http://sourceforge.net/projects/dcm4che. * * The Initial Developer of the Original Code is * TIANI Medgraph AG. * Portions created by the Initial Developer are Copyright (C) 2003-2005 * the Initial Developer. All Rights Reserved. * * Contributor(s): * Gunter Zeilinger <gunter.zeilinger@tiani.com> * Franz Willer <franz.willer@gwi-ag.com> * Fuad Ibrahimov <fuad@ibrahimov.de> * * Alternatively, the contents of this file may be used under the terms of * either the GNU General Public License Version 2 or later (the "GPL"), or * the GNU Lesser General Public License Version 2.1 or later (the "LGPL"), * in which case the provisions of the GPL or the LGPL are applicable instead * of those above. If you wish to allow use of your version of this file only * under the terms of either the GPL or the LGPL, and not to allow others to * use your version of this file under the terms of the MPL, indicate your * decision by deleting the provisions above and replace them with the notice * and other provisions required by the GPL or the LGPL. If you do not delete * the provisions above, a recipient may use your version of this file under * the terms of any one of the MPL, the GPL or the LGPL. * * ***** END LICENSE BLOCK ***** */ package org.dcm4chex.archive.dcm.storescp; import java.io.BufferedInputStream; import java.io.EOFException; import java.io.File; import java.io.FileInputStream; import java.io.FileOutputStream; import java.io.IOException; import java.io.InputStream; import java.net.URI; import java.rmi.RemoteException; import java.security.DigestInputStream; import java.security.DigestOutputStream; import java.security.MessageDigest; import java.util.Arrays; import java.util.Calendar; import java.util.Date; import java.util.Iterator; import java.util.List; import java.util.StringTokenizer; import java.util.concurrent.Executor; import java.util.concurrent.Executors; import javax.ejb.CreateException; import javax.ejb.FinderException; import javax.ejb.ObjectNotFoundException; import javax.management.ObjectName; import javax.security.auth.Subject; import org.dcm4che.data.Command; import org.dcm4che.data.Dataset; import org.dcm4che.data.DcmDecodeParam; import org.dcm4che.data.DcmElement; import org.dcm4che.data.DcmEncodeParam; import org.dcm4che.data.DcmObjectFactory; import org.dcm4che.data.DcmParseException; import org.dcm4che.data.DcmParser; import org.dcm4che.data.DcmParserFactory; import org.dcm4che.data.PersonName; import org.dcm4che.dict.Status; import org.dcm4che.dict.Tags; import org.dcm4che.dict.UIDs; import org.dcm4che.dict.VRs; import org.dcm4che.net.AAssociateAC; import org.dcm4che.net.AAssociateRQ; import org.dcm4che.net.ActiveAssociation; import org.dcm4che.net.Association; import org.dcm4che.net.AssociationListener; import org.dcm4che.net.DcmServiceBase; import org.dcm4che.net.DcmServiceException; import org.dcm4che.net.Dimse; import org.dcm4che.net.PDU; import org.dcm4che.util.BufferedOutputStream; import org.dcm4che2.audit.message.AuditMessage; import org.dcm4che2.audit.message.ParticipantObject; import org.dcm4che2.audit.message.PatientRecordMessage; import org.dcm4cheri.util.StringUtils; import org.dcm4chex.archive.codec.CompressCmd; import org.dcm4chex.archive.common.Availability; import org.dcm4chex.archive.common.PrivateTags; import org.dcm4chex.archive.common.SeriesStored; import org.dcm4chex.archive.config.CompressionRules; import org.dcm4chex.archive.ejb.conf.AttributeFilter; import org.dcm4chex.archive.ejb.interfaces.FileDTO; import org.dcm4chex.archive.ejb.interfaces.FileSystemDTO; import org.dcm4chex.archive.ejb.interfaces.FileSystemMgt2; import org.dcm4chex.archive.ejb.interfaces.FileSystemMgt2Home; import org.dcm4chex.archive.ejb.interfaces.MPPSManager; import org.dcm4chex.archive.ejb.interfaces.MPPSManagerHome; import org.dcm4chex.archive.ejb.interfaces.Storage; import org.dcm4chex.archive.ejb.interfaces.StorageHome; import org.dcm4chex.archive.ejb.interfaces.StudyPermissionDTO; import org.dcm4chex.archive.ejb.jdbc.QueryFilesCmd; import org.dcm4chex.archive.exceptions.NonUniquePatientIDException; import org.dcm4chex.archive.mbean.HttpUserInfo; import org.dcm4chex.archive.perf.PerfCounterEnum; import org.dcm4chex.archive.perf.PerfMonDelegate; import org.dcm4chex.archive.perf.PerfPropertyEnum; import org.dcm4chex.archive.util.EJBHomeFactory; import org.dcm4chex.archive.util.FileUtils; import org.dcm4chex.archive.util.HomeFactoryException; import org.jboss.logging.Logger; /** * @author Gunter.Zeilinger@tiani.com * @version $Revision: 17038 $ * @since 03.08.2003 */ public class StoreScp extends DcmServiceBase implements AssociationListener { private static final String ALL = "ALL"; private static final int MISSING_USER_ID_ERR_STATUS = 0xCE10; private static final int NO_APPEND_PERMISSION_ERR_STATUS = 0xCE24; private static final String MISSING_USER_ID_ERR_MSG = "Missing user identification for appending existing Study"; private static final String NO_APPEND_PERMISSION_ERR_MSG = "No permission to append existing Study"; private static final String STORE_XSL = "cstorerq.xsl"; private static final String STORE_XML = "-cstorerq.xml"; private static final String MWL2STORE_XSL = "mwl-cfindrsp2cstorerq.xsl"; private static final String STORE2MWL_XSL = "cstorerq2mwl-cfindrq.xsl"; private static final String RECEIVE_BUFFER = "RECEIVE_BUFFER"; private static final String SERIES_STORED = "SERIES_STORED"; // private static final String SOP_IUIDS = "SOP_IUIDS"; protected final StoreScpService service; private final Logger log; private boolean studyDateInFilePath = false; private boolean sourceAETInFilePath = false; private boolean yearInFilePath = true; private boolean monthInFilePath = true; private boolean dayInFilePath = true; private boolean hourInFilePath = false; private boolean acceptMissingPatientID = true; private boolean acceptMissingPatientName = true; private boolean serializeDBUpdate = false; private int updateDatabaseMaxRetries = 2; private int maxCountUpdateDatabaseRetries = 0; private boolean storeDuplicateIfDiffMD5 = true; private boolean storeDuplicateIfDiffHost = true; private long updateDatabaseRetryInterval = 0L; private CompressionRules compressionRules = new CompressionRules(""); private String[] coerceWarnCallingAETs = {}; private String[] acceptMismatchIUIDCallingAETs = {}; private String[] onlyWellKnownInstancesCallingAETs = {}; private boolean checkIncorrectWorklistEntry = true; private String[] referencedDirectoryPath; private String[] referencedDirectoryURI; private String refFileSystemGroupID; private boolean readReferencedFile = true; private boolean md5sumReferencedFile = true; private boolean coerceBeforeWrite = false; protected PerfMonDelegate perfMon; private volatile Executor syncFileExecutor; public StoreScp(StoreScpService service) { this.service = service; this.log = service.getLog(); perfMon = new PerfMonDelegate(this.service); } public final ObjectName getPerfMonServiceName() { return perfMon.getPerfMonServiceName(); } public final void setPerfMonServiceName(ObjectName perfMonServiceName) { perfMon.setPerfMonServiceName(perfMonServiceName); } public final boolean isAcceptMissingPatientID() { return acceptMissingPatientID; } public final void setAcceptMissingPatientID(boolean accept) { this.acceptMissingPatientID = accept; } public final boolean isAcceptMissingPatientName() { return acceptMissingPatientName; } public final void setAcceptMissingPatientName(boolean accept) { this.acceptMissingPatientName = accept; } public final boolean isSerializeDBUpdate() { return serializeDBUpdate; } public final void setSerializeDBUpdate(boolean serialize) { this.serializeDBUpdate = serialize; } public final String getCoerceWarnCallingAETs() { return StringUtils.toString(coerceWarnCallingAETs, '\\'); } public final void setCoerceWarnCallingAETs(String aets) { coerceWarnCallingAETs = StringUtils.split(aets, '\\'); } public final String getAcceptMismatchIUIDCallingAETs() { return StringUtils.toString(acceptMismatchIUIDCallingAETs, '\\'); } public final void setAcceptMismatchIUIDCallingAETs(String aets) { acceptMismatchIUIDCallingAETs = StringUtils.split(aets, '\\'); } public final String getOnlyWellKnownInstancesCallingAETs() { return StringUtils.toString(onlyWellKnownInstancesCallingAETs, '\\'); } public final void setOnlyWellKnownInstancesCallingAETs(String aets) { onlyWellKnownInstancesCallingAETs = StringUtils.split(aets, '\\'); } public final boolean isStudyDateInFilePath() { return studyDateInFilePath; } public final void setStudyDateInFilePath(boolean studyDateInFilePath) { this.studyDateInFilePath = studyDateInFilePath; } public final boolean isSourceAETInFilePath() { return sourceAETInFilePath; } public final void setSourceAETInFilePath(boolean sourceAETInFilePath) { this.sourceAETInFilePath = sourceAETInFilePath; } public final boolean isYearInFilePath() { return yearInFilePath; } public final void setYearInFilePath(boolean yearInFilePath) { this.yearInFilePath = yearInFilePath; } public final boolean isMonthInFilePath() { return monthInFilePath; } public final void setMonthInFilePath(boolean monthInFilePath) { this.monthInFilePath = monthInFilePath; } public final boolean isDayInFilePath() { return dayInFilePath; } public final void setDayInFilePath(boolean dayInFilePath) { this.dayInFilePath = dayInFilePath; } public final boolean isHourInFilePath() { return hourInFilePath; } public final void setHourInFilePath(boolean hourInFilePath) { this.hourInFilePath = hourInFilePath; } public final String getReferencedDirectoryPath() { if (referencedDirectoryPath == null) return ALL; StringBuffer sb = new StringBuffer(); String nl = System.getProperty("line.separator", "\n"); for ( String s : referencedDirectoryPath) { sb.append(s).append(nl); } return sb.toString(); } public final void setReferencedDirectoryPath(String pathOrURI) { if ( ALL.equals(pathOrURI.trim())) { referencedDirectoryURI = null; referencedDirectoryPath = null; } else { StringTokenizer st = new StringTokenizer(pathOrURI, " \t\r\n;"); int len = st.countTokens(); referencedDirectoryURI = new String[len]; referencedDirectoryPath = new String[len]; String trimmed; for ( int i = 0 ; st.hasMoreElements() ; i++ ) { trimmed = st.nextToken().trim(); referencedDirectoryURI[i] = isURI(trimmed) ? (trimmed + '/') : FileUtils.toFile(trimmed).toURI().toString(); referencedDirectoryPath[i] = trimmed; } } } public void setReferencedFileSystemGroupID(String groupID) { this.refFileSystemGroupID = groupID; } public String getReferencedFileSystemGroupID() { return refFileSystemGroupID; } private static boolean isURI(String pathOrURI) { return pathOrURI.indexOf(':') > 1 ; } public final boolean isMd5sumReferencedFile() { return md5sumReferencedFile; } public final void setMd5sumReferencedFile(boolean md5ReferencedFile) { this.md5sumReferencedFile = md5ReferencedFile; } public final boolean isCoerceBeforeWrite() { return this.coerceBeforeWrite; } public final void setCoerceBeforeWrite(boolean coerceBeforeWrite) { this.coerceBeforeWrite = coerceBeforeWrite; } public final boolean isReadReferencedFile() { return readReferencedFile; } public final void setReadReferencedFile(boolean readReferencedFile) { this.readReferencedFile = readReferencedFile; } public final boolean isStoreDuplicateIfDiffHost() { return storeDuplicateIfDiffHost; } public final void setStoreDuplicateIfDiffHost(boolean storeDuplicate) { this.storeDuplicateIfDiffHost = storeDuplicate; } public final boolean isStoreDuplicateIfDiffMD5() { return storeDuplicateIfDiffMD5; } public final void setStoreDuplicateIfDiffMD5(boolean storeDuplicate) { this.storeDuplicateIfDiffMD5 = storeDuplicate; } public final CompressionRules getCompressionRules() { return compressionRules; } public final void setCompressionRules(CompressionRules compressionRules) { this.compressionRules = compressionRules; } public final int getUpdateDatabaseMaxRetries() { return updateDatabaseMaxRetries; } public final void setUpdateDatabaseMaxRetries(int updateDatabaseMaxRetries) { this.updateDatabaseMaxRetries = updateDatabaseMaxRetries; } public final int getMaxCountUpdateDatabaseRetries() { return maxCountUpdateDatabaseRetries; } public final void setMaxCountUpdateDatabaseRetries(int count) { this.maxCountUpdateDatabaseRetries = count; } public final long getUpdateDatabaseRetryInterval() { return updateDatabaseRetryInterval; } public final void setUpdateDatabaseRetryInterval(long interval) { this.updateDatabaseRetryInterval = interval; } /** * @return Returns the checkIncorrectWorklistEntry. */ public boolean isCheckIncorrectWorklistEntry() { return checkIncorrectWorklistEntry; } /** * @param checkIncorrectWorklistEntry * The checkIncorrectWorklistEntry to set. */ public void setCheckIncorrectWorklistEntry( boolean checkIncorrectWorklistEntry) { this.checkIncorrectWorklistEntry = checkIncorrectWorklistEntry; } protected void doCStore(ActiveAssociation activeAssoc, Dimse rq, Command rspCmd) throws IOException, DcmServiceException { InputStream in = rq.getDataAsStream(); perfMon.start(activeAssoc, rq, PerfCounterEnum.C_STORE_SCP_OBJ_IN); perfMon.setProperty(activeAssoc, rq, PerfPropertyEnum.REQ_DIMSE, rq); DcmDecodeParam decParam = DcmDecodeParam.valueOf(rq .getTransferSyntaxUID()); Dataset ds = objFact.newDataset(); DcmParser parser = DcmParserFactory.getInstance().newDcmParser(in); try { parser.setMaxValueLength(service.getMaxValueLength()); parser.setDcmHandler(ds.getDcmHandler()); parser.parseDataset(decParam, Tags.PixelData); if (!parser.hasSeenEOF() && parser.getReadTag() != Tags.PixelData) { parser.unreadHeader(); parser.parseDataset(decParam, -1); } doActualCStore(activeAssoc, rq, rspCmd, ds, parser); } catch (DcmParseException x) { throw new DcmServiceException(Status.ProcessingFailure, x.getMessage(), x); } perfMon.stop(activeAssoc, rq, PerfCounterEnum.C_STORE_SCP_OBJ_IN); } /** * Actual CStore request handling. Allows for subclasses to do some * preliminary work with the rq Dataset before reading and handling the * pixel data. * * This method expects that the Dataset has already been parsed from the * Dimse InputStream, and the DcmParser is initialized already with the * Dataset. * * @param activeAssoc * The ActiveAssociation * @param rq * The Dimse request * @param rspCmd * The response Command * @param ds * The parsed Dataset from the Dimse rq * @param parser * The DcmParser initialized with the InputStream from the */ protected void doActualCStore(ActiveAssociation activeAssoc, Dimse rq, Command rspCmd, Dataset ds, DcmParser parser) throws IOException, DcmServiceException { File file = null; boolean dcm4cheeURIReferenced = rq.getTransferSyntaxUID().equals( UIDs.Dcm4cheURIReferenced); try { Command rqCmd = rq.getCommand(); Association assoc = activeAssoc.getAssociation(); String callingAET = assoc.getCallingAET(); String calledAET = assoc.getCalledAET(); String iuid = checkSOPInstanceUID(rqCmd, ds, callingAET); checkAppendPermission(assoc, ds); if (!checkOnlyWellKnownInstances(assoc, iuid, callingAET)) { log.info("StoreSCP only accepts well known instances from AE "+callingAET+ " ! Ignored Instance:"+iuid); return; } List duplicates = new QueryFilesCmd(iuid).getFileDTOs(); if (!(duplicates.isEmpty() || storeDuplicateIfDiffMD5 || storeDuplicateIfDiffHost && !containsLocal(duplicates))) { log.info("Received Instance[uid=" + iuid + "] already exists - ignored"); return; } service.preProcess(ds); if (log.isDebugEnabled()) { log.debug("Dataset:\n"); log.debug(ds); } // Set original dataset perfMon.setProperty(activeAssoc, rq, PerfPropertyEnum.REQ_DATASET, ds); service.logDIMSE(assoc, STORE_XML, ds); if (isCheckIncorrectWorklistEntry() && checkIncorrectWorklistEntry(ds)) { log .info("Received Instance[uid=" + iuid + "] ignored! Reason: Incorrect Worklist entry selected!"); return; } String retrieveAET; String availability; FileSystemDTO fsDTO = null; String filePath = null; byte[] md5sum = null; Dataset coerced = service.getCoercionAttributesFor(callingAET, STORE_XSL, ds, assoc); if ( coerceBeforeWrite ) { ds.setPrivateCreatorID(PrivateTags.CreatorID); ds.putAE(PrivateTags.CallingAET, callingAET); ds.putAE(PrivateTags.CalledAET, calledAET); ds.setPrivateCreatorID(null); if (coerced != null) { service.coerceAttributes(ds, coerced); } service.postCoercionProcessing(ds); } if (dcm4cheeURIReferenced) { String uri = ds.getString(Tags.RetrieveURI); if (uri == null) { retrieveAET = ds.getString(Tags.RetrieveAET); availability = ds.getString(Tags.InstanceAvailability); if (retrieveAET == null || availability == null ) { throw new DcmServiceException( Status.DataSetDoesNotMatchSOPClassError, "Missing (0040,E010) Retrieve URI - required for Dcm4che Retrieve URI Transfer Syntax"); } } else { String[] selected = selectReferencedDirectoryURI(uri); if (selected == null) { throw new DcmServiceException( Status.DataSetDoesNotMatchSOPClassError, "(0040,E010) Retrieve URI: " + uri + " does not match with configured Referenced Directory Path: " + getReferencedDirectoryPath()); } filePath = uri.substring(selected[1].length()); if (uri.startsWith("file:/")) { file = new File(new URI(uri)); if (!file.isFile()) { throw new DcmServiceException(Status.ProcessingFailure, "File referenced by (0040,E010) Retrieve URI: " + uri + " not found!"); } } fsDTO = getFileSystemMgt().getFileSystemOfGroup( refFileSystemGroupID, selected[0].startsWith("file:") ? new URI(selected[0]).getPath() : selected[0]); retrieveAET = fsDTO.getRetrieveAET(); availability = Availability.toString(fsDTO.getAvailability()); if (file != null && readReferencedFile) { log.info("M-READ " + file); Dataset fileDS = objFact.newDataset(); FileInputStream fis = new FileInputStream(file); try { if (md5sumReferencedFile) { MessageDigest digest = MessageDigest .getInstance("MD5"); DigestInputStream dis = new DigestInputStream(fis, digest); BufferedInputStream bis = new BufferedInputStream( dis); fileDS.readFile(bis, null, Tags.PixelData); byte[] buf = getByteBuffer(assoc); while (bis.read(buf) != -1) ; md5sum = digest.digest(); } else { BufferedInputStream bis = new BufferedInputStream( fis); fileDS.readFile(bis, null, Tags.PixelData); } } finally { fis.close(); } fileDS.putAll(ds, Dataset.REPLACE_ITEMS); ds = fileDS; } } if (ds.getFileMetaInfo() == null) { ds.setPrivateCreatorID(PrivateTags.CreatorID); String tsuid = ds.getString( PrivateTags.Dcm4cheURIReferencedTransferSyntaxUID, UIDs.ImplicitVRLittleEndian); ds.setPrivateCreatorID(null); ds.setFileMetaInfo(objFact.newFileMetaInfo(rqCmd .getAffectedSOPClassUID(), rqCmd .getAffectedSOPInstanceUID(), tsuid)); } } else { String fsgrpid = service.selectFileSystemGroup(callingAET, calledAET, ds); fsDTO = service.selectStorageFileSystem(fsgrpid); retrieveAET = fsDTO.getRetrieveAET(); availability = Availability.toString(fsDTO.getAvailability()); File baseDir = FileUtils.toFile(fsDTO.getDirectoryPath()); file = makeFile(baseDir, ds, callingAET); filePath = file.getPath().substring( baseDir.getPath().length() + 1).replace( File.separatorChar, '/'); CompressCmd compressCmd = null; if (parser.getReadTag() == Tags.PixelData && parser.getReadLength() != -1) { compressCmd = compressionRules.getCompressFor(assoc, ds); if (compressCmd != null) compressCmd.coerceDataset(ds); } ds.setFileMetaInfo(objFact.newFileMetaInfo(ds, compressCmd != null ? compressCmd.getTransferSyntaxUID() : rq.getTransferSyntaxUID())); perfMon.start(activeAssoc, rq, PerfCounterEnum.C_STORE_SCP_OBJ_STORE); perfMon.setProperty(activeAssoc, rq, PerfPropertyEnum.DICOM_FILE, file); md5sum = storeToFile(parser, ds, file, compressCmd, getByteBuffer(assoc)); perfMon.stop(activeAssoc, rq, PerfCounterEnum.C_STORE_SCP_OBJ_STORE); } if (md5sum != null && ignoreDuplicate(duplicates, md5sum)) { log.info("Received Instance[uid=" + iuid + "] already exists - ignored"); if (!dcm4cheeURIReferenced) { deleteFailedStorage(file); } return; } ds.putAE(Tags.RetrieveAET, retrieveAET); if ( ! coerceBeforeWrite ) { ds.setPrivateCreatorID(PrivateTags.CreatorID); ds.putAE(PrivateTags.CallingAET, callingAET); ds.putAE(PrivateTags.CalledAET, calledAET); ds.setPrivateCreatorID(null); if (coerced != null) { service.coerceAttributes(ds, coerced); } service.postCoercionProcessing(ds); } checkPatientIdAndName(ds, callingAET); Storage store = getStorage(assoc); SeriesStored seriesStored = handleSeriesStored(assoc, store, ds); boolean newSeries = seriesStored == null; boolean newStudy = false; String seriuid = ds.getString(Tags.SeriesInstanceUID); if (newSeries) { Dataset mwlFilter = service.getCoercionAttributesFor(callingAET, STORE2MWL_XSL, ds, assoc); if (mwlFilter != null) { coerced = merge(coerced, mergeMatchingMWLItem(assoc, ds, seriuid, mwlFilter)); } if (!callingAET.equals(calledAET)) { service.ignorePatientIDForUnscheduled(ds, Tags.RequestAttributesSeq, callingAET); service.supplementIssuerOfPatientID(ds, assoc, callingAET, false); service.supplementIssuerOfAccessionNumber(ds, assoc, callingAET, false); service.supplementInstitutionalData(ds, assoc, callingAET); service.generatePatientID(ds, ds, calledAET); } newStudy = !store.studyExists(ds.getString(Tags.StudyInstanceUID)); } perfMon.start(activeAssoc, rq, PerfCounterEnum.C_STORE_SCP_OBJ_REGISTER_DB); long fileLength = file != null ? file.length() : 0L; long fspk = fsDTO != null ? fsDTO.getPk() : -1L; boolean clearExternalRetrieveAET = hasDifferentMd5(duplicates, md5sum); Dataset coercedElements; try { coercedElements = updateDB(store, ds, fspk, filePath, fileLength, md5sum, 0, newSeries, clearExternalRetrieveAET); } catch (NonUniquePatientIDException e) { service.coercePatientID(ds); coerced.putLO(Tags.PatientID, ds.getString(Tags.PatientID)); coerced.putLO(Tags.IssuerOfPatientID, ds.getString(Tags.IssuerOfPatientID)); coercedElements = updateDB(store, ds, fspk, filePath, fileLength, md5sum, 0, newSeries, clearExternalRetrieveAET); } if(newSeries) { seriesStored = initSeriesStored(ds, callingAET, retrieveAET); assoc.putProperty(SERIES_STORED, seriesStored); if (newStudy) { service.sendNewStudyNotification(ds); } } appendInstanceToSeriesStored(seriesStored, ds, retrieveAET, availability); coerced = merge(coerced, coercedElements); try { logCoercion(ds, coerced); } catch (Exception e) { log.warn("Failed to generate audit log for attribute coercion:", e); } ds.putAll(coercedElements, Dataset.MERGE_ITEMS); perfMon.setProperty(activeAssoc, rq, PerfPropertyEnum.REQ_DATASET, ds); perfMon.stop(activeAssoc, rq, PerfCounterEnum.C_STORE_SCP_OBJ_REGISTER_DB); if (coerced.isEmpty() || !contains(coerceWarnCallingAETs, callingAET)) { rspCmd.putUS(Tags.Status, Status.Success); } else { int[] coercedTags = new int[coerced.size()]; Iterator it = coerced.iterator(); for (int i = 0; i < coercedTags.length; i++) { coercedTags[i] = ((DcmElement) it.next()).tag(); } rspCmd.putAT(Tags.OffendingElement, coercedTags); rspCmd.putUS(Tags.Status, Status.CoercionOfDataElements); } service.postProcess(ds); } catch (DcmServiceException e) { log.warn(e.getMessage(), e); if (!dcm4cheeURIReferenced) { deleteFailedStorage(file); } throw e; } catch (Throwable e) { log.error(e.getMessage(), e); if (!dcm4cheeURIReferenced) { deleteFailedStorage(file); } throw new DcmServiceException(Status.ProcessingFailure, e); } } protected SeriesStored handleSeriesStored(Association assoc, Storage store, Dataset ds) throws FinderException, RemoteException, Exception { String seriuid = ds.getString(Tags.SeriesInstanceUID); SeriesStored seriesStored = (SeriesStored) assoc.getProperty(SERIES_STORED); if (seriesStored != null && !seriuid.equals(seriesStored.getSeriesInstanceUID())) { service.logInstancesStoredAndUpdateDerivedFields(store, assoc.getSocket(), seriesStored); doAfterSeriesIsStored(store, assoc, seriesStored); seriesStored = null; } return seriesStored; } private void logCoercion(Dataset ds, Dataset coerced) { if (coerced.isEmpty()) return; StringBuilder sb = new StringBuilder(); for (Iterator<DcmElement> i = coerced.iterator(); i.hasNext();) { DcmElement coercedElement = i.next(); DcmElement originalElement = ds.get(coercedElement.tag()); if (originalElement != null) { String originalValue = originalElement.toString(); String coercedValue = coercedElement.toString(); if (!originalValue.equals(coercedValue)) { if (sb.length() > 0) sb.append(";"); sb.append(" ["); sb.append(originalValue); sb.append("->"); sb.append(coercedValue); sb.append("]"); } } } if (sb.length() > 0) { sb.insert(0, "The following elements were coerced during storage: "); HttpUserInfo userInfo = new HttpUserInfo(AuditMessage .isEnableDNSLookups()); PatientRecordMessage msg = new PatientRecordMessage( PatientRecordMessage.UPDATE); msg.addUserPerson("CSTORECOERCION", null, null, userInfo .getHostName(), true); PersonName pn = ds.getPersonName(Tags.PatientName); String pname = pn != null ? pn.format() : null; ParticipantObject patient = msg.addPatient( ds.getString(Tags.PatientID, "NO_PATIENT_ID"), pname); patient.addParticipantObjectDetail("Description", sb.toString()); msg.validate(); Logger.getLogger("auditlog").info(msg); } } private String[] selectReferencedDirectoryURI(String uri) { if ( referencedDirectoryPath == null ) { log.debug("ReferencedDirectoryPath is set to ALL! uri:"+uri); try { FileSystemDTO[] fsDTOs = getFileSystemMgt().getFileSystemsOfGroup(this.refFileSystemGroupID); String dir, fsUri; for ( FileSystemDTO dto : fsDTOs ) { dir = dto.getDirectoryPath(); fsUri = isURI(dir) ? dir+"/" : FileUtils.toFile(dir).toURI().toString(); log.debug("Filesystem URI:"+fsUri); if (uri.startsWith(fsUri)) { return new String[]{ dir, fsUri}; } } } catch (Exception x ) { log.error("Can't get FilesystemMgt Bean!",x); } } else { for ( int i = 0; i< referencedDirectoryURI.length ; i++) { if ( uri.startsWith(referencedDirectoryURI[i]) ) { return new String[]{referencedDirectoryPath[i], referencedDirectoryURI[i]}; } } } return null; } private SeriesStored initSeriesStored(Dataset ds, String callingAET, String retrieveAET) { Dataset patAttrs = AttributeFilter.getPatientAttributeFilter().filter(ds); Dataset studyAttrs = AttributeFilter.getStudyAttributeFilter().filter(ds); Dataset seriesAttrs = AttributeFilter.getSeriesAttributeFilter().filter(ds); Dataset ian = DcmObjectFactory.getInstance().newDataset(); ian.putUI(Tags.StudyInstanceUID, ds.getString(Tags.StudyInstanceUID)); Dataset refSeries = ian.putSQ(Tags.RefSeriesSeq).addNewItem(); refSeries.putUI(Tags.SeriesInstanceUID, ds.getString(Tags.SeriesInstanceUID)); refSeries.putSQ(Tags.RefSOPSeq); Dataset pps = seriesAttrs.getItem(Tags.RefPPSSeq); DcmElement refPPSSeq = ian.putSQ(Tags.RefPPSSeq); if (pps != null) { if (!pps.contains(Tags.PerformedWorkitemCodeSeq)) { pps.putSQ(Tags.PerformedWorkitemCodeSeq); } refPPSSeq.addItem(pps); } return new SeriesStored(callingAET, retrieveAET, patAttrs, studyAttrs, seriesAttrs, ian); } private void appendInstanceToSeriesStored(SeriesStored seriesStored, Dataset ds, String retrieveAET, String availability) { Dataset refSOP = seriesStored.getIAN() .get(Tags.RefSeriesSeq).getItem() .get(Tags.RefSOPSeq).addNewItem(); refSOP.putUI(Tags.RefSOPClassUID, ds.getString(Tags.SOPClassUID)); refSOP.putUI(Tags.RefSOPInstanceUID, ds.getString(Tags.SOPInstanceUID)); refSOP.putAE(Tags.RetrieveAET, retrieveAET); refSOP.putCS(Tags.InstanceAvailability, availability); } private void checkAppendPermission(Association a, Dataset ds) throws Exception { if (service.hasUnrestrictedAppendPermissions(a.getCallingAET())) { return; } // only check on first instance of a series received in the same // association String seriuid = ds.getString(Tags.SeriesInstanceUID); SeriesStored seriesStored = (SeriesStored) a.getProperty(SERIES_STORED); if (seriesStored != null && seriuid.equals(seriesStored.getSeriesInstanceUID())) { return; } String suid = ds.getString(Tags.StudyInstanceUID); if (!getStorage(a).studyExists(suid)) { return; } Subject subject = (Subject) a.getProperty("user"); if (subject == null) { throw new DcmServiceException(MISSING_USER_ID_ERR_STATUS, MISSING_USER_ID_ERR_MSG); } if (!service.getStudyPermissionManager(a).hasPermission(suid, StudyPermissionDTO.APPEND_ACTION, subject)) { throw new DcmServiceException(NO_APPEND_PERMISSION_ERR_STATUS, NO_APPEND_PERMISSION_ERR_MSG); } } private boolean checkOnlyWellKnownInstances(Association assoc, String iuid, String callingAET) throws Exception { if (contains(onlyWellKnownInstancesCallingAETs, callingAET)) { return getStorage(assoc).instanceExists(iuid); } return true; } private Dataset merge(Dataset ds, Dataset merge) { if (ds == null) { return merge; } if (merge == null) { return ds; } ds.putAll(merge, Dataset.MERGE_ITEMS); return ds; } private Dataset mergeMatchingMWLItem(Association assoc, Dataset ds, String seriuid, Dataset mwlFilter) { List mwlItems; log.info("Query for matching worklist entries for received Series[" + seriuid + "]"); try { mwlItems = service.findMWLEntries(mwlFilter); } catch (Exception e) { log.error( "Query for matching worklist entries for received Series[" + seriuid + "] failed:", e); return null; } int size = mwlItems.size(); log.info("" + size + " matching worklist entries found for received Series[ " + seriuid + "]"); if (size == 0) { return null; } String callingAET = assoc.getCallingAET(); Dataset coerce = service.getCoercionAttributesFor(callingAET, MWL2STORE_XSL, (Dataset) mwlItems.get(0), assoc); if (coerce == null) { log .error("Failed to find or load stylesheet " + MWL2STORE_XSL + " for " + callingAET + ". Cannot coerce object attributes with request information."); return null; } if (size > 1) { DcmElement rqAttrsSq = coerce.get(Tags.RequestAttributesSeq); Dataset coerce0 = coerce .exclude(new int[] { Tags.RequestAttributesSeq }); for (int i = 1; i < size; i++) { Dataset coerce1 = service.getCoercionAttributesFor(callingAET, MWL2STORE_XSL, (Dataset) mwlItems.get(i), assoc); if (!coerce1.match(coerce0, true, true)) { log .warn("Several (" + size + ") matching worklist entries " + "found for received Series[ " + seriuid + "], which differs also in attributes NOT mapped to the Request Attribute Sequence item " + "- Do not coerce object attributes with request information."); return null; } if (rqAttrsSq != null) { Dataset item = coerce1.getItem(Tags.RequestAttributesSeq); if (item != null) { rqAttrsSq.addItem(item); } } } } service.coerceAttributes(ds, coerce); return coerce; } private boolean checkIncorrectWorklistEntry(Dataset ds) throws Exception { Dataset refPPS = ds.getItem(Tags.RefPPSSeq); if (refPPS == null) { return false; } String ppsUID = refPPS.getString(Tags.RefSOPInstanceUID); if (ppsUID == null) { return false; } Dataset mpps; try { mpps = getMPPSManager().getMPPS(ppsUID); } catch (ObjectNotFoundException e) { return false; } Dataset item = mpps.getItem(Tags.PPSDiscontinuationReasonCodeSeq); return item != null && "110514".equals(item.getString(Tags.CodeValue)) && "DCM".equals(item.getString(Tags.CodingSchemeDesignator)); } private MPPSManager getMPPSManager() throws CreateException, RemoteException, HomeFactoryException { return ((MPPSManagerHome) EJBHomeFactory.getFactory().lookup( MPPSManagerHome.class, MPPSManagerHome.JNDI_NAME)).create(); } private FileSystemMgt2 getFileSystemMgt() throws RemoteException, CreateException, HomeFactoryException { return ((FileSystemMgt2Home) EJBHomeFactory.getFactory().lookup( FileSystemMgt2Home.class, FileSystemMgt2Home.JNDI_NAME)).create(); } private byte[] getByteBuffer(Association assoc) { byte[] buf = (byte[]) assoc.getProperty(RECEIVE_BUFFER); if (buf == null) { buf = new byte[service.getBufferSize()]; assoc.putProperty(RECEIVE_BUFFER, buf); } return buf; } private boolean containsLocal(List duplicates) { for (int i = 0, n = duplicates.size(); i < n; ++i) { FileDTO dto = (FileDTO) duplicates.get(i); if (service.isFileSystemGroupLocalAccessable( dto.getFileSystemGroupID())) return true; } return false; } private boolean ignoreDuplicate(List duplicates, byte[] md5sum) { for (int i = 0, n = duplicates.size(); i < n; ++i) { FileDTO dto = (FileDTO) duplicates.get(i); if (storeDuplicateIfDiffMD5 && !Arrays.equals(md5sum, dto.getFileMd5())) continue; if (storeDuplicateIfDiffHost && !service.isFileSystemGroupLocalAccessable( dto.getFileSystemGroupID())) continue; return true; } return false; } private boolean hasDifferentMd5(List duplicates, byte[] md5sum) { if (storeDuplicateIfDiffHost) { for (int i = 0, n = duplicates.size(); i < n; ++i) { FileDTO dto = (FileDTO) duplicates.get(i); if (Arrays.equals(md5sum, dto.getFileMd5())) return false; } } return true; } private void deleteFailedStorage(File file) { if (file == null) { return; } log.info("M-DELETE file:" + file); file.delete(); // purge empty series and study directory File seriesDir = file.getParentFile(); if (seriesDir.delete()) { seriesDir.getParentFile().delete(); } } protected Dataset updateDB(Storage storage, Dataset ds, long fspk, String filePath, long fileLength, byte[] md5, int fileStatus, boolean updateStudyAccessTime, boolean clearExternalRetrieveAET) throws DcmServiceException, NonUniquePatientIDException { int retry = 0; for (;;) { try { if (serializeDBUpdate) { synchronized (storage) { return storage.store(ds, fspk, filePath, fileLength, md5, fileStatus, updateStudyAccessTime, clearExternalRetrieveAET, service.patientMatching()); } } else { return storage.store(ds, fspk, filePath, fileLength, md5, fileStatus, updateStudyAccessTime, clearExternalRetrieveAET, service.patientMatching()); } } catch (NonUniquePatientIDException e) { throw e; } catch (Exception e) { ++retry; if (retry > updateDatabaseMaxRetries) { service.getLog().error( "failed to update DB with entries for received " + filePath, e); throw new DcmServiceException(Status.ProcessingFailure, e); } maxCountUpdateDatabaseRetries = Math.max(retry, maxCountUpdateDatabaseRetries); service.getLog().warn( "failed to update DB with entries for received " + filePath + " - retry", e); try { Thread.sleep(updateDatabaseRetryInterval); } catch (InterruptedException e1) { log.warn("update Database Retry Interval interrupted:", e1); } } } } Storage getStorage(Association assoc) throws RemoteException, CreateException, HomeFactoryException { Storage store = (Storage) assoc.getProperty(StorageHome.JNDI_NAME); if (store == null) { store = service.getStorage(); assoc.putProperty(StorageHome.JNDI_NAME, store); } return store; } File makeFile(File basedir, Dataset ds, String callingAET) throws Exception { Calendar date = Calendar.getInstance(); StringBuffer filePath = new StringBuffer(); if( sourceAETInFilePath && callingAET != null) { filePath.append(callingAET); filePath.append(File.separatorChar); } if (studyDateInFilePath) { Date studyDate = ds.getDateTime(Tags.StudyDate, Tags.StudyTime); if (studyDate != null) date.setTime(studyDate); } if (yearInFilePath) { filePath.append(String.valueOf(date.get(Calendar.YEAR))); filePath.append(File.separatorChar); } if (monthInFilePath) { filePath.append(String.valueOf(date.get(Calendar.MONTH) + 1)); filePath.append(File.separatorChar); } if (dayInFilePath) { filePath.append(String.valueOf(date.get(Calendar.DAY_OF_MONTH))); filePath.append(File.separatorChar); } if (hourInFilePath) { filePath.append(String.valueOf(date.get(Calendar.HOUR_OF_DAY))); filePath.append(File.separatorChar); } filePath.append(FileUtils.toHex(ds.getString(Tags.StudyInstanceUID) .hashCode())); filePath.append(File.separatorChar); filePath.append(FileUtils.toHex(ds.getString(Tags.SeriesInstanceUID) .hashCode())); File dir = new File(basedir, filePath.toString()); return FileUtils.createNewFile(dir, ds.getString(Tags.SOPInstanceUID) .hashCode()); } private byte[] storeToFile(DcmParser parser, Dataset ds, final File file, CompressCmd compressCmd, byte[] buffer) throws Exception { log.info("M-WRITE file:" + file); MessageDigest md = null; BufferedOutputStream bos = null; FileOutputStream fos = new FileOutputStream(file); if (service.isMd5sum()) { md = MessageDigest.getInstance("MD5"); DigestOutputStream dos = new DigestOutputStream(fos, md); bos = new BufferedOutputStream(dos, buffer); } else { bos = new BufferedOutputStream(fos, buffer); } try { DcmDecodeParam decParam = parser.getDcmDecodeParam(); String tsuid = ds.getFileMetaInfo().getTransferSyntaxUID(); DcmEncodeParam encParam = DcmEncodeParam.valueOf(tsuid); ds.writeFile(bos, encParam); if (parser.getReadTag() == Tags.PixelData) { int len = parser.getReadLength(); InputStream in = parser.getInputStream(); if (encParam.encapsulated) { ds.writeHeader(bos, encParam, Tags.PixelData, VRs.OB, -1); if (decParam.encapsulated) { parser.parseHeader(); while (parser.getReadTag() == Tags.Item) { len = parser.getReadLength(); ds.writeHeader(bos, encParam, Tags.Item, VRs.NONE, len); bos.copyFrom(in, len); parser.parseHeader(); } } else { int read = compressCmd.compress(decParam.byteOrder, parser.getInputStream(), bos, null); skipFully(in, parser.getReadLength() - read); } ds.writeHeader(bos, encParam, Tags.SeqDelimitationItem, VRs.NONE, 0); } else { ds.writeHeader(bos, encParam, Tags.PixelData, parser .getReadVR(), len); bos.copyFrom(in, len); } parser.parseDataset(decParam, -1); ds.subSet(Tags.PixelData, -1).writeDataset(bos, encParam); } bos.flush(); if (service.isSyncFileBeforeCStoreRSP()) { fos.getFD().sync(); } else if (service.isSyncFileAfterCStoreRSP()) { final FileOutputStream fos2 = fos; syncFileExecutor().execute(new Runnable() { public void run() { try { fos2.getFD().sync(); } catch (Exception e) { log.error("sync of " + file + " failed:", e); } finally { try { fos2.close(); } catch (Exception ignore) {} } }}); fos = null; } } finally { if (fos != null) try { fos.close(); } catch (Exception ignore) {} } return md != null ? md.digest() : null; } private Executor syncFileExecutor() { Executor result = syncFileExecutor; if (result == null) { synchronized (this) { result = syncFileExecutor; if (result == null) { syncFileExecutor = result = Executors.newSingleThreadExecutor(); } } } return result; } private static void skipFully(InputStream in, int n) throws IOException { int remaining = n; int skipped = 0; while (remaining > 0) { if ((skipped = (int) in.skip(remaining)) == 0) { throw new EOFException(); } remaining -= skipped; } } private String checkSOPInstanceUID(Command rqCmd, Dataset ds, String aet) throws DcmServiceException { String cuid = checkNotNull(ds.getString(Tags.SOPClassUID), "Missing SOP Class UID (0008,0016)"); String iuid = checkNotNull(ds.getString(Tags.SOPInstanceUID), "Missing SOP Instance UID (0008,0018)"); checkNotNull(ds.getString(Tags.StudyInstanceUID), "Missing Study Instance UID (0020,000D)"); checkNotNull(ds.getString(Tags.SeriesInstanceUID), "Missing Series Instance UID (0020,000E)"); if (!rqCmd.getAffectedSOPInstanceUID().equals(iuid)) { String prompt = "SOP Instance UID in Dataset [" + iuid + "] differs from Affected SOP Instance UID[" + rqCmd.getAffectedSOPInstanceUID() + "]"; log.warn(prompt); if (!contains(acceptMismatchIUIDCallingAETs, aet)) { throw new DcmServiceException( Status.DataSetDoesNotMatchSOPClassError, prompt); } } if (!rqCmd.getAffectedSOPClassUID().equals(cuid)) { throw new DcmServiceException( Status.DataSetDoesNotMatchSOPClassError, "SOP Class UID in Dataset differs from Affected SOP Class UID"); } return iuid; } private static String checkNotNull(String val, String msg) throws DcmServiceException { if (val == null) { throw new DcmServiceException( Status.DataSetDoesNotMatchSOPClassError, msg); } return val; } private void checkPatientIdAndName(Dataset ds, String aet) throws DcmServiceException, HomeFactoryException, RemoteException, CreateException, FinderException { String pid = ds.getString(Tags.PatientID); String pname = ds.getString(Tags.PatientName); if (pid == null && !acceptMissingPatientID) { throw new DcmServiceException( Status.DataSetDoesNotMatchSOPClassError, "Acceptance of objects without Patient ID is disabled"); } if (pname == null && !acceptMissingPatientName) { throw new DcmServiceException( Status.DataSetDoesNotMatchSOPClassError, "Acceptance of objects without Patient Name is disabled"); } } private boolean contains(Object[] a, Object e) { for (int i = 0; i < a.length; i++) { if (a[i].equals(e)) { return true; } } return false; } // Implementation of AssociationListener public void write(Association src, PDU pdu) { if (pdu instanceof AAssociateAC) perfMon.assocEstEnd(src, Command.C_STORE_RQ); } public void received(Association src, PDU pdu) { if (pdu instanceof AAssociateRQ) perfMon.assocEstStart(src, Command.C_STORE_RQ); } public void write(Association src, Dimse dimse) { } public void received(Association src, Dimse dimse) { } public void error(Association src, IOException ioe) { } public void closing(Association assoc) { if (assoc.getAAssociateAC() != null) perfMon.assocRelStart(assoc, Command.C_STORE_RQ); SeriesStored seriesStored = (SeriesStored) assoc.getProperty(SERIES_STORED); if (seriesStored != null) { try { Storage store = getStorage(assoc); service.logInstancesStoredAndUpdateDerivedFields( store, assoc.getSocket(), seriesStored); doAfterSeriesIsStored(store, assoc, seriesStored); } catch (Exception e) { handleClosingFailed(e); } } } // extension point for handling failure of closing actions protected void handleClosingFailed(Exception e) { // default no-op behaviour, just log log.error("Clean up on Association close failed:", e); } public void closed(Association assoc) { if (assoc.getAAssociateAC() != null) perfMon.assocRelEnd(assoc, Command.C_STORE_RQ); } // extension point for specialized implementations of StoreScp protected void doAfterSeriesIsStored(Storage store, Association assoc, SeriesStored seriesStored) throws Exception { return; } }