repo_name
stringlengths
5
108
path
stringlengths
6
333
size
stringlengths
1
6
content
stringlengths
4
977k
license
stringclasses
15 values
orioncode/orionplatform
orion_math/orion_math_core/src/main/java/com/orionplatform/math/geometry/point/tasks/distance/GetClosestPairOfPointsTask.java
1267
package com.orionplatform.math.geometry.point.tasks.distance; import com.orionplatform.core.abstraction.Orion; import com.orionplatform.core.tuple.Pair; import com.orionplatform.math.geometry.point.Point; import com.orionplatform.math.geometry.point.PointRules; import com.orionplatform.math.number.ANumber; import com.orionplatform.math.number.precision.Precision; public class GetClosestPairOfPointsTask extends Orion { public static synchronized Pair<Point, Point> run(Point[] points, int precision) { PointRules.areValid(points); precision = Precision.getValidPrecision(precision); Point point1 = null; Point point2 = null; ANumber minimumDistance = ANumber.ofMax(); for(int i = 0; i < points.length - 1; i++) { for(int j = i + 1; j < points.length; j++) { ANumber distance = points[i].getDistanceFromPoint(points[j], precision); if(distance.isLessThan(minimumDistance)) { minimumDistance = distance; point1 = points[i]; point2 = points[j]; } } } return Pair.of(point1, point2); } }
apache-2.0
Tecyang/libConnect
Linux/libconnect_java/src/com/libconnect/jnitest/jni_test.java
740
package com.libconnect.jnitest; //jni test class //at src //run sudo javac com/libconnect/jnitest/jni_test.java //sudo javah -classpath ../bin/ -d ../bin/ -jni com.libconnect.jnitest.libconnect //sudo sudo cp ../bin/com_libconnect_jnitest_libconnect.h ../../libconnect_jni/ public class jni_test { /** * @param args */ public static void main(String[] args) { // TODO Auto-generated method stub System.out.println( System.getProperty("java.library.path")); System.loadLibrary("connect"); libconnect connect = new libconnect(); String result = connect.libconnect_test("test jni function!"); System.out.println(result); // } } class libconnect { public native String libconnect_test(String s); }
apache-2.0
sundrio/sundrio
annotations/builder/src/main/java/io/sundr/builder/annotations/IgnoreDescendants.java
963
/* * Copyright 2016 The original authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.sundr.builder.annotations; import java.lang.annotation.ElementType; import java.lang.annotation.Retention; import java.lang.annotation.RetentionPolicy; import java.lang.annotation.Target; @Target({ ElementType.FIELD }) @Retention(RetentionPolicy.SOURCE) public @interface IgnoreDescendants { }
apache-2.0
orangelynx/Trident
src/main/java/net/tridentsdk/server/player/TridentPlayer.java
22475
/* * Trident - A Multithreaded Server Alternative * Copyright 2014 The TridentSDK Team * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package net.tridentsdk.server.player; import com.google.common.collect.Queues; import com.google.gson.JsonArray; import com.google.gson.JsonElement; import com.google.gson.JsonObject; import com.google.gson.JsonParser; import net.tridentsdk.Trident; import net.tridentsdk.base.Block; import net.tridentsdk.base.Position; import net.tridentsdk.base.Substance; import net.tridentsdk.docs.InternalUseOnly; import net.tridentsdk.entity.Entity; import net.tridentsdk.entity.living.Player; import net.tridentsdk.entity.types.EntityType; import net.tridentsdk.event.player.PlayerJoinEvent; import net.tridentsdk.meta.ChatColor; import net.tridentsdk.meta.MessageBuilder; import net.tridentsdk.meta.nbt.CompoundTag; import net.tridentsdk.registry.Factory; import net.tridentsdk.registry.Registered; import net.tridentsdk.server.TridentServer; import net.tridentsdk.server.concurrent.ThreadsHandler; import net.tridentsdk.server.data.MetadataType; import net.tridentsdk.server.data.ProtocolMetadata; import net.tridentsdk.server.netty.ClientConnection; import net.tridentsdk.server.netty.packet.Packet; import net.tridentsdk.server.packets.play.out.*; import net.tridentsdk.server.world.TridentChunk; import net.tridentsdk.server.world.TridentWorld; import net.tridentsdk.util.TridentLogger; import net.tridentsdk.util.Vector; import net.tridentsdk.world.ChunkLocation; import net.tridentsdk.world.settings.GameMode; import net.tridentsdk.world.settings.LevelType; import javax.annotation.concurrent.ThreadSafe; import java.io.BufferedReader; import java.io.InputStreamReader; import java.net.URL; import java.net.URLConnection; import java.util.*; import java.util.concurrent.ConcurrentHashMap; import java.util.function.Predicate; import java.util.stream.Stream; import static net.tridentsdk.server.packets.play.out.PacketPlayOutPlayerListItem.PlayerListDataBuilder; @ThreadSafe public class TridentPlayer extends OfflinePlayer { private static final Map<UUID, Player> ONLINE_PLAYERS = new ConcurrentHashMap<>(); private static final int MAX_VIEW = Trident.config().getInt("view-distance", 15); private static final int MAX_CHUNKS = (int) Trident.config().getConfigSection("performance") .getInt("max-chunks-player", 441); private final PlayerConnection connection; private final Set<ChunkLocation> knownChunks = Factory.newSet(); private final Queue<PacketPlayOutMapChunkBulk> chunkQueue = Queues.newConcurrentLinkedQueue(); private volatile boolean loggingIn = true; private volatile boolean sprinting; private volatile boolean crouching; private volatile boolean flying; private volatile byte skinFlags; private volatile Locale locale; private volatile int viewDistance = MAX_VIEW; private TridentPlayer(UUID uuid, CompoundTag tag, TridentWorld world, ClientConnection connection) { super(uuid, tag, world); this.connection = PlayerConnection.createPlayerConnection(connection, this); } public static void sendAll(Packet packet) { players().stream().forEach((p) -> ((TridentPlayer) p).connection.sendPacket(packet)); } public static void sendFiltered(Packet packet, Predicate<Player> predicate) { players().stream() .filter(predicate) .forEach((p) -> ((TridentPlayer) p).connection.sendPacket(packet)); } public static TridentPlayer spawnPlayer(ClientConnection connection, UUID id, String name) { // determine if this player has logged in before CompoundTag playerTag = (OfflinePlayer.getOfflinePlayer( id) == null) ? null : OfflinePlayer.getOfflinePlayer(id).asNbt(); // if this player is new if (playerTag == null) { playerTag = OfflinePlayer.generatePlayer(id); } final TridentPlayer p = new TridentPlayer(id, playerTag, TridentServer.WORLD, connection); p.executor = ThreadsHandler.playerExecutor(); OfflinePlayer.OFFLINE_PLAYERS.put(id, p); ONLINE_PLAYERS.put(id, p); p.name = name; p.gameMode = GameMode.CREATIVE;//GameMode.of(((IntTag) playerTag.getTag("playerGameType")).value()); p.executor.execute(() -> { p.connection.sendPacket(new PacketPlayOutJoinGame().set("entityId", p.entityId()) .set("gamemode", p.gameMode) .set("dimension", p.world().dimension()) .set("difficulty", p.world().difficulty()) .set("maxPlayers", (short) Trident.config().getInt("max-players")) .set("levelType", LevelType.DEFAULT)); p.abilities.creative = 1; p.abilities.flySpeed = 0.135F; p.abilities.canFly = 1; // DEBUG ===== // TODO: Use chunk HeightMap to get the spawn block more efficiently Block block = p.world().blockAt(Position.create(p.world(), 0, 255, 0)); Vector below = new Vector(0, -1, 0); while(block.substance() == Substance.AIR && block.relativeBlock(below).substance() == Substance.AIR) { block = block.relativeBlock(below); } block = block.relativeBlock(new Vector(0, 5, 0)); //Position pos = Position.create(p.world(), 0, 255, 0); p.setPosition(block.position()); p.spawnPosition = block.position(); // ===== p.connection.sendPacket(PacketPlayOutPluginMessage.VANILLA_CHANNEL); p.connection.sendPacket(new PacketPlayOutServerDifficulty().set("difficulty", p.world().difficulty())); p.connection.sendPacket(new PacketPlayOutSpawnPosition().set("location", p.spawnLocation())); p.connection.sendPacket(p.abilities.asPacket()); p.connection.sendPacket(new PacketPlayOutPlayerCompleteMove().set("location", p.spawnLocation()).set("flags", (byte) 0)); sendAll(new PacketPlayOutPlayerListItem() .set("action", 0) .set("playerListData", new PlayerListDataBuilder[]{p.listData()})); List<PlayerListDataBuilder> builders = new ArrayList<>(); players().stream().filter((player) -> !player.equals(p)) .forEach((player) -> builders.add(((TridentPlayer) player).listData())); TridentLogger.log(p.name + " has joined the server"); p.connection.sendPacket(new PacketPlayOutPlayerListItem() .set("action", 0) .set("playerListData", builders.stream().toArray(PlayerListDataBuilder[]::new))); }); p.spawn(); return p; } public static Player getPlayer(UUID id) { return ONLINE_PLAYERS.get(id); } public static Collection<Player> players() { return ONLINE_PLAYERS.values(); } @Override protected void doEncodeMeta(ProtocolMetadata protocolMeta) { protocolMeta.setMeta(0, MetadataType.BYTE, (byte) (((fireTicks.intValue() == 0) ? 1 : 0) | (isCrouching() ? 2 : 0) | (isSprinting() ? 8 : 0))); // TODO invisibility & blocking/eating protocolMeta.setMeta(10, MetadataType.BYTE, skinFlags); protocolMeta.setMeta(16, MetadataType.BYTE, (byte) 0); // hide cape, might need changing protocolMeta.setMeta(17, MetadataType.FLOAT, 0F); // absorption hearts TODO protocolMeta.setMeta(18, MetadataType.INT, 0); // TODO scoreboard system (this value is the player's score) } public boolean isLoggingIn() { return loggingIn; } @InternalUseOnly public void resumeLogin() { if (!loggingIn) return; connection.sendPacket(PacketPlayOutStatistics.DEFAULT_STATISTIC); sendChunks(viewDistance()); //TridentInventory inventory = TridentInventory.create("Inventory", 18, InventoryType.CHEST); //inventory.setSlot(0, new Item(Substance.DIAMOND_PICKAXE)); //inventory.sendTo(this); // Wait for response for (Entity entity : world().entities()) { // Register mob, packet sent to new player } loggingIn = false; connection.sendPacket(new PacketPlayOutEntityVelocity() .set("entityId", entityId()) .set("velocity", new Vector(0, -0.07, 0))); connection.sendPacket(new PacketPlayOutGameStateChange().set("reason", 3).set("value", (float) gameMode().asByte())); TridentServer.WORLD.addEntity(this); // TODO Registered.events().fire(new PlayerJoinEvent(this)); for (Player player : players()) { TridentPlayer p = (TridentPlayer) player; new MessageBuilder(name + " has joined the server").color(ChatColor.YELLOW).build().sendTo(player); if (!p.equals(this)) { ProtocolMetadata metadata = new ProtocolMetadata(); encodeMetadata(metadata); p.connection.sendPacket(new PacketPlayOutSpawnPlayer() .set("entityId", id) .set("player", this) .set("metadata", metadata)); metadata = new ProtocolMetadata(); p.encodeMetadata(metadata); connection.sendPacket(new PacketPlayOutSpawnPlayer() .set("entityId", p.id) .set("player", p) .set("metadata", metadata)); } } } @Override protected void doTick() { int distance = viewDistance(); if (!isLoggingIn()) sendChunks(distance); if (!chunkQueue.isEmpty()) connection.sendPacket(chunkQueue.poll()); cleanChunks(distance); connection.tick(); ticksExisted.incrementAndGet(); } public void cleanChunks(int viewDist) { int toClean = knownChunks.size() - MAX_CHUNKS; if (toClean > 0) { Position pos = position(); int x = (int) pos.x() / 16; int z = (int) pos.z() / 16; for (ChunkLocation location : knownChunks) { int cx = location.x(); int cz = location.z(); int abs = Math.abs(cx - x); int abs1 = Math.abs(cz - z); if (abs > viewDist || abs1 > viewDist) { ((TridentWorld) world()).loadedChunks.tryRemove(location); connection.sendPacket(new PacketPlayOutChunkData(new byte[0], location, true, (short) 0)); knownChunks.remove(location); } } } } @Override protected void doRemove() { ONLINE_PLAYERS.remove(this.uniqueId()); cleanChunks(0); PacketPlayOutPlayerListItem item = new PacketPlayOutPlayerListItem(); item.set("action", 4).set("playerListData", new PlayerListDataBuilder[]{ new PacketPlayOutPlayerListItem.PlayerListDataBuilder().id(uniqueId).values(new Object[0])}); sendAll(item); players().forEach(p -> new MessageBuilder(name + " has left the server").color(ChatColor.YELLOW).build().sendTo(p)); TridentLogger.log(name + " has left the server"); } @Override public void setPosition(Position loc) { double dX = loc.x() - position().x(); double dY = loc.y() - position().y(); double dZ = loc.z() - position().z(); if (dX == 0 && dY == 0 && dZ == 0) { sendFiltered(new PacketPlayOutEntityLook().set("entityId", entityId()) .set("location", loc).set("onGround", onGround), player -> !player.equals(this) ); return; } if ((dX > 4 || dY > 4 || dZ > 4) || (ticksExisted.get() & 1) == 0) { sendFiltered(new PacketPlayOutEntityTeleport() .set("entityId", entityId()) .set("location", loc) .set("onGround", onGround), player -> !player.equals(this)); } else { for (Player player : players()) { if (player.equals(this)) continue; Packet packet = new PacketPlayOutEntityRelativeMove() .set("entityId", entityId()) .set("difference", new Vector(dX, dY, dZ)) .set("onGround", onGround); ((TridentPlayer) player).connection.sendPacket(packet); } } super.setPosition(loc); } /* * @NotJavaDoc * TODO: Create Message API and utilize it */ public void kickPlayer(String reason) { connection.sendPacket(new PacketPlayOutDisconnect().set("reason", new MessageBuilder(reason).build().asJson())); TridentLogger.log(name + " was kicked for " + reason); } private static final Map<UUID, String> textures = new ConcurrentHashMap<>(); public PlayerListDataBuilder listData() { String[] texture = texture().split("#"); return new PacketPlayOutPlayerListItem.PlayerListDataBuilder() .id(uniqueId) .values(name, 1, new Object[]{"textures", texture[0], true, texture[1]}, (int) gameMode.asByte(), 0, displayName != null, displayName); } // TODO move to login private String texture() { String tex = textures.get(uniqueId()); if (tex == null) { try { URL mojang = new URL("https://sessionserver.mojang.com/session/minecraft/profile/" + uniqueId.toString().replace("-", "") + "?unsigned=false"); StringBuilder builder = new StringBuilder(); URLConnection connection = mojang.openConnection(); BufferedReader reader = new BufferedReader(new InputStreamReader(connection.getInputStream())); String line; while ((line = reader.readLine()) != null) { builder.append(line).append("\n"); } JsonElement object = new JsonParser().parse(builder.toString()); JsonArray properties = object.getAsJsonObject().get("properties").getAsJsonArray(); for (int i = 0; i < properties.size(); i++) { JsonObject element = properties.get(i).getAsJsonObject(); if (element.get("name").getAsString().equals("textures")) { String value = element.get("value").getAsString(); String sig = element.get("signature").getAsString(); tex = value + "#" + sig; textures.put(uniqueId(), tex); } } } catch (Exception e) { e.printStackTrace(); } } return tex; } public PlayerConnection connection() { return this.connection; } public static final int SLOT_OFFSET = 35; public void setSlot(final short slot) { if ((int) slot > 8 || (int) slot < 0) { TridentLogger.error(new IllegalArgumentException("Slot must be within the ranges of 0-8")); return; } TridentPlayer.super.selectedSlot = slot; setSelectedSlot(slot); setHeldItem(heldItem()); // Updates inventory } @Override public void sendMessage(String message) { // fixme new MessageBuilder(message) .build() .sendTo(this); } @Override public void sendRaw(final String... messages) { Stream.of(messages) .filter((m) -> m != null) .forEach((message) -> connection.sendPacket(new PacketPlayOutChat() .set("jsonMessage", message) .set("position", PacketPlayOutChat.ChatPosition.CHAT))); } public void sendChunks(int viewDistance) { int centX = ((int) Math.floor(loc.x())) >> 4; int centZ = ((int) Math.floor(loc.z())) >> 4; PacketPlayOutMapChunkBulk bulk = new PacketPlayOutMapChunkBulk(); int length = 0; for (int x = (centX - viewDistance / 2); x <= (centX + viewDistance / 2); x += 1) { for (int z = (centZ - viewDistance / 2); z <= (centZ + viewDistance / 2); z += 1) { ChunkLocation location = ChunkLocation.create(x, z); if (!knownChunks.add(location)) continue; TridentChunk chunk = (TridentChunk) world().chunkAt(x, z, true); PacketPlayOutChunkData data = chunk.asPacket(); bulk.addEntry(data); length += (10 + data.data().length); if (length >= 1845152) { // send the packet if the length is close to the protocol maximum connection.sendPacket(bulk); bulk = new PacketPlayOutMapChunkBulk(); length = 0; } } } if (bulk.hasEntries()) { connection.sendPacket(bulk); } // WARNING: The stability of this method is very, very poor // there are two bugs to fix should you choose to implement this // One: Spread out the chunk packet sending // the client usually drops about 50 frames at regular intervals // using this method // Two: Update clutter // There are more updates than chunks sent // perhaps consider a local collection that is pushed at the end // as the knownChunks is simply not enough to deal with the // incredibly fast chunk serialization via this method, or the // algorithm is completely incorrect in the first place // This method is so quick that the most likely bottleneck is the // client or the network /* int centX = ((int) Math.floor(loc.x())) >> 4; int centZ = ((int) Math.floor(loc.z())) >> 4; ThreadLocal<PacketPlayOutMapChunkBulk> bulk = new ThreadLocal<PacketPlayOutMapChunkBulk>() { @Override protected PacketPlayOutMapChunkBulk initialValue() { return new PacketPlayOutMapChunkBulk(); } }; // Don't include this loop because that would result in micro sending // if the size isn't close to the protocol max // keep it in the inside loop to reduce buffer pollution int minX = centX - viewDistance / 2; int maxX = centX + viewDistance / 2; for (int x = minX; x <= maxX; x += 1) { final int finalX = x; ThreadsHandler.worldExecutor().execute(() -> { int minZ = centZ - viewDistance / 2; int maxZ = centZ + viewDistance / 2; for (int z = minZ; z <= maxZ; z += 1) { ChunkLocation location = ChunkLocation.create(finalX, z); if (!knownChunks.add(location)) continue; TridentChunk chunk = (TridentChunk) world().chunkAt(location, true); PacketPlayOutChunkData data = chunk.asPacket(); PacketPlayOutMapChunkBulk b = bulk.get(); b.addEntry(data); if (b.size() >= 1845152) { // send the packet if the length is close to the protocol maximum connection.sendPacket(bulk.get()); bulk.set(new PacketPlayOutMapChunkBulk()); } if (bulk.get().hasEntries()) { connection.sendPacket(bulk.get()); } } }); } */ } @Override public void setGameMode(GameMode mode) { super.setGameMode(mode); this.connection.sendPacket(this.abilities.asPacket()); } public boolean isFlying() { return flying; } public void setFlying(boolean flying) { this.flying = flying; abilities.flying = (flying) ? (byte) 1 : (byte) 0; connection.sendPacket(abilities.asPacket()); } public boolean isFlyMode() { return abilities.canFly(); } public void setFlyMode(boolean flying) { abilities.canFly = (flying) ? (byte) 1 : (byte) 0; } public boolean isSprinting() { return sprinting; } public void setSprinting(boolean sprinting) { this.sprinting = sprinting; ProtocolMetadata meta = new ProtocolMetadata(); encodeMetadata(meta); sendFiltered(new PacketPlayOutEntityMetadata().set("entityId", entityId()).set("metadata", meta), p -> !p.equals(this)); } public boolean isCrouching() { return crouching; } @InternalUseOnly public void setCrouching(boolean crouching) { this.crouching = crouching; ProtocolMetadata meta = new ProtocolMetadata(); encodeMetadata(meta); sendFiltered(new PacketPlayOutEntityMetadata().set("entityId", entityId()).set("metadata", meta), p -> !p.equals(this)); } public void setLocale(Locale locale) { this.locale = locale; } public void setSkinFlags(byte flags) { skinFlags = flags; } public void setViewDistance(int viewDistance) { this.viewDistance = viewDistance; } public int viewDistance() { return Math.min(viewDistance, MAX_VIEW); } @Override public boolean connected() { return true; } @Override public Player asPlayer() { return this; } @Override public EntityType type() { return EntityType.PLAYER; } }
apache-2.0
cmoulliard/apiman
common/util/src/main/java/io/apiman/common/util/ServiceRegistryUtil.java
2751
/* * Copyright 2015 JBoss Inc * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.apiman.common.util; import java.util.HashMap; import java.util.LinkedHashSet; import java.util.Map; import java.util.ServiceConfigurationError; import java.util.ServiceLoader; import java.util.Set; /** * Provides simple access to services. * * @author eric.wittmann@redhat.com */ public class ServiceRegistryUtil { private static Map<Class<?>, Set<?>> servicesCache = new HashMap<>(); private ServiceRegistryUtil() { } /** * Gets a single service by its interface. * * @param serviceInterface the service interface * @throws IllegalStateException method has been invoked at an illegal or inappropriate time */ @SuppressWarnings("javadoc") public static <T> T getSingleService(Class<T> serviceInterface) throws IllegalStateException { // Cached single service values are derived from the values cached when checking // for multiple services T rval = null; Set<T> services = getServices(serviceInterface); if (services.size() > 1) { throw new IllegalStateException( "Multiple implementations found of " + serviceInterface); //$NON-NLS-1$ } else if (!services.isEmpty()) { rval = services.iterator().next(); } return rval; } /** * Get a set of service implementations for a given interface. * * @param serviceInterface the service interface * @return the set of services */ @SuppressWarnings("unchecked") public static <T> Set<T> getServices(Class<T> serviceInterface) { synchronized (servicesCache) { if (servicesCache.containsKey(serviceInterface)) { return (Set<T>) servicesCache.get(serviceInterface); } Set<T> services = new LinkedHashSet<>(); try { for (T service : ServiceLoader.load(serviceInterface)) { services.add(service); } } catch (ServiceConfigurationError sce) { // No services found - don't check again. } return services; } } }
apache-2.0
kebernet/invoker
runtime/src/test/java/net/kebernet/invoker/runtime/annotations/GET.java
872
/* * Copyright (c) 2016 Robert Cooper * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package net.kebernet.invoker.runtime.annotations; import java.lang.annotation.Retention; import java.lang.annotation.RetentionPolicy; /** * Created by rcooper on 10/15/16. */ @Retention(RetentionPolicy.RUNTIME) public @interface GET { }
apache-2.0
galaxynut/aws-sdk-java
aws-java-sdk-codecommit/src/main/java/com/amazonaws/service/codecommit/AWSCodeCommit.java
24050
/* * Copyright 2010-2015 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). * You may not use this file except in compliance with the License. * A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either * express or implied. See the License for the specific language governing * permissions and limitations under the License. */ package com.amazonaws.service.codecommit; import com.amazonaws.*; import com.amazonaws.regions.*; import com.amazonaws.service.codecommit.model.*; /** * Interface for accessing CodeCommit. * <p> * <fullname>AWS CodeCommit</fullname> * <p> * This is the <i>AWS CodeCommit API Reference</i>. This reference provides * descriptions of the AWS CodeCommit API. * </p> * <p> * You can use the AWS CodeCommit API to work with the following objects: * </p> * <ul> * <li>Repositories</li> * <li>Branches</li> * <li>Commits</li> * </ul> * <p> * For information about how to use AWS CodeCommit, see the <i>AWS CodeCommit * User Guide</i>. * </p> */ public interface AWSCodeCommit { /** * Overrides the default endpoint for this client * ("https://codecommit.us-east-1.amazonaws.com"). Callers can use this * method to control which AWS region they want to work with. * <p> * Callers can pass in just the endpoint (ex: * "codecommit.us-east-1.amazonaws.com") or a full URL, including the * protocol (ex: "https://codecommit.us-east-1.amazonaws.com"). If the * protocol is not specified here, the default protocol from this client's * {@link ClientConfiguration} will be used, which by default is HTTPS. * <p> * For more information on using AWS regions with the AWS SDK for Java, and * a complete list of all available endpoints for all AWS services, see: <a * href= * "http://developer.amazonwebservices.com/connect/entry.jspa?externalID=3912" * > http://developer.amazonwebservices.com/connect/entry.jspa?externalID= * 3912</a> * <p> * <b>This method is not threadsafe. An endpoint should be configured when * the client is created and before any service requests are made. Changing * it afterwards creates inevitable race conditions for any service requests * in transit or retrying.</b> * * @param endpoint * The endpoint (ex: "codecommit.us-east-1.amazonaws.com") or a full * URL, including the protocol (ex: * "https://codecommit.us-east-1.amazonaws.com") of the region * specific AWS endpoint this client will communicate with. */ void setEndpoint(String endpoint); /** * An alternative to {@link AWSCodeCommit#setEndpoint(String)}, sets the * regional endpoint for this client's service calls. Callers can use this * method to control which AWS region they want to work with. * <p> * By default, all service endpoints in all regions use the https protocol. * To use http instead, specify it in the {@link ClientConfiguration} * supplied at construction. * <p> * <b>This method is not threadsafe. A region should be configured when the * client is created and before any service requests are made. Changing it * afterwards creates inevitable race conditions for any service requests in * transit or retrying.</b> * * @param region * The region this client will communicate with. See * {@link Region#getRegion(com.amazonaws.regions.Regions)} for * accessing a given region. Must not be null and must be a region * where the service is available. * * @see Region#getRegion(com.amazonaws.regions.Regions) * @see Region#createClient(Class, * com.amazonaws.auth.AWSCredentialsProvider, ClientConfiguration) * @see Region#isServiceSupported(String) */ void setRegion(Region region); /** * <p> * Gets information about one or more repositories. * </p> * <note> * <p> * The description field for a repository accepts all HTML characters and * all valid Unicode characters. Applications that do not HTML-encode the * description and display it in a web page could expose users to * potentially malicious code. Make sure that you HTML-encode the * description field in any application that uses this API to display the * repository description on a web page. * </p> * </note> * * @param batchGetRepositoriesRequest * Represents the input of a batch get repositories operation. * @return Result of the BatchGetRepositories operation returned by the * service. * @throws RepositoryNamesRequiredException * A repository names object is required but was not specified. * @throws MaximumRepositoryNamesExceededException * The maximum number of allowed repository names was exceeded. * Currently, this number is 25. * @throws InvalidRepositoryNameException * At least one specified repository name is not valid.</p> * <note>This exception only occurs when a specified repository name * is not valid. Other exceptions occur when a required repository * parameter is missing, or when a specified repository does not * exist. * @throws EncryptionIntegrityChecksFailedException * An encryption integrity check failed. * @throws EncryptionKeyAccessDeniedException * An encryption key could not be accessed. * @throws EncryptionKeyDisabledException * The encryption key is disabled. * @throws EncryptionKeyNotFoundException * No encryption key was found. * @throws EncryptionKeyUnavailableException * The encryption key is not available. */ BatchGetRepositoriesResult batchGetRepositories( BatchGetRepositoriesRequest batchGetRepositoriesRequest); /** * <p> * Creates a new branch in a repository and points the branch to a commit. * </p> * <note>Calling the create branch operation does not set a repository's * default branch. To do this, call the update default branch * operation.</note> * * @param createBranchRequest * Represents the input of a create branch operation. * @throws RepositoryNameRequiredException * A repository name is required but was not specified. * @throws InvalidRepositoryNameException * At least one specified repository name is not valid.</p> * <note>This exception only occurs when a specified repository name * is not valid. Other exceptions occur when a required repository * parameter is missing, or when a specified repository does not * exist. * @throws RepositoryDoesNotExistException * The specified repository does not exist. * @throws BranchNameRequiredException * A branch name is required but was not specified. * @throws BranchNameExistsException * The specified branch name already exists. * @throws InvalidBranchNameException * The specified branch name is not valid. * @throws CommitIdRequiredException * A commit ID was not specified. * @throws CommitDoesNotExistException * The specified commit does not exist or no commit was specified, * and the specified repository has no default branch. * @throws InvalidCommitIdException * The specified commit ID is not valid. * @throws EncryptionIntegrityChecksFailedException * An encryption integrity check failed. * @throws EncryptionKeyAccessDeniedException * An encryption key could not be accessed. * @throws EncryptionKeyDisabledException * The encryption key is disabled. * @throws EncryptionKeyNotFoundException * No encryption key was found. * @throws EncryptionKeyUnavailableException * The encryption key is not available. */ void createBranch(CreateBranchRequest createBranchRequest); /** * <p> * Creates a new, empty repository. * </p> * * @param createRepositoryRequest * Represents the input of a create repository operation. * @return Result of the CreateRepository operation returned by the service. * @throws RepositoryNameExistsException * The specified repository name already exists. * @throws RepositoryNameRequiredException * A repository name is required but was not specified. * @throws InvalidRepositoryNameException * At least one specified repository name is not valid.</p> * <note>This exception only occurs when a specified repository name * is not valid. Other exceptions occur when a required repository * parameter is missing, or when a specified repository does not * exist. * @throws InvalidRepositoryDescriptionException * The specified repository description is not valid. * @throws RepositoryLimitExceededException * A repository resource limit was exceeded. * @throws EncryptionIntegrityChecksFailedException * An encryption integrity check failed. * @throws EncryptionKeyAccessDeniedException * An encryption key could not be accessed. * @throws EncryptionKeyDisabledException * The encryption key is disabled. * @throws EncryptionKeyNotFoundException * No encryption key was found. * @throws EncryptionKeyUnavailableException * The encryption key is not available. */ CreateRepositoryResult createRepository( CreateRepositoryRequest createRepositoryRequest); /** * <p> * Deletes a repository. If a specified repository was already deleted, a * null repository ID will be returned. * </p> * <important>Deleting a repository also deletes all associated objects and * metadata. After a repository is deleted, all future push calls to the * deleted repository will fail.</important> * * @param deleteRepositoryRequest * Represents the input of a delete repository operation. * @return Result of the DeleteRepository operation returned by the service. * @throws RepositoryNameRequiredException * A repository name is required but was not specified. * @throws InvalidRepositoryNameException * At least one specified repository name is not valid.</p> * <note>This exception only occurs when a specified repository name * is not valid. Other exceptions occur when a required repository * parameter is missing, or when a specified repository does not * exist. * @throws EncryptionIntegrityChecksFailedException * An encryption integrity check failed. * @throws EncryptionKeyAccessDeniedException * An encryption key could not be accessed. * @throws EncryptionKeyDisabledException * The encryption key is disabled. * @throws EncryptionKeyNotFoundException * No encryption key was found. * @throws EncryptionKeyUnavailableException * The encryption key is not available. */ DeleteRepositoryResult deleteRepository( DeleteRepositoryRequest deleteRepositoryRequest); /** * <p> * Retrieves information about a repository branch, including its name and * the last commit ID. * </p> * * @param getBranchRequest * Represents the input of a get branch operation. * @return Result of the GetBranch operation returned by the service. * @throws RepositoryNameRequiredException * A repository name is required but was not specified. * @throws RepositoryDoesNotExistException * The specified repository does not exist. * @throws InvalidRepositoryNameException * At least one specified repository name is not valid.</p> * <note>This exception only occurs when a specified repository name * is not valid. Other exceptions occur when a required repository * parameter is missing, or when a specified repository does not * exist. * @throws BranchNameRequiredException * A branch name is required but was not specified. * @throws InvalidBranchNameException * The specified branch name is not valid. * @throws BranchDoesNotExistException * The specified branch does not exist. * @throws EncryptionIntegrityChecksFailedException * An encryption integrity check failed. * @throws EncryptionKeyAccessDeniedException * An encryption key could not be accessed. * @throws EncryptionKeyDisabledException * The encryption key is disabled. * @throws EncryptionKeyNotFoundException * No encryption key was found. * @throws EncryptionKeyUnavailableException * The encryption key is not available. */ GetBranchResult getBranch(GetBranchRequest getBranchRequest); /** * <p> * Gets information about a repository. * </p> * <note> * <p> * The description field for a repository accepts all HTML characters and * all valid Unicode characters. Applications that do not HTML-encode the * description and display it in a web page could expose users to * potentially malicious code. Make sure that you HTML-encode the * description field in any application that uses this API to display the * repository description on a web page. * </p> * </note> * * @param getRepositoryRequest * Represents the input of a get repository operation. * @return Result of the GetRepository operation returned by the service. * @throws RepositoryNameRequiredException * A repository name is required but was not specified. * @throws RepositoryDoesNotExistException * The specified repository does not exist. * @throws InvalidRepositoryNameException * At least one specified repository name is not valid.</p> * <note>This exception only occurs when a specified repository name * is not valid. Other exceptions occur when a required repository * parameter is missing, or when a specified repository does not * exist. * @throws EncryptionIntegrityChecksFailedException * An encryption integrity check failed. * @throws EncryptionKeyAccessDeniedException * An encryption key could not be accessed. * @throws EncryptionKeyDisabledException * The encryption key is disabled. * @throws EncryptionKeyNotFoundException * No encryption key was found. * @throws EncryptionKeyUnavailableException * The encryption key is not available. */ GetRepositoryResult getRepository(GetRepositoryRequest getRepositoryRequest); /** * <p> * Gets information about one or more branches in a repository. * </p> * * @param listBranchesRequest * Represents the input of a list branches operation. * @return Result of the ListBranches operation returned by the service. * @throws RepositoryNameRequiredException * A repository name is required but was not specified. * @throws RepositoryDoesNotExistException * The specified repository does not exist. * @throws InvalidRepositoryNameException * At least one specified repository name is not valid.</p> * <note>This exception only occurs when a specified repository name * is not valid. Other exceptions occur when a required repository * parameter is missing, or when a specified repository does not * exist. * @throws EncryptionIntegrityChecksFailedException * An encryption integrity check failed. * @throws EncryptionKeyAccessDeniedException * An encryption key could not be accessed. * @throws EncryptionKeyDisabledException * The encryption key is disabled. * @throws EncryptionKeyNotFoundException * No encryption key was found. * @throws EncryptionKeyUnavailableException * The encryption key is not available. * @throws InvalidContinuationTokenException * The specified continuation token is not valid. */ ListBranchesResult listBranches(ListBranchesRequest listBranchesRequest); /** * <p> * Gets information about one or more repositories. * </p> * * @param listRepositoriesRequest * Represents the input of a list repositories operation. * @return Result of the ListRepositories operation returned by the service. * @throws InvalidSortByException * The specified sort by value is not valid. * @throws InvalidOrderException * The specified sort order is not valid. * @throws InvalidContinuationTokenException * The specified continuation token is not valid. */ ListRepositoriesResult listRepositories( ListRepositoriesRequest listRepositoriesRequest); /** * <p> * Sets or changes the default branch name for the specified repository. * </p> * <note>If you use this operation to change the default branch name to the * current default branch name, a success message is returned even though * the default branch did not change.</note> * * @param updateDefaultBranchRequest * Represents the input of an update default branch operation. * @throws RepositoryNameRequiredException * A repository name is required but was not specified. * @throws RepositoryDoesNotExistException * The specified repository does not exist. * @throws InvalidRepositoryNameException * At least one specified repository name is not valid.</p> * <note>This exception only occurs when a specified repository name * is not valid. Other exceptions occur when a required repository * parameter is missing, or when a specified repository does not * exist. * @throws BranchNameRequiredException * A branch name is required but was not specified. * @throws InvalidBranchNameException * The specified branch name is not valid. * @throws BranchDoesNotExistException * The specified branch does not exist. * @throws EncryptionIntegrityChecksFailedException * An encryption integrity check failed. * @throws EncryptionKeyAccessDeniedException * An encryption key could not be accessed. * @throws EncryptionKeyDisabledException * The encryption key is disabled. * @throws EncryptionKeyNotFoundException * No encryption key was found. * @throws EncryptionKeyUnavailableException * The encryption key is not available. */ void updateDefaultBranch( UpdateDefaultBranchRequest updateDefaultBranchRequest); /** * <p> * Sets or changes the comment or description for a repository. * </p> * <note> * <p> * The description field for a repository accepts all HTML characters and * all valid Unicode characters. Applications that do not HTML-encode the * description and display it in a web page could expose users to * potentially malicious code. Make sure that you HTML-encode the * description field in any application that uses this API to display the * repository description on a web page. * </p> * </note> * * @param updateRepositoryDescriptionRequest * Represents the input of an update repository description * operation. * @throws RepositoryNameRequiredException * A repository name is required but was not specified. * @throws RepositoryDoesNotExistException * The specified repository does not exist. * @throws InvalidRepositoryNameException * At least one specified repository name is not valid.</p> * <note>This exception only occurs when a specified repository name * is not valid. Other exceptions occur when a required repository * parameter is missing, or when a specified repository does not * exist. * @throws InvalidRepositoryDescriptionException * The specified repository description is not valid. * @throws EncryptionIntegrityChecksFailedException * An encryption integrity check failed. * @throws EncryptionKeyAccessDeniedException * An encryption key could not be accessed. * @throws EncryptionKeyDisabledException * The encryption key is disabled. * @throws EncryptionKeyNotFoundException * No encryption key was found. * @throws EncryptionKeyUnavailableException * The encryption key is not available. */ void updateRepositoryDescription( UpdateRepositoryDescriptionRequest updateRepositoryDescriptionRequest); /** * <p> * Renames a repository. * </p> * * @param updateRepositoryNameRequest * Represents the input of an update repository description * operation. * @throws RepositoryDoesNotExistException * The specified repository does not exist. * @throws RepositoryNameExistsException * The specified repository name already exists. * @throws RepositoryNameRequiredException * A repository name is required but was not specified. * @throws InvalidRepositoryNameException * At least one specified repository name is not valid.</p> * <note>This exception only occurs when a specified repository name * is not valid. Other exceptions occur when a required repository * parameter is missing, or when a specified repository does not * exist. */ void updateRepositoryName( UpdateRepositoryNameRequest updateRepositoryNameRequest); /** * Shuts down this client object, releasing any resources that might be held * open. This is an optional method, and callers are not expected to call * it, but can if they want to explicitly release any open resources. Once a * client has been shutdown, it should not be used to make any more * requests. */ void shutdown(); /** * Returns additional metadata for a previously executed successful request, * typically used for debugging issues where a service isn't acting as * expected. This data isn't considered part of the result data returned by * an operation, so it's available through this separate, diagnostic * interface. * <p> * Response metadata is only cached for a limited period of time, so if you * need to access this extra diagnostic information for an executed request, * you should use this method to retrieve it as soon as possible after * executing a request. * * @param request * The originally executed request. * * @return The response metadata for the specified request, or null if none * is available. */ ResponseMetadata getCachedResponseMetadata(AmazonWebServiceRequest request); }
apache-2.0
echalkpad/t4f-data
rpc/rest/cxf/tmp/spring-security/src/demo/jaxrs/service/Product.java
1646
/**************************************************************** * Licensed to the AOS Community (AOS) under one or more * * contributor license agreements. See the NOTICE file * * distributed with this work for additional information * * regarding copyright ownership. The AOS licenses this file * * to you under the Apache License, Version 2.0 (the * * "License"); you may not use this file except in compliance * * with the License. You may obtain a copy of the License at * * * * http://www.apache.org/licenses/LICENSE-2.0 * * * * Unless required by applicable law or agreed to in writing, * * software distributed under the License is distributed on an * * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * * KIND, either express or implied. See the License for the * * specific language governing permissions and limitations * * under the License. * ****************************************************************/ package demo.jaxrs.service; import javax.xml.bind.annotation.XmlRootElement; @XmlRootElement(name = "Product") public class Product { private long id; private String description; public long getId() { return id; } public void setId(long id) { this.id = id; } public String getDescription() { return description; } public void setDescription(String d) { this.description = d; } }
apache-2.0
wanlihuan/phone-touch
business-service/business-common-service/src/main/java/com/assistivetouch/widget/VolumeAdjust.java
1906
package com.assistivetouch.widget; import android.content.Context; import android.media.AudioManager; public class VolumeAdjust extends ToastProgressBar{ public AudioManager audiomanage; private int maxVolume; private int mVolumeType = AudioManager.STREAM_MUSIC; /** * * @param context * @param volumeType 如:AudioManager.STREAM_MUSIC */ public VolumeAdjust(Context context) { this(context, null, 0); // TODO Auto-generated constructor stub } public VolumeAdjust(Context context, CharSequence text, int duration, int volumeType) { this(context, text, duration); // TODO Auto-generated constructor stub mVolumeType = volumeType; maxVolume = audiomanage.getStreamMaxVolume(volumeType); } private VolumeAdjust(Context context, CharSequence text, int duration) { super(context, text, duration); // TODO Auto-generated constructor stub audiomanage = (AudioManager) context.getSystemService(Context.AUDIO_SERVICE); } public int getMaxVolume(){ return maxVolume; } public void updateVolume(int volumeValue){ int currentVolume = getCurrentStreamVolume(); //获取当前值 if(currentVolume > maxVolume || currentVolume < 0) return; audiomanage.setStreamVolume(mVolumeType, volumeValue, 0); } public int getCurrentStreamVolume(){ return audiomanage.getStreamVolume(mVolumeType); //获取当前值 } public int volume2progress(int volume){ float progressPercent = (float)volume / (float)maxVolume;//百分比 int currentProgress = (int)(progressPercent * maxProgress); return currentProgress; } public int progress2volume(int progress){ float progressPercent = progress / (float)maxProgress; int currentValue = (int) (progressPercent * maxVolume); return currentValue; } }
apache-2.0
dkpro/dkpro-jwktl
src/main/java/de/tudarmstadt/ukp/jwktl/parser/ru/wikokit/base/wikipedia/language/WikimediaSisterProject.java
7108
/******************************************************************************* * Copyright 2008 Andrew Krizhanovsky <andrew.krizhanovsky at gmail.com> * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. ******************************************************************************/ package de.tudarmstadt.ukp.jwktl.parser.ru.wikokit.base.wikipedia.language; import java.util.HashMap; import java.util.Map; import de.tudarmstadt.ukp.jwktl.parser.ru.wikokit.base.wikipedia.util.StringUtil; /** Wikimedia sister projects: code and short codes, e.g. 'wikipedia' and 'w', 'wikt'. * * See w:Wikipedia:Interwikimedia_links and w:Wikipedia:Wikimedia_sister_projects */ public class WikimediaSisterProject { /** Long form, e.g. 'wikipedia', 'commons', 'meta', etc. */ private final String long_form; // /** Shortcut, e.g. 'w', 'wikt', 'm'. */ // private final String shortcut; // /** Project name, e.g. 'Wikipedia', 'Wiktionary'. */ // private final String name; /** Map from long_form and from shortcut to the project object */ private static Map<String, WikimediaSisterProject> code2project = new HashMap<>(); // list of projects: public static final WikimediaSisterProject Wikipedia = new WikimediaSisterProject("wikipedia", "w", "Wikipedia"); public static final WikimediaSisterProject Wiktionary = new WikimediaSisterProject("wiktionary", "wikt", "Wiktionary"); public static final WikimediaSisterProject Wikinews = new WikimediaSisterProject("wikinews", "n", "Wikinews"); public static final WikimediaSisterProject Wikibooks = new WikimediaSisterProject("wikibooks", "b", "Wikibooks"); public static final WikimediaSisterProject Wikiquote = new WikimediaSisterProject("wikiquote", "q", "Wikiquote"); public static final WikimediaSisterProject Wikisource = new WikimediaSisterProject("wikisource", "s", "Wikisource"); public static final WikimediaSisterProject Wikispecies = new WikimediaSisterProject("wikispecies", "", "Wikispecies"); public static final WikimediaSisterProject Wikiversity = new WikimediaSisterProject("", "v", "Wikiversity"); // in really there are two long forms: [[wikimedia:]] and [[foundation:]], and no shortcuts public static final WikimediaSisterProject Wikimedia_Foundation = new WikimediaSisterProject("wikimedia","foundation","Wikimedia_Foundation"); public static final WikimediaSisterProject Wikimedia_Commons = new WikimediaSisterProject("commons:","", "Wikimedia_Commons"); public static final WikimediaSisterProject Wikimedia_Meta_Wiki = new WikimediaSisterProject("meta", "m", "Wikimedia_Meta-Wiki"); public static final WikimediaSisterProject Wikimedia_Incubator = new WikimediaSisterProject("incubator","", "Wikimedia_Incubator"); public static final WikimediaSisterProject MediaWiki = new WikimediaSisterProject("", "mw", "MediaWiki"); public static final WikimediaSisterProject MediaZilla = new WikimediaSisterProject("mediazilla", "", "MediaZilla"); private WikimediaSisterProject(String long_form,String shortcut,String name) { this.long_form = long_form; // this.shortcut = shortcut; // this.name = name; if(long_form.length() > 0) { code2project.put(long_form, this); } if(shortcut.length() > 0) { code2project.put(shortcut, this); } } /** Checks weather exists the project with long form or shortcut 'code'. */ public static boolean existsCode(String code) { return code2project.containsKey(code); } public String toString() { return long_form; } /** Returns true if the language has this 'long_form'. */ public boolean equals(String long_form) { return long_form.equalsIgnoreCase(this.long_form); } /** Gets WM sister project by name of long form */ public static WikimediaSisterProject get(String code) throws NullPointerException { if( code2project.containsKey(code) ) { return code2project.get(code); /* if(long_form.equalsIgnoreCase( ru.toString())) { return ru; } else if(long_form.equalsIgnoreCase( en.toString())) { return en; } else if(long_form.equalsIgnoreCase( simple.toString())) { return simple; } else if(long_form.equalsIgnoreCase( de.toString())) { return de; // todo // ... */ } else { throw new NullPointerException("Null WikimediaSisterProject"); } } /** Gets texts of interwikimedia link. E.g. * [[wikt:Wiktionary:Statistics#Detail|statistics]] -&gt; "statistics" * * @param text_inside_link e.g. "wikipedia:" in [[wikipedia:]] */ public static String getLinkText(String text_inside_link) { String before, after; if(-1 != text_inside_link.indexOf(':')) { before = StringUtil.getTextBeforeFirstColumn(text_inside_link); after = StringUtil.getTextAfterFirstColumn (text_inside_link); return getLinkText(before, after); } return text_inside_link; } /** Gets texts of interwikimedia link. E.g. * [[wikt:Wiktionary:Statistics#Detail|statistics]] -&gt; "statistics" * * @param before the text before the first column, e.g. "wikipedia" in [[wikipedia:]] * @param after e.g. "Wikipedia:Wikimedia_sister_projects" in [[w:Wikipedia:Wikimedia_sister_projects]] */ public static String getLinkText(String before,String after) { // 0 e.g. [[:Image:Wiktionary-logo-gl.png|a logo that depicts a dictionary]] if( 0 == before.length() || existsCode(before) ) { if(-1 != after.indexOf('|')) { return StringUtil.getTextAfterFirstVerticalPipe(after); } else if(-1 != after.indexOf(':')) { // e.g. [[:de:Hauptseite]], after = "de:Hauptseite" String lang_code = StringUtil.getTextBeforeFirstColumn(after); if(LanguageType.has(lang_code)) { return StringUtil.getTextAfterFirstColumn(after); } return after; } } return new StringBuffer(before).append(":").append(after).toString(); } }
apache-2.0
aws/aws-sdk-java
aws-java-sdk-iot/src/main/java/com/amazonaws/services/iot/model/transform/ThingDocumentJsonUnmarshaller.java
4567
/* * Copyright 2017-2022 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with * the License. A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR * CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions * and limitations under the License. */ package com.amazonaws.services.iot.model.transform; import java.math.*; import javax.annotation.Generated; import com.amazonaws.services.iot.model.*; import com.amazonaws.transform.SimpleTypeJsonUnmarshallers.*; import com.amazonaws.transform.*; import com.fasterxml.jackson.core.JsonToken; import static com.fasterxml.jackson.core.JsonToken.*; /** * ThingDocument JSON Unmarshaller */ @Generated("com.amazonaws:aws-java-sdk-code-generator") public class ThingDocumentJsonUnmarshaller implements Unmarshaller<ThingDocument, JsonUnmarshallerContext> { public ThingDocument unmarshall(JsonUnmarshallerContext context) throws Exception { ThingDocument thingDocument = new ThingDocument(); int originalDepth = context.getCurrentDepth(); String currentParentElement = context.getCurrentParentElement(); int targetDepth = originalDepth + 1; JsonToken token = context.getCurrentToken(); if (token == null) token = context.nextToken(); if (token == VALUE_NULL) { return null; } while (true) { if (token == null) break; if (token == FIELD_NAME || token == START_OBJECT) { if (context.testExpression("thingName", targetDepth)) { context.nextToken(); thingDocument.setThingName(context.getUnmarshaller(String.class).unmarshall(context)); } if (context.testExpression("thingId", targetDepth)) { context.nextToken(); thingDocument.setThingId(context.getUnmarshaller(String.class).unmarshall(context)); } if (context.testExpression("thingTypeName", targetDepth)) { context.nextToken(); thingDocument.setThingTypeName(context.getUnmarshaller(String.class).unmarshall(context)); } if (context.testExpression("thingGroupNames", targetDepth)) { context.nextToken(); thingDocument.setThingGroupNames(new ListUnmarshaller<String>(context.getUnmarshaller(String.class)) .unmarshall(context)); } if (context.testExpression("attributes", targetDepth)) { context.nextToken(); thingDocument.setAttributes(new MapUnmarshaller<String, String>(context.getUnmarshaller(String.class), context .getUnmarshaller(String.class)).unmarshall(context)); } if (context.testExpression("shadow", targetDepth)) { context.nextToken(); thingDocument.setShadow(context.getUnmarshaller(String.class).unmarshall(context)); } if (context.testExpression("deviceDefender", targetDepth)) { context.nextToken(); thingDocument.setDeviceDefender(context.getUnmarshaller(String.class).unmarshall(context)); } if (context.testExpression("connectivity", targetDepth)) { context.nextToken(); thingDocument.setConnectivity(ThingConnectivityJsonUnmarshaller.getInstance().unmarshall(context)); } } else if (token == END_ARRAY || token == END_OBJECT) { if (context.getLastParsedParentElement() == null || context.getLastParsedParentElement().equals(currentParentElement)) { if (context.getCurrentDepth() <= originalDepth) break; } } token = context.nextToken(); } return thingDocument; } private static ThingDocumentJsonUnmarshaller instance; public static ThingDocumentJsonUnmarshaller getInstance() { if (instance == null) instance = new ThingDocumentJsonUnmarshaller(); return instance; } }
apache-2.0
nratanova/java_pft
addressbook-web-tests/src/test/java/ru/pft/addressbook/tests/GroupCreationTests.java
3288
package ru.pft.addressbook.tests; import com.google.common.reflect.TypeToken; import com.google.gson.Gson; import com.thoughtworks.xstream.XStream; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.testng.annotations.DataProvider; import org.testng.annotations.Test; import ru.pft.addressbook.model.GroupData; import ru.pft.addressbook.model.Groups; import java.io.*; import java.util.Iterator; import java.util.List; import java.util.stream.Collectors; import static org.hamcrest.CoreMatchers.equalTo; import static org.hamcrest.MatcherAssert.assertThat; public class GroupCreationTests extends TestBase { @DataProvider public Iterator<Object[]> validGroupsXML() throws IOException { try (BufferedReader reader = new BufferedReader(new FileReader(new File("src/test/resources/groups.xml")))) { String xml = ""; String line = reader.readLine(); while (line != null) { xml += line; line = reader.readLine(); } XStream xstream = new XStream(); xstream.processAnnotations(GroupData.class); List<GroupData> groups = (List<GroupData>) xstream.fromXML(xml); return groups.stream().map((g)-> new Object[] {g} ).collect(Collectors.toList()).iterator(); } } @DataProvider public Iterator<Object[]> validGroupsJson() throws IOException { try(BufferedReader reader = new BufferedReader(new FileReader(new File("src/test/resources/groups.json")))) { String json = ""; String line = reader.readLine(); while (line != null) { json += line; line = reader.readLine(); } Gson gson = new Gson(); List<GroupData> groups = gson.fromJson(json, new TypeToken<List<GroupData>>(){}.getType()); //То же самое что и List<GroupData>.class return groups.stream().map((g)-> new Object[] {g} ).collect(Collectors.toList()).iterator(); } } @Test (dataProvider = "validGroupsJson") public void testGroupCreation(GroupData group) { app.goTo().groupPage(); Groups before = app.db().groups(); app.group().create(group); assertThat(app.group().count(), equalTo(before.size() + 1)); //Хэширование, предварительная проверка, // при помощи более быстрого способа Groups after = app.db().groups(); assertThat(after, equalTo (before.withAdded(group.withId(after.stream().mapToInt((g) -> g.getId()).max().getAsInt())))); //CoreMatchers.equalTo - проверялка, параметр - то, что надо проверить } @Test (enabled = false) //Негативный тест, проверяющий, что группа с таким названием не создается public void testBadGroupCreation() { app.goTo().groupPage(); Groups before = app.group().all(); GroupData group = new GroupData().withName("Group2'");//Создается объект с именем Group2 app.group().create(group); assertThat(app.group().count(), equalTo(before.size())); //Проверка, что после создания группы кол-во групп увелисилось на 1 Groups after = app.group().all(); assertThat(after, equalTo(before)); } }
apache-2.0
medicayun/medicayundicom
dcm4jboss-all/trunk/dcm4jboss-ejb/target/xdoclet/java/org/dcm4chex/archive/ejb/session/FixPatientAttributesSession.java
791
/* * Generated by XDoclet - Do not edit! */ package org.dcm4chex.archive.ejb.session; /** * Session layer for FixPatientAttributes. * @xdoclet-generated at ${TODAY} * @copyright The XDoclet Team * @author XDoclet * @version 2.19.0-SNAPSHOT */ public class FixPatientAttributesSession extends org.dcm4chex.archive.ejb.session.FixPatientAttributesBean implements javax.ejb.SessionBean { public void ejbActivate() { } public void ejbPassivate() { } public void setSessionContext(javax.ejb.SessionContext ctx) { super.setSessionContext(ctx); } public void unsetSessionContext() { super.unsetSessionContext(); } public void ejbRemove() { } public void ejbCreate() throws javax.ejb.CreateException { } }
apache-2.0
jrkinley/flume-interceptor-analytics
src/main/java/org/apache/flume/source/PeriodicEmissionSource.java
4139
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.flume.source; import java.util.Set; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import org.apache.flume.Context; import org.apache.flume.Event; import org.apache.flume.EventDrivenSource; import org.apache.flume.Source; import org.apache.flume.conf.Configurable; import org.apache.flume.interceptor.AnalyticInterceptor; import org.apache.flume.interceptor.Interceptor; import org.apache.flume.tools.InterceptorRegistry; import org.apache.log4j.Logger; /** * A Flume Source that connects to an {@link AnalyticInterceptor} and periodically emits its results */ public class PeriodicEmissionSource extends AbstractSource implements EventDrivenSource, Configurable { private static final Logger LOG = Logger.getLogger(PeriodicEmissionSource.class); private static final String EMIT_FREQ_MS = "emitFreqMS"; private static final String INTERCEPTOR_CLASS = "interceptorClass"; private int emitFreqMS; private Class<?> interceptorClass; private ExecutorService service; /** {@inheritDoc} */ @Override public void configure(Context context) { this.emitFreqMS = context.getInteger(EMIT_FREQ_MS); try { this.interceptorClass = Class.forName(context.getString(INTERCEPTOR_CLASS)); } catch (ClassNotFoundException e) { throw new IllegalArgumentException(e); } if (!AnalyticInterceptor.class.isAssignableFrom(interceptorClass)) { throw new IllegalArgumentException( "interceptorClass must implement the AnalyticInterceptor interface"); } LOG.info(String.format( "Initializing PeriodicEmissionSource: emitFreqMS=%d, interceptorClass=%s", emitFreqMS, interceptorClass)); } /** {@inheritDoc} */ @Override public synchronized void start() { service = Executors.newSingleThreadExecutor(); Runnable handler = new PeriodicHandler(this, this.emitFreqMS, this.interceptorClass); service.execute(handler); } public static class PeriodicHandler implements Runnable { private Source source; private int emitFreqMS; private Class<? extends AnalyticInterceptor> interceptorClass; @SuppressWarnings("unchecked") public PeriodicHandler(Source source, int emitFreqMS, Class<?> interceptorClass) { this.source = source; this.emitFreqMS = emitFreqMS; this.interceptorClass = (Class<? extends AnalyticInterceptor>) interceptorClass; } /** {@inheritDoc} */ @Override public void run() { while (true) { sleep(); Set<? extends Interceptor> interceptors = InterceptorRegistry.getInstances(interceptorClass); if (LOG.isDebugEnabled()) { LOG.debug(String.format("Emitting results for %d interceptors", interceptors.size())); } for (Interceptor i : interceptors) { for (Event e : ((AnalyticInterceptor) i).getStatsEvents()) { if (LOG.isDebugEnabled()) { LOG.debug(String.format("Emit: Header: %s, Body: %s", e.getHeaders(), new String(e.getBody()))); } source.getChannelProcessor().processEvent(e); } } } } private void sleep() { try { Thread.sleep(emitFreqMS); } catch (InterruptedException e) { LOG.error(e); } } } }
apache-2.0
CChengz/dot.r
workspace/fits/jexi-1.0b-all/src/com/crackj2ee/jexi/core/command/CommandManager.java
3982
/* * Created on 2004-8-4 * Author: Xuefeng, Copyright (C) 2004, Xuefeng. */ package com.crackj2ee.jexi.core.command; import java.util.*; import com.crackj2ee.jexi.core.*; import com.crackj2ee.jexi.ui.*; /** * Command manager manage all commands that can undo and redo. * * @author Xuefeng */ public final class CommandManager { private static CommandManager instance = new CommandManager(); // store the command list: private List commands = new ArrayList(); // store the position of the current command (just executed): private int current = (-1); // prevent the client to create instance: private CommandManager() {} /** * Get the only instance of the CommandManager. * * @return The instance of the CommandManager. */ public static CommandManager instance() { return instance; } // add the command that just executed: private void addToCommandList(Command cmd) { Assert.checkTrue(cmd.canUndo()); // must support undo. commands.add(cmd); current++; System.out.println("added a new command: "+cmd.toString()); } /** * Undo the last command. */ public void undo() { Assert.checkTrue(canUndo()); // TODO: undo the last command: } /** * Can redo the last undo command? * * @return True if can redo. */ public boolean canRedo() { return current<commands.size()-1; } /** * Redo the last cancelled command. */ public void redo() { Assert.checkTrue(canRedo()); // TODO: redo the last cancelled command: } /** * Can undo the last command? * * @return True if can undo. */ public boolean canUndo() { return current>=0; } /** * Create a new insert command and execute it. * * @param doc The document object. * @param c The char of the key. */ public void newInsertCommand(Document doc, char c) { Command cmd = new InsertCommand(doc, c); if(cmd.execute() && cmd.canUndo()) { addToCommandList(cmd); } } /** * Create a new insert picture command and execute it. * * @param doc The document object. * @param filename The picture file name. */ public void newInsertPictureCommand(Document doc, String filename) { Command cmd = new InsertPictureCommand(doc, filename); if(cmd.execute() && cmd.canUndo()) { addToCommandList(cmd); } } /** * Create a new format command and execute it. * * @param doc The document object. * @param fontName The font name, or null if ignore. * @param fontSize The font size, or null if ignore. * @param bold The bold attribute, or null if ignore. * @param italic The italic attribute, or null if ignore. * @param underlined The underlined attribute, or null if ignore. * @param color The color, or null if ignore. */ public void newFormatCommand(Document doc, String fontName, Integer fontSize, Boolean bold, Boolean italic, Boolean underlined, Color color) { Command cmd = new FormatCommand(doc, fontName, fontSize, bold, italic, underlined, color); if(cmd.execute() && cmd.canUndo()) { addToCommandList(cmd); } } /** * Create a new delete command and execute it. * * @param doc The document object. */ public void newDeleteCommand(Document doc) { Command cmd = new DeleteCommand(doc); if(cmd.execute() && cmd.canUndo()) { addToCommandList(cmd); } } /** * Create a new break command and execute it. * * @param doc The document object. */ public void newSplitCommand(Document doc) { Command cmd = new SplitCommand(doc); if(cmd.execute() && cmd.canUndo()) { addToCommandList(cmd); } } }
apache-2.0
azusa/hatunatu
hatunatu-util/src/main/java/jp/fieldnotes/hatunatu/util/log/Logger.java
13780
/* * Copyright 2004-2012 the Seasar Foundation and the Others. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, * either express or implied. See the License for the specific language * governing permissions and limitations under the License. */ package jp.fieldnotes.hatunatu.util.log; import java.util.Map; import jp.fieldnotes.hatunatu.util.collection.ArrayUtil; import jp.fieldnotes.hatunatu.util.exception.SIllegalArgumentException; import jp.fieldnotes.hatunatu.util.message.MessageFormatter; import jp.fieldnotes.hatunatu.util.misc.Disposable; import jp.fieldnotes.hatunatu.util.misc.DisposableUtil; import static jp.fieldnotes.hatunatu.util.collection.CollectionsUtil.*; import static jp.fieldnotes.hatunatu.util.misc.AssertionUtil.*; /** * ログ出力を提供するクラスです。 * * @author higa */ public class Logger { /** * ログの出力レベルです。 */ public enum LogLevel { /** デバッグ */ DEBUG, /** 情報 */ INFO, /** 警告 */ WARN, /** エラー */ ERROR, /** 致命的 */ FATAL, } /** ロガーアダプタのファクトリ */ protected static final LoggerAdapterFactory factory = getLoggerAdapterFactory(); /** クラスをキーとするロガー のマップ */ protected static final Map<Class<?>, Logger> loggers = newHashMap(); /** 初期化済みを示すフラグ */ private static boolean initialized; /** ロガーアダプタ */ private final LoggerAdapter log; /** * {@link Logger}を返します。 * * @param clazz * ロガーのカテゴリとなるクラス。{@literal null}であってはいけません * @return {@link Logger} */ public static synchronized Logger getLogger(final Class<?> clazz) { assertArgumentNotNull("clazz", clazz); if (!initialized) { initialize(); } Logger logger = loggers.get(clazz); if (logger == null) { logger = new Logger(clazz); loggers.put(clazz, logger); } return logger; } /** * フォーマットされたメッセージ文字列を返します。 * * @param messageCode * メッセージコード。{@literal null}や空文字列であってはいけません * @param args * 引数 * @return フォーマットされたメッセージ文字列 */ public static LogMessage format(final String messageCode, final Object... args) { assertArgumentNotEmpty("messageCode", messageCode); final char messageType = messageCode.charAt(0); final String message = MessageFormatter.getSimpleMessage(messageCode, args); switch (messageType) { case 'D': return new LogMessage(LogLevel.DEBUG, message); case 'I': return new LogMessage(LogLevel.INFO, message); case 'W': return new LogMessage(LogLevel.WARN, message); case 'E': return new LogMessage(LogLevel.ERROR, message); case 'F': return new LogMessage(LogLevel.FATAL, message); default: throw new SIllegalArgumentException( "messageCode", "EUTL0009", ArrayUtil.asArray(messageCode, "messageCode : " + messageCode)); } } /** * {@link Logger}を初期化します。 */ protected static synchronized void initialize() { DisposableUtil.addFirst(new Disposable() { @Override public void dispose() { initialized = false; loggers.clear(); factory.releaseAll(); } }); initialized = true; } /** * ログアダプタのファクトリを返します。 * <p> * Commons Loggingが使える場合はCommons Loggingを利用するためのファクトリを返します。 * 使えない場合はjava.util.loggingロガーを利用するためのファクトリを返します。 * </p> * * @return ログアダプタのファクトリ */ protected static LoggerAdapterFactory getLoggerAdapterFactory() { try { Class.forName("org.apache.commons.logging.LogFactory"); return new JclLoggerAdapterFactory(); } catch (final Throwable ignore) { return new JulLoggerAdapterFactory(); } } /** * インスタンスを構築します。 * * @param clazz * ログ出力のカテゴリとなるクラス */ protected Logger(final Class<?> clazz) { log = factory.getLoggerAdapter(clazz); } /** * DEBUG情報が出力されるかどうかを返します。 * * @return DEBUG情報が出力されるかどうか */ public boolean isDebugEnabled() { return log.isDebugEnabled(); } /** * DEBUG情報を出力します。 * * @param message * メッセージ * @param throwable * 例外 */ public void debug(final Object message, final Throwable throwable) { if (isDebugEnabled()) { log.debug(toString(message), throwable); } } /** * DEBUG情報を出力します。 * * @param message * メッセージ */ public void debug(final Object message) { if (isDebugEnabled()) { log.debug(toString(message)); } } /** * INFO情報が出力されるかどうかを返します。 * * @return INFO情報が出力されるかどうか */ public boolean isInfoEnabled() { return log.isInfoEnabled(); } /** * INFO情報を出力します。 * * @param message * メッセージ * @param throwable * 例外 */ public void info(final Object message, final Throwable throwable) { if (isInfoEnabled()) { log.info(toString(message), throwable); } } /** * INFO情報を出力します。 * * @param message * メッセージ */ public void info(final Object message) { if (isInfoEnabled()) { log.info(toString(message)); } } /** * WARN情報を出力します。 * * @param message * メッセージ * @param throwable * 例外 */ public void warn(final Object message, final Throwable throwable) { log.warn(toString(message), throwable); } /** * WARN情報を出力します。 * * @param message * メッセージ */ public void warn(final Object message) { log.warn(message.toString()); } /** * ERROR情報を出力します。 * * @param message * メッセージ * @param throwable * 例外 */ public void error(final Object message, final Throwable throwable) { log.error(message.toString(), throwable); } /** * ERROR情報を出力します。 * * @param message * メッセージ */ public void error(final Object message) { log.error(message.toString()); } /** * FATAL情報を出力します。 * * @param message * メッセージ * @param throwable * 例外 */ public void fatal(final Object message, final Throwable throwable) { log.fatal(message.toString(), throwable); } /** * FATAL情報を出力します。 * * @param message * メッセージ */ public void fatal(final Object message) { log.fatal(message.toString()); } /** * ログを出力します。 * * @param throwable * 例外。{@literal null}であってはいけません */ public void log(final Throwable throwable) { assertArgumentNotNull("throwable", throwable); error(throwable.getMessage(), throwable); } /** * ログを出力します。 * * @param messageCode * メッセージコード。{@literal null}や空文字列であってはいけません * @param args * 引数 */ public void log(final String messageCode, final Object... args) { assertArgumentNotEmpty("messageCode", messageCode); log(format(messageCode, args)); } /** * ログを出力します。 * <p> * ログメッセージは{@link #format(String, Object...)}メソッドで作成します。 * {@link #format(String, Object...)}を{@literal static import}しておくと便利です。 * </p> * * <pre> * import static org.seasar.util.log.Logger.format; * * Logger logger = Logger.getLogger(Xxx.class); * logger.log(format("DXXX0000", arg1, arg2, arg3)); * </pre> * * @param logMessage * ログメッセージ。{@literal null}であってはいけません */ public void log(final LogMessage logMessage) { assertArgumentNotNull("logMessage", logMessage); log(logMessage, null); } /** * ログを出力します。 * <p> * ログメッセージは{@link #format(String, Object...)}メソッドで作成します。 * {@link #format(String, Object...)}を{@literal static import}しておくと便利です。 * </p> * * <pre> * import static org.seasar.util.log.Logger.format; * * Logger logger = Logger.getLogger(Xxx.class); * logger.log(format("DXXX0000", arg1, arg2, arg3), t); * </pre> * * @param logMessage * ログメッセージ。{@literal null}であってはいけません * @param throwable * 例外 */ public void log(final LogMessage logMessage, final Throwable throwable) { assertArgumentNotNull("logMessage", logMessage); final LogLevel level = logMessage.getLevel(); if (isEnabledFor(level)) { final String message = logMessage.getMessage(); switch (level) { case DEBUG: log.debug(message, throwable); break; case INFO: log.info(message, throwable); break; case WARN: log.warn(message, throwable); break; case ERROR: log.error(message, throwable); break; case FATAL: log.fatal(message, throwable); break; } } } /** * 指定のログレベルが有効なら{@literal true}を返します. * * @param logLevel * ログレベル * @return 指定のログレベルが有効なら{@literal true} */ protected boolean isEnabledFor(final LogLevel logLevel) { switch (logLevel) { case DEBUG: return log.isDebugEnabled(); case INFO: return log.isInfoEnabled(); case WARN: return log.isWarnEnabled(); case ERROR: return log.isErrorEnabled(); case FATAL: return log.isFatalEnabled(); default: throw new SIllegalArgumentException( "logLevel", "EUTL0009", ArrayUtil.asArray(logLevel, logLevel)); } } /** * メッセージオブジェクトの文字列表現を返します。 * * @param message * メッセージオブジェクト * @return メッセージオブジェクトの文字列表現 */ protected static String toString(final Object message) { if (message == null) { return "null"; } if (message instanceof String) { return (String) message; } return message.toString(); } /** * ログ出力するメッセージです。 * * @author koichik */ public static class LogMessage { /** ログレベル */ protected final LogLevel level; /** ログメッセージ */ protected final String message; /** * インスタンスを構築します。 * * @param level * ログレベル。{@literal null}であってはいけません * @param message * ログメッセージ */ public LogMessage(final LogLevel level, final String message) { assertArgumentNotNull("level", level); this.level = level; this.message = message; } /** * 出力レベルを返します。 * * @return 出力レベル */ public LogLevel getLevel() { return level; } /** * メッセージを返します。 * * @return メッセージ */ public String getMessage() { return message; } } }
apache-2.0
eltonsrc/biblioteca
biblioteca-webservice/src/main/java/br/org/am/biblioteca/rest/UsuarioController.java
2709
package br.org.am.biblioteca.rest; import java.util.HashSet; import java.util.Set; import javax.ws.rs.Consumes; import javax.ws.rs.DELETE; import javax.ws.rs.GET; import javax.ws.rs.POST; import javax.ws.rs.PUT; import javax.ws.rs.Path; import javax.ws.rs.PathParam; import javax.ws.rs.Produces; import javax.ws.rs.core.MediaType; import javax.ws.rs.core.Response; import org.apache.shiro.crypto.hash.Sha256Hash; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Controller; import br.org.am.biblioteca.model.Grupo; import br.org.am.biblioteca.model.Usuario; import br.org.am.biblioteca.rest.json.View; import br.org.am.biblioteca.service.GrupoService; @Controller @Path("usuario") public class UsuarioController extends BaseRestController { private GrupoService grupoService; @Autowired public void setGrupoService(GrupoService grupoService) { this.grupoService = grupoService; } @GET @Path("/") @Produces(JSON_UTF8) public Response listUsuario() { return Response.status(200) .entity(parseToJson(usuarioService.list(), View.Public.class)).build(); } @GET @Path("/{id}") @Produces(JSON_UTF8) public Response getUsuario(@PathParam("id") String id) { return Response.status(200) .entity(parseToJson(usuarioService.findById(id), View.Public.class)) .build(); } @POST @Path("/") @Consumes(JSON_UTF8) public Response createUsuario(Usuario usuario) { saveUsuario(usuario); return Response.status(200).entity("").build(); } @PUT @Path("/") @Consumes(JSON_UTF8) public Response updateUsuario(Usuario usuario) { saveUsuario(usuario); return Response.status(200).entity("").build(); } @DELETE @Path("/{id}") public Response deleteUsuario(@PathParam("id") String id) { if (!usuarioLogadoAdmin()) { return Response.status(401).entity("").build(); } if (getUsuarioLogado().getId().equals(id)) { return Response.status(401).entity("").build(); } Usuario usuario = usuarioService.findById(id); usuarioService.delete(usuario); return Response.status(200).entity("").build(); } private void saveUsuario(Usuario usuario) { Set<Grupo> grupoSet = new HashSet<Grupo>(); for (Grupo grupo : usuario.getGrupoSet()) { grupoSet.add(grupoService.findByNome(grupo.getNome())); } usuario.setGrupoSet(grupoSet); usuario.setSenha(new Sha256Hash(usuario.getSenha()).toHex()); usuarioService.save(usuario); } }
apache-2.0
splunk/splunk-shuttl
src/java/com/splunk/shuttl/archiver/filesystem/glacier/GlacierThawingException.java
863
// Copyright (C) 2011 Splunk Inc. // // Splunk Inc. licenses this file // to you under the Apache License, Version 2.0 (the // "License"); you may not use this file except in compliance // with the License. You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package com.splunk.shuttl.archiver.filesystem.glacier; public class GlacierThawingException extends RuntimeException { private static final long serialVersionUID = 1L; public GlacierThawingException(String msg) { super(msg); } }
apache-2.0
izumin5210/Droidux
examples/todomvc/src/main/java/info/izumin/android/droidux/example/todomvc/action/ClearCompletedTodoAction.java
291
package info.izumin.android.droidux.example.todomvc.action; import info.izumin.android.droidux.Action; /** * Created by izumin on 11/5/15. */ public class ClearCompletedTodoAction implements Action { public static final String TAG = ClearCompletedTodoAction.class.getSimpleName(); }
apache-2.0
rhuss/kubernetes-client
kubernetes-tests/src/test/java/io/fabric8/kubernetes/client/mock/KubernetesClusterRoleCrudTest.java
9273
/** * Copyright (C) 2015 Red Hat, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.fabric8.kubernetes.client.mock; import io.fabric8.kubernetes.api.model.rbac.KubernetesClusterRole; import io.fabric8.kubernetes.api.model.rbac.KubernetesClusterRoleBuilder; import io.fabric8.kubernetes.api.model.rbac.KubernetesClusterRoleList; import io.fabric8.kubernetes.api.model.rbac.KubernetesPolicyRuleBuilder; import io.fabric8.kubernetes.client.KubernetesClient; import io.fabric8.kubernetes.client.server.mock.KubernetesServer; import org.junit.Rule; import org.junit.Test; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertTrue; public class KubernetesClusterRoleCrudTest { private static final Logger logger = LoggerFactory.getLogger(KubernetesClusterRoleCrudTest.class); @Rule public KubernetesServer kubernetesServer = new KubernetesServer(true,true); @Test public void crudTest(){ KubernetesClient client = kubernetesServer.getClient(); KubernetesClusterRole kubernetesClusterRole = new KubernetesClusterRoleBuilder() .withNewMetadata() .withName("node-reader") .endMetadata() .addToRules(0, new KubernetesPolicyRuleBuilder() .addToApiGroups(0,"") .addToNonResourceURLs(0,"/healthz") .addToResourceNames(0,"my-node") .addToResources(0,"nodes") .addToVerbs(0, "get") .addToVerbs(1, "watch") .addToVerbs(2, "list") .build() ) .build(); //test of creation kubernetesClusterRole = client.rbac().kubernetesClusterRoles().create(kubernetesClusterRole); assertNotNull(kubernetesClusterRole); assertEquals("ClusterRole", kubernetesClusterRole.getKind()); assertEquals("rbac.authorization.k8s.io/v1", kubernetesClusterRole.getApiVersion()); assertNotNull(kubernetesClusterRole.getMetadata()); assertEquals("node-reader", kubernetesClusterRole.getMetadata().getName()); assertNotNull(kubernetesClusterRole.getRules()); assertEquals(1, kubernetesClusterRole.getRules().size()); assertNotNull(kubernetesClusterRole.getRules().get(0).getApiGroups()); assertEquals(1, kubernetesClusterRole.getRules().get(0).getApiGroups().size()); assertEquals("", kubernetesClusterRole.getRules().get(0).getApiGroups().get(0)); assertNotNull(kubernetesClusterRole.getRules().get(0).getNonResourceURLs()); assertEquals(1, kubernetesClusterRole.getRules().get(0).getNonResourceURLs().size()); assertEquals("/healthz", kubernetesClusterRole.getRules().get(0).getNonResourceURLs().get(0)); assertNotNull(kubernetesClusterRole.getRules().get(0).getResourceNames()); assertEquals(1, kubernetesClusterRole.getRules().get(0).getResourceNames().size()); assertEquals("my-node", kubernetesClusterRole.getRules().get(0).getResourceNames().get(0)); assertNotNull(kubernetesClusterRole.getRules().get(0).getResources()); assertEquals(1, kubernetesClusterRole.getRules().get(0).getResources().size()); assertEquals("nodes", kubernetesClusterRole.getRules().get(0).getResources().get(0)); assertNotNull(kubernetesClusterRole.getRules().get(0).getVerbs()); assertEquals(3, kubernetesClusterRole.getRules().get(0).getVerbs().size()); assertEquals("get", kubernetesClusterRole.getRules().get(0).getVerbs().get(0)); assertEquals("watch", kubernetesClusterRole.getRules().get(0).getVerbs().get(1)); assertEquals("list", kubernetesClusterRole.getRules().get(0).getVerbs().get(2)); //test of list KubernetesClusterRoleList kubernetesClusterRoleList = client.rbac().kubernetesClusterRoles().list(); assertNotNull(kubernetesClusterRoleList); assertNotNull(kubernetesClusterRoleList.getItems()); assertEquals(1, kubernetesClusterRoleList.getItems().size()); assertNotNull(kubernetesClusterRoleList.getItems().get(0)); assertEquals("ClusterRole", kubernetesClusterRoleList.getItems().get(0).getKind()); assertEquals("rbac.authorization.k8s.io/v1", kubernetesClusterRoleList.getItems().get(0).getApiVersion()); assertNotNull(kubernetesClusterRoleList.getItems().get(0).getMetadata()); assertEquals("node-reader", kubernetesClusterRoleList.getItems().get(0).getMetadata().getName()); assertNotNull(kubernetesClusterRoleList.getItems().get(0).getRules()); assertEquals(1, kubernetesClusterRoleList.getItems().get(0).getRules().size()); assertNotNull(kubernetesClusterRoleList.getItems().get(0).getRules().get(0).getApiGroups()); assertEquals(1, kubernetesClusterRoleList.getItems().get(0).getRules().get(0).getApiGroups().size()); assertEquals("", kubernetesClusterRoleList.getItems().get(0).getRules().get(0).getApiGroups().get(0)); assertNotNull(kubernetesClusterRoleList.getItems().get(0).getRules().get(0).getNonResourceURLs()); assertEquals(1, kubernetesClusterRoleList.getItems().get(0).getRules().get(0).getNonResourceURLs().size()); assertEquals("/healthz", kubernetesClusterRoleList.getItems().get(0).getRules().get(0).getNonResourceURLs().get(0)); assertNotNull(kubernetesClusterRoleList.getItems().get(0).getRules().get(0).getResourceNames()); assertEquals(1, kubernetesClusterRoleList.getItems().get(0).getRules().get(0).getResourceNames().size()); assertEquals("my-node", kubernetesClusterRoleList.getItems().get(0).getRules().get(0).getResourceNames().get(0)); assertNotNull(kubernetesClusterRoleList.getItems().get(0).getRules().get(0).getResources()); assertEquals(1, kubernetesClusterRoleList.getItems().get(0).getRules().get(0).getResources().size()); assertEquals("nodes", kubernetesClusterRoleList.getItems().get(0).getRules().get(0).getResources().get(0)); assertNotNull(kubernetesClusterRoleList.getItems().get(0).getRules().get(0).getVerbs()); assertEquals(3, kubernetesClusterRoleList.getItems().get(0).getRules().get(0).getVerbs().size()); assertEquals("get", kubernetesClusterRoleList.getItems().get(0).getRules().get(0).getVerbs().get(0)); assertEquals("watch", kubernetesClusterRoleList.getItems().get(0).getRules().get(0).getVerbs().get(1)); assertEquals("list", kubernetesClusterRoleList.getItems().get(0).getRules().get(0).getVerbs().get(2)); //test of updation kubernetesClusterRole = client.rbac().kubernetesClusterRoles().withName("node-reader").edit() .editRule(0).addToApiGroups(1, "extensions").endRule().done(); assertNotNull(kubernetesClusterRole); assertEquals("ClusterRole", kubernetesClusterRole.getKind()); assertEquals("rbac.authorization.k8s.io/v1", kubernetesClusterRole.getApiVersion()); assertNotNull(kubernetesClusterRole.getMetadata()); assertEquals("node-reader", kubernetesClusterRole.getMetadata().getName()); assertNotNull(kubernetesClusterRole.getRules()); assertEquals(1, kubernetesClusterRole.getRules().size()); assertNotNull(kubernetesClusterRole.getRules().get(0).getApiGroups()); assertEquals(2, kubernetesClusterRole.getRules().get(0).getApiGroups().size()); assertEquals("", kubernetesClusterRole.getRules().get(0).getApiGroups().get(0)); assertEquals("extensions", kubernetesClusterRole.getRules().get(0).getApiGroups().get(1)); assertNotNull(kubernetesClusterRole.getRules().get(0).getNonResourceURLs()); assertEquals(1, kubernetesClusterRole.getRules().get(0).getNonResourceURLs().size()); assertEquals("/healthz", kubernetesClusterRole.getRules().get(0).getNonResourceURLs().get(0)); assertNotNull(kubernetesClusterRole.getRules().get(0).getResourceNames()); assertEquals(1, kubernetesClusterRole.getRules().get(0).getResourceNames().size()); assertEquals("my-node", kubernetesClusterRole.getRules().get(0).getResourceNames().get(0)); assertNotNull(kubernetesClusterRole.getRules().get(0).getResources()); assertEquals(1, kubernetesClusterRole.getRules().get(0).getResources().size()); assertEquals("nodes", kubernetesClusterRole.getRules().get(0).getResources().get(0)); assertNotNull(kubernetesClusterRole.getRules().get(0).getVerbs()); assertEquals(3, kubernetesClusterRole.getRules().get(0).getVerbs().size()); assertEquals("get", kubernetesClusterRole.getRules().get(0).getVerbs().get(0)); assertEquals("watch", kubernetesClusterRole.getRules().get(0).getVerbs().get(1)); assertEquals("list", kubernetesClusterRole.getRules().get(0).getVerbs().get(2)); //test of deletion boolean deleted = client.rbac().kubernetesClusterRoles().delete(); assertTrue(deleted); kubernetesClusterRoleList = client.rbac().kubernetesClusterRoles().list(); assertEquals(0,kubernetesClusterRoleList.getItems().size()); } }
apache-2.0
laccore/coretools
coretools-graphics/src/main/java/org/andrill/coretools/graphics/driver/Java2DDriver.java
16821
/* * Copyright (c) Josh Reed, 2009. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.andrill.coretools.graphics.driver; import java.awt.BasicStroke; import java.awt.Color; import java.awt.Dimension; import java.awt.Font; import java.awt.FontMetrics; import java.awt.GradientPaint; import java.awt.Graphics2D; import java.awt.Paint; import java.awt.Point; import java.awt.Rectangle; import java.awt.Shape; import java.awt.TexturePaint; import java.awt.geom.AffineTransform; import java.awt.geom.Arc2D; import java.awt.geom.Ellipse2D; import java.awt.geom.GeneralPath; import java.awt.geom.Line2D; import java.awt.geom.Point2D; import java.awt.geom.Rectangle2D; import java.awt.image.BufferedImage; import java.io.IOException; import java.net.URL; import java.util.ArrayList; import java.util.List; import java.util.Stack; import java.util.concurrent.ExecutionException; import java.util.concurrent.Future; import javax.imageio.ImageIO; import javax.swing.JComponent; import org.andrill.coretools.Platform; import org.andrill.coretools.ResourceLoader; import org.andrill.coretools.graphics.fill.ColorFill; import org.andrill.coretools.graphics.fill.Fill; import org.andrill.coretools.graphics.fill.GradientFill; import org.andrill.coretools.graphics.fill.MultiFill; import org.andrill.coretools.graphics.fill.TextureFill; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * An implementation of the Driver interface for Java2D. * * @author Josh Reed (jareed@andrill.org) */ public class Java2DDriver implements Driver { protected static class State { Fill fill = null; Color lineColor = null; LineStyle lineStyle = LineStyle.SOLID; int lineThickness = 1; } private static final Logger LOGGER = LoggerFactory.getLogger(Java2DDriver.class); private static final Rectangle ANCHOR = new Rectangle(0, 0, 32, 32); protected static final float LINE_DASH[] = { 18, 9 }; protected static final float LINE_DASH_DOT[] = { 9, 3, 3, 3 }; protected static final float LINE_DOT[] = { 3, 3 }; protected Fill background = new ColorFill(Color.white); protected Graphics2D g2d = null; protected final ImageCache cache; protected final ResourceLoader loader; protected Color lineColor = Color.black; protected LineStyle lineStyle = LineStyle.SOLID; protected int lineThickness = 1; protected Rectangle2D originalClip = null; protected AffineTransform originalTransform = null; protected boolean scaleStrokes = false; protected BasicStroke stroke = null; protected Stack<AffineTransform> transforms = new Stack<AffineTransform>(); protected Stack<State> stateStack = new Stack<State>(); protected JComponent interactive = null; protected BufferedImage imageError = null; protected BufferedImage imageLoading = null; /** * Create a new Java2DDriver. * * @param graphics * the Java2D graphics object. */ public Java2DDriver(final Graphics2D graphics) { this(graphics, false, null); } /** * Create a new Java2DDriver. * * @param graphics * the Java2D graphics object. * @param scaleStrokes * true if stroke widths should be scaled, false otherwise. * @param interactive * the component who the Graphics2D object belongs to, or null. */ public Java2DDriver(final Graphics2D graphics, final boolean scaleStrokes, final JComponent interactive) { g2d = graphics; this.scaleStrokes = scaleStrokes; originalTransform = g2d.getTransform(); originalClip = g2d.getClipBounds(); transforms.push(new AffineTransform()); this.interactive = interactive; loader = Platform.getService(ResourceLoader.class); cache = Platform.getService(ImageCache.class); } private List<Paint> createPaints(final Fill fill, final Shape shape) { final List<Paint> paints = new ArrayList<Paint>(); // if no fill, then just fill with white if (fill == null) { paints.add(Color.white); return paints; } // create paints based on fill style switch (fill.getStyle()) { case COLOR: final ColorFill c = (ColorFill) fill; paints.add(c.getColor()); break; case GRADIENT: final GradientFill g = (GradientFill) fill; final Rectangle2D r = shape.getBounds2D(); Point2D p1, p2; if (g.isHorizontal()) { p1 = new Point2D.Double(r.getMinX(), r.getCenterY()); p2 = new Point2D.Double(r.getMaxX(), r.getCenterY()); } else { p1 = new Point2D.Double(r.getCenterX(), r.getMinY()); p2 = new Point2D.Double(r.getCenterX(), r.getMaxY()); } paints.add(new GradientPaint(p1, g.getStart(), p2, g.getEnd())); break; case TEXTURE: final TextureFill t = (TextureFill) fill; BufferedImage iimage; try { iimage = cache.get(t.getTexture(), 1, interactive).get(); if (iimage != null) { paints .add(new TexturePaint(iimage, new Rectangle(0, 0, iimage.getWidth(), iimage.getHeight()))); } else { LOGGER.error("Unable to load texture {}", t.getTexture().toExternalForm()); } } catch (InterruptedException e) { LOGGER.error("Unable to load texture {}: {}", t.getTexture().toExternalForm(), e.getMessage()); } catch (ExecutionException e) { LOGGER.error("Unable to load texture {}: {}", t.getTexture().toExternalForm(), e.getMessage()); } break; case MULTI: final MultiFill m = (MultiFill) fill; for (final Fill f : m.getFills()) { paints.addAll(createPaints(f, shape)); } } return paints; } private Shape createPolygon(final List<Point2D> points) { final GeneralPath path = new GeneralPath(GeneralPath.WIND_EVEN_ODD); boolean first = true; for (final Point2D p : points) { if (first) { path.moveTo((float) p.getX(), (float) p.getY()); } else { path.lineTo((float) p.getX(), (float) p.getY()); } first = false; } path.closePath(); return path; } private void createStroke() { switch (lineStyle) { case SOLID: stroke = new BasicStroke(lineThickness, BasicStroke.CAP_SQUARE, BasicStroke.JOIN_MITER, lineThickness, null, 0); break; case DASHED: stroke = new BasicStroke(lineThickness, BasicStroke.CAP_SQUARE, BasicStroke.JOIN_MITER, lineThickness, LINE_DASH, 0); break; case DOTTED: stroke = new BasicStroke(lineThickness, BasicStroke.CAP_SQUARE, BasicStroke.JOIN_MITER, lineThickness, LINE_DOT, 0); break; case DASH_DOTTED: stroke = new BasicStroke(lineThickness, BasicStroke.CAP_SQUARE, BasicStroke.JOIN_MITER, lineThickness, LINE_DASH_DOT, 0); break; default: stroke = new BasicStroke(lineThickness, BasicStroke.CAP_SQUARE, BasicStroke.JOIN_MITER, lineThickness, null, 0); break; } } /** * {@inheritDoc} */ public void dispose() { if (g2d != null) { g2d.dispose(); g2d = null; } interactive = null; stateStack.clear(); transforms.clear(); originalClip = null; originalTransform = null; } private void draw(final Shape s) { prepareDraw(); if (scaleStrokes) { g2d.draw(s); } else { g2d.setTransform(originalTransform); g2d.draw(transforms.peek().createTransformedShape(s)); g2d.transform(transforms.peek()); } } /** * {@inheritDoc} */ public void drawArc(final Rectangle2D bounds, final double start, final double extent, final ArcStyle style) { draw(new Arc2D.Double(bounds, start, extent, getArcType(style))); } /** * {@inheritDoc} */ public void drawImage(final Point2D point, final URL image) { prepareDraw(); Future<BufferedImage> future = cache.get(image, 1, interactive); if ((interactive == null) || future.isDone()) { try { BufferedImage bi = future.get(); if (bi != null) { g2d.drawImage(bi, (int) point.getX(), (int) point.getY(), null); } else { drawImageError(new Rectangle2D.Double(point.getX(), point.getY(), ANCHOR.getWidth(), ANCHOR .getHeight()), image); } } catch (InterruptedException e) { LOGGER.error("drawImage() error", e); drawImageError( new Rectangle2D.Double(point.getX(), point.getY(), ANCHOR.getWidth(), ANCHOR.getHeight()), image); } catch (ExecutionException e) { LOGGER.error("drawImage() error", e); drawImageError( new Rectangle2D.Double(point.getX(), point.getY(), ANCHOR.getWidth(), ANCHOR.getHeight()), image); } } else { drawImageLoading(new Rectangle2D.Double(point.getX(), point.getY(), ANCHOR.getWidth(), ANCHOR.getHeight()), image); } } /** * {@inheritDoc} */ public void drawImage(final Rectangle2D rect, final URL image) { drawImageScaled(rect, image, true); } /** * {@inheritDoc} */ public void embedImage(final Rectangle2D rect, final URL image) { drawImageScaled(rect, image, false); } // Internal draw method to accommodate drawImage and embedImage protected void drawImageScaled(final Rectangle2D rect, final URL image, final boolean scaleToRect) { prepareDraw(); Future<BufferedImage> future = null; if (scaleToRect) { future = cache.get(image, new Dimension((int) rect.getWidth(), (int) rect.getHeight()), interactive); } else { future = cache.get(image, interactive); } if ((interactive == null) || future.isDone()) { try { BufferedImage bi = future.get(); if (bi != null) { g2d.drawImage(bi, (int) rect.getX(), (int) rect.getY(), (int) rect.getWidth(), (int) rect .getHeight(), null); } else { drawImageError(rect, image); } } catch (InterruptedException e) { LOGGER.error("drawImage() error", e); drawImageError(rect, image); } catch (ExecutionException e) { LOGGER.error("drawImage() error", e); drawImageError(rect, image); } } else { drawImageLoading(rect, image); } } protected void drawImageError(final Rectangle2D r, final URL orig) { if (imageError == null) { try { imageError = ImageIO.read(loader.getResource("rsrc:org/andrill/coretools/graphics/driver/error.png")); } catch (IOException e) { LOGGER.error("Unable to load 'error.png'"); } } g2d.setPaint(new TexturePaint(imageError, ANCHOR)); g2d.fill(r); } protected void drawImageLoading(final Rectangle2D r, final URL url) { // try a placeholder image first Future<BufferedImage> placeholder = cache.getPlaceholderImage(url, new Dimension((int) r.getWidth(), (int) r .getHeight())); if (placeholder == null) { internalDrawImageLoading(r); } else { try { BufferedImage image = placeholder.get(); if (image == null) { internalDrawImageLoading(r); } else { g2d.drawImage(image, (int) r.getX(), (int) r.getY(), (int) r.getWidth(), (int) r.getHeight(), null); } } catch (InterruptedException e) { LOGGER.error("drawImageLoading() error", e); internalDrawImageLoading(r); } catch (ExecutionException e) { LOGGER.error("drawImageLoading() error", e); internalDrawImageLoading(r); } } } /** * {@inheritDoc} */ public void drawLine(final Point2D start, final Point2D end) { draw(new Line2D.Double(start, end)); } /** * {@inheritDoc} */ public void drawOval(final Rectangle2D bounds) { draw(new Ellipse2D.Double(bounds.getX(), bounds.getY(), bounds.getWidth(), bounds.getHeight())); } /** * {@inheritDoc} */ public void drawPoint(final Point2D point) { prepareDraw(); draw(new Line2D.Double(point, point)); } /** * {@inheritDoc} */ public void drawPolygon(final List<Point2D> points) { draw(createPolygon(points)); } /** * {@inheritDoc} */ public void drawRectangle(final Rectangle2D rect) { draw(rect); } /** * {@inheritDoc} */ public void drawString(final Point2D point, final Font font, final String string) { prepareDraw(); g2d.setFont(font); g2d.drawString(string, (int) point.getX(), (int) point.getY() + font.getSize()); } /** * {@inheritDoc} */ public void drawStringRotated(Point2D point, Font font, String string, double theta) { AffineTransform oldTransform = g2d.getTransform(); g2d.translate(point.getX(), point.getY()); g2d.rotate(theta); drawString(new Point(0, 0), font, string); g2d.setTransform(oldTransform); } private void fill(final Shape s) { for (final Paint p : createPaints(background, s)) { g2d.setPaint(p); g2d.fill(s); } } /** * {@inheritDoc} */ public void fillArc(final Rectangle2D bounds, final double start, final double extent, final ArcStyle style) { fill(new Arc2D.Double(bounds, start, extent, getArcType(style))); } /** * {@inheritDoc} */ public void fillOval(final Rectangle2D bounds) { fill(new Ellipse2D.Double(bounds.getX(), bounds.getY(), bounds.getWidth(), bounds.getHeight())); } /** * {@inheritDoc} */ public void fillPolygon(final List<Point2D> points) { fill(createPolygon(points)); } /** * {@inheritDoc} */ public void fillRectangle(final Rectangle2D rect) { fill(rect); } private int getArcType(final ArcStyle style) { switch (style) { case OPEN: return Arc2D.OPEN; case CLOSED: return Arc2D.CHORD; case SECTOR: return Arc2D.PIE; default: return -1; } } /** * {@inheritDoc} */ public Rectangle2D getClip() { return g2d.getClipBounds(); } /** * {@inheritDoc} */ public Fill getFill() { return background; } /** * {@inheritDoc} */ public Color getLineColor() { return lineColor; } /** * {@inheritDoc} */ public LineStyle getLineStyle() { return lineStyle; } /** * {@inheritDoc} */ public int getLineThickness() { return lineThickness; } /** * {@inheritDoc} */ public Rectangle2D getStringBounds(final Font font, final String string) { final FontMetrics metrics = g2d.getFontMetrics(font); return metrics.getStringBounds(string, g2d); } protected void internalDrawImageLoading(final Rectangle2D r) { if (imageLoading == null) { try { imageLoading = ImageIO.read(loader .getResource("rsrc:org/andrill/coretools/graphics/driver/loading.png")); } catch (IOException e) { LOGGER.error("Unable to load 'loading.png'"); } } g2d.setPaint(new TexturePaint(imageLoading, ANCHOR)); g2d.fill(r); } /** * Restores the previous state of the context. */ public void popState() { if (!stateStack.isEmpty()) { State state = stateStack.pop(); setFill(state.fill); setLineColor(state.lineColor); setLineStyle(state.lineStyle); setLineThickness(state.lineThickness); } } /** * {@inheritDoc} */ public void popTransform() { if (!transforms.isEmpty()) { transforms.pop(); g2d.setTransform(originalTransform); if (!transforms.isEmpty()) { g2d.transform(transforms.peek()); } } } private void prepareDraw() { if (stroke == null) { createStroke(); } g2d.setStroke(stroke); g2d.setPaint(lineColor); } /** * Saves the current state of the context. */ public void pushState() { State state = new State(); state.fill = getFill(); state.lineColor = getLineColor(); state.lineStyle = getLineStyle(); state.lineThickness = getLineThickness(); stateStack.push(state); } /** * {@inheritDoc} */ public void pushTransform(final AffineTransform transform) { final AffineTransform tx = new AffineTransform(transforms.peek()); tx.concatenate(transform); transforms.push(tx); g2d.transform(transform); } /** * {@inheritDoc} */ public void setClip(final Rectangle2D r) { if (r == null) { g2d.setTransform(originalTransform); g2d.setClip(originalClip); g2d.transform(transforms.peek()); } else if (originalClip != null) { g2d.setTransform(originalTransform); g2d.setClip(transforms.peek().createTransformedShape(r).getBounds2D().createIntersection(originalClip)); g2d.transform(transforms.peek()); } else { g2d.setClip(r); } } /** * {@inheritDoc} */ public void setFill(final Fill fill) { background = fill; } /** * {@inheritDoc} */ public void setLineColor(final Color color) { lineColor = color; stroke = null; } /** * {@inheritDoc} */ public void setLineStyle(final LineStyle style) { lineStyle = style; stroke = null; } /** * {@inheritDoc} */ public void setLineThickness(final int thickness) { lineThickness = thickness; stroke = null; } }
apache-2.0
arnohaase/java-macros
java-macro/src/main/java/com/ajjpj/macro/impl/GenericMacroAnnotationProcessorImpl.java
1637
package com.ajjpj.macro.impl; import javax.annotation.processing.*; import javax.lang.model.SourceVersion; import javax.lang.model.element.Element; import javax.lang.model.element.ElementKind; import javax.lang.model.element.TypeElement; import java.io.IOException; import java.net.URL; import java.util.Enumeration; import java.util.ServiceLoader; import java.util.Set; /** * @author arno */ @SupportedAnnotationTypes("*") @SupportedSourceVersion(SourceVersion.RELEASE_8) public class GenericMacroAnnotationProcessorImpl extends AbstractProcessor { private MacroProcessor macroProcessor; @Override public synchronized void init (ProcessingEnvironment env) { super.init (env); // actual processors are presumably available via the same class loader that loaded this class for (MacroProcessor candidate: ServiceLoader.load (MacroProcessor.class, getClass ().getClassLoader ())) { if (candidate.canHandle(env)) { this.macroProcessor = candidate; this.macroProcessor.init (env); return; } } throw new RuntimeException ("no macro processor for this compiler"); //TODO error handling } @Override public boolean process(Set<? extends TypeElement> annotations, RoundEnvironment roundEnv) { if(roundEnv.processingOver()) { return false; } for(Element rootEl: roundEnv.getRootElements()) { if(rootEl.getKind() != ElementKind.CLASS) { continue; } macroProcessor.process(rootEl); } return false; } }
apache-2.0
rmpestano/arquillian-extension-persistence
int-tests/src/test/java/org/jboss/arquillian/integration/persistence/testextension/PersistenceExtensionTesterArchiveAppender.java
1905
/* * JBoss, Home of Professional Open Source * Copyright 2011 Red Hat Inc. and/or its affiliates and other contributors * as indicated by the @authors tag. All rights reserved. * See the copyright.txt in the distribution for a * full listing of individual contributors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * http://www.apache.org/licenses/LICENSE-2.0 * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.jboss.arquillian.integration.persistence.testextension; import org.jboss.arquillian.container.test.spi.RemoteLoadableExtension; import org.jboss.arquillian.container.test.spi.client.deployment.CachedAuxilliaryArchiveAppender; import org.jboss.shrinkwrap.api.Archive; import org.jboss.shrinkwrap.api.Filters; import org.jboss.shrinkwrap.api.ShrinkWrap; import org.jboss.shrinkwrap.api.spec.JavaArchive; public class PersistenceExtensionTesterArchiveAppender extends CachedAuxilliaryArchiveAppender { @Override protected Archive<?> buildArchive() { return ShrinkWrap.create(JavaArchive.class, "arquillian-persistence-tester.jar") .addPackages(true, Filters.exclude(PersistenceExtensionTesterArchiveAppender.class, PersistenceExtensionTester.class), this.getClass().getPackage()) .addPackages(true, "org.assertj.core") .addAsServiceProvider(RemoteLoadableExtension.class, PersistenceExtensionRemoteTester.class); } }
apache-2.0
android-art-intel/Nougat
art-extension/opttests/src/OptimizationTests/ConstantCalculationSinking/OperationNotSupportedIntLong_002/Main.java
1771
/* * Copyright (C) 2016 Intel Corporation * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package OptimizationTests.ConstantCalculationSinking.OperationNotSupportedIntLong_002; /** * * Expected result: no sinking, operation is not supported * **/ public class Main { final int iterations = 5; public int testLoopInt(int n) { int testVar = 2; int additionalVar = 0; for (int i = 0; i < iterations; i++) { testVar >>= 2; additionalVar += (i*2)%5 + i%3 + i%4 + n; } testVar += additionalVar; return testVar; } public long testLoopLong(long n) { long testVar = 2; long additionalVar = 0; for (long i = 0; i < iterations; i++) { testVar >>= 2; additionalVar += (i*2)%5 + i%3 + i%4 + n; } testVar += additionalVar; return testVar; } public static void main(String[] args) { System.out.println(new Main().testLoopInt(10)); System.out.println(new Main().testLoopLong(10)); } }
apache-2.0
ceph/rados-java
src/main/java/com/ceph/rbd/Rbd.java
11354
/* * RADOS Java - Java bindings for librados and librbd * * Copyright (C) 2013 Wido den Hollander <wido@42on.com> * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, * either express or implied. See the License for the specific * language governing permissions and limitations under the License. */ package com.ceph.rbd; import com.ceph.rados.IoCTX; import com.sun.jna.ptr.IntByReference; import com.sun.jna.ptr.LongByReference; import com.sun.jna.Pointer; import com.sun.jna.Memory; import com.sun.jna.Native; import static com.ceph.rbd.Library.rbd; public class Rbd { Pointer io; /** * Get the librbd version * * @return a int array with the minor, major and extra version */ public static int[] getVersion() { IntByReference minor = new IntByReference(); IntByReference major = new IntByReference(); IntByReference extra = new IntByReference(); rbd.rbd_version(minor, major, extra); int[] returnValue = {minor.getValue(), major.getValue(), extra.getValue()}; return returnValue; } public Rbd(IoCTX io) { this.io = io.getPointer(); } /** * Create a new RBD image * * @param name * The name of the new image * @param size * The size of the new image in bytes * @param order * Object/block size, as a power of two * @throws RbdException */ public void create(String name, long size, int order) throws RbdException { IntByReference orderRef = new IntByReference(order); int r = rbd.rbd_create(this.io, name, size, orderRef); if (r < 0) { throw new RbdException("Failed to create image " + name, r); } } /** * Create a new RBD image * * @param name * The name of the new image * @param size * The size of the new image in bytes * @throws RbdException */ public void create(String name, long size) throws RbdException { this.create(name, size, 0); } /** * Create a new RBD v2 image * * @param name * The name of the new image * @param size * The size of the new image in bytes * @param features * Initial feature bits * @param order * Object/block size, as a power of two * @throws RbdException */ public void create(String name, long size, long features, int order) throws RbdException { IntByReference orderRef = new IntByReference(order); int r = rbd.rbd_create2(this.io, name, size, features, orderRef); if (r < 0) { throw new RbdException("Failed to create image " + name, r); } } /** * Create a new RBD v2 image * * @param name * The name of the new image * @param size * The size of the new image in bytes * @param features * Initial feature bits * @throws RbdException */ public void create(String name, long size, long features) throws RbdException { this.create(name, size, features, 0); } /** * Create a new RBD v2 image * * @param name * The name of the new image * @param size * The size of the new image in bytes * @param features * Initial feature bits * @param order * Object/block size, as a power of two * @param stripe_unit * Stripe unit size, in bytes. * @param stripe_count * Number of objects to stripe over before looping * @throws RbdException */ public void create(String name, long size, long features, int order, long stripe_unit, long stripe_count) throws RbdException { IntByReference orderRef = new IntByReference(order); int r = rbd.rbd_create3(this.io, name, size, features, orderRef, stripe_unit, stripe_count); if (r < 0) { throw new RbdException("Failed to create image " + name, r); } } /** * Remove a RBD image * * @param name * The name of the image * @throws RbdException */ public void remove(String name) throws RbdException { int r = rbd.rbd_remove(this.io, name); if (r < 0) { throw new RbdException("Failed to remove image " + name, r); } } /** * Rename a RBD image * * @param srcName * The source name * @param destName * The new name for the image * @throws RbdException */ public void rename(String srcName, String destName) throws RbdException { int r = rbd.rbd_rename(this.io, srcName, destName); if (r < 0) { throw new RbdException("Failed to rename image " + srcName + " to " + destName, r); } } /** * List all RBD images in this pool * * @return String[] * @throws RbdException */ public String[] list() throws RbdException { int initialBufferSize = 1024; return list(initialBufferSize); } /** * List all RBD images in this pool * * @param initialBufferSize * Initial size of the byte buffer holding image names * @return String[] * Array of image names in the pool * @throws RbdException */ public String[] list(int initialBufferSize) throws RbdException { LongByReference sizePointer = new LongByReference(initialBufferSize); byte[] names = new byte[initialBufferSize]; int r = rbd.rbd_list(this.io, names, sizePointer); if (r < 0 && r != -34) { throw new RbdException("Failed to list RBD images", r); } // -34 (-ERANGE) is returned if the byte buffers are not big enough if (r == -34 || sizePointer.getValue() > initialBufferSize) { names = new byte[(int) sizePointer.getValue()]; r = rbd.rbd_list(this.io, names, sizePointer); if (r < 0) { throw new RbdException("Failed to list RBD images", r); } } return new String(names).split("\0"); } /** * Open a RBD image * * @param name * The name of the image you want to open * @throws RbdException * @return RbdImage */ public RbdImage open(String name) throws RbdException { return this.open(name, null); } /** * Open a RBD image with a specific snapshot * * @param name * The name of the image you want to open * @param snapName * The name of the snapshot to open * @throws RbdException * @return RbdImage */ public RbdImage open(String name, String snapName) throws RbdException { Pointer p = new Memory(Native.POINTER_SIZE); int r = rbd.rbd_open(this.io, name, p, snapName); if (r < 0) { throw new RbdException("Failed to open image " + name, r); } return new RbdImage(p, name); } /** * Open a RBD image read only * * @param name * The name of the image you want to open * @throws RbdException * @return RbdImage */ public RbdImage openReadOnly(String name) throws RbdException { return this.openReadOnly(name, null); } /** * Open a RBD image with a specific snapshot read only * * @param name * The name of the image you want to open * @param snapName * The name of the snapshot to open * @throws RbdException * @return RbdImage */ public RbdImage openReadOnly(String name, String snapName) throws RbdException { Pointer p = new Memory(Native.POINTER_SIZE); int r = rbd.rbd_open_read_only(this.io, name, p, snapName); if (r < 0) { throw new RbdException("Failed to open image " + name, r); } return new RbdImage(p, name); } /** * Close a RBD image * * @param image * The RbdImage object * @throws RbdException */ public void close(RbdImage image) throws RbdException { int r = rbd.rbd_close(image.getPointer()); if (r < 0) { throw new RbdException("Failed to close image", r); } } /** * Clone a RBD image * * @param parentImage * The name of the parent image * @param parentSnap * The snapshot of the parent image (has to be protected) * @param childIo * The IoCTX for the child image * @param childName * The name for the child image * @param features * The RBD features * @param order * Object/block size, as a power of two * @param stripe_unit * Stripe unit size, in bytes. * @param stripe_count * Number of objects to stripe over before looping * @throws RbdException */ public void clone(String parentImage, String parentSnap, IoCTX childIo, String childName, long features, int order, long stripe_unit, long stripe_count) throws RbdException { IntByReference orderRef = new IntByReference(order); int r = rbd.rbd_clone2(this.io, parentImage, parentSnap, childIo.getPointer(), childName, features, orderRef, stripe_unit, stripe_count); if (r < 0) { throw new RbdException("Failed to clone image " + parentImage + "@" + parentSnap + " to " + childName, r); } } /** * Clone a RBD image * * @param parentImage * The name of the parent image * @param parentSnap * The snapshot of the parent image (has to be protected) * @param childIo * The IoCTX for the child image * @param childName * The name for the child image * @param features * The RBD features * @param order * Object/block size, as a power of two * @throws RbdException */ public void clone(String parentImage, String parentSnap, IoCTX childIo, String childName, long features, int order) throws RbdException { IntByReference orderRef = new IntByReference(order); int r = rbd.rbd_clone(this.io, parentImage, parentSnap, childIo.getPointer(), childName, features, orderRef); if (r < 0) { throw new RbdException("Failed to clone image " + parentImage + "@" + parentSnap + " to " + childName, r); } } /** * Copy a RBD image * * @param sourceImage * The source RbdImage * @param destImage * The destination RbdImage * @throws RbdException */ public void copy(RbdImage sourceImage, RbdImage destImage) throws RbdException { int r = rbd.rbd_copy2(sourceImage.getPointer(), destImage.getPointer()); if (r < 0) { throw new RbdException("Failed to copy image " + sourceImage.getName() + " to " + destImage.getName(), r); } } }
apache-2.0
cinderella/incubator-cloudstack
awsapi/src/com/amazon/ec2/DescribeRegionsResponse.java
15997
// Licensed to the Apache Software Foundation (ASF) under one // or more contributor license agreements. See the NOTICE file // distributed with this work for additional information // regarding copyright ownership. The ASF licenses this file // to you under the Apache License, Version 2.0 (the // "License"); you may not use this file except in compliance // with the License. You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, // software distributed under the License is distributed on an // "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY // KIND, either express or implied. See the License for the // specific language governing permissions and limitations // under the License. /** * DescribeRegionsResponse.java * * This file was auto-generated from WSDL * by the Apache Axis2 version: 1.5.1 Built on : Oct 19, 2009 (10:59:34 EDT) */ package com.amazon.ec2; /** * DescribeRegionsResponse bean class */ public class DescribeRegionsResponse implements org.apache.axis2.databinding.ADBBean{ public static final javax.xml.namespace.QName MY_QNAME = new javax.xml.namespace.QName( "http://ec2.amazonaws.com/doc/2009-10-31/", "DescribeRegionsResponse", "ns1"); private static java.lang.String generatePrefix(java.lang.String namespace) { if(namespace.equals("http://ec2.amazonaws.com/doc/2009-10-31/")){ return "ns1"; } return org.apache.axis2.databinding.utils.BeanUtil.getUniquePrefix(); } /** * field for DescribeRegionsResponse */ protected com.amazon.ec2.DescribeRegionsResponseType localDescribeRegionsResponse ; /** * Auto generated getter method * @return com.amazon.ec2.DescribeRegionsResponseType */ public com.amazon.ec2.DescribeRegionsResponseType getDescribeRegionsResponse(){ return localDescribeRegionsResponse; } /** * Auto generated setter method * @param param DescribeRegionsResponse */ public void setDescribeRegionsResponse(com.amazon.ec2.DescribeRegionsResponseType param){ this.localDescribeRegionsResponse=param; } /** * isReaderMTOMAware * @return true if the reader supports MTOM */ public static boolean isReaderMTOMAware(javax.xml.stream.XMLStreamReader reader) { boolean isReaderMTOMAware = false; try{ isReaderMTOMAware = java.lang.Boolean.TRUE.equals(reader.getProperty(org.apache.axiom.om.OMConstants.IS_DATA_HANDLERS_AWARE)); }catch(java.lang.IllegalArgumentException e){ isReaderMTOMAware = false; } return isReaderMTOMAware; } /** * * @param parentQName * @param factory * @return org.apache.axiom.om.OMElement */ public org.apache.axiom.om.OMElement getOMElement ( final javax.xml.namespace.QName parentQName, final org.apache.axiom.om.OMFactory factory) throws org.apache.axis2.databinding.ADBException{ org.apache.axiom.om.OMDataSource dataSource = new org.apache.axis2.databinding.ADBDataSource(this,MY_QNAME){ public void serialize(org.apache.axis2.databinding.utils.writer.MTOMAwareXMLStreamWriter xmlWriter) throws javax.xml.stream.XMLStreamException { DescribeRegionsResponse.this.serialize(MY_QNAME,factory,xmlWriter); } }; return new org.apache.axiom.om.impl.llom.OMSourcedElementImpl( MY_QNAME,factory,dataSource); } public void serialize(final javax.xml.namespace.QName parentQName, final org.apache.axiom.om.OMFactory factory, org.apache.axis2.databinding.utils.writer.MTOMAwareXMLStreamWriter xmlWriter) throws javax.xml.stream.XMLStreamException, org.apache.axis2.databinding.ADBException{ serialize(parentQName,factory,xmlWriter,false); } public void serialize(final javax.xml.namespace.QName parentQName, final org.apache.axiom.om.OMFactory factory, org.apache.axis2.databinding.utils.writer.MTOMAwareXMLStreamWriter xmlWriter, boolean serializeType) throws javax.xml.stream.XMLStreamException, org.apache.axis2.databinding.ADBException{ //We can safely assume an element has only one type associated with it if (localDescribeRegionsResponse==null){ throw new org.apache.axis2.databinding.ADBException("Property cannot be null!"); } localDescribeRegionsResponse.serialize(MY_QNAME,factory,xmlWriter); } /** * Util method to write an attribute with the ns prefix */ private void writeAttribute(java.lang.String prefix,java.lang.String namespace,java.lang.String attName, java.lang.String attValue,javax.xml.stream.XMLStreamWriter xmlWriter) throws javax.xml.stream.XMLStreamException{ if (xmlWriter.getPrefix(namespace) == null) { xmlWriter.writeNamespace(prefix, namespace); xmlWriter.setPrefix(prefix, namespace); } xmlWriter.writeAttribute(namespace,attName,attValue); } /** * Util method to write an attribute without the ns prefix */ private void writeAttribute(java.lang.String namespace,java.lang.String attName, java.lang.String attValue,javax.xml.stream.XMLStreamWriter xmlWriter) throws javax.xml.stream.XMLStreamException{ if (namespace.equals("")) { xmlWriter.writeAttribute(attName,attValue); } else { registerPrefix(xmlWriter, namespace); xmlWriter.writeAttribute(namespace,attName,attValue); } } /** * Util method to write an attribute without the ns prefix */ private void writeQNameAttribute(java.lang.String namespace, java.lang.String attName, javax.xml.namespace.QName qname, javax.xml.stream.XMLStreamWriter xmlWriter) throws javax.xml.stream.XMLStreamException { java.lang.String attributeNamespace = qname.getNamespaceURI(); java.lang.String attributePrefix = xmlWriter.getPrefix(attributeNamespace); if (attributePrefix == null) { attributePrefix = registerPrefix(xmlWriter, attributeNamespace); } java.lang.String attributeValue; if (attributePrefix.trim().length() > 0) { attributeValue = attributePrefix + ":" + qname.getLocalPart(); } else { attributeValue = qname.getLocalPart(); } if (namespace.equals("")) { xmlWriter.writeAttribute(attName, attributeValue); } else { registerPrefix(xmlWriter, namespace); xmlWriter.writeAttribute(namespace, attName, attributeValue); } } /** * method to handle Qnames */ private void writeQName(javax.xml.namespace.QName qname, javax.xml.stream.XMLStreamWriter xmlWriter) throws javax.xml.stream.XMLStreamException { java.lang.String namespaceURI = qname.getNamespaceURI(); if (namespaceURI != null) { java.lang.String prefix = xmlWriter.getPrefix(namespaceURI); if (prefix == null) { prefix = generatePrefix(namespaceURI); xmlWriter.writeNamespace(prefix, namespaceURI); xmlWriter.setPrefix(prefix,namespaceURI); } if (prefix.trim().length() > 0){ xmlWriter.writeCharacters(prefix + ":" + org.apache.axis2.databinding.utils.ConverterUtil.convertToString(qname)); } else { // i.e this is the default namespace xmlWriter.writeCharacters(org.apache.axis2.databinding.utils.ConverterUtil.convertToString(qname)); } } else { xmlWriter.writeCharacters(org.apache.axis2.databinding.utils.ConverterUtil.convertToString(qname)); } } private void writeQNames(javax.xml.namespace.QName[] qnames, javax.xml.stream.XMLStreamWriter xmlWriter) throws javax.xml.stream.XMLStreamException { if (qnames != null) { // we have to store this data until last moment since it is not possible to write any // namespace data after writing the charactor data java.lang.StringBuffer stringToWrite = new java.lang.StringBuffer(); java.lang.String namespaceURI = null; java.lang.String prefix = null; for (int i = 0; i < qnames.length; i++) { if (i > 0) { stringToWrite.append(" "); } namespaceURI = qnames[i].getNamespaceURI(); if (namespaceURI != null) { prefix = xmlWriter.getPrefix(namespaceURI); if ((prefix == null) || (prefix.length() == 0)) { prefix = generatePrefix(namespaceURI); xmlWriter.writeNamespace(prefix, namespaceURI); xmlWriter.setPrefix(prefix,namespaceURI); } if (prefix.trim().length() > 0){ stringToWrite.append(prefix).append(":").append(org.apache.axis2.databinding.utils.ConverterUtil.convertToString(qnames[i])); } else { stringToWrite.append(org.apache.axis2.databinding.utils.ConverterUtil.convertToString(qnames[i])); } } else { stringToWrite.append(org.apache.axis2.databinding.utils.ConverterUtil.convertToString(qnames[i])); } } xmlWriter.writeCharacters(stringToWrite.toString()); } } /** * Register a namespace prefix */ private java.lang.String registerPrefix(javax.xml.stream.XMLStreamWriter xmlWriter, java.lang.String namespace) throws javax.xml.stream.XMLStreamException { java.lang.String prefix = xmlWriter.getPrefix(namespace); if (prefix == null) { prefix = generatePrefix(namespace); while (xmlWriter.getNamespaceContext().getNamespaceURI(prefix) != null) { prefix = org.apache.axis2.databinding.utils.BeanUtil.getUniquePrefix(); } xmlWriter.writeNamespace(prefix, namespace); xmlWriter.setPrefix(prefix, namespace); } return prefix; } /** * databinding method to get an XML representation of this object * */ public javax.xml.stream.XMLStreamReader getPullParser(javax.xml.namespace.QName qName) throws org.apache.axis2.databinding.ADBException{ //We can safely assume an element has only one type associated with it return localDescribeRegionsResponse.getPullParser(MY_QNAME); } /** * Factory class that keeps the parse method */ public static class Factory{ /** * static method to create the object * Precondition: If this object is an element, the current or next start element starts this object and any intervening reader events are ignorable * If this object is not an element, it is a complex type and the reader is at the event just after the outer start element * Postcondition: If this object is an element, the reader is positioned at its end element * If this object is a complex type, the reader is positioned at the end element of its outer element */ public static DescribeRegionsResponse parse(javax.xml.stream.XMLStreamReader reader) throws java.lang.Exception{ DescribeRegionsResponse object = new DescribeRegionsResponse(); int event; java.lang.String nillableValue = null; java.lang.String prefix =""; java.lang.String namespaceuri =""; try { while (!reader.isStartElement() && !reader.isEndElement()) reader.next(); // Note all attributes that were handled. Used to differ normal attributes // from anyAttributes. java.util.Vector handledAttributes = new java.util.Vector(); while(!reader.isEndElement()) { if (reader.isStartElement() ){ if (reader.isStartElement() && new javax.xml.namespace.QName("http://ec2.amazonaws.com/doc/2009-10-31/","DescribeRegionsResponse").equals(reader.getName())){ object.setDescribeRegionsResponse(com.amazon.ec2.DescribeRegionsResponseType.Factory.parse(reader)); } // End of if for expected property start element else{ // A start element we are not expecting indicates an invalid parameter was passed throw new org.apache.axis2.databinding.ADBException("Unexpected subelement " + reader.getLocalName()); } } else { reader.next(); } } // end of while loop } catch (javax.xml.stream.XMLStreamException e) { throw new java.lang.Exception(e); } return object; } }//end of factory class }
apache-2.0
fluentxml4j/fluentxml4j
core/src/test/java/com/github/fluentxml4j/examples/validate/ValidateExample.java
1076
package com.github.fluentxml4j.examples.validate; import com.github.fluentxml4j.junit.XmlSource; import com.github.fluentxml4j.validate.ValidationResult; import org.junit.Rule; import org.junit.Test; import static com.github.fluentxml4j.FluentXml.validate; import static org.hamcrest.CoreMatchers.is; import static org.hamcrest.MatcherAssert.assertThat; public class ValidateExample { @Rule public XmlSource xmlSource = XmlSource.withData("<data><persons>" // + "<person id='p1'><name>meiser</name><firstname>hans</firstname></person>" // + "<person id='p2'><name>moeckl</name><firstname>joachim</firstname></person>" // + "<person id='p3'><name>kerkeling</name><firstname>hans-peter</firstname></person>" // + "</persons></data>"); @Rule public XmlSource schemaSource = XmlSource.withDataFrom(ValidateExample.class, "schema.xsd"); @Test public void validateDocAgainstDoc() { ValidationResult result = validate(xmlSource.asDocument()) .againstSchema(schemaSource.asDocument()) .getResult(); assertThat(result.hasErrors(), is(false)); } }
apache-2.0
frtu/SimpleToolbox
SimpleScanner/src/main/java/com/github/frtu/simple/scan/filters/ExtensionFilenameFilter.java
1354
package com.github.frtu.simple.scan.filters; import java.io.File; import java.io.FileFilter; import java.util.ArrayList; import org.apache.commons.io.FilenameUtils; public class ExtensionFilenameFilter implements FileFilter { private static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(ExtensionFilenameFilter.class); private final ArrayList<String> acceptedFileExtension = new ArrayList<String>(); public ExtensionFilenameFilter() { super(); } public ExtensionFilenameFilter(String... extensions) { super(); addAcceptedFileExtension(extensions); } public void addAcceptedFileExtension(String... extensions) { for (String extension : extensions) { String filteredExtension = extension; if (extension.contains(".")) { filteredExtension = FilenameUtils.getExtension(extension); } acceptedFileExtension.add(filteredExtension.toLowerCase()); } } @Override public boolean accept(File file) { if (file.isDirectory()) { // Skip directories! return true; } String filename = file.getName(); String extension = FilenameUtils.getExtension(filename).toLowerCase(); logger.debug("Found extension={} for filename={}", extension, filename); for (String fileExtension : acceptedFileExtension) { if (fileExtension.equals(extension)) { return true; } } return false; } }
apache-2.0
facebook/screenshot-tests-for-android
core/src/main/java/com/facebook/testing/screenshot/internal/TestNameDetector.java
3456
/* * Copyright (c) Meta Platforms, Inc. and affiliates. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.facebook.testing.screenshot.internal; import java.lang.annotation.Annotation; import java.lang.reflect.Method; import javax.annotation.Nullable; /** Detect the test name and class that is being run currently. */ public class TestNameDetector { private static final String JUNIT_TEST_CASE = "junit.framework.TestCase"; private static final String JUNIT_RUN_WITH = "org.junit.runner.RunWith"; private static final String JUNIT_TEST = "org.junit.Test"; private static final String UNKNOWN = "unknown"; private TestNameDetector() {} /** * Get the current test class in a standard JUnit3 or JUnit4 test, or "unknown" if we couldn't * detect it. */ public static String getTestClass() { StackTraceElement element = getFirstTestElement(new Throwable().getStackTrace()); if (element == null) { return UNKNOWN; } return element.getClassName(); } /** * Get the current test name in a standard JUnit3 or JUnit4 test, or "unknown" if we couldn't * detect it. */ public static String getTestName() { StackTraceElement[] stack = new Throwable().getStackTrace(); StackTraceElement testElement = getFirstTestElement(stack); if (testElement == null) { return UNKNOWN; } String methodName = testElement.getMethodName(); for (StackTraceElement element : stack) { if (testElement.getClassName().equals(element.getClassName())) { methodName = element.getMethodName(); } } return methodName; } private static @Nullable StackTraceElement getFirstTestElement(StackTraceElement[] stack) { for (StackTraceElement element : stack) { try { Class<?> clazz = Class.forName(element.getClassName()); Method method = clazz.getMethod(element.getMethodName()); if (isTestClass(clazz) || isTestMethod(method)) { return element; } } catch (NoSuchMethodException ignored) { // Not actionable, move onto the next element } catch (ClassNotFoundException ignored) { // Not actionable, move onto the next element } } return null; } private static boolean isTestClass(Class<?> clazz) { return clazz != null && (JUNIT_TEST_CASE.equals(clazz.getCanonicalName()) || hasAnnotation(clazz.getAnnotations(), JUNIT_RUN_WITH) || isTestClass(clazz.getSuperclass())); } private static boolean isTestMethod(Method method) { return hasAnnotation(method.getAnnotations(), JUNIT_TEST); } private static boolean hasAnnotation(Annotation[] annotations, String annotationCanonicalName) { for (Annotation annotation : annotations) { if (annotationCanonicalName.equalsIgnoreCase( annotation.annotationType().getCanonicalName())) { return true; } } return false; } }
apache-2.0
industrieit/ohr
src/main/java/com/industrieit/ohr/Reify.java
940
/* Copyright 2013 Industrie IT Pty Ltd Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package com.industrieit.ohr; import java.lang.annotation.Retention; import java.lang.annotation.RetentionPolicy; import java.lang.annotation.Target; import java.lang.annotation.ElementType; @Target(ElementType.METHOD) @Retention(RetentionPolicy.RUNTIME) public @interface Reify { Consistency consistency() default Consistency.NORMAL; }
apache-2.0
banq/jdonframework
example/jdonMVC+CQRS+ES/src/main/java/com/jdon/framework/test/domain/UserModel.java
3330
/** * Copyright 2005 Jdon.com * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain event copy of the License at http://www.apache.org/licenses/LICENSE-2.0 * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.jdon.framework.test.domain; import com.jdon.annotation.Model; import com.jdon.annotation.model.Inject; import com.jdon.annotation.model.OnCommand; import com.jdon.framework.test.domain.command.UpdateCommand; import com.jdon.framework.test.domain.event.UserUpdatedEvent; import com.jdon.framework.test.domain.vo.UploadVO; import com.jdon.framework.test.event.domain.publisher.EventSourcing; import com.jdon.framework.test.event.domain.publisher.LazyLoaderRole; /** * Aggregate root * * @author banq * */ @Model public class UserModel { private String userId; private String username; private String email; private String password; private String verifypassword; private Attachment attachment; @Inject public EventSourcing es; @Inject public LazyLoaderRole lazyLoaderRole; private int count = -1; @OnCommand("UserSaveCommand") public void update(UpdateCommand updateCommand) { if (!updateCommand.getNewUserDTO().getUserId().equals(this.userId)) { System.err.print("update not this user"); return; } this.username = updateCommand.getNewUserDTO().getUsername(); this.email = updateCommand.getNewUserDTO().getEmail(); this.es.updated(new UserUpdatedEvent(updateCommand.getNewUserDTO())); this.setUploadFile(updateCommand.getUploadVO()); } public void setUserId(String userId) { this.userId = userId; } public String getUsername() { return username; } public void setUsername(String username) { this.username = username; } public String getUserId() { return userId; } public int getCount() { return count; } public void setCount(int count) { this.count = count; } public String getEmail() { return email; } public void setEmail(String email) { this.email = email; } public String getPassword() { return password; } public void setPassword(String password) { this.password = password; } public String getVerifypassword() { return verifypassword; } public void setVerifypassword(String verifypassword) { this.verifypassword = verifypassword; } public Attachment getAttachment() { if (attachment == null) attachment = new Attachment(this.getUserId(), this.lazyLoaderRole); return attachment; } public UploadFile getUploadFile() { return getAttachment().getUploadFile(); } public void setUploadFile(UploadVO event) { es.saveUpload(event); UploadFile uploadFile = new UploadFile(); uploadFile.setData(event.getFilesData()); uploadFile.setContentType(event.getContextType()); uploadFile.setName(event.getFilename()); getAttachment().setUploadFile(uploadFile); } }
apache-2.0
romankagan/DDBWorkbench
plugins/svn4idea/src/org/jetbrains/idea/svn/dialogs/SvnFormatWorker.java
7749
/* * Copyright 2000-2009 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.jetbrains.idea.svn.dialogs; import com.intellij.openapi.application.ApplicationManager; import com.intellij.openapi.progress.ProgressIndicator; import com.intellij.openapi.progress.Task; import com.intellij.openapi.project.Project; import com.intellij.openapi.ui.DialogWrapper; import com.intellij.openapi.ui.Messages; import com.intellij.openapi.vcs.AbstractVcsHelper; import com.intellij.openapi.vcs.ProjectLevelVcsManager; import com.intellij.openapi.vcs.VcsException; import com.intellij.openapi.vcs.changes.ChangeListManager; import com.intellij.openapi.vcs.changes.LocalChangeList; import com.intellij.vcsUtil.VcsUtil; import org.jetbrains.annotations.NotNull; import org.jetbrains.idea.svn.SvnBundle; import org.jetbrains.idea.svn.SvnUtil; import org.jetbrains.idea.svn.SvnVcs; import org.jetbrains.idea.svn.WorkingCopyFormat; import org.jetbrains.idea.svn.api.ClientFactory; import org.tmatesoft.svn.core.SVNCancelException; import org.tmatesoft.svn.core.SVNException; import org.tmatesoft.svn.core.wc.ISVNEventHandler; import org.tmatesoft.svn.core.wc.SVNEvent; import org.tmatesoft.svn.core.wc.SVNEventAction; import java.io.File; import java.util.ArrayList; import java.util.Collections; import java.util.Iterator; import java.util.List; public class SvnFormatWorker extends Task.Backgroundable { private List<Throwable> myExceptions; private final Project myProject; private final WorkingCopyFormat myNewFormat; private final List<WCInfo> myWcInfos; private List<LocalChangeList> myBeforeChangeLists; private final SvnVcs myVcs; public SvnFormatWorker(final Project project, final WorkingCopyFormat newFormat, final List<WCInfo> wcInfos) { super(project, SvnBundle.message("action.change.wcopy.format.task.title"), false, DEAF); myProject = project; myNewFormat = newFormat; myExceptions = new ArrayList<Throwable>(); myWcInfos = wcInfos; myVcs = SvnVcs.getInstance(myProject); } public SvnFormatWorker(final Project project, final WorkingCopyFormat newFormat, final WCInfo wcInfo) { this(project, newFormat, Collections.singletonList(wcInfo)); } public void checkForOutsideCopies() { boolean canceled = false; for (Iterator<WCInfo> iterator = myWcInfos.iterator(); iterator.hasNext();) { final WCInfo wcInfo = iterator.next(); if (! wcInfo.isIsWcRoot()) { File path = new File(wcInfo.getPath()); path = SvnUtil.getWorkingCopyRoot(path); int result = Messages.showYesNoCancelDialog(SvnBundle.message("upgrade.format.clarify.for.outside.copies.text", path), SvnBundle.message("action.change.wcopy.format.task.title"), Messages.getWarningIcon()); if (DialogWrapper.CANCEL_EXIT_CODE == result) { canceled = true; break; } else if (DialogWrapper.OK_EXIT_CODE != result) { // no - for this copy only. maybe other iterator.remove(); } } } if (canceled) { myWcInfos.clear(); } } public boolean haveStuffToConvert() { return ! myWcInfos.isEmpty(); } @Override public void onCancel() { onSuccess(); } @Override public void onSuccess() { if (myProject.isDisposed()) { return; } if (! myExceptions.isEmpty()) { final List<String> messages = new ArrayList<String>(); for (Throwable exception : myExceptions) { messages.add(exception.getMessage()); } AbstractVcsHelper.getInstance(myProject) .showErrors(Collections.singletonList(new VcsException(messages)), SvnBundle.message("action.change.wcopy.format.task.title")); } } public void run(@NotNull final ProgressIndicator indicator) { ProjectLevelVcsManager.getInstanceChecked(myProject).startBackgroundVcsOperation(); indicator.setIndeterminate(true); final boolean supportsChangelists = myNewFormat.supportsChangelists(); if (supportsChangelists) { myBeforeChangeLists = ChangeListManager.getInstance(myProject).getChangeListsCopy(); } try { for (WCInfo wcInfo : myWcInfos) { File path = new File(wcInfo.getPath()); if (! wcInfo.isIsWcRoot()) { path = SvnUtil.getWorkingCopyRoot(path); } try { String cleanupMessage = SvnBundle.message("action.Subversion.cleanup.progress.text", path.getAbsolutePath()); String upgradeMessage = SvnBundle.message("action.change.wcopy.format.task.progress.text", path.getAbsolutePath(), SvnUtil.formatRepresentation(wcInfo.getFormat()), SvnUtil.formatRepresentation(myNewFormat)); ISVNEventHandler handler = createUpgradeHandler(indicator, cleanupMessage, upgradeMessage); getFactory(path, myNewFormat).createUpgradeClient().upgrade(path, myNewFormat, handler); } catch (Throwable e) { myExceptions.add(e); } } } finally { ProjectLevelVcsManager.getInstance(myProject).stopBackgroundVcsOperation(); // to map to native if (supportsChangelists) { SvnVcs.getInstance(myProject).processChangeLists(myBeforeChangeLists); } ApplicationManager.getApplication().getMessageBus().syncPublisher(SvnVcs.WC_CONVERTED).run(); } } @NotNull private ClientFactory getFactory(@NotNull File path, @NotNull WorkingCopyFormat format) throws VcsException { ClientFactory factory = myVcs.getFactory(path); ClientFactory otherFactory = myVcs.getOtherFactory(factory); List<WorkingCopyFormat> factoryFormats = factory.createUpgradeClient().getSupportedFormats(); List<WorkingCopyFormat> otherFactoryFormats = otherFactory.createUpgradeClient().getSupportedFormats(); return factoryFormats.contains(format) || !otherFactoryFormats.contains(format) ? factory : otherFactory; } private static ISVNEventHandler createUpgradeHandler(@NotNull final ProgressIndicator indicator, @NotNull final String cleanupMessage, @NotNull final String upgradeMessage) { return new ISVNEventHandler() { @Override public void handleEvent(SVNEvent event, double progress) throws SVNException { if (event.getFile() != null) { if (SVNEventAction.UPGRADED_PATH.equals(event.getAction())) { indicator.setText2("Upgraded path " + VcsUtil.getPathForProgressPresentation(event.getFile())); } // fake event indicating cleanup start if (SVNEventAction.UPDATE_STARTED.equals(event.getAction())) { indicator.setText(cleanupMessage); } // fake event indicating upgrade start if (SVNEventAction.UPDATE_COMPLETED.equals(event.getAction())) { indicator.setText(upgradeMessage); } } } @Override public void checkCancelled() throws SVNCancelException { indicator.checkCanceled(); } }; } }
apache-2.0
jmostella/armeria
core/src/main/java/com/linecorp/armeria/client/circuitbreaker/CircuitBreakerRpcClient.java
5077
/* * Copyright 2018 LINE Corporation * * LINE Corporation licenses this file to you under the Apache License, * version 2.0 (the "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at: * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations * under the License. */ package com.linecorp.armeria.client.circuitbreaker; import java.util.function.Function; import com.linecorp.armeria.client.Client; import com.linecorp.armeria.client.ClientRequestContext; import com.linecorp.armeria.client.circuitbreaker.KeyedCircuitBreakerMapping.KeySelector; import com.linecorp.armeria.common.RpcRequest; import com.linecorp.armeria.common.RpcResponse; /** * A {@link Client} decorator that handles failures of RPC remote invocation based on circuit breaker pattern. */ public final class CircuitBreakerRpcClient extends CircuitBreakerClient<RpcRequest, RpcResponse> { /** * Creates a new decorator using the specified {@link CircuitBreaker} instance and * {@link CircuitBreakerStrategy}. * * <p>Since {@link CircuitBreaker} is a unit of failure detection, don't reuse the same instance for * unrelated services. * * @param circuitBreaker The {@link CircuitBreaker} instance to be used */ public static Function<Client<RpcRequest, RpcResponse>, CircuitBreakerRpcClient> newDecorator(CircuitBreaker circuitBreaker, CircuitBreakerStrategy<RpcResponse> strategy) { return newDecorator((ctx, req) -> circuitBreaker, strategy); } /** * Creates a new decorator with the specified {@link CircuitBreakerMapping} and * {@link CircuitBreakerStrategy}. */ public static Function<Client<RpcRequest, RpcResponse>, CircuitBreakerRpcClient> newDecorator(CircuitBreakerMapping mapping, CircuitBreakerStrategy<RpcResponse> strategy) { return delegate -> new CircuitBreakerRpcClient(delegate, mapping, strategy); } /** * Creates a new decorator that binds one {@link CircuitBreaker} per RPC method name with the specified * {@link CircuitBreakerStrategy}. * * @param factory A function that takes an RPC method name and creates a new {@link CircuitBreaker}. */ public static Function<Client<RpcRequest, RpcResponse>, CircuitBreakerRpcClient> newPerMethodDecorator(Function<String, CircuitBreaker> factory, CircuitBreakerStrategy<RpcResponse> strategy) { return newDecorator(new KeyedCircuitBreakerMapping<>(KeySelector.METHOD, factory), strategy); } /** * Creates a new decorator that binds one {@link CircuitBreaker} per host with the specified * {@link CircuitBreakerStrategy}. * * @param factory a function that takes a host name and creates a new {@link CircuitBreaker} */ public static Function<Client<RpcRequest, RpcResponse>, CircuitBreakerRpcClient> newPerHostDecorator(Function<String, CircuitBreaker> factory, CircuitBreakerStrategy<RpcResponse> strategy) { return newDecorator(new KeyedCircuitBreakerMapping<>(KeySelector.HOST, factory), strategy); } /** * Creates a new decorator that binds one {@link CircuitBreaker} per host and RPC method name with * the specified {@link CircuitBreakerStrategy}. * * @param factory a function that takes a host+method and creates a new {@link CircuitBreaker} */ public static Function<Client<RpcRequest, RpcResponse>, CircuitBreakerRpcClient> newPerHostAndMethodDecorator(Function<String, CircuitBreaker> factory, CircuitBreakerStrategy<RpcResponse> strategy) { return newDecorator(new KeyedCircuitBreakerMapping<>(KeySelector.HOST_AND_METHOD, factory), strategy); } /** * Creates a new instance that decorates the specified {@link Client}. */ CircuitBreakerRpcClient(Client<RpcRequest, RpcResponse> delegate, CircuitBreakerMapping mapping, CircuitBreakerStrategy<RpcResponse> strategy) { super(delegate, mapping, strategy); } @Override protected RpcResponse doExecute(ClientRequestContext ctx, RpcRequest req, CircuitBreaker circuitBreaker) throws Exception { final RpcResponse response; try { response = delegate().execute(ctx, req); } catch (Throwable cause) { reportSuccessOrFailure( circuitBreaker, strategy().shouldReportAsSuccess(RpcResponse.ofFailure(cause))); throw cause; } response.whenComplete((unused1, unused2) -> { reportSuccessOrFailure(circuitBreaker, strategy().shouldReportAsSuccess(response)); }); return response; } }
apache-2.0
leafclick/intellij-community
platform/lang-impl/src/com/intellij/psi/impl/include/FileIncludeProvider.java
2264
/* * Copyright 2000-2009 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intellij.psi.impl.include; import com.intellij.openapi.extensions.ExtensionPointName; import com.intellij.openapi.fileTypes.FileType; import com.intellij.openapi.vfs.VirtualFile; import com.intellij.psi.PsiFile; import com.intellij.psi.PsiFileSystemItem; import com.intellij.util.Consumer; import com.intellij.util.indexing.FileContent; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; /** * @author Dmitry Avdeev */ public abstract class FileIncludeProvider { public static final ExtensionPointName<FileIncludeProvider> EP_NAME = ExtensionPointName.create("com.intellij.include.provider"); @NotNull public abstract String getId(); public abstract boolean acceptFile(VirtualFile file); public abstract void registerFileTypesUsedForIndexing(@NotNull Consumer<FileType> fileTypeSink); public abstract FileIncludeInfo @NotNull [] getIncludeInfos(FileContent content); /** * If all providers return {@code null} then {@code FileIncludeInfo} is resolved in a standard way using {@code FileReferenceSet} */ @Nullable public PsiFileSystemItem resolveIncludedFile(@NotNull final FileIncludeInfo info, @NotNull final PsiFile context) { return null; } /** * Override this method and increment returned value each time when you change the logic of your provider. */ public int getVersion() { return 0; } /** * @return Possible name in included paths. For example if a provider returns FileIncludeInfos without file extensions */ @NotNull public String getIncludeName(@NotNull PsiFile file, @NotNull String originalName) { return originalName; } }
apache-2.0
zoozooll/MyExercise
meep/MeepStore2/src/com/oregonscientific/meep/store2/ctrl/ImageDownloadCtrl.java
6725
package com.oregonscientific.meep.store2.ctrl; import java.io.ByteArrayOutputStream; import java.io.IOException; import java.io.InputStream; import java.util.ArrayDeque; import java.util.ArrayList; import org.apache.http.HttpEntity; import org.apache.http.HttpResponse; import org.apache.http.client.methods.HttpGet; import android.content.Context; import android.database.sqlite.SQLiteDatabase; import android.graphics.Bitmap; import android.graphics.BitmapFactory; import android.net.http.AndroidHttpClient; import android.util.Log; import com.oregonscientific.meep.store2.global.MeepStoreApplication; import com.oregonscientific.meep.store2.global.MeepStoreLog; import com.oregonscientific.meep.store2.object.BannerItem; import com.oregonscientific.meep.store2.object.DownloadImageItem; import com.oregonscientific.meep.store2.object.MeepStoreItem; public class ImageDownloadCtrl { ArrayDeque<DownloadImageItem> mDownloadQ; private boolean mIsDownloading = false; //MeepStoreItem mDownloadingItem =null; DownloadListener mDownloadListener; Thread mHttpThread; AndroidHttpClient mHttpClient; StoreItemCacheCtrl mCacheCtrl; Context mContext; boolean mStopDownload = false; public interface DownloadListener { public abstract void onDownloadCompleted(boolean downloadAborted, DownloadImageItem item); } public synchronized DownloadListener getDownloadListener() { return mDownloadListener; } public synchronized void setDownloadListener(DownloadListener downloadListener) { this.mDownloadListener = downloadListener; } public ImageDownloadCtrl(Context context, SQLiteDatabase db, String name) { mContext = context; mCacheCtrl = new StoreItemCacheCtrl(db); mDownloadQ = new ArrayDeque<DownloadImageItem>(); mHttpClient = AndroidHttpClient.newInstance("AndroidDownloader"); } public void startDownload() { mHttpThread = new Thread(mHttpDownloadRunnable); mHttpThread.start(); } public synchronized void addToDownloadQ(DownloadImageItem item) { MeepStoreLog.logcatMessage("restoreimagedownload", "add to downloadQ" + item.getId()); Bitmap bmp = mCacheCtrl.getImageCache(item.getId()); if(bmp != null){ mDownloadListener.onDownloadCompleted(false, item); MeepStoreLog.logcatMessage("restoreimagedownload", "found on db" + item.getId()); return; } mDownloadQ.add(item); } public synchronized DownloadImageItem getDownloadItemFromQueue() { return mDownloadQ.poll(); } public synchronized boolean isDownloading() { return mIsDownloading; } public synchronized void setIsDownloading(boolean mIsDownloading) { this.mIsDownloading = mIsDownloading; } private Runnable mHttpDownloadRunnable = new Runnable() { @Override public void run() { while (true) { //MeepStoreLog.logcatMessage("storeimagedownload", "downlaod q size" +mDownloadQ.size() ); DownloadImageItem downloadingItem = getDownloadItemFromQueue(); if (downloadingItem != null) { AndroidHttpClient client = AndroidHttpClient.newInstance("android/meep"); setIsDownloading(true); //MeepStoreLog.logcatMessage("storeimagedownload", "start http download" + downloadingItem.getId()); MeepStoreLog.logcatMessage("restoreimagedownload", "start http download" + downloadingItem.getId()); try { // get file HttpGet httpGet = new HttpGet(downloadingItem.getUrl()); HttpResponse response = client.execute(httpGet); HttpEntity entity = response.getEntity(); if (entity != null) { InputStream is = entity.getContent(); Bitmap downloadBitmap = BitmapFactory.decodeStream(is); downloadingItem.setImage(downloadBitmap); byte[] bytes = readBytes(is); is.close(); if (bytes == null) { DownloadListener listener = getDownloadListener(); if (listener != null) listener.onDownloadCompleted(true, downloadingItem); } else { DownloadListener listener = getDownloadListener(); if (listener != null) listener.onDownloadCompleted(false, downloadingItem); } entity.consumeContent(); } } catch (Exception e) { DownloadListener listener = getDownloadListener(); if (listener != null) listener.onDownloadCompleted(false, downloadingItem); Log.e("restoreimagedownload",e.toString()); e.printStackTrace(); } finally { client.close(); setIsDownloading(false); } } else { try { Thread.sleep(1000); } catch (InterruptedException e) { // TODO Auto-generated catch block e.printStackTrace(); Log.e("restoreimagedownload", e.toString()); } } } } }; private synchronized void clearCurrentDownload(){ mDownloadQ.clear(); mStopDownload = true; } public synchronized void downloadMeepStoreItems(ArrayList<MeepStoreItem> itemList){ clearCurrentDownload(); for (int i = 0; i < itemList.size(); i++) { MeepStoreItem item = itemList.get(i); DownloadImageItem dlItem = new DownloadImageItem(item.getItemId(), getIconUrl(item.getIconUrl())); mDownloadQ.add(dlItem); } } public synchronized void addMeepStoreItems(ArrayList<MeepStoreItem> itemList){ for(int i=0; i<itemList.size(); i++){ MeepStoreItem item = itemList.get(i); DownloadImageItem dlItem = new DownloadImageItem(item.getItemId(), getIconUrl(item.getIconUrl())); mDownloadQ.add(dlItem); } } private String getIconUrl(String iconUrl){ MeepStoreApplication app = (MeepStoreApplication)mContext.getApplicationContext(); String prefix = app.getLoginInfo().url_prefix; if(!iconUrl.toLowerCase().contains("http")){ return prefix + iconUrl ; } return iconUrl; } public synchronized void downloadBanner(ArrayList<BannerItem> bannerList) { clearCurrentDownload(); for (int i = 0; i < bannerList.size(); i++) { BannerItem banner = bannerList.get(i); DownloadImageItem item = new DownloadImageItem(banner.id, banner.image); mDownloadQ.add(item); } } public byte[] readBytes(InputStream inputStream) throws IOException { // this dynamically extends to take the bytes you read ByteArrayOutputStream byteBuffer = new ByteArrayOutputStream(); // this is storage overwritten on each iteration with bytes int bufferSize = 1024; byte[] buffer = new byte[bufferSize]; // we need to know how may bytes were read to write them to the // byteBuffer int len = 0; while (inputStream.available() > 0 && (len = inputStream.read(buffer)) != -1) { if(mStopDownload){ mStopDownload = false; return null; } byteBuffer.write(buffer, 0, len); } // and then we can return your byte array. return byteBuffer.toByteArray(); } }
apache-2.0
o3project/openflowj-otn
src/main/java/org/projectfloodlight/openflow/protocol/ver14/OFActionIdBsnMirrorVer14.java
6307
// Copyright (c) 2008 The Board of Trustees of The Leland Stanford Junior University // Copyright (c) 2011, 2012 Open Networking Foundation // Copyright (c) 2012, 2013 Big Switch Networks, Inc. // This library was generated by the LoxiGen Compiler. // See the file LICENSE.txt which should have been included in the source distribution // Automatically generated by LOXI from template of_class.java // Do not modify package org.projectfloodlight.openflow.protocol.ver14; import org.projectfloodlight.openflow.protocol.*; import org.projectfloodlight.openflow.protocol.action.*; import org.projectfloodlight.openflow.protocol.actionid.*; import org.projectfloodlight.openflow.protocol.bsntlv.*; import org.projectfloodlight.openflow.protocol.errormsg.*; import org.projectfloodlight.openflow.protocol.meterband.*; import org.projectfloodlight.openflow.protocol.instruction.*; import org.projectfloodlight.openflow.protocol.instructionid.*; import org.projectfloodlight.openflow.protocol.match.*; import org.projectfloodlight.openflow.protocol.oxm.*; import org.projectfloodlight.openflow.protocol.queueprop.*; import org.projectfloodlight.openflow.types.*; import org.projectfloodlight.openflow.util.*; import org.projectfloodlight.openflow.exceptions.*; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.jboss.netty.buffer.ChannelBuffer; import com.google.common.hash.PrimitiveSink; import com.google.common.hash.Funnel; class OFActionIdBsnMirrorVer14 implements OFActionIdBsnMirror { private static final Logger logger = LoggerFactory.getLogger(OFActionIdBsnMirrorVer14.class); // version: 1.4 final static byte WIRE_VERSION = 5; final static int LENGTH = 12; // OF message fields // // Immutable default instance final static OFActionIdBsnMirrorVer14 DEFAULT = new OFActionIdBsnMirrorVer14( ); final static OFActionIdBsnMirrorVer14 INSTANCE = new OFActionIdBsnMirrorVer14(); // private empty constructor - use shared instance! private OFActionIdBsnMirrorVer14() { } // Accessors for OF message fields @Override public OFActionType getType() { return OFActionType.EXPERIMENTER; } @Override public long getExperimenter() { return 0x5c16c7L; } @Override public long getSubtype() { return 0x1L; } @Override public OFVersion getVersion() { return OFVersion.OF_14; } // no data members - do not support builder public OFActionIdBsnMirror.Builder createBuilder() { throw new UnsupportedOperationException("OFActionIdBsnMirrorVer14 has no mutable properties -- builder unneeded"); } final static Reader READER = new Reader(); static class Reader implements OFMessageReader<OFActionIdBsnMirror> { @Override public OFActionIdBsnMirror readFrom(ChannelBuffer bb) throws OFParseError { int start = bb.readerIndex(); // fixed value property type == 65535 short type = bb.readShort(); if(type != (short) 0xffff) throw new OFParseError("Wrong type: Expected=OFActionType.EXPERIMENTER(65535), got="+type); int length = U16.f(bb.readShort()); if(length != 12) throw new OFParseError("Wrong length: Expected=12(12), got="+length); if(bb.readableBytes() + (bb.readerIndex() - start) < length) { // Buffer does not have all data yet bb.readerIndex(start); return null; } if(logger.isTraceEnabled()) logger.trace("readFrom - length={}", length); // fixed value property experimenter == 0x5c16c7L int experimenter = bb.readInt(); if(experimenter != 0x5c16c7) throw new OFParseError("Wrong experimenter: Expected=0x5c16c7L(0x5c16c7L), got="+experimenter); // fixed value property subtype == 0x1L int subtype = bb.readInt(); if(subtype != 0x1) throw new OFParseError("Wrong subtype: Expected=0x1L(0x1L), got="+subtype); if(logger.isTraceEnabled()) logger.trace("readFrom - returning shared instance={}", INSTANCE); return INSTANCE; } } public void putTo(PrimitiveSink sink) { FUNNEL.funnel(this, sink); } final static OFActionIdBsnMirrorVer14Funnel FUNNEL = new OFActionIdBsnMirrorVer14Funnel(); static class OFActionIdBsnMirrorVer14Funnel implements Funnel<OFActionIdBsnMirrorVer14> { private static final long serialVersionUID = 1L; @Override public void funnel(OFActionIdBsnMirrorVer14 message, PrimitiveSink sink) { // fixed value property type = 65535 sink.putShort((short) 0xffff); // fixed value property length = 12 sink.putShort((short) 0xc); // fixed value property experimenter = 0x5c16c7L sink.putInt(0x5c16c7); // fixed value property subtype = 0x1L sink.putInt(0x1); } } public void writeTo(ChannelBuffer bb) { WRITER.write(bb, this); } final static Writer WRITER = new Writer(); static class Writer implements OFMessageWriter<OFActionIdBsnMirrorVer14> { @Override public void write(ChannelBuffer bb, OFActionIdBsnMirrorVer14 message) { // fixed value property type = 65535 bb.writeShort((short) 0xffff); // fixed value property length = 12 bb.writeShort((short) 0xc); // fixed value property experimenter = 0x5c16c7L bb.writeInt(0x5c16c7); // fixed value property subtype = 0x1L bb.writeInt(0x1); } } @Override public String toString() { StringBuilder b = new StringBuilder("OFActionIdBsnMirrorVer14("); b.append(")"); return b.toString(); } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (getClass() != obj.getClass()) return false; return true; } @Override public int hashCode() { int result = 1; return result; } }
apache-2.0
treejames/android-task
library/src/main/java/net/vrallev/android/task/TaskCacheFragment.java
4068
package net.vrallev.android.task; import android.app.Activity; import android.app.Fragment; import android.app.FragmentManager; import android.os.Bundle; import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; /** * @author rwondratschek */ @SuppressWarnings("UnusedDeclaration") public final class TaskCacheFragment extends Fragment implements TaskCacheFragmentInterface { private static final String TAG = "TaskCacheFragment"; /*package*/ static TaskCacheFragment getFrom(Activity activity) { FragmentManager fragmentManager = activity.getFragmentManager(); Fragment fragment = fragmentManager.findFragmentByTag(TAG); if (fragment instanceof TaskCacheFragment) { return (TaskCacheFragment) fragment; } TaskCacheFragmentInterface cacheFragment = Helper.getTempCacheFragment(activity); if (cacheFragment instanceof TaskCacheFragment) { return (TaskCacheFragment) cacheFragment; } TaskCacheFragment result = new TaskCacheFragment(); result.mActivity = activity; fragmentManager.beginTransaction() .add(result, TAG) .commitAllowingStateLoss(); try { fragmentManager.executePendingTransactions(); } catch (IllegalStateException ignored) { // may throw java.lang.IllegalStateException: Recursive entry to executePendingTransactions TaskCacheFragmentInterface.Helper.putTempCacheFragment(activity, result); } return result; } private final Map<String, Object> mCache; private boolean mCanSaveInstanceState; private Activity mActivity; public TaskCacheFragment() { setRetainInstance(true); mCache = Collections.synchronizedMap(new HashMap<String, Object>()); } @Override public void onAttach(Activity activity) { mActivity = activity; super.onAttach(activity); } @Override public void onActivityCreated(Bundle savedInstanceState) { super.onActivityCreated(savedInstanceState); mCanSaveInstanceState = true; } @Override public void onStart() { super.onStart(); mCanSaveInstanceState = true; List<TaskPendingResult> list = get(PENDING_RESULT_KEY); if (list != null && !list.isEmpty()) { TaskCacheFragmentInterface.Helper.postPendingResults(list, this); } } @Override public void onResume() { super.onResume(); mCanSaveInstanceState = true; } @Override public void onStop() { mCanSaveInstanceState = false; super.onStop(); } @Override public void onDetach() { if (mActivity.isFinishing()) { mActivity = null; } super.onDetach(); } @Override public void onSaveInstanceState(Bundle outState) { mCanSaveInstanceState = false; super.onSaveInstanceState(outState); } @Override public boolean canSaveInstanceState() { return mCanSaveInstanceState; } @SuppressWarnings("unchecked") @Override public synchronized <T> T get(String key) { return (T) mCache.get(key); } @SuppressWarnings("unchecked") @Override public synchronized <T> T put(String key, Object object) { return (T) mCache.put(key, object); } @SuppressWarnings("unchecked") @Override public synchronized <T> T remove(String key) { return (T) mCache.remove(key); } @Override public synchronized void putPendingResult(TaskPendingResult pendingResult) { List<TaskPendingResult> list = get(PENDING_RESULT_KEY); if (list == null) { list = Collections.synchronizedList(new ArrayList<TaskPendingResult>()); put(PENDING_RESULT_KEY, list); } list.add(pendingResult); } @Override public Activity getParentActivity() { return mActivity; } }
apache-2.0
smanvi-pivotal/geode
geode-core/src/test/java/org/apache/geode/internal/cache/ClearTXLockingDUnitTest.java
13277
/* * Licensed to the Apache Software Foundation (ASF) under one or more contributor license * agreements. See the NOTICE file distributed with this work for additional information regarding * copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance with the License. You may obtain a * copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express * or implied. See the License for the specific language governing permissions and limitations under * the License. */ /* * ClearRvvLockingDUnitTest.java * * Created on September 6, 2005, 2:57 PM */ package org.apache.geode.internal.cache; import java.util.HashMap; import java.util.Iterator; import java.util.Map; import java.util.concurrent.CountDownLatch; import org.assertj.core.api.JUnitSoftAssertions; import org.junit.Before; import org.junit.Ignore; import org.junit.Rule; import org.junit.Test; import org.junit.experimental.categories.Category; import org.apache.geode.cache.Cache; import org.apache.geode.cache.CacheEvent; import org.apache.geode.cache.CacheFactory; import org.apache.geode.cache.Region; import org.apache.geode.cache.RegionFactory; import org.apache.geode.cache.RegionShortcut; import org.apache.geode.cache.Scope; import org.apache.geode.distributed.internal.membership.InternalDistributedMember; import org.apache.geode.test.dunit.Host; import org.apache.geode.test.dunit.VM; import org.apache.geode.test.dunit.cache.internal.JUnit4CacheTestCase; import org.apache.geode.test.junit.categories.DistributedTest; /** * Test class to verify proper locking interaction between transactions and the CLEAR region * operation. * * GEODE-1740: It was observed that operations performed within a transaction were not holding * region modification locks for the duration of commit processing. This lock is used to ensure * region consistency during CLEAR processing. By not holding the lock for the duration of commit * processing, a window was opened that allowed region operations such as clear to occur in * mid-commit. * * The fix for GEODE-1740 was to acquire and hold read locks for any region involved in the commit. * This forces CLEAR to wait until commit processing is complete. */ @SuppressWarnings("serial") @Category(DistributedTest.class) public class ClearTXLockingDUnitTest extends JUnit4CacheTestCase { @Rule public transient JUnitSoftAssertions softly = new JUnitSoftAssertions(); /* * This test performs operations within a transaction and during commit processing schedules a * clear to be performed on the relevant region. The scheduled clear should wait until commit * processing is complete before clearing the region. Failure to do so, would result in region * inconsistencies. */ private static final String THE_KEY = "theKey"; private static final String THE_VALUE = "theValue"; private static final int NUMBER_OF_PUTS = 2; private static final String REGION_NAME1 = "testRegion1"; private static final String REGION_NAME2 = "testRegion2"; static Cache cache; static CountDownLatch opsLatch; private static CountDownLatch regionLatch; private static CountDownLatch verifyLatch; private VM vm0; private VM vm1; private VM opsVM; private VM regionVM; @Before public void setup() { Host host = Host.getHost(0); vm0 = host.getVM(0); vm1 = host.getVM(1); createCache(vm0); createCache(vm1); } @Test public void testPutWithClearSameVM() { setupRegions(vm0, vm0); setClearHook(REGION_NAME1, opsVM, regionVM); performTestAndCheckResults(); } @Test public void testPutWithClearDifferentVM() { setupRegions(vm0, vm1); setClearHook(REGION_NAME1, opsVM, regionVM); performTestAndCheckResults(); } /* * The CLOSE tests are ignored until the close operation has been updated to acquire a write lock * during processing. */ @Ignore @Test public void testPutWithCloseSameVM() { setupRegions(vm0, vm0); setCloseHook(REGION_NAME1, opsVM, regionVM); performTestAndCheckResults(); } @Ignore @Test public void testPutWithCloseDifferentVM() { setupRegions(vm0, vm1); setCloseHook(REGION_NAME1, opsVM, regionVM); performTestAndCheckResults(); } /* * The DESTROY_REGION tests are ignored until the destroy operation has been updated to acquire a * write lock during processing. */ @Ignore @Test public void testPutWithDestroyRegionSameVM() { setupRegions(vm0, vm0); setDestroyRegionHook(REGION_NAME1, opsVM, regionVM); performTestAndCheckResults(); } @Ignore @Test public void testPutWithDestroyRegionDifferentVM() { setupRegions(vm0, vm1); setDestroyRegionHook(REGION_NAME1, opsVM, regionVM); performTestAndCheckResults(); } // Local methods /* * This method executes a runnable test and then checks for region consistency */ private void performTestAndCheckResults() { try { opsVM.invoke(this::putOperationsTest); checkForConsistencyErrors(REGION_NAME1); checkForConsistencyErrors(REGION_NAME2); } finally { opsVM.invoke(() -> resetArmHook(REGION_NAME1)); } } /* * Set which vm will perform the transaction and which will perform the region operation and * create the regions on the vms */ private void setupRegions(VM opsTarget, VM regionTarget) { opsVM = opsTarget; regionVM = regionTarget; vm0.invoke(() -> createRegion(REGION_NAME1)); vm0.invoke(() -> createRegion(REGION_NAME2)); vm1.invoke(() -> createRegion(REGION_NAME1)); vm1.invoke(() -> createRegion(REGION_NAME2)); } private void putOperationsTest() { opsVM.invoke(() -> doPuts(getCache(), regionVM)); } /* * Set arm hook to detect when region operation is attempting to acquire write lock and stage the * clear that will be released half way through commit processing. */ private void setClearHook(String rname, VM whereOps, VM whereClear) { whereOps.invoke(() -> setArmHook(rname)); whereClear.invokeAsync(() -> stageClear(rname, whereOps)); } // remote test methods /* * Wait to be notified and then execute the clear. Once the clear completes, notify waiter to * perform region verification. */ private static void stageClear(String rname, VM whereOps) throws InterruptedException { regionLatch = new CountDownLatch(1); regionOperationWait(regionLatch); LocalRegion r = (LocalRegion) cache.getRegion(rname); r.clear(); whereOps.invoke(() -> releaseVerify()); } /* * Set and stage method for close and destroy are the same as clear */ private void setCloseHook(String rname, VM whereOps, VM whereClear) { whereOps.invoke(() -> setArmHook(rname)); whereClear.invokeAsync(() -> stageClose(rname, whereOps)); } private static void stageClose(String rname, VM whereOps) throws InterruptedException { regionLatch = new CountDownLatch(1); regionOperationWait(regionLatch); LocalRegion r = (LocalRegion) cache.getRegion(rname); r.close(); whereOps.invoke(() -> releaseVerify()); } private void setDestroyRegionHook(String rname, VM whereOps, VM whereClear) { whereOps.invoke(() -> setArmHook(rname)); whereClear.invokeAsync(() -> stageDestroyRegion(rname, whereOps)); } private static void stageDestroyRegion(String rname, VM whereOps) throws InterruptedException { regionLatch = new CountDownLatch(1); regionOperationWait(regionLatch); LocalRegion r = (LocalRegion) cache.getRegion(rname); r.destroyRegion(); whereOps.invoke(() -> releaseVerify()); } /* * Set the abstract region map lock hook to detect attempt to acquire write lock by region * operation. */ private void setArmHook(String rname) { LocalRegion r = (LocalRegion) cache.getRegion(rname); ArmLockHook theArmHook = new ArmLockHook(); ((AbstractRegionMap) r.entries).setARMLockTestHook(theArmHook); } /* * Cleanup arm lock hook by setting it null */ private void resetArmHook(String rname) { LocalRegion r = (LocalRegion) cache.getRegion(rname); ((AbstractRegionMap) r.entries).setARMLockTestHook(null); } /* * Wait to be notified it is time to perform region operation (i.e. CLEAR) */ private static void regionOperationWait(CountDownLatch latch) throws InterruptedException { latch.await(); /* * regionLatch = new CountDownLatch(1); regionLatch.await(); */ } /* * A simple transaction that will have a region operation execute during commit. opsLatch is used * to wait until region operation has been scheduled during commit and verifyLatch is used to * ensure commit and clear processing have both completed. */ private static void doPuts(Cache cache, VM whereRegion) throws InterruptedException { TXManagerImpl txManager = (TXManagerImpl) cache.getCacheTransactionManager(); opsLatch = new CountDownLatch(1); verifyLatch = new CountDownLatch(1); txManager.begin(); TXStateInterface txState = ((TXStateProxyImpl) txManager.getTXState()).getRealDeal(null, null); ((TXState) txState).setDuringApplyChanges(new CommitTestCallback(whereRegion)); Region region1 = cache.getRegion(REGION_NAME1); Region region2 = cache.getRegion(REGION_NAME2); for (int i = 0; i < NUMBER_OF_PUTS; i++) { region1.put(REGION_NAME1 + THE_KEY + i, THE_VALUE + i); region2.put(REGION_NAME2 + THE_KEY + i, THE_VALUE + i); } txManager.commit(); verifyLatch.await(); } /* * Release the region operation that has been previously staged */ private static void releaseRegionOperation(VM whereRegion) { whereRegion.invoke(() -> regionLatch.countDown()); } /* * Region operation has been scheduled, now resume commit processing */ private static void releaseOps() { opsLatch.countDown(); } /* * Notify waiter it is time to verify region contents */ private static void releaseVerify() { verifyLatch.countDown(); } private InternalDistributedMember createCache(VM vm) { return vm.invoke(() -> { cache = getCache(new CacheFactory().set("conserve-sockets", "true")); return getSystem().getDistributedMember(); }); } private static void createRegion(String rgnName) { RegionFactory<Object, Object> factory = cache.createRegionFactory(RegionShortcut.REPLICATE); factory.setConcurrencyChecksEnabled(true); factory.setScope(Scope.DISTRIBUTED_ACK); factory.create(rgnName); } /* * Get region contents from each member and verify they are consistent */ private void checkForConsistencyErrors(String regionName) { Map<Object, Object> r0Contents = vm0.invoke(() -> getRegionContents(regionName)); Map<Object, Object> r1Contents = vm1.invoke(() -> getRegionContents(regionName)); for (int i = 0; i < NUMBER_OF_PUTS; i++) { String theKey = regionName + THE_KEY + i; if (r0Contents.containsKey(theKey)) { softly.assertThat(r1Contents.get(theKey)) .as("region contents are not consistent for key %s", theKey) .isEqualTo(r0Contents.get(theKey)); } else { softly.assertThat(r1Contents).as("expected containsKey for %s to return false", theKey) .doesNotContainKey(theKey); } } } @SuppressWarnings("rawtypes") private static Map<Object, Object> getRegionContents(String rname) { LocalRegion r = (LocalRegion) cache.getRegion(rname); Map<Object, Object> result = new HashMap<>(); for (Iterator i = r.entrySet().iterator(); i.hasNext();) { Region.Entry e = (Region.Entry) i.next(); result.put(e.getKey(), e.getValue()); } return result; } /* * Test callback called for each operation during commit processing. Half way through commit * processing, release the region operation. */ static class CommitTestCallback implements Runnable { private VM whereRegionOperation; private int callCount; /* entered twice for each put lap since there are 2 regions */ private int releasePoint = NUMBER_OF_PUTS; CommitTestCallback(VM whereRegion) { whereRegionOperation = whereRegion; callCount = 0; } public void run() { callCount++; if (callCount == releasePoint) { releaseRegionOperation(whereRegionOperation); try { opsLatch.await(); } catch (InterruptedException e) { } } } } /* * The region operations attempt to acquire the write lock will hang while commit processing is * occurring. Before this occurs, resume commit processing. */ public class ArmLockHook extends ARMLockTestHookAdapter { @Override public void beforeLock(InternalRegion owner, CacheEvent event) { if (event != null) { if (event.getOperation().isClear() || event.getOperation().isRegionDestroy() || event.getOperation().isClose()) { releaseOps(); } } } } }
apache-2.0
InspurUSA/kudu
java/kudu-client/src/main/java/org/apache/kudu/client/KuduScanner.java
7481
// Licensed to the Apache Software Foundation (ASF) under one // or more contributor license agreements. See the NOTICE file // distributed with this work for additional information // regarding copyright ownership. The ASF licenses this file // to you under the Apache License, Version 2.0 (the // "License"); you may not use this file except in compliance // with the License. You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, // software distributed under the License is distributed on an // "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY // KIND, either express or implied. See the License for the // specific language governing permissions and limitations // under the License. package org.apache.kudu.client; import org.apache.yetus.audience.InterfaceAudience; import org.apache.yetus.audience.InterfaceStability; import org.apache.kudu.Schema; import org.apache.kudu.client.AsyncKuduScanner.ReadMode; /** * Synchronous version of {@link AsyncKuduScanner}. Offers the same API but with blocking methods. */ @InterfaceAudience.Public @InterfaceStability.Evolving public class KuduScanner implements Iterable<RowResult> { private final AsyncKuduScanner asyncScanner; KuduScanner(AsyncKuduScanner asyncScanner) { this.asyncScanner = asyncScanner; } /** * Tells if the last rpc returned that there might be more rows to scan. * @return true if there might be more data to scan, else false */ public boolean hasMoreRows() { return asyncScanner.hasMoreRows(); } /** * If set to true, the {@link RowResult} object returned by the {@link RowResultIterator} * will be reused with each call to {@link RowResultIterator#next()}. * This can be a useful optimization to reduce the number of objects created. * * Note: DO NOT use this if the RowResult is stored between calls to next(). * Enabling this optimization means that a call to next() mutates the previously returned * RowResult. Accessing the previously returned RowResult after a call to next(), by storing all * RowResults in a collection and accessing them later for example, will lead to all of the * stored RowResults being mutated as per the data in the last RowResult returned. */ public void setReuseRowResult(boolean reuseRowResult) { asyncScanner.setReuseRowResult(reuseRowResult); } /** * Scans a number of rows. * <p> * Once this method returns {@code null} once (which indicates that this * {@code Scanner} is done scanning), calling it again leads to an undefined * behavior. * @return a list of rows. * @throws KuduException if anything went wrong. */ public RowResultIterator nextRows() throws KuduException { return KuduClient.joinAndHandleException(asyncScanner.nextRows()); } /** * Keep the current remote scanner alive. * <p> * Keep the current remote scanner alive on the Tablet server for an * additional time-to-live. This is useful if the interval in between * nextRows() calls is big enough that the remote scanner might be garbage * collected. The scanner time-to-live can be configured on the tablet * server via the --scanner_ttl_ms configuration flag and has a default * of 60 seconds. * <p> * This does not invalidate any previously fetched results. * <p> * Note that an exception thrown by this method should not be taken as indication * that the scan has failed. Subsequent calls to nextRows() might still be successful, * particularly if the scanner is configured to be fault tolerant. * @throws KuduException if anything went wrong. */ public final void keepAlive() throws KuduException { KuduClient.joinAndHandleException(asyncScanner.keepAlive()); } /** * @return true if the scanner has been closed. */ public boolean isClosed() { return asyncScanner.isClosed(); } /** * Closes this scanner (don't forget to call this when you're done with it!). * <p> * Closing a scanner already closed has no effect. * @return a deferred object that indicates the completion of the request * @throws KuduException if anything went wrong. */ public RowResultIterator close() throws KuduException { return KuduClient.joinAndHandleException(asyncScanner.close()); } /** * Returns the maximum number of rows that this scanner was configured to return. * @return a long representing the maximum number of rows that can be returned */ public long getLimit() { return asyncScanner.getLimit(); } /** * Returns if this scanner was configured to cache data blocks or not. * @return true if this scanner will cache blocks, else else. */ public boolean getCacheBlocks() { return asyncScanner.getCacheBlocks(); } /** * Returns the maximum number of bytes returned by the scanner, on each batch. * @return a long representing the maximum number of bytes that a scanner can receive at once * from a tablet server */ public long getBatchSizeBytes() { return asyncScanner.getBatchSizeBytes(); } /** * Returns the ReadMode for this scanner. * @return the configured read mode for this scanner */ public ReadMode getReadMode() { return asyncScanner.getReadMode(); } /** * Returns the projection schema of this scanner. If specific columns were * not specified during scanner creation, the table schema is returned. * @return the projection schema for this scanner */ public Schema getProjectionSchema() { return asyncScanner.getProjectionSchema(); } /** * Returns the RemoteTablet currently being scanned, if any. */ @InterfaceAudience.LimitedPrivate("Test") public RemoteTablet currentTablet() { return asyncScanner.currentTablet(); } /** * Gets the replica selection mechanism being used. * * @return the replica selection mechanism */ @InterfaceAudience.LimitedPrivate("Test") ReplicaSelection getReplicaSelection() { return asyncScanner.getReplicaSelection(); } /** * Returns the current value of the scanner's scan request timeout. * @return the timeout value, in milliseconds */ public long getScanRequestTimeout() { return asyncScanner.getScanRequestTimeout(); } @Override public KuduScannerIterator iterator() { return new KuduScannerIterator(this, asyncScanner.getKeepAlivePeriodMs()); } /** * A Builder class to build {@link KuduScanner}. * Use {@link KuduClient#newScannerBuilder} in order to get a builder instance. */ @InterfaceAudience.Public @InterfaceStability.Evolving public static class KuduScannerBuilder extends AbstractKuduScannerBuilder<KuduScannerBuilder, KuduScanner> { KuduScannerBuilder(AsyncKuduClient client, KuduTable table) { super(client, table); } /** * Builds a {@link KuduScanner} using the passed configurations. * @return a new {@link KuduScanner} */ @Override public KuduScanner build() { return new KuduScanner(new AsyncKuduScanner( client, table, projectedColumnNames, projectedColumnIndexes, readMode, isFaultTolerant, scanRequestTimeout, predicates, limit, cacheBlocks, prefetching, lowerBoundPrimaryKey, upperBoundPrimaryKey, startTimestamp, htTimestamp, batchSizeBytes, PartitionPruner.create(this), replicaSelection, keepAlivePeriodMs)); } } }
apache-2.0
fadeoutsoftware/acronetwork
AcronetworkServer/AcronetDaemon/src/it/fadeout/acronet/daemon/AcronetDaemonConfiguration.java
1390
package it.fadeout.acronet.daemon; import it.fadeout.acronetwork.business.AnagTableInfo; import it.fadeout.acronetwork.business.ChartInfo; import java.util.ArrayList; public class AcronetDaemonConfiguration { String fileRepositoryPath; int minutesPolling = 1; int chartTimeRangeDays = 15; ArrayList<ChartInfo> chartsInfo = new ArrayList<>(); ArrayList<AnagTableInfo> anagTablesInfo = new ArrayList<>(); public String getFileRepositoryPath() { return fileRepositoryPath; } public void setFileRepositoryPath(String fileRepositoryPath) { this.fileRepositoryPath = fileRepositoryPath; } public int getMinutesPolling() { return minutesPolling; } public void setMinutesPolling(int minutesPolling) { this.minutesPolling = minutesPolling; } public int getChartTimeRangeDays() { return chartTimeRangeDays; } public void setChartTimeRangeDays(int chartTimeRangeDays) { this.chartTimeRangeDays = chartTimeRangeDays; } public ArrayList<ChartInfo> getChartsInfo() { return chartsInfo; } public void setChartsInfo(ArrayList<ChartInfo> chartsInfo) { this.chartsInfo = chartsInfo; } public ArrayList<AnagTableInfo> getAnagTablesInfo() { return anagTablesInfo; } public void setAnagTablesInfo(ArrayList<AnagTableInfo> anagTablesInfo) { this.anagTablesInfo = anagTablesInfo; } }
apache-2.0
dbrant/apps-android-wikipedia
app/src/test/java/org/wikipedia/page/NamespaceTest.java
3885
package org.wikipedia.page; import org.junit.AfterClass; import org.junit.BeforeClass; import org.junit.Test; import org.junit.runner.RunWith; import org.robolectric.RobolectricTestRunner; import org.wikipedia.dataclient.WikiSite; import java.util.Locale; import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.nullValue; import static org.wikipedia.page.Namespace.FILE; import static org.wikipedia.page.Namespace.MAIN; import static org.wikipedia.page.Namespace.MEDIA; import static org.wikipedia.page.Namespace.SPECIAL; import static org.wikipedia.page.Namespace.TALK; import static org.wikipedia.page.Namespace.USER; import static org.wikipedia.page.Namespace.USER_TALK; @RunWith(RobolectricTestRunner.class) public class NamespaceTest { private static Locale PREV_DEFAULT_LOCALE; @BeforeClass public static void setUp() { PREV_DEFAULT_LOCALE = Locale.getDefault(); Locale.setDefault(Locale.ENGLISH); } @AfterClass public static void tearDown() { Locale.setDefault(PREV_DEFAULT_LOCALE); } @Test public void testOf() { assertThat(Namespace.of(SPECIAL.code()), is(SPECIAL)); } @Test public void testFromLegacyStringMain() { //noinspection deprecation assertThat(Namespace.fromLegacyString(WikiSite.forLanguageCode("test"), null), is(MAIN)); } @Test public void testFromLegacyStringFile() { //noinspection deprecation assertThat(Namespace.fromLegacyString(WikiSite.forLanguageCode("he"), "קובץ"), is(FILE)); } @Test public void testFromLegacyStringSpecial() { //noinspection deprecation assertThat(Namespace.fromLegacyString(WikiSite.forLanguageCode("lez"), "Служебная"), is(SPECIAL)); } @Test public void testFromLegacyStringTalk() { //noinspection deprecation assertThat(Namespace.fromLegacyString(WikiSite.forLanguageCode("en"), "Talk"), is(TALK)); assertThat(Namespace.fromLegacyString(WikiSite.forLanguageCode("ru"), "Обсуждение"), is(TALK)); } @Test public void testFromLegacyStringUser() { //noinspection deprecation assertThat(Namespace.fromLegacyString(WikiSite.forLanguageCode("en"), "User"), is(USER)); assertThat(Namespace.fromLegacyString(WikiSite.forLanguageCode("af"), "Gebruiker"), is(USER)); } @Test public void testFromLegacyStringUserTalk() { //noinspection deprecation assertThat(Namespace.fromLegacyString(WikiSite.forLanguageCode("en"), "User talk"), is(USER_TALK)); assertThat(Namespace.fromLegacyString(WikiSite.forLanguageCode("vi"), "Thảo luận Thành viên"), is(USER_TALK)); } @Test public void testCode() { assertThat(MAIN.code(), is(0)); assertThat(TALK.code(), is(1)); } @Test public void testSpecial() { assertThat(SPECIAL.special(), is(true)); assertThat(MAIN.special(), is(false)); } @Test public void testMain() { assertThat(MAIN.main(), is(true)); assertThat(TALK.main(), is(false)); } @Test public void testFile() { assertThat(FILE.file(), is(true)); assertThat(MAIN.file(), is(false)); } @Test public void testTalkNegative() { assertThat(MEDIA.talk(), is(false)); assertThat(SPECIAL.talk(), is(false)); } @Test public void testTalkZero() { assertThat(MAIN.talk(), is(false)); } @Test public void testTalkOdd() { assertThat(TALK.talk(), is(true)); } @Test public void testToLegacyStringMain() { //noinspection deprecation assertThat(MAIN.toLegacyString(), nullValue()); } @Test public void testToLegacyStringNonMain() { //noinspection deprecation assertThat(TALK.toLegacyString(), is("Talk")); } }
apache-2.0
Axway/ats-framework
corelibrary/src/main/java/com/axway/ats/core/utils/ExceptionUtils.java
4090
/* * Copyright 2017 Axway Software * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.axway.ats.core.utils; import java.io.PrintWriter; import java.io.StringWriter; /** * Formatting exceptions to plain String */ public class ExceptionUtils { /** * @param e the exception to parse * @return a formatted string representing the provided exception */ public static String getExceptionMsg( Throwable e ) { return getExceptionMsg(e, null); } /** * @param e the exception to parse * @param usrMsg an additional message passed by user * @return a formatted string representing the provided exception */ public static String getExceptionMsg( Throwable e, String usrMsg ) { StringBuilder msg = new StringBuilder(); if (e != null) { msg.append("EXCEPTION\n"); if (!StringUtils.isNullOrEmpty(usrMsg)) { msg.append("USER message:\n\t"); msg.append(usrMsg); msg.append("\n"); } if (null != e.getCause()) { msg.append("Cause:\n\t"); msg.append(e.getCause().toString()); msg.append("\n"); } if (null != e.getMessage()) { msg.append("Message:\n\t"); msg.append(getMsgLines(e.getMessage())); msg.append("\n"); } else if (null != e.getLocalizedMessage()) { msg.append("Message:\n\t"); msg.append(getMsgLines(e.getLocalizedMessage())); msg.append("\n"); } else { msg.append("Message: "); msg.append("null"); msg.append("\n"); } msg.append("TYPE:\n\t"); msg.append(e.getClass().toString()); msg.append("\nCALL STACK:\n"); StringWriter sw = new StringWriter(); PrintWriter pw = new PrintWriter(sw); e.printStackTrace(pw); msg.append(sw.toString()); } return msg.toString(); } private static StringBuilder getMsgLines( String msg ) { StringBuilder resMsg = new StringBuilder(); String[] msgLines = msg.split("\n"); for (int i = 0; i < msgLines.length; i++) { resMsg.append(msgLines[i]); resMsg.append("\n\t"); } return resMsg; } /** * Check if exception contains certain message * @param message the exception message as java.lang.String * @param exception the exception * */ public static boolean containsMessage( String message, Exception exception ) { return containsMessage(message, exception, true); } /** * Check if exception contains certain message * @param message the exception message as java.lang.String * @param exception the exception * @param deepSearch whether to search for the message in the entire stack trace (true) or just the top most exception (false) * */ public static boolean containsMessage( String message, Exception exception, boolean deepSearch ) { Throwable th = exception; while (th != null) { String errMsg = th.getMessage(); if (!StringUtils.isNullOrEmpty(errMsg)) { if (errMsg.contains(message)) { return true; } } if (!deepSearch) { return false; } th = th.getCause(); } return false; } }
apache-2.0
madanadit/alluxio
job/server/src/main/java/alluxio/master/job/command/CommandManager.java
4122
/* * The Alluxio Open Foundation licenses this work under the Apache License, version 2.0 * (the "License"). You may not use this work except in compliance with the License, which is * available at www.apache.org/licenses/LICENSE-2.0 * * This software is distributed on an "AS IS" basis, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, * either express or implied, as more fully set forth in the License. * * See the NOTICE file distributed with this work for information regarding copyright ownership. */ package alluxio.master.job.command; import alluxio.grpc.CancelTaskCommand; import alluxio.grpc.JobCommand; import alluxio.grpc.RunTaskCommand; import alluxio.job.JobConfig; import alluxio.job.util.SerializationUtils; import com.google.common.collect.Lists; import com.google.common.collect.Maps; import com.google.protobuf.ByteString; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.io.IOException; import java.util.List; import java.util.Map; import javax.annotation.concurrent.ThreadSafe; /** * A command manager that manages the commands to issue to the workers. */ @ThreadSafe public final class CommandManager { private static final Logger LOG = LoggerFactory.getLogger(CommandManager.class); // TODO(yupeng) add retry support private final Map<Long, List<JobCommand>> mWorkerIdToPendingCommands = Maps.newHashMap(); /** * Constructs a new {@link CommandManager}. */ public CommandManager() {} /** * Submits a run-task command to a specified worker. * * @param jobId the id of the job * @param taskId the id of the task * @param jobConfig the job configuration * @param taskArgs the arguments passed to the executor on the worker * @param workerId the id of the worker */ public synchronized void submitRunTaskCommand(long jobId, int taskId, JobConfig jobConfig, Object taskArgs, long workerId) { RunTaskCommand.Builder runTaskCommand = RunTaskCommand.newBuilder(); runTaskCommand.setJobId(jobId); runTaskCommand.setTaskId(taskId); try { runTaskCommand.setJobConfig(ByteString.copyFrom(SerializationUtils.serialize(jobConfig))); if (taskArgs != null) { runTaskCommand.setTaskArgs(ByteString.copyFrom(SerializationUtils.serialize(taskArgs))); } } catch (IOException e) { // TODO(yupeng) better exception handling LOG.info("Failed to serialize the run task command:" + e); return; } JobCommand.Builder command = JobCommand.newBuilder(); command.setRunTaskCommand(runTaskCommand); if (!mWorkerIdToPendingCommands.containsKey(workerId)) { mWorkerIdToPendingCommands.put(workerId, Lists.<JobCommand>newArrayList()); } mWorkerIdToPendingCommands.get(workerId).add(command.build()); } /** * Submits a cancel-task command to a specified worker. * * @param jobId the job id * @param taskId the task id * @param workerId the worker id */ public synchronized void submitCancelTaskCommand(long jobId, int taskId, long workerId) { CancelTaskCommand.Builder cancelTaskCommand = CancelTaskCommand.newBuilder(); cancelTaskCommand.setJobId(jobId); cancelTaskCommand.setTaskId(taskId); JobCommand.Builder command = JobCommand.newBuilder(); command.setCancelTaskCommand(cancelTaskCommand); if (!mWorkerIdToPendingCommands.containsKey(workerId)) { mWorkerIdToPendingCommands.put(workerId, Lists.<JobCommand>newArrayList()); } mWorkerIdToPendingCommands.get(workerId).add(command.build()); } /** * Polls all the pending commands to a worker and removes the commands from the queue. * * @param workerId id of the worker to send the commands to * @return the list of the commends polled */ public synchronized List<alluxio.grpc.JobCommand> pollAllPendingCommands(long workerId) { if (!mWorkerIdToPendingCommands.containsKey(workerId)) { return Lists.newArrayList(); } List<JobCommand> commands = Lists.newArrayList(mWorkerIdToPendingCommands.get(workerId)); mWorkerIdToPendingCommands.get(workerId).clear(); return commands; } }
apache-2.0
consulo/consulo
modules/base/util/util-io/src/main/java/consulo/util/io/CharSequenceReader.java
1973
/* * Copyright 2000-2009 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package consulo.util.io; import consulo.util.lang.CharArrayCharSequence; import javax.annotation.Nonnull; import java.io.Reader; /** * @author max */ public class CharSequenceReader extends Reader { private final CharSequence myText; private int myCurPos; public CharSequenceReader(@Nonnull CharSequence text) { myText = text; myCurPos = 0; } @Override public void close() { } @Override public int read(char[] cbuf, int off, int len) { if (off < 0 || off > cbuf.length || len < 0 || off + len > cbuf.length || off + len < 0) { throw new IndexOutOfBoundsException("cbuf.length=" + cbuf.length + "; off=" + off + "; len=" + len); } if (len == 0) { return 0; } if (myText instanceof CharArrayCharSequence) { // Optimization final int readChars = ((CharArrayCharSequence)myText).readCharsTo(myCurPos, cbuf, off, len); if (readChars < 0) return -1; myCurPos += readChars; return readChars; } int charsToCopy = Math.min(len, myText.length() - myCurPos); if (charsToCopy <= 0) return -1; for (int n = 0; n < charsToCopy; n++) { cbuf[n + off] = myText.charAt(n + myCurPos); } myCurPos += charsToCopy; return charsToCopy; } @Override public int read() { if (myCurPos >= myText.length()) return -1; return myText.charAt(myCurPos++); } }
apache-2.0
qiscus/qiscus-sdk-android
chat-core/src/main/java/com/qiscus/sdk/chat/core/data/model/QiscusCommentSendingInterceptor.java
204
package com.qiscus.sdk.chat.core.data.model; /** * @author yuana * @since 10/20/17 */ public interface QiscusCommentSendingInterceptor { QiscusComment sendComment(QiscusComment qiscusComment); }
apache-2.0
operasoftware/AndroidOperaLink
src/com/opera/link/apilib/android/items/Note.java
1481
package com.opera.link.apilib.android.items; import java.util.Date; import java.util.HashMap; import org.json.JSONException; import org.json.JSONObject; public class Note extends NoteFolderEntry { public static final String ITEM_TYPE = "note"; public String content; public Date created; public String uri; protected Note() { } public Note(String content) { this(); this.content = content; this.created = new Date(); } @Override void loadParameters(JSONObject json) throws JSONException { if (json.has(Element.CONTENT_JSON_FIELD)) { content = json.getString(Element.CONTENT_JSON_FIELD); } if (json.has(Element.URI_JSON_FIELD)) { uri = json.getString(Element.URI_JSON_FIELD); } if (json.has(Element.CREATED_JSON_FIELD)) { created = parseDate(json.getString(Element.CREATED_JSON_FIELD)); } } @Override public HashMap<String, String> createParamsDict() { final String createdString; if (created != null) { createdString = dateToString(created); } else { createdString = null; } HashMap<String, String> params = new HashMap<String, String>() { private static final long serialVersionUID = 1L; { put(Element.CONTENT_JSON_FIELD, content); put(Element.URI_JSON_FIELD, uri); put(Element.CREATED_JSON_FIELD, createdString); } }; return skipNullParams(params); } @Override public String getItemType() { return ITEM_TYPE; } @Override public boolean isNote() { return true; } }
apache-2.0
SnappyDataInc/snappy-store
gemfirexd/tools/src/dunit/java/com/pivotal/gemfirexd/internal/engine/distributed/offheap/OffHeapPreparedStatementDUnit.java
5326
/* * Copyright (c) 2010-2015 Pivotal Software, Inc. All rights reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); you * may not use this file except in compliance with the License. You * may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or * implied. See the License for the specific language governing * permissions and limitations under the License. See accompanying * LICENSE file. */ package com.pivotal.gemfirexd.internal.engine.distributed.offheap; import java.sql.Connection; import java.sql.PreparedStatement; import java.sql.ResultSet; import java.sql.Statement; import com.pivotal.gemfirexd.TestUtil; import com.pivotal.gemfirexd.internal.engine.distributed.PreparedStatementDUnit; import com.pivotal.gemfirexd.jdbc.Bugs3Test.DataGenerator; public class OffHeapPreparedStatementDUnit extends PreparedStatementDUnit { public OffHeapPreparedStatementDUnit(String name) { super(name); } @Override public void setUp() throws Exception { super.setUp(); this.configureDefaultOffHeap(true); } @Override public String getSuffix() { return " offheap "; } public void testLobsInsertForReplicate() throws Exception { startVMs(1, 3); Connection conn = TestUtil.getConnection(); Statement st = conn.createStatement(); ResultSet rs = null; st.execute("CREATE TYPE trade.UDTPrice EXTERNAL NAME 'udtexamples.UDTPrice' LANGUAGE JAVA"); st.execute("CREATE TYPE trade.UUID EXTERNAL NAME 'java.util.UUID' LANGUAGE JAVA"); st.execute("create table trade.securities (sec_id int not null, symbol varchar(10) not null, price decimal (30, 20), exchange varchar(10) not null, tid int, constraint sec_pk primary key (sec_id), " + "constraint sec_uq unique (symbol, exchange)," + " constraint exc_ch check (exchange in ('nasdaq', 'nye', 'amex', 'lse', 'fse', 'hkse', 'tse'))) replicate offheap"); PreparedStatement psSec = conn .prepareStatement("insert into trade.securities values (?, ?, ?,?, ?)"); DataGenerator dg = new DataGenerator(); for (int i = 1; i < 2; ++i) { dg.insertIntoSecurities(psSec, i); } st.execute("create table trade.companies (symbol varchar(10) not null, exchange varchar(10) not null, companytype smallint, " + "uid CHAR(16) FOR BIT DATA, uuid trade.UUID, companyname char(100), companyinfo clob, " + "note long varchar, histprice trade.udtprice, asset bigint, logo varchar(100) for bit data, tid int, " + "constraint comp_pk primary key (symbol, exchange)) replicate offheap"); PreparedStatement psComp = conn .prepareStatement("insert into trade.companies (symbol, exchange, companytype," + " uid, uuid, companyname, companyinfo, note, histprice, asset, logo, tid) values (?,?,?,?,?,?,?,?,?,?,?,?)"); rs = st.executeQuery("select * from trade.securities"); int k = 0; while (rs.next()) { String symbol = rs.getString(2); String exchange = rs.getString(4); dg.insertIntoCompanies(psComp, symbol, exchange); ++k; } assertTrue(k >= 1); } public void testLobsInsertForPartitioned() throws Exception { startVMs(1, 3); Connection conn = TestUtil.getConnection(); Statement st = conn.createStatement(); ResultSet rs = null; st.execute("CREATE TYPE trade.UDTPrice EXTERNAL NAME 'udtexamples.UDTPrice' LANGUAGE JAVA"); st.execute("CREATE TYPE trade.UUID EXTERNAL NAME 'java.util.UUID' LANGUAGE JAVA"); st.execute("create table trade.securities (sec_id int not null, symbol varchar(10) not null, price decimal (30, 20), exchange varchar(10) not null, tid int, constraint sec_pk primary key (sec_id), " + "constraint sec_uq unique (symbol, exchange), " + "constraint exc_ch check (exchange in ('nasdaq', 'nye', 'amex', 'lse', 'fse', 'hkse', 'tse'))) offheap redundancy 2"); PreparedStatement psSec = conn .prepareStatement("insert into trade.securities values (?, ?, ?,?, ?)"); DataGenerator dg = new DataGenerator(); for (int i = 1; i < 2; ++i) { dg.insertIntoSecurities(psSec, i); } st.execute("create table trade.companies (symbol varchar(10) not null, exchange varchar(10) not null, " + "companytype smallint, uid CHAR(16) FOR BIT DATA, uuid trade.UUID, " + "companyname char(100), companyinfo clob, note long varchar, histprice trade.udtprice, " + "asset bigint, logo varchar(100) for bit data, tid int," + " constraint comp_pk primary key (symbol, exchange)) offheap redundancy 2"); PreparedStatement psComp = conn .prepareStatement("insert into trade.companies (symbol, exchange, companytype," + " uid, uuid, companyname, companyinfo, note, histprice, asset, logo, tid) values (?,?,?,?,?,?,?,?,?,?,?,?)"); rs = st.executeQuery("select * from trade.securities"); int k = 0; while (rs.next()) { String symbol = rs.getString(2); String exchange = rs.getString(4); dg.insertIntoCompanies(psComp, symbol, exchange); ++k; } assertTrue(k >= 1); } }
apache-2.0
bingoohuang/aoc
src/main/java/org/n3r/aoc/utils/Aocs.java
5274
package org.n3r.aoc.utils; import com.google.common.base.Objects; import org.apache.commons.lang3.StringUtils; import org.joor.Reflect; import org.n3r.aoc.AocContext; import org.n3r.aoc.PropertiesAware; import org.n3r.aoc.SimpleConfigAware; import org.n3r.aoc.check.impl.order.RecordOrder; import org.n3r.eql.util.EqlUtils; import java.io.File; import java.io.IOException; import java.io.InputStream; import java.sql.ResultSet; import java.sql.ResultSetMetaData; import java.sql.SQLException; import java.util.Properties; public class Aocs { public static Properties loadClasspathProperties(String resourceName) { InputStream is = classResourceToInputStream(resourceName, false); Properties properties = new Properties(); try { properties.load(is); } catch (IOException e) { throw new RuntimeException(resourceName + " can not loaded to properties"); } return properties; } /** * Return the context classloader. BL: if this is command line operation, the classloading issues are more sane. * During servlet execution, we explicitly set the ClassLoader. * * @return The context classloader. */ public static ClassLoader getClassLoader() { return Objects.firstNonNull( Thread.currentThread().getContextClassLoader(), Aocs.class.getClassLoader()); } public static InputStream classResourceToInputStream(String pathname, boolean silent) { InputStream is = classResourceToStream(pathname); if (is != null || silent) return is; throw new RuntimeException("fail to find " + pathname + " in classpath"); } public static InputStream classResourceToStream(String resourceName) { return getClassLoader().getResourceAsStream(resourceName); } public static <T> T loadObject(Properties rootProperties, Properties properties, String config) { String classNameWithPrefix = config; if (classNameWithPrefix.startsWith("@")) classNameWithPrefix = classNameWithPrefix.substring(1); int leftBracketPos = classNameWithPrefix.indexOf('('); String className; String prefix; if (leftBracketPos > 0) { int rightBracketPos = classNameWithPrefix.indexOf(')', leftBracketPos); if (rightBracketPos < 0) throw new RuntimeException("there is no matched brackets in " + config); className = classNameWithPrefix.substring(0, leftBracketPos); prefix = classNameWithPrefix.substring(leftBracketPos + 1, rightBracketPos); } else { className = classNameWithPrefix; prefix = ""; } String aliasKey = "alias." + className; if (rootProperties.containsKey(aliasKey)) { className = rootProperties.getProperty(aliasKey); } T obj = Reflect.on(className).create().get(); if (obj instanceof PropertiesAware) { ((PropertiesAware) obj).setProperties(rootProperties, subProperties(properties, prefix)); } if (obj instanceof SimpleConfigAware) { ((SimpleConfigAware) obj).setSimpleConfig(prefix); } return obj; } public static Properties subProperties(Properties properties, String prefix) { if (StringUtils.isEmpty(prefix)) return properties; Properties newProperties = new Properties(); String fullPrefix = prefix + "."; int prefixSize = fullPrefix.length(); for (String key : properties.stringPropertyNames()) { if (key.indexOf(fullPrefix) == 0) { String newKey = key.substring(prefixSize); if (StringUtils.isEmpty(newKey)) continue; newProperties.put(newKey, properties.getProperty(key)); } } return newProperties; } /** * Create a temporary file */ public static File tempFile() { File f; try { f = File.createTempFile("aoc", ".tmp"); } catch (IOException e) { throw new RuntimeException(e); } f.deleteOnExit(); return f; } public static void sleepMilis(long mills) { try { Thread.sleep(mills); } catch (InterruptedException e) { // ignore } } public static void checkRequired(String ftpHost, String name) { if (StringUtils.isNotEmpty(ftpHost)) return; throw new RuntimeException(name + " is requied"); } public static String substitute(AocContext aocContext, String before) { return Substituters.parse(before, aocContext.getAocContext()); } public static RecordOrder mapRow(ResultSet rs, int rowNum) throws SQLException { ResultSetMetaData metaData = rs.getMetaData(); Object[] fieldsValue = new Object[metaData.getColumnCount()]; String[] fieldsName = new String[metaData.getColumnCount()]; for (int i = 0, ii = metaData.getColumnCount(); i < ii; ++i) { fieldsName[i] = EqlUtils.lookupColumnName(metaData, i + 1); fieldsValue[i] = EqlUtils.getResultSetValue(rs, i + 1); } return new RecordOrder(fieldsValue, fieldsName, null); } }
apache-2.0
eddavisson/google-cloud-datastore
java/datastore/src/main/java/com/google/datastore/v1/client/DatastoreEmulatorOptions.java
1488
/* * Copyright 2015 Google Inc. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.datastore.v1.client; import java.util.HashMap; import java.util.Map; /** An immutable object containing settings for a {@link DatastoreEmulator}. */ public class DatastoreEmulatorOptions { private final Map<String, String> envVars; DatastoreEmulatorOptions(Map<String, String> envVars) { this.envVars = envVars; } /** Builder for {@link DatastoreEmulatorOptions}. */ public static class Builder { private final Map<String, String> envVars = new HashMap<String, String>(); public DatastoreEmulatorOptions build() { return new DatastoreEmulatorOptions(envVars); } /** Adds an environment variable to pass to the emulator. */ public Builder addEnvVar(String var, String value) { envVars.put(var, value); return this; } } public Map<String, String> getEnvVars() { return envVars; } }
apache-2.0
haocdp/EasyGo
app/src/main/java/com/unicorn/easygo/EGOApplication.java
10877
package com.unicorn.easygo; import android.widget.Toast; import com.unicorn.easygo.db.litepal.Account; import com.unicorn.easygo.db.litepal.Goods; import com.unicorn.easygo.db.litepal.OrderAlias; import com.unicorn.easygo.db.litepal.ShoppingCart; import com.unicorn.easygo.db.litepal.User; import com.unicorn.easygo.entity.UserProfile; import com.unicorn.easygo.utils.DateUtil; import org.litepal.LitePal; import org.litepal.LitePalApplication; import java.util.Date; import java.util.HashMap; import java.util.Map; /** * Created by haoc_dp on 2017/6/25. */ public class EGOApplication extends LitePalApplication { private static EGOApplication egoApplication; public static EGOApplication getInstance() { return egoApplication; } /*public EGOApplication() { super(); }*/ /** * 用户信息 */ private UserProfile userProfile; private boolean hasBundCart; private String shoppingCartNo; /** * 超市信息 */ private String marketName = ""; /** * 订单信息 */ private String orderNo = ""; private Map<Integer, Double> goodsIdList = new HashMap<>(); //存放订单里的商品ID和价格 //建立数据库 // private goodDBdao gooddbdao; // private accountDBdao acdbdao; // private scanRecordDBdao scandbdao; // private shoppingCartDBdao sCartdbdao; // private userDBdao userdbdao; // private testDBdao testdbdao; @Override public void onCreate() { super.onCreate(); egoApplication = this; userProfile = new UserProfile(); hasBundCart = false; LitePal.getDatabase(); initDB(); } /** * 初始化数据库 */ private void initDB() { // gooddbdao = new goodDBdao(getApplicationContext()); // acdbdao = new accountDBdao(getApplicationContext()); // scandbdao = new scanRecordDBdao(getApplicationContext()); // sCartdbdao = new shoppingCartDBdao(getApplicationContext()); // userdbdao = new userDBdao(getApplicationContext()); // testdbdao = new testDBdao(getApplicationContext()); // //添加商品信息 // gooddbdao.add("Geer vr 头戴眼镜",100,8.5,20170621,11,"357千焦/每100克","15克/每100克","11克/每100克","3.3克/每100克","70毫克/每100克","images/liulian.png"); // gooddbdao.add("好趣友薯片",100,8.5,20170621,11,"357千焦/每100克","15克/每100克","11克/每100克","3.3克/每100克","70毫克/每100克","images/liulian.png"); // gooddbdao.add("蒙牛酸酸乳",100,8.5,20170621,11,"357千焦/每100克","15克/每100克","11克/每100克","3.3克/每100克","70毫克/每100克","images/liulian.png"); // gooddbdao.add("蒙牛真果粒",100,8.5,20170621,11,"357千焦/每100克","15克/每100克","11克/每100克","3.3克/每100克","70毫克/每100克","images/liulian.png"); // gooddbdao.add("得力笔记本",100,8.5,20170621,11,"357千焦/每100克","15克/每100克","11克/每100克","3.3克/每100克","70毫克/每100克","images/liulian.png"); // gooddbdao.add("六神花露水",100,8.5,20170621,11,"357千焦/每100克","15克/每100克","11克/每100克","3.3克/每100克","70毫克/每100克","images/liulian.png"); // gooddbdao.add("丝塔芙洁面乳",100,8.5,20170621,11,"357千焦/每100克","15克/每100克","11克/每100克","3.3克/每100克","70毫克/每100克","images/liulian.png"); // gooddbdao.add("得力笔记本",100,8.5,20170621,11,"357千焦/每100克","15克/每100克","11克/每100克","3.3克/每100克","70毫克/每100克","images/liulian.png"); // gooddbdao.add("得力订书机",100,8.5,20170621,11,"357千焦/每100克","15克/每100克","11克/每100克","3.3克/每100克","70毫克/每100克","images/liulian.png"); // gooddbdao.add("伊利益消复原乳",450,7.8,20170615,21,"357千焦/每100克","2.8克/每100克","11克/每100克","3.3克/每100克","70毫克/每100克","images/liulian.png"); // gooddbdao.add("雅漾大喷",100,8.5,201706221,12,"357千焦/每100克","2.8克/每100克","11克/每100克","3.3克/每100克","70毫克/每100克","images/liulian.png"); // gooddbdao.add("蒙牛纯牛奶",100,8.5,201706221,10,"357千焦/每100克","2.8克/每100克","11克/每100克","3.3克/每100克","70毫克/每100克","images/liulian.png"); // gooddbdao.add("维达抽纸巾",100,8.5,201706221,9,"357千焦/每100克","2.8克/每100克","11克/每100克","3.3克/每100克","70毫克/每100克","images/liulian.png"); // gooddbdao.add("乐事薯片",100,8.5,201706221,8,"357千焦/每100克","2.8克/每100克","11克/每100克","3.3克/每100克","70毫克/每100克","images/liulian.png"); // //添加账户信息 // acdbdao.add("女","是",238,1); // acdbdao.add("男","是",238,2); // acdbdao.add("男","是",1738,3); // acdbdao.add("女","是",538,4); // //扫描信息 // scandbdao.add(1,1,20170623); // scandbdao.add(2,1,20170623); // scandbdao.add(3,1,20170623); // //购物车信息 // sCartdbdao.add(1); // sCartdbdao.add(2); // sCartdbdao.add(3); // sCartdbdao.add(4); // //用户信息 // userdbdao.add("zhangsan","123456"); // userdbdao.add("lisi","123456"); // userdbdao.add("login","123456"); // //测试 // testdbdao.add(1,627834674,20170624,1);//(goodId,orderNumber,dealDate,userId) // testdbdao.add(2,761267897,20170423,1); // testdbdao.add(3,761267897,20170423,1); // testdbdao.add(4,761267897,20170423,1); // testdbdao.add(5,761267897,20170423,1); // testdbdao.add(6,761267897,20170423,1); // testdbdao.add(7,761267897,20170423,1); // testdbdao.add(8,761267459,20170417,1); // testdbdao.add(9,767853897,20170411,1); // testdbdao.add(10,761267013,20170403,1); // testdbdao.add(11,745723896,20170401,1); Account account1 = new Account("amdin", "123456"); account1.save(); User user1 = new User( account1.getUsername(), 1, "", 100); user1.save(); account1.setUser(user1); account1.save(); try { Goods goods1 = new Goods( "士力架", "69029875", "50g", 4, DateUtil.string2Date("2017-06-21"), DateUtil.string2Date("2018-06-21"), "357千焦", "15克", "11克", "3.3克", "4%", "8%", "10%", "2%", "images/士力架.png"); goods1.save(); Goods goods2 = new Goods( "炫迈口香糖", "6954432710720", "20g", 9, DateUtil.string2Date("2017-06-21"), DateUtil.string2Date("2018-06-21"), "357千焦", "15克", "11克", "3.3克", "4%", "8%", "10%", "2%", "images/炫迈口香糖.png"); goods2.save(); Goods goods3 = new Goods( "维达纸巾", "6901236341292", "150g", 24, DateUtil.string2Date("2017-06-21"), DateUtil.string2Date("2018-06-21"), "357千焦", "15克", "11克", "3.3克", "4%", "8%", "10%", "2%", "images/维达纸巾.png"); goods3.save(); ShoppingCart shoppingCart = new ShoppingCart( "sc-12434342774", false ); shoppingCart.save(); OrderAlias orderAlias = new OrderAlias( "201706221530234", new Date(), 30 ); //orderAlias.save(); /*Goods goods1 = DataSupport.findAll(Goods.class).get(0); Goods goods2 = DataSupport.findAll(Goods.class).get(1); Goods goods3 = DataSupport.findAll(Goods.class).get(2); OrderAlias orderAlias = DataSupport.where("id = ?", "1").find(OrderAlias.class).get(0);*/ orderAlias.getGoodsList().add(goods1); orderAlias.getGoodsList().add(goods2); orderAlias.getGoodsList().add(goods3); orderAlias.save(); /*goods1.getOrderAliasList().add(orderAlias); goods1.save(); goods2.getOrderAliasList().add(orderAlias); goods2.save(); goods3.getOrderAliasList().add(orderAlias); goods3.save();*/ /*OrderGoods orderGoods1 = new OrderGoods( orderAlias, goods1, 1 ); orderGoods1.save(); OrderGoods orderGoods2 = new OrderGoods( orderAlias, goods2, 1 ); orderGoods2.save(); OrderGoods orderGoods3 = new OrderGoods( orderAlias, goods3, 1 ); orderGoods3.save();*/ } catch (Exception e) { e.printStackTrace(); } Toast.makeText(getApplicationContext(), "数据添加成功 !", Toast.LENGTH_SHORT).show(); } public UserProfile getUserProfile() { return userProfile; } public boolean hasBundCart() { return hasBundCart; } public void setHasBundCart(boolean hasBundCart) { this.hasBundCart = hasBundCart; } public String getShoppingCartNo() { return shoppingCartNo; } public void setShoppingCartNo(String shoppingCartNo) { this.shoppingCartNo = shoppingCartNo; } public String getOrderNo() { return orderNo; } public void setOrderNo(String orderNo) { this.orderNo = orderNo; } public Map<Integer, Double> getGoodsIdList() { return goodsIdList; } public String getMarketName() { return marketName; } public void setMarketName(String marketName) { this.marketName = marketName; } }
apache-2.0
metamx/druid
java-util/src/test/java/io/druid/java/util/metrics/SigarLoadTest.java
1154
/* * Licensed to Metamarkets Group Inc. (Metamarkets) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. Metamarkets licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package io.druid.java.util.metrics; import junit.framework.Assert; import org.hyperic.sigar.Sigar; import org.hyperic.sigar.SigarException; import org.junit.Test; public class SigarLoadTest { @Test public void testSigarLoad() throws SigarException { Sigar sigar = SigarUtil.getSigar(); Assert.assertTrue(sigar.getPid() > 0); } }
apache-2.0
bhagatsingh/ApprovalTests.Java.Maven
src/test/java/org/approvaltests/reporters/tests/QueryableDiffReporterHarness.java
1026
package org.approvaltests.reporters.tests; import org.apache.hadoop.util.StringUtils; import org.approvaltests.reporters.GenericDiffReporter; import com.spun.util.persistence.ExecutableQuery; public class QueryableDiffReporterHarness implements ExecutableQuery { private final GenericDiffReporter reporter; private final String file1; private final String file2; public QueryableDiffReporterHarness(GenericDiffReporter reporter, String file1, String file2) { this.reporter = reporter; this.file1 = file1; this.file2 = file2; } @Override public String getQuery() throws Exception { String[] commandLine = reporter.getCommandLine("%s", "%s"); return StringUtils.join(" ", commandLine); } @Override public String executeQuery(String query) throws Exception { query = String.format(query, file1, file2); try { Runtime.getRuntime().exec(query); } catch (Throwable e) { return e.toString(); } return query; } }
apache-2.0
raviperi/storm
storm-client/src/jvm/org/apache/storm/streams/WindowedProcessorBolt.java
5300
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.storm.streams; import com.google.common.collect.Multimap; import org.apache.storm.streams.windowing.SlidingWindows; import org.apache.storm.streams.windowing.TumblingWindows; import org.apache.storm.streams.windowing.Window; import org.apache.storm.task.OutputCollector; import org.apache.storm.task.TopologyContext; import org.apache.storm.topology.OutputFieldsDeclarer; import org.apache.storm.topology.base.BaseWindowedBolt; import org.apache.storm.tuple.Tuple; import org.apache.storm.windowing.TupleWindow; import org.jgrapht.DirectedGraph; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.util.Date; import java.util.List; import java.util.Map; import static org.apache.storm.streams.WindowNode.PUNCTUATION; /** * Stream bolt that executes windowing operations. */ class WindowedProcessorBolt extends BaseWindowedBolt implements StreamBolt { private static final Logger LOG = LoggerFactory.getLogger(WindowedProcessorBolt.class); private final ProcessorBoltDelegate delegate; private final Window<?, ?> window; WindowedProcessorBolt(String id, DirectedGraph<Node, Edge> graph, List<ProcessorNode> nodes, Window<?, ?> window) { delegate = new ProcessorBoltDelegate(id, graph, nodes); this.window = window; setWindowConfig(); } @Override public void prepare(Map stormConf, TopologyContext context, OutputCollector collector) { delegate.prepare(stormConf, context, collector); } @Override public void execute(TupleWindow inputWindow) { LOG.trace("Window triggered at {}, inputWindow {}", new Date(), inputWindow); if (delegate.isEventTimestamp()) { delegate.setEventTimestamp(inputWindow.getEndTimestamp()); } for (Tuple tuple : inputWindow.get()) { Pair<Object, String> valueAndStream = delegate.getValueAndStream(tuple); if (!StreamUtil.isPunctuation(valueAndStream.getFirst())) { delegate.process(valueAndStream.getFirst(), valueAndStream.getSecond()); } } for (String stream : delegate.getInitialStreams()) { delegate.process(PUNCTUATION, stream); } } @Override public void declareOutputFields(OutputFieldsDeclarer declarer) { delegate.declareOutputFields(declarer); } @Override public void setTimestampField(String fieldName) { delegate.setTimestampField(fieldName); } @Override public String getId() { return delegate.getId(); } private void setWindowConfig() { if (window instanceof SlidingWindows) { setSlidingWindowParams(window.getWindowLength(), window.getSlidingInterval()); } else if (window instanceof TumblingWindows) { setTumblingWindowParams(window.getWindowLength()); } if (window.getTimestampField() != null) { withTimestampField(window.getTimestampField()); } if (window.getLag() != null) { withLag(window.getLag()); } if (window.getLateTupleStream() != null) { withLateTupleStream(window.getLateTupleStream()); } } private void setSlidingWindowParams(Object windowLength, Object slidingInterval) { if (windowLength instanceof Count) { if (slidingInterval instanceof Count) { withWindow((Count) windowLength, (Count) slidingInterval); } else if (slidingInterval instanceof Duration) { withWindow((Count) windowLength, (Duration) slidingInterval); } } else if (windowLength instanceof Duration) { if (slidingInterval instanceof Count) { withWindow((Duration) windowLength, (Count) slidingInterval); } else if (slidingInterval instanceof Duration) { withWindow((Duration) windowLength, (Duration) slidingInterval); } } } private void setTumblingWindowParams(Object windowLength) { if (windowLength instanceof Count) { withTumblingWindow((Count) windowLength); } else if (windowLength instanceof Duration) { withTumblingWindow((Duration) windowLength); } } void setStreamToInitialProcessors(Multimap<String, ProcessorNode> streamToInitialProcessors) { delegate.setStreamToInitialProcessors(streamToInitialProcessors); } }
apache-2.0
neykov/incubator-brooklyn
policy/src/main/java/brooklyn/enricher/TimeWeightedDeltaEnricher.java
5148
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package brooklyn.enricher; import groovy.lang.Closure; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import brooklyn.enricher.basic.AbstractTypeTransformingEnricher; import brooklyn.entity.Entity; import brooklyn.event.AttributeSensor; import brooklyn.event.Sensor; import brooklyn.event.SensorEvent; import brooklyn.util.GroovyJavaMethods; import brooklyn.util.flags.SetFromFlag; import com.google.common.base.Function; import com.google.common.base.Functions; /** * Converts an absolute sensor into a delta sensor (i.e. the diff between the current and previous value), * presented as a units/timeUnit based on the event timing. * <p> * NB for time (e.g. "total milliseconds consumed") use {@link TimeFractionDeltaEnricher} */ public class TimeWeightedDeltaEnricher<T extends Number> extends AbstractTypeTransformingEnricher<T,Double> { private static final Logger LOG = LoggerFactory.getLogger(TimeWeightedDeltaEnricher.class); Number lastValue; long lastTime = -1; /** unitMillis is the number of milliseconds to apply for the conversion from input to output; * e.g. 1000 for counting things per second; * NB for time (e.g. "total milliseconds consumed") use {@link TimeFractionDeltaEnricher} */ @SetFromFlag int unitMillis; @SetFromFlag Function<Double,Double> postProcessor; // default 1 second public static <T extends Number> TimeWeightedDeltaEnricher<T> getPerSecondDeltaEnricher(Entity producer, Sensor<T> source, Sensor<Double> target) { return new TimeWeightedDeltaEnricher<T>(producer, source, target, 1000); } public TimeWeightedDeltaEnricher() { // for rebind } public TimeWeightedDeltaEnricher(Entity producer, Sensor<T> source, Sensor<Double> target, int unitMillis) { this(producer, source, target, unitMillis, Functions.<Double>identity()); } public TimeWeightedDeltaEnricher(Entity producer, Sensor<T> source, Sensor<Double> target, int unitMillis, Closure<Double> postProcessor) { this(producer, source, target, unitMillis, GroovyJavaMethods.<Double,Double>functionFromClosure(postProcessor)); } public TimeWeightedDeltaEnricher(Entity producer, Sensor<T> source, Sensor<Double> target, int unitMillis, Function<Double,Double> postProcessor) { super(producer, source, target); this.unitMillis = unitMillis; this.postProcessor = postProcessor; } @Override public void onEvent(SensorEvent<T> event) { onEvent(event, event.getTimestamp()); } public void onEvent(SensorEvent<T> event, long eventTime) { Number current = event.getValue(); if (current == null) { // Can't compute a delta; // don't assume current=zero because then things like requestCount->requestsPerSecond is negative! // instead assume same as last time, so delta == 0 double deltaPostProcessed = postProcessor.apply(0d); entity.setAttribute((AttributeSensor<Double>)target, deltaPostProcessed); if (LOG.isTraceEnabled()) LOG.trace("set {} to {}, {} -> {} at {}", new Object[] {this, deltaPostProcessed, lastValue, current, eventTime}); return; } if (eventTime > lastTime) { if (lastValue == null) { // cannot calculate time-based delta with a single value if (LOG.isTraceEnabled()) LOG.trace("{} received event but no last value so will not emit, null -> {} at {}", new Object[] {this, current, eventTime}); } else { double duration = (lastTime < 0) ? unitMillis : eventTime - lastTime; if (eventTime == lastTime) duration = 0.1; // 0.1 of a millisecond is a relatively small number: double delta = (current.doubleValue() - lastValue.doubleValue()) / (duration / unitMillis); double deltaPostProcessed = postProcessor.apply(delta); entity.setAttribute((AttributeSensor<Double>)target, deltaPostProcessed); if (LOG.isTraceEnabled()) LOG.trace("set {} to {}, {} -> {} at {}", new Object[] {this, deltaPostProcessed, lastValue, current, eventTime}); } lastValue = current; lastTime = eventTime; } } }
apache-2.0
edwtjo/nixitch
gen/org/nixos/idea/psi/NixFnLambda.java
354
// This is a generated file. Not intended for manual editing. package org.nixos.idea.psi; import java.util.List; import org.jetbrains.annotations.*; import com.intellij.psi.PsiElement; public interface NixFnLambda extends PsiElement { @NotNull NixFormalSet getFormalSet(); @NotNull PsiElement getId(); @NotNull PsiElement getNamed(); }
apache-2.0
tuwiendsg/MELA
MELA-Core/MELA-Common/src/main/java/at/ac/tuwien/dsg/mela/common/elasticityAnalysis/concepts/elasticityPathway/som/SOMStrategy.java
2580
/** * Copyright 2013 Technische Universitat Wien (TUW), Distributed Systems Group * E184 * * This work was partially supported by the European Commission in terms of the * CELAR FP7 project (FP7-ICT-2011-8 \#317790) * * Licensed under the Apache License, Version 2.0 (the "License"); you may not * use this file except in compliance with the License. You may obtain a copy of * the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations under * the License. */ package at.ac.tuwien.dsg.mela.common.elasticityAnalysis.concepts.elasticityPathway.som; import java.io.Serializable; /** * Author: Daniel Moldovan E-Mail: d.moldovan@dsg.tuwien.ac.at * * */ public abstract class SOMStrategy implements Serializable { protected Double distanceRestraintFactor; protected Double learningFactor; private Double toleranceRange; protected int neighbourhoodSize; { distanceRestraintFactor = 1d; learningFactor = 1d; } protected SOMStrategy(Double distanceRestraintFactor, Double learningFactor, int neighbourhoodSize, Double toleranceRange) { this.distanceRestraintFactor = distanceRestraintFactor; this.learningFactor = learningFactor; this.toleranceRange = toleranceRange; this.neighbourhoodSize = neighbourhoodSize; } public abstract Double getDistanceRestraintFactor(int distanceLevel, int neighboursCount); public abstract Double geLearningRestraintFactor(int distanceLevel); public Double getToleranceRange() { return toleranceRange; } public int getNeighbourhoodSize() { return neighbourhoodSize; } public SOMStrategy withDistanceRestraintFactor(final Double distanceRestraintFactor) { this.distanceRestraintFactor = distanceRestraintFactor; return this; } public SOMStrategy withLearningFactor(final Double learningFactor) { this.learningFactor = learningFactor; return this; } public SOMStrategy withToleranceRange(final Double toleranceRange) { this.toleranceRange = toleranceRange; return this; } public SOMStrategy withNeighbourhoodSize(final int neighbourhoodSize) { this.neighbourhoodSize = neighbourhoodSize; return this; } }
apache-2.0
nafae/developer
examples/dfp_axis/src/main/java/dfp/axis/v201408/customtargetingservice/CreateCustomTargetingKeysAndValues.java
7183
// Copyright 2014 Google Inc. All Rights Reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package dfp.axis.v201408.customtargetingservice; import com.google.api.ads.common.lib.auth.OfflineCredentials; import com.google.api.ads.common.lib.auth.OfflineCredentials.Api; import com.google.api.ads.dfp.axis.factory.DfpServices; import com.google.api.ads.dfp.axis.v201408.CustomTargetingKey; import com.google.api.ads.dfp.axis.v201408.CustomTargetingKeyType; import com.google.api.ads.dfp.axis.v201408.CustomTargetingServiceInterface; import com.google.api.ads.dfp.axis.v201408.CustomTargetingValue; import com.google.api.ads.dfp.axis.v201408.CustomTargetingValueMatchType; import com.google.api.ads.dfp.lib.client.DfpSession; import com.google.api.client.auth.oauth2.Credential; import java.util.Random; /** * This example creates new custom targeting keys and values. To determine which * custom targeting keys and values exist, run * GetAllCustomTargetingKeysAndValue.java. To target these custom * targeting keys and values, run TargetCustomCriteria.java. * * Credentials and properties in {@code fromFile()} are pulled from the * "ads.properties" file. See README for more info. * * Tags: CustomTargetingService.createCustomTargetingKeys * Tags: CustomTargetingService.createCustomTargetingValues * * @author Adam Rogal */ public class CreateCustomTargetingKeysAndValues { public static void runExample(DfpServices dfpServices, DfpSession session) throws Exception { // Get the CustomTargetingService. CustomTargetingServiceInterface customTargetingService = dfpServices.get(session, CustomTargetingServiceInterface.class); // Create predefined key. CustomTargetingKey genderKey = new CustomTargetingKey(); genderKey.setDisplayName("gender"); genderKey.setName("g" + new Random().nextInt(1000)); genderKey.setType(CustomTargetingKeyType.PREDEFINED); // Create predefined key that may be used for content targeting. CustomTargetingKey genreKey = new CustomTargetingKey(); genreKey.setDisplayName("genre"); genreKey.setName("genre" + new Random().nextInt(1000)); genreKey.setType(CustomTargetingKeyType.PREDEFINED); // Create free-form key. CustomTargetingKey carModelKey = new CustomTargetingKey(); carModelKey.setDisplayName("car model"); carModelKey.setName("c" + new Random().nextInt(1000)); carModelKey.setType(CustomTargetingKeyType.FREEFORM); // Create the custom targeting keys on the server. CustomTargetingKey[] customTargetingKeys = customTargetingService.createCustomTargetingKeys( new CustomTargetingKey[] {genderKey, genreKey, carModelKey}); for (CustomTargetingKey createdCustomTargetingKey : customTargetingKeys) { System.out.printf("A custom targeting key with ID \"%d\", name \"%s\", and display name " + "\"%s\" was created.\n", createdCustomTargetingKey.getId(), createdCustomTargetingKey.getName(), createdCustomTargetingKey.getDisplayName()); } // Set the created custom targeting keys. genderKey = customTargetingKeys[0]; genreKey = customTargetingKeys[1]; carModelKey = customTargetingKeys[2]; // Create custom targeting value for the predefined gender key. CustomTargetingValue genderMaleValue = new CustomTargetingValue(); genderMaleValue.setCustomTargetingKeyId(genderKey.getId()); genderMaleValue.setDisplayName("male"); // Name is set to 1 so that the actual name can be hidden from website // users. genderMaleValue.setName("1"); genderMaleValue.setMatchType(CustomTargetingValueMatchType.EXACT); CustomTargetingValue genderFemaleValue = new CustomTargetingValue(); genderFemaleValue.setCustomTargetingKeyId(genderKey.getId()); genderFemaleValue.setDisplayName("female"); // Name is set to 2 so that the actual name can be hidden from website // users. genderFemaleValue.setName("2"); genderFemaleValue.setMatchType(CustomTargetingValueMatchType.EXACT); // Create custom targeting value for the predefined genre key. CustomTargetingValue genreComedyValue = new CustomTargetingValue(); genreComedyValue.setCustomTargetingKeyId(genreKey.getId()); genreComedyValue.setDisplayName("comedy"); genreComedyValue.setName("comedy"); genreComedyValue.setMatchType(CustomTargetingValueMatchType.EXACT); CustomTargetingValue genreDramaValue = new CustomTargetingValue(); genreDramaValue.setCustomTargetingKeyId(genreKey.getId()); genreDramaValue.setDisplayName("drama"); genreDramaValue.setName("drama"); genreDramaValue.setMatchType(CustomTargetingValueMatchType.EXACT); // Create custom targeting value for the free-form car model key. These are // values that would be suggested in the UI or can be used when targeting // with a FreeFormCustomCriteria. CustomTargetingValue carModelHondaValue = new CustomTargetingValue(); carModelHondaValue.setCustomTargetingKeyId(carModelKey.getId()); carModelHondaValue.setDisplayName("~honda"); carModelHondaValue.setName("honda"); // A match type of broad will match anything including "honda", // i.e. "~honda". carModelHondaValue.setMatchType(CustomTargetingValueMatchType.BROAD); // Create the custom targeting values on the server. CustomTargetingValue[] customTargetingValues = customTargetingService.createCustomTargetingValues(new CustomTargetingValue[] { genderMaleValue, genderFemaleValue, genreComedyValue, genreDramaValue, carModelHondaValue}); for (CustomTargetingValue createdCustomTargetingValue : customTargetingValues) { System.out.printf("A custom targeting value with ID \"%d\", belonging to key with ID \"%d\", " + "with name \"%s\" and display name \"%s\" was created.\n", createdCustomTargetingValue.getId(), createdCustomTargetingValue.getCustomTargetingKeyId(), createdCustomTargetingValue.getName(), createdCustomTargetingValue.getDisplayName()); } } public static void main(String[] args) throws Exception { // Generate a refreshable OAuth2 credential similar to a ClientLogin token // and can be used in place of a service account. Credential oAuth2Credential = new OfflineCredentials.Builder() .forApi(Api.DFP) .fromFile() .build() .generateCredential(); // Construct a DfpSession. DfpSession session = new DfpSession.Builder() .fromFile() .withOAuth2Credential(oAuth2Credential) .build(); DfpServices dfpServices = new DfpServices(); runExample(dfpServices, session); } }
apache-2.0
chmyga/component-runtime
component-runtime-manager/src/main/java/org/talend/sdk/component/runtime/manager/service/api/Unwrappable.java
883
/** * Copyright (C) 2006-2020 Talend Inc. - www.talend.com * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.talend.sdk.component.runtime.manager.service.api; // marks a class as being unwrappable which means some internals are accessible in an unsafe and advanced manner. @FunctionalInterface public interface Unwrappable { <T> T unwrap(Class<T> type); }
apache-2.0
zoozooll/MyExercise
meep/MeepOpenBox/src/com/oregonscientific/meep/meepopenbox/MeepOpenBoxTimeZoneSelect.java
7237
package com.oregonscientific.meep.meepopenbox; import java.util.ArrayList; import java.util.Collections; import java.util.Comparator; import java.util.Date; import java.util.HashMap; import java.util.List; import java.util.Locale; import java.util.TimeZone; import java.util.concurrent.TimeUnit; import org.xmlpull.v1.XmlPullParserException; import android.app.Activity; import android.app.AlarmManager; import android.app.DialogFragment; import android.content.Context; import android.content.Intent; import android.content.res.XmlResourceParser; import android.os.Bundle; import android.util.Log; import android.view.View; import android.view.View.OnClickListener; import android.widget.AdapterView; import android.widget.AdapterView.OnItemClickListener; import android.widget.Button; import android.widget.ListView; import android.widget.TextView; import com.oregonscientific.meep.meepopenbox.view.MeepOpenBoxDialogFragment; import com.oregonscientific.meep.meepopenbox.view.MeepOpenBoxTimeZoneArrayAdapter; import com.oregonscientific.meep.meepopenbox.view.MeepOpenBoxViewManager; /** * Activity for Time Zone Select page * @author Charles */ public class MeepOpenBoxTimeZoneSelect extends MeepOpenBoxBaseActivity { public static final String TAG = MeepOpenBoxTimeZoneSelect.class.getSimpleName(); public static final String GMT = "GMT"; public static final String UTC = "UTC"; private static final String XMLTAG_TIMEZONE = "timezone"; private static final String KEY_ID = "id"; private static final String KEY_DISPLAYNAME = "name"; @Override public void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.open_box_time_zone_select_layout); final ListView listView = (ListView) findViewById(R.id.timeZoneSelectListView); final ArrayList<String> timeZoneList = new ArrayList<String>(); List<HashMap<String, String>> zones = getZones(); for (HashMap<String, String> map : zones) { timeZoneList.add(map.get(KEY_DISPLAYNAME) + " " + getUTCBias(TimeZone.getTimeZone(map.get(KEY_ID)))); } Collections.sort(timeZoneList, new TimeZoneComparator()); final MeepOpenBoxTimeZoneArrayAdapter adapter = new MeepOpenBoxTimeZoneArrayAdapter(this, timeZoneList); listView.setOnItemClickListener(new OnItemClickListener() { @Override public void onItemClick( AdapterView<?> parent, View view, int position, long id) { adapter.setSelectedValue((String) listView.getItemAtPosition(position)); adapter.notifyDataSetChanged(); } }); listView.setAdapter(adapter); Button backButton = (Button) findViewById(R.id.timeZoneSelectBackBtn); backButton.setOnClickListener(new OnClickListener() { @Override public void onClick(View view) { onBackPressed(); } }); Button nextButton = (Button) findViewById(R.id.timeZoneSelectNextBtn); nextButton.setOnClickListener(new OnClickListener() { @Override public void onClick(View view) { if (checkTimeZone(timeZoneList, adapter.getSelectedValue())) { MeepOpenBoxViewManager.goToNextPage(MeepOpenBoxTimeZoneSelect.this); } } }); } @Override public void onActivityResult(int requestCode, int resultCode, Intent data) { super.onActivityResult(requestCode, resultCode, data); if (resultCode == Activity.RESULT_OK) { setResult(Activity.RESULT_OK); finish(); } } @Override public void hideBackButton() { Button backButton = (Button) findViewById(R.id.timeZoneSelectBackBtn); backButton.setVisibility(View.INVISIBLE); TextView backButtonText = (TextView) findViewById(R.id.timeZoneSelectBackText); backButtonText.setVisibility(View.INVISIBLE); } @Override public void setNextButtonEnabled(boolean enabled) { Button nextButton = (Button) findViewById(R.id.timeZoneSelectNextBtn); nextButton.setEnabled(enabled); } private List<HashMap<String, String>> getZones() { List<HashMap<String, String>> myData = new ArrayList<HashMap<String, String>>(); try { XmlResourceParser xrp = getResources().getXml(R.xml.timezones); while (xrp.next() != XmlResourceParser.START_TAG) ; xrp.next(); while (xrp.getEventType() != XmlResourceParser.END_TAG) { while (xrp.getEventType() != XmlResourceParser.START_TAG) { if (xrp.getEventType() == XmlResourceParser.END_DOCUMENT) { return myData; } xrp.next(); } if (xrp.getName().equals(XMLTAG_TIMEZONE)) { String id = xrp.getAttributeValue(0); String displayName = xrp.nextText(); HashMap<String, String> map = new HashMap<String, String>(); map.put(KEY_ID, id); map.put(KEY_DISPLAYNAME, displayName); myData.add(map); } while (xrp.getEventType() != XmlResourceParser.END_TAG) { xrp.next(); } xrp.next(); } xrp.close(); } catch (XmlPullParserException xppe) { Log.e(TAG, "Ill-formatted timezones.xml file"); } catch (java.io.IOException ioe) { Log.e(TAG, "Unable to read timezones.xml file"); } return myData; } /** * Checks whether selected time zone can be set * @param mLv ListView of time zone * @param value name of time zone selected * @return true if time zone is set, false otherwise */ private boolean checkTimeZone(ArrayList<String> timeZoneList, String value) { if (value != null && timeZoneList.contains(value)) { int pos = value.indexOf(GMT); if (pos != -1) { String timeZone = value.substring(pos, pos + 9); setTimeZone(timeZone); } return true; } DialogFragment newFragment = MeepOpenBoxDialogFragment.newInstance(MeepOpenBoxDialogFragment.TIMEZONE_NOT_SELECTED_DIALOG_ID); newFragment.show(getFragmentManager(), "dialog"); setNextButtonEnabled(false); return false; } /** * Sets time zone * @param gmt Time Zone in format "GMT+00:00" */ private void setTimeZone(String gmt) { if (gmt != null) { AlarmManager alarm = (AlarmManager) getSystemService(Context.ALARM_SERVICE); alarm.setTimeZone(gmt); TimeZone.setDefault(TimeZone.getTimeZone(gmt)); } } /** * Gets UTC bias from a time zone * @param timeZone time zone * @return UTC bias of time zone */ private String getUTCBias(TimeZone timeZone) { String number = "+00"; if (timeZone == null) { return "(" + GMT + number + ":00)"; } boolean inDaylightTime = timeZone.inDaylightTime(new Date()); int offset = timeZone.getRawOffset(); offset = inDaylightTime ? offset + timeZone.getDSTSavings() : offset; String formatString = offset >= 0 ? "+%02d:%02d" : "-%02d:%02d"; offset = Math.abs(offset); number = String.format( Locale.ENGLISH, formatString, TimeUnit.MILLISECONDS.toHours(offset), TimeUnit.MILLISECONDS.toMinutes(offset) - TimeUnit.HOURS.toMinutes(TimeUnit.MILLISECONDS.toHours(offset))); return "(" + GMT + number + ")"; } private class TimeZoneComparator implements Comparator<String> { @Override public int compare(String stringA, String stringB) { if (stringA == null && stringB == null) { return 0; } else if (stringA != null && stringB == null) { return 1; } else if (stringA == null && stringB != null) { return -1; } else { return stringA.compareTo(stringB); } } } }
apache-2.0
jentfoo/aws-sdk-java
aws-java-sdk-managedblockchain/src/main/java/com/amazonaws/services/managedblockchain/model/Framework.java
1755
/* * Copyright 2014-2019 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with * the License. A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR * CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions * and limitations under the License. */ package com.amazonaws.services.managedblockchain.model; import javax.annotation.Generated; /** * */ @Generated("com.amazonaws:aws-java-sdk-code-generator") public enum Framework { HYPERLEDGER_FABRIC("HYPERLEDGER_FABRIC"); private String value; private Framework(String value) { this.value = value; } @Override public String toString() { return this.value; } /** * Use this in place of valueOf. * * @param value * real value * @return Framework corresponding to the value * * @throws IllegalArgumentException * If the specified value does not map to one of the known values in this enum. */ public static Framework fromValue(String value) { if (value == null || "".equals(value)) { throw new IllegalArgumentException("Value cannot be null or empty!"); } for (Framework enumEntry : Framework.values()) { if (enumEntry.toString().equals(value)) { return enumEntry; } } throw new IllegalArgumentException("Cannot create enum from " + value + " value!"); } }
apache-2.0
pbailis/cassandra-pbs
src/java/org/apache/cassandra/config/CFMetaData.java
41833
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.cassandra.config; import java.lang.reflect.Constructor; import java.lang.reflect.InvocationTargetException; import java.nio.ByteBuffer; import java.util.*; import org.apache.commons.lang.builder.EqualsBuilder; import org.apache.commons.lang.builder.HashCodeBuilder; import org.apache.commons.lang.builder.ToStringBuilder; import org.apache.avro.util.Utf8; import org.apache.cassandra.db.*; import org.apache.cassandra.db.compaction.AbstractCompactionStrategy; import org.apache.cassandra.db.marshal.*; import org.apache.cassandra.db.migration.Migration; import org.apache.cassandra.db.migration.avro.ColumnDef; import org.apache.cassandra.io.IColumnSerializer; import org.apache.cassandra.io.compress.CompressionParameters; import org.apache.cassandra.thrift.InvalidRequestException; import org.apache.cassandra.utils.ByteBufferUtil; import org.slf4j.Logger; import org.slf4j.LoggerFactory; public final class CFMetaData { // // !! Important !! // This class can be tricky to modify. Please read http://wiki.apache.org/cassandra/ConfigurationNotes // for how to do so safely. // private static Logger logger = LoggerFactory.getLogger(CFMetaData.class); public final static double DEFAULT_READ_REPAIR_CHANCE = 0.1; public final static boolean DEFAULT_REPLICATE_ON_WRITE = true; public final static int DEFAULT_GC_GRACE_SECONDS = 864000; public final static int DEFAULT_MIN_COMPACTION_THRESHOLD = 4; public final static int DEFAULT_MAX_COMPACTION_THRESHOLD = 32; public final static double DEFAULT_MERGE_SHARDS_CHANCE = 0.1; public final static String DEFAULT_COMPACTION_STRATEGY_CLASS = "SizeTieredCompactionStrategy"; public final static ByteBuffer DEFAULT_KEY_NAME = ByteBufferUtil.bytes("KEY"); public static final CFMetaData StatusCf = newSystemMetadata(SystemTable.STATUS_CF, 0, "persistent metadata for the local node", BytesType.instance, null); public static final CFMetaData HintsCf = newSystemMetadata(HintedHandOffManager.HINTS_CF, 1, "hinted handoff data", BytesType.instance, BytesType.instance); public static final CFMetaData MigrationsCf = newSystemMetadata(Migration.MIGRATIONS_CF, 2, "individual schema mutations", TimeUUIDType.instance, null); public static final CFMetaData SchemaCf = newSystemMetadata(Migration.SCHEMA_CF, 3, "current state of the schema", UTF8Type.instance, null); public static final CFMetaData IndexCf = newSystemMetadata(SystemTable.INDEX_CF, 5, "indexes that have been completed", UTF8Type.instance, null); public static final CFMetaData NodeIdCf = newSystemMetadata(SystemTable.NODE_ID_CF, 6, "nodeId and their metadata", TimeUUIDType.instance, null); public static final CFMetaData VersionCf = newSystemMetadata(SystemTable.VERSION_CF, 7, "server version information", UTF8Type.instance, null); static { try { VersionCf.keyAlias(ByteBufferUtil.bytes("component")) .keyValidator(UTF8Type.instance) .columnMetadata(Collections.singletonMap(ByteBufferUtil.bytes("version"), new ColumnDefinition(ByteBufferUtil.bytes("version"), UTF8Type.instance, null, null, null))); } catch (ConfigurationException e) { throw new RuntimeException(e); } } //REQUIRED public final Integer cfId; // internal id, never exposed to user public final String ksName; // name of keyspace public final String cfName; // name of this column family public final ColumnFamilyType cfType; // standard, super public final AbstractType comparator; // bytes, long, timeuuid, utf8, etc. public final AbstractType subcolumnComparator; // like comparator, for supercolumns //OPTIONAL private String comment; // default none, for humans only private double readRepairChance; // default 1.0 (always), chance [0.0,1.0] of read repair private boolean replicateOnWrite; // default false private int gcGraceSeconds; // default 864000 (ten days) private AbstractType defaultValidator; // default BytesType (no-op), use comparator types private AbstractType keyValidator; // default BytesType (no-op), use comparator types private int minCompactionThreshold; // default 4 private int maxCompactionThreshold; // default 32 // mergeShardsChance is now obsolete, but left here so as to not break // thrift compatibility private double mergeShardsChance; // default 0.1, chance [0.0, 1.0] of merging old shards during replication private ByteBuffer keyAlias; // default NULL private Double bloomFilterFpChance; // default NULL private Map<ByteBuffer, ColumnDefinition> column_metadata; public Class<? extends AbstractCompactionStrategy> compactionStrategyClass; public Map<String, String> compactionStrategyOptions; private CompressionParameters compressionParameters; public CFMetaData comment(String prop) { comment = enforceCommentNotNull(prop); return this;} public CFMetaData readRepairChance(double prop) {readRepairChance = prop; return this;} public CFMetaData replicateOnWrite(boolean prop) {replicateOnWrite = prop; return this;} public CFMetaData gcGraceSeconds(int prop) {gcGraceSeconds = prop; return this;} public CFMetaData defaultValidator(AbstractType prop) {defaultValidator = prop; return this;} public CFMetaData keyValidator(AbstractType prop) {keyValidator = prop; return this;} public CFMetaData minCompactionThreshold(int prop) {minCompactionThreshold = prop; return this;} public CFMetaData maxCompactionThreshold(int prop) {maxCompactionThreshold = prop; return this;} public CFMetaData mergeShardsChance(double prop) {mergeShardsChance = prop; return this;} public CFMetaData keyAlias(ByteBuffer prop) {keyAlias = prop; return this;} public CFMetaData columnMetadata(Map<ByteBuffer,ColumnDefinition> prop) {column_metadata = prop; return this;} public CFMetaData compactionStrategyClass(Class<? extends AbstractCompactionStrategy> prop) {compactionStrategyClass = prop; return this;} public CFMetaData compactionStrategyOptions(Map<String, String> prop) {compactionStrategyOptions = prop; return this;} public CFMetaData compressionParameters(CompressionParameters prop) {compressionParameters = prop; return this;} public CFMetaData bloomFilterFpChance(Double prop) { bloomFilterFpChance = prop; return this;} public CFMetaData(String keyspace, String name, ColumnFamilyType type, AbstractType comp, AbstractType subcc) { this(keyspace, name, type, comp, subcc, Schema.instance.nextCFId()); } private CFMetaData(String keyspace, String name, ColumnFamilyType type, AbstractType comp, AbstractType subcc, int id) { // Final fields must be set in constructor ksName = keyspace; cfName = name; cfType = type; comparator = comp; subcolumnComparator = enforceSubccDefault(type, subcc); // System cfs have specific ids, and copies of old CFMDs need // to copy over the old id. cfId = id; this.init(); } private AbstractType enforceSubccDefault(ColumnFamilyType cftype, AbstractType subcc) { return (subcc == null) && (cftype == ColumnFamilyType.Super) ? BytesType.instance : subcc; } private static String enforceCommentNotNull (CharSequence comment) { return (comment == null) ? "" : comment.toString(); } private void init() { // Set a bunch of defaults readRepairChance = DEFAULT_READ_REPAIR_CHANCE; replicateOnWrite = DEFAULT_REPLICATE_ON_WRITE; gcGraceSeconds = DEFAULT_GC_GRACE_SECONDS; minCompactionThreshold = DEFAULT_MIN_COMPACTION_THRESHOLD; maxCompactionThreshold = DEFAULT_MAX_COMPACTION_THRESHOLD; mergeShardsChance = DEFAULT_MERGE_SHARDS_CHANCE; // Defaults strange or simple enough to not need a DEFAULT_T for defaultValidator = BytesType.instance; keyValidator = BytesType.instance; comment = ""; keyAlias = null; // This qualifies as a 'strange default'. column_metadata = new HashMap<ByteBuffer,ColumnDefinition>(); try { compactionStrategyClass = createCompactionStrategy(DEFAULT_COMPACTION_STRATEGY_CLASS); } catch (ConfigurationException e) { throw new AssertionError(e); } compactionStrategyOptions = new HashMap<String, String>(); compressionParameters = new CompressionParameters(null); } private static CFMetaData newSystemMetadata(String cfName, int cfId, String comment, AbstractType comparator, AbstractType subcc) { ColumnFamilyType type = subcc == null ? ColumnFamilyType.Standard : ColumnFamilyType.Super; CFMetaData newCFMD = new CFMetaData(Table.SYSTEM_TABLE, cfName, type, comparator, subcc, cfId); return newCFMD.comment(comment) .readRepairChance(0) .gcGraceSeconds(0) .mergeShardsChance(0.0); } public static CFMetaData newIndexMetadata(CFMetaData parent, ColumnDefinition info, AbstractType columnComparator) { return new CFMetaData(parent.ksName, parent.indexColumnFamilyName(info), ColumnFamilyType.Standard, columnComparator, null) .keyValidator(info.getValidator()) .readRepairChance(0.0) .gcGraceSeconds(parent.gcGraceSeconds) .minCompactionThreshold(parent.minCompactionThreshold) .maxCompactionThreshold(parent.maxCompactionThreshold); } // Create a new CFMD by changing just the cfName public static CFMetaData rename(CFMetaData cfm, String newName) { return copyOpts(new CFMetaData(cfm.ksName, newName, cfm.cfType, cfm.comparator, cfm.subcolumnComparator, cfm.cfId), cfm); } // Create a new CFMD by changing just the ksName public static CFMetaData renameTable(CFMetaData cfm, String ksName) { return copyOpts(new CFMetaData(ksName, cfm.cfName, cfm.cfType, cfm.comparator, cfm.subcolumnComparator, cfm.cfId), cfm); } private static CFMetaData copyOpts(CFMetaData newCFMD, CFMetaData oldCFMD) { return newCFMD.comment(oldCFMD.comment) .readRepairChance(oldCFMD.readRepairChance) .replicateOnWrite(oldCFMD.replicateOnWrite) .gcGraceSeconds(oldCFMD.gcGraceSeconds) .defaultValidator(oldCFMD.defaultValidator) .minCompactionThreshold(oldCFMD.minCompactionThreshold) .maxCompactionThreshold(oldCFMD.maxCompactionThreshold) .columnMetadata(oldCFMD.column_metadata) .compactionStrategyClass(oldCFMD.compactionStrategyClass) .compactionStrategyOptions(oldCFMD.compactionStrategyOptions) .compressionParameters(oldCFMD.compressionParameters) .bloomFilterFpChance(oldCFMD.bloomFilterFpChance); } /** * generate a column family name for an index corresponding to the given column. * This is NOT the same as the index's name! This is only used in sstable filenames and is not exposed to users. * * @param info A definition of the column with index * * @return name of the index ColumnFamily */ public String indexColumnFamilyName(ColumnDefinition info) { // TODO simplify this when info.index_name is guaranteed to be set return cfName + "." + (info.getIndexName() == null ? ByteBufferUtil.bytesToHex(info.name) : info.getIndexName()); } // converts CFM to avro CfDef public org.apache.cassandra.db.migration.avro.CfDef toAvro() { org.apache.cassandra.db.migration.avro.CfDef cf = new org.apache.cassandra.db.migration.avro.CfDef(); cf.id = cfId; cf.keyspace = new Utf8(ksName); cf.name = new Utf8(cfName); cf.column_type = new Utf8(cfType.name()); cf.comparator_type = new Utf8(comparator.toString()); if (subcolumnComparator != null) { assert cfType == ColumnFamilyType.Super : String.format("%s CF %s should not have subcomparator %s defined", cfType, cfName, subcolumnComparator); cf.subcomparator_type = new Utf8(subcolumnComparator.toString()); } cf.comment = new Utf8(enforceCommentNotNull(comment)); cf.read_repair_chance = readRepairChance; cf.replicate_on_write = replicateOnWrite; cf.gc_grace_seconds = gcGraceSeconds; cf.default_validation_class = defaultValidator == null ? null : new Utf8(defaultValidator.toString()); cf.key_validation_class = new Utf8(keyValidator.toString()); cf.min_compaction_threshold = minCompactionThreshold; cf.max_compaction_threshold = maxCompactionThreshold; cf.merge_shards_chance = mergeShardsChance; cf.key_alias = keyAlias; cf.column_metadata = new ArrayList<ColumnDef>(column_metadata.size()); for (ColumnDefinition cd : column_metadata.values()) cf.column_metadata.add(cd.toAvro()); cf.compaction_strategy = new Utf8(compactionStrategyClass.getName()); if (compactionStrategyOptions != null) { cf.compaction_strategy_options = new HashMap<CharSequence, CharSequence>(); for (Map.Entry<String, String> e : compactionStrategyOptions.entrySet()) cf.compaction_strategy_options.put(new Utf8(e.getKey()), new Utf8(e.getValue())); } cf.compression_options = compressionParameters.asAvroOptions(); cf.bloom_filter_fp_chance = bloomFilterFpChance; return cf; } public static CFMetaData fromAvro(org.apache.cassandra.db.migration.avro.CfDef cf) { AbstractType comparator; AbstractType subcolumnComparator = null; AbstractType validator; AbstractType keyValidator; try { comparator = TypeParser.parse(cf.comparator_type.toString()); if (cf.subcomparator_type != null) subcolumnComparator = TypeParser.parse(cf.subcomparator_type); validator = TypeParser.parse(cf.default_validation_class); keyValidator = TypeParser.parse(cf.key_validation_class); } catch (Exception ex) { throw new RuntimeException("Could not inflate CFMetaData for " + cf, ex); } Map<ByteBuffer, ColumnDefinition> column_metadata = new TreeMap<ByteBuffer, ColumnDefinition>(BytesType.instance); for (ColumnDef aColumn_metadata : cf.column_metadata) { ColumnDefinition cd = ColumnDefinition.fromAvro(aColumn_metadata); if (cd.getIndexType() != null && cd.getIndexName() == null) cd.setIndexName(getDefaultIndexName(cf.name.toString(), comparator, cd.name)); column_metadata.put(cd.name, cd); } CFMetaData newCFMD = new CFMetaData(cf.keyspace.toString(), cf.name.toString(), ColumnFamilyType.create(cf.column_type.toString()), comparator, subcolumnComparator, cf.id); // When we pull up an old avro CfDef which doesn't have these arguments, // it doesn't default them correctly. Without explicit defaulting, // grandfathered metadata becomes wrong or causes crashes. // Isn't AVRO supposed to handle stuff like this? if (cf.min_compaction_threshold != null) { newCFMD.minCompactionThreshold(cf.min_compaction_threshold); } if (cf.max_compaction_threshold != null) { newCFMD.maxCompactionThreshold(cf.max_compaction_threshold); } if (cf.merge_shards_chance != null) { newCFMD.mergeShardsChance(cf.merge_shards_chance); } if (cf.key_alias != null) { newCFMD.keyAlias(cf.key_alias); } if (cf.compaction_strategy != null) { try { newCFMD.compactionStrategyClass = createCompactionStrategy(cf.compaction_strategy.toString()); } catch (ConfigurationException e) { throw new RuntimeException(e); } } if (cf.compaction_strategy_options != null) { for (Map.Entry<CharSequence, CharSequence> e : cf.compaction_strategy_options.entrySet()) newCFMD.compactionStrategyOptions.put(e.getKey().toString(), e.getValue().toString()); } CompressionParameters cp; try { cp = CompressionParameters.create(cf.compression_options); } catch (ConfigurationException e) { throw new RuntimeException(e); } return newCFMD.comment(cf.comment.toString()) .readRepairChance(cf.read_repair_chance) .replicateOnWrite(cf.replicate_on_write) .gcGraceSeconds(cf.gc_grace_seconds) .defaultValidator(validator) .keyValidator(keyValidator) .columnMetadata(column_metadata) .compressionParameters(cp) .bloomFilterFpChance(cf.bloom_filter_fp_chance); } public String getComment() { return comment; } public double getReadRepairChance() { return readRepairChance; } public double getMergeShardsChance() { return mergeShardsChance; } public boolean getReplicateOnWrite() { return replicateOnWrite; } public int getGcGraceSeconds() { return gcGraceSeconds; } public AbstractType getDefaultValidator() { return defaultValidator; } public AbstractType getKeyValidator() { return keyValidator; } public Integer getMinCompactionThreshold() { return minCompactionThreshold; } public Integer getMaxCompactionThreshold() { return maxCompactionThreshold; } public ByteBuffer getKeyName() { return keyAlias == null ? DEFAULT_KEY_NAME : keyAlias; } public CompressionParameters compressionParameters() { return compressionParameters; } public Map<ByteBuffer, ColumnDefinition> getColumn_metadata() { return Collections.unmodifiableMap(column_metadata); } public AbstractType getComparatorFor(ByteBuffer superColumnName) { return superColumnName == null ? comparator : subcolumnComparator; } public Double getBloomFilterFpChance() { return bloomFilterFpChance; } public boolean equals(Object obj) { if (obj == this) { return true; } else if (obj == null || obj.getClass() != getClass()) { return false; } CFMetaData rhs = (CFMetaData) obj; return new EqualsBuilder() .append(ksName, rhs.ksName) .append(cfName, rhs.cfName) .append(cfType, rhs.cfType) .append(comparator, rhs.comparator) .append(subcolumnComparator, rhs.subcolumnComparator) .append(comment, rhs.comment) .append(readRepairChance, rhs.readRepairChance) .append(replicateOnWrite, rhs.replicateOnWrite) .append(gcGraceSeconds, rhs.gcGraceSeconds) .append(defaultValidator, rhs.defaultValidator) .append(keyValidator, rhs.keyValidator) .append(minCompactionThreshold, rhs.minCompactionThreshold) .append(maxCompactionThreshold, rhs.maxCompactionThreshold) .append(cfId.intValue(), rhs.cfId.intValue()) .append(column_metadata, rhs.column_metadata) .append(mergeShardsChance, rhs.mergeShardsChance) .append(keyAlias, rhs.keyAlias) .append(compactionStrategyClass, rhs.compactionStrategyClass) .append(compactionStrategyOptions, rhs.compactionStrategyOptions) .append(compressionParameters, rhs.compressionParameters) .append(bloomFilterFpChance, rhs.bloomFilterFpChance) .isEquals(); } public int hashCode() { return new HashCodeBuilder(29, 1597) .append(ksName) .append(cfName) .append(cfType) .append(comparator) .append(subcolumnComparator) .append(comment) .append(readRepairChance) .append(replicateOnWrite) .append(gcGraceSeconds) .append(defaultValidator) .append(keyValidator) .append(minCompactionThreshold) .append(maxCompactionThreshold) .append(cfId) .append(column_metadata) .append(mergeShardsChance) .append(keyAlias) .append(compactionStrategyClass) .append(compactionStrategyOptions) .append(compressionParameters) .append(bloomFilterFpChance) .toHashCode(); } public AbstractType getValueValidator(ByteBuffer column) { return getValueValidator(column_metadata.get(column)); } public AbstractType getValueValidator(ColumnDefinition columnDefinition) { return columnDefinition == null ? defaultValidator : columnDefinition.getValidator(); } /** applies implicit defaults to cf definition. useful in updates */ public static void applyImplicitDefaults(org.apache.cassandra.thrift.CfDef cf_def) { if (!cf_def.isSetComment()) cf_def.setComment(""); if (!cf_def.isSetReplicate_on_write()) cf_def.setReplicate_on_write(CFMetaData.DEFAULT_REPLICATE_ON_WRITE); if (!cf_def.isSetMin_compaction_threshold()) cf_def.setMin_compaction_threshold(CFMetaData.DEFAULT_MIN_COMPACTION_THRESHOLD); if (!cf_def.isSetMax_compaction_threshold()) cf_def.setMax_compaction_threshold(CFMetaData.DEFAULT_MAX_COMPACTION_THRESHOLD); if (!cf_def.isSetMerge_shards_chance()) cf_def.setMerge_shards_chance(CFMetaData.DEFAULT_MERGE_SHARDS_CHANCE); if (null == cf_def.compaction_strategy) cf_def.compaction_strategy = DEFAULT_COMPACTION_STRATEGY_CLASS; if (null == cf_def.compaction_strategy_options) cf_def.compaction_strategy_options = Collections.emptyMap(); } public static CFMetaData fromThrift(org.apache.cassandra.thrift.CfDef cf_def) throws InvalidRequestException, ConfigurationException { ColumnFamilyType cfType = ColumnFamilyType.create(cf_def.column_type); if (cfType == null) { throw new InvalidRequestException("Invalid column type " + cf_def.column_type); } applyImplicitDefaults(cf_def); CFMetaData newCFMD = new CFMetaData(cf_def.keyspace, cf_def.name, cfType, TypeParser.parse(cf_def.comparator_type), cf_def.subcomparator_type == null ? null : TypeParser.parse(cf_def.subcomparator_type), cf_def.isSetId() ? cf_def.id : Schema.instance.nextCFId()); if (cf_def.isSetGc_grace_seconds()) { newCFMD.gcGraceSeconds(cf_def.gc_grace_seconds); } if (cf_def.isSetMin_compaction_threshold()) { newCFMD.minCompactionThreshold(cf_def.min_compaction_threshold); } if (cf_def.isSetMax_compaction_threshold()) { newCFMD.maxCompactionThreshold(cf_def.max_compaction_threshold); } if (cf_def.isSetMerge_shards_chance()) { newCFMD.mergeShardsChance(cf_def.merge_shards_chance); } if (cf_def.isSetKey_alias()) { newCFMD.keyAlias(cf_def.key_alias); } if (cf_def.isSetKey_validation_class()) { newCFMD.keyValidator(TypeParser.parse(cf_def.key_validation_class)); } if (cf_def.isSetCompaction_strategy()) newCFMD.compactionStrategyClass = createCompactionStrategy(cf_def.compaction_strategy); if (cf_def.isSetCompaction_strategy_options()) newCFMD.compactionStrategyOptions(new HashMap<String, String>(cf_def.compaction_strategy_options)); if (cf_def.isSetBloom_filter_fp_chance()) newCFMD.bloomFilterFpChance(cf_def.bloom_filter_fp_chance); CompressionParameters cp = CompressionParameters.create(cf_def.compression_options); return newCFMD.comment(cf_def.comment) .readRepairChance(cf_def.read_repair_chance) .replicateOnWrite(cf_def.replicate_on_write) .defaultValidator(TypeParser.parse(cf_def.default_validation_class)) .keyValidator(TypeParser.parse(cf_def.key_validation_class)) .columnMetadata(ColumnDefinition.fromThrift(cf_def.column_metadata)) .compressionParameters(cp) .validate(); } /** updates CFMetaData in-place to match cf_def */ public void apply(org.apache.cassandra.db.migration.avro.CfDef cf_def) throws ConfigurationException { logger.debug("applying {} to {}", cf_def, this); // validate if (!cf_def.keyspace.toString().equals(ksName)) throw new ConfigurationException(String.format("Keyspace mismatch (found %s; expected %s)", cf_def.keyspace, ksName)); if (!cf_def.name.toString().equals(cfName)) throw new ConfigurationException(String.format("Column family mismatch (found %s; expected %s)", cf_def.name, cfName)); if (!cf_def.id.equals(cfId)) throw new ConfigurationException(String.format("Column family ID mismatch (found %s; expected %s)", cf_def.id, cfId)); if (!cf_def.column_type.toString().equals(cfType.name())) throw new ConfigurationException("types do not match."); if (comparator != TypeParser.parse(cf_def.comparator_type)) throw new ConfigurationException("comparators do not match."); if (cf_def.subcomparator_type == null || cf_def.subcomparator_type.equals("")) { if (subcolumnComparator != null) throw new ConfigurationException("subcolumncomparators do not match."); // else, it's null and we're good. } else if (subcolumnComparator != TypeParser.parse(cf_def.subcomparator_type)) throw new ConfigurationException("subcolumncomparators do not match."); validateMinMaxCompactionThresholds(cf_def); comment = enforceCommentNotNull(cf_def.comment); readRepairChance = cf_def.read_repair_chance; replicateOnWrite = cf_def.replicate_on_write; gcGraceSeconds = cf_def.gc_grace_seconds; defaultValidator = TypeParser.parse(cf_def.default_validation_class); keyValidator = TypeParser.parse(cf_def.key_validation_class); minCompactionThreshold = cf_def.min_compaction_threshold; maxCompactionThreshold = cf_def.max_compaction_threshold; mergeShardsChance = cf_def.merge_shards_chance; keyAlias = cf_def.key_alias; if (cf_def.bloom_filter_fp_chance != null) bloomFilterFpChance = cf_def.bloom_filter_fp_chance; // adjust column definitions. figure out who is coming and going. Set<ByteBuffer> toRemove = new HashSet<ByteBuffer>(); Set<ByteBuffer> newColumns = new HashSet<ByteBuffer>(); Set<org.apache.cassandra.db.migration.avro.ColumnDef> toAdd = new HashSet<org.apache.cassandra.db.migration.avro.ColumnDef>(); for (org.apache.cassandra.db.migration.avro.ColumnDef def : cf_def.column_metadata) { newColumns.add(def.name); if (!column_metadata.containsKey(def.name)) toAdd.add(def); } for (ByteBuffer name : column_metadata.keySet()) if (!newColumns.contains(name)) toRemove.add(name); // remove the ones leaving. for (ByteBuffer indexName : toRemove) { column_metadata.remove(indexName); } // update the ones staying for (org.apache.cassandra.db.migration.avro.ColumnDef def : cf_def.column_metadata) { ColumnDefinition oldDef = column_metadata.get(def.name); if (oldDef == null) continue; oldDef.setValidator(TypeParser.parse(def.validation_class)); oldDef.setIndexType(def.index_type == null ? null : org.apache.cassandra.thrift.IndexType.valueOf(def.index_type.name()), ColumnDefinition.getStringMap(def.index_options)); oldDef.setIndexName(def.index_name == null ? null : def.index_name.toString()); } // add the new ones coming in. for (org.apache.cassandra.db.migration.avro.ColumnDef def : toAdd) { AbstractType dValidClass = TypeParser.parse(def.validation_class); ColumnDefinition cd = new ColumnDefinition(def.name, dValidClass, def.index_type == null ? null : org.apache.cassandra.thrift.IndexType.valueOf(def.index_type.toString()), ColumnDefinition.getStringMap(def.index_options), def.index_name == null ? null : def.index_name.toString()); column_metadata.put(cd.name, cd); } if (cf_def.compaction_strategy != null) compactionStrategyClass = createCompactionStrategy(cf_def.compaction_strategy.toString()); if (null != cf_def.compaction_strategy_options) { compactionStrategyOptions = new HashMap<String, String>(); for (Map.Entry<CharSequence, CharSequence> e : cf_def.compaction_strategy_options.entrySet()) compactionStrategyOptions.put(e.getKey().toString(), e.getValue().toString()); } compressionParameters = CompressionParameters.create(cf_def.compression_options); logger.debug("application result is {}", this); } public static Class<? extends AbstractCompactionStrategy> createCompactionStrategy(String className) throws ConfigurationException { className = className.contains(".") ? className : "org.apache.cassandra.db.compaction." + className; try { return (Class<? extends AbstractCompactionStrategy>) Class.forName(className); } catch (Exception e) { throw new ConfigurationException("Could not create Compaction Strategy of type " + className, e); } } public AbstractCompactionStrategy createCompactionStrategyInstance(ColumnFamilyStore cfs) { try { Constructor constructor = compactionStrategyClass.getConstructor(new Class[] { ColumnFamilyStore.class, Map.class // options }); return (AbstractCompactionStrategy)constructor.newInstance(new Object[] { cfs, compactionStrategyOptions}); } catch (NoSuchMethodException e) { throw new RuntimeException(e); } catch (InstantiationException e) { throw new RuntimeException(e); } catch (IllegalAccessException e) { throw new RuntimeException(e); } catch (InvocationTargetException e) { throw new RuntimeException(e); } } // converts CFM to thrift CfDef public org.apache.cassandra.thrift.CfDef toThrift() { org.apache.cassandra.thrift.CfDef def = new org.apache.cassandra.thrift.CfDef(ksName, cfName); def.setId(cfId); def.setColumn_type(cfType.name()); def.setComparator_type(comparator.toString()); if (subcolumnComparator != null) { assert cfType == ColumnFamilyType.Super : String.format("%s CF %s should not have subcomparator %s defined", cfType, cfName, subcolumnComparator); def.setSubcomparator_type(subcolumnComparator.toString()); } def.setComment(enforceCommentNotNull(comment)); def.setRead_repair_chance(readRepairChance); def.setReplicate_on_write(replicateOnWrite); def.setGc_grace_seconds(gcGraceSeconds); def.setDefault_validation_class(defaultValidator.toString()); def.setKey_validation_class(keyValidator.toString()); def.setMin_compaction_threshold(minCompactionThreshold); def.setMax_compaction_threshold(maxCompactionThreshold); def.setMerge_shards_chance(mergeShardsChance); def.setKey_alias(getKeyName()); List<org.apache.cassandra.thrift.ColumnDef> column_meta = new ArrayList<org.apache.cassandra.thrift.ColumnDef>(column_metadata.size()); for (ColumnDefinition cd : column_metadata.values()) { org.apache.cassandra.thrift.ColumnDef tcd = new org.apache.cassandra.thrift.ColumnDef(); tcd.setIndex_name(cd.getIndexName()); tcd.setIndex_type(cd.getIndexType()); tcd.setIndex_options(cd.getIndexOptions()); tcd.setName(cd.name); tcd.setValidation_class(cd.getValidator().toString()); column_meta.add(tcd); } def.setColumn_metadata(column_meta); def.setCompaction_strategy(compactionStrategyClass.getName()); def.setCompaction_strategy_options(new HashMap<String, String>(compactionStrategyOptions)); def.setCompression_options(compressionParameters.asThriftOptions()); if (bloomFilterFpChance != null) def.setBloom_filter_fp_chance(bloomFilterFpChance); return def; } public static void validateMinMaxCompactionThresholds(org.apache.cassandra.db.migration.avro.CfDef cf_def) throws ConfigurationException { if (cf_def.min_compaction_threshold != null && cf_def.max_compaction_threshold != null) { if ((cf_def.min_compaction_threshold > cf_def.max_compaction_threshold) && cf_def.max_compaction_threshold != 0) { throw new ConfigurationException("min_compaction_threshold cannot be greater than max_compaction_threshold"); } } else if (cf_def.min_compaction_threshold != null) { if (cf_def.min_compaction_threshold > DEFAULT_MAX_COMPACTION_THRESHOLD) { throw new ConfigurationException("min_compaction_threshold cannot be greather than max_compaction_threshold (default " + DEFAULT_MAX_COMPACTION_THRESHOLD + ")"); } } else if (cf_def.max_compaction_threshold != null) { if (cf_def.max_compaction_threshold < DEFAULT_MIN_COMPACTION_THRESHOLD && cf_def.max_compaction_threshold != 0) { throw new ConfigurationException("max_compaction_threshold cannot be less than min_compaction_threshold"); } } else { //Defaults are valid. } } public ColumnDefinition getColumnDefinition(ByteBuffer name) { return column_metadata.get(name); } public ColumnDefinition getColumnDefinitionForIndex(String indexName) { for (ColumnDefinition def : column_metadata.values()) { if (indexName.equals(def.getIndexName())) return def; } return null; } /** * Convert a null index_name to appropriate default name according to column status * @param cf_def Thrift ColumnFamily Definition */ public static void addDefaultIndexNames(org.apache.cassandra.thrift.CfDef cf_def) throws InvalidRequestException { if (cf_def.column_metadata == null) return; AbstractType comparator; try { comparator = TypeParser.parse(cf_def.comparator_type); } catch (ConfigurationException e) { throw new InvalidRequestException(e.getMessage()); } for (org.apache.cassandra.thrift.ColumnDef column : cf_def.column_metadata) { if (column.index_type != null && column.index_name == null) column.index_name = getDefaultIndexName(cf_def.name, comparator, column.name); } } public static String getDefaultIndexName(String cfName, AbstractType comparator, ByteBuffer columnName) { return (cfName + "_" + comparator.getString(columnName) + "_idx").replaceAll("\\W", ""); } public IColumnSerializer getColumnSerializer() { if (cfType == ColumnFamilyType.Standard) return Column.serializer(); return SuperColumn.serializer(subcolumnComparator); } public CFMetaData validate() throws ConfigurationException { if (comparator instanceof CounterColumnType) throw new ConfigurationException("CounterColumnType is not a valid comparator"); if (subcolumnComparator instanceof CounterColumnType) throw new ConfigurationException("CounterColumnType is not a valid sub-column comparator"); if (keyValidator instanceof CounterColumnType) throw new ConfigurationException("CounterColumnType is not a valid key validator"); // Mixing counter with non counter columns is not supported (#2614) if (defaultValidator instanceof CounterColumnType) { for (ColumnDefinition def : column_metadata.values()) if (!(def.getValidator() instanceof CounterColumnType)) throw new ConfigurationException("Cannot add a non counter column (" + comparator.getString(def.name) + ") in a counter column family"); } else { for (ColumnDefinition def : column_metadata.values()) if (def.getValidator() instanceof CounterColumnType) throw new ConfigurationException("Cannot add a counter column (" + comparator.getString(def.name) + ") in a non counter column family"); } return this; } @Override public String toString() { return new ToStringBuilder(this) .append("cfId", cfId) .append("ksName", ksName) .append("cfName", cfName) .append("cfType", cfType) .append("comparator", comparator) .append("subcolumncomparator", subcolumnComparator) .append("comment", comment) .append("readRepairChance", readRepairChance) .append("replicateOnWrite", replicateOnWrite) .append("gcGraceSeconds", gcGraceSeconds) .append("defaultValidator", defaultValidator) .append("keyValidator", keyValidator) .append("minCompactionThreshold", minCompactionThreshold) .append("maxCompactionThreshold", maxCompactionThreshold) .append("mergeShardsChance", mergeShardsChance) .append("keyAlias", keyAlias) .append("column_metadata", column_metadata) .append("compactionStrategyClass", compactionStrategyClass) .append("compactionStrategyOptions", compactionStrategyOptions) .append("compressionOptions", compressionParameters.asThriftOptions()) .append("bloomFilterFpChance", bloomFilterFpChance) .toString(); } }
apache-2.0
sean-liang/stardict
src/main/java/com/orangereading/stardict/exporter/DictionaryExporter.java
1033
package com.orangereading.stardict.exporter; import java.io.IOException; import com.orangereading.stardict.cli.CommandExport; import com.orangereading.stardict.domain.DictionaryItem; import com.orangereading.stardict.domain.ImmutableDictionaryInfo; public interface DictionaryExporter { /** * * Initialize the exporter. Will be called by the container right after new * instance created. * * @param info * dictionary info * @param name * dictionary file name(without extension) * @param command * cli command * */ public void init(final ImmutableDictionaryInfo info, final String name, final CommandExport command); /** * * Append a dictionary item. * * @param item * dictionary item * */ public void append(DictionaryItem item); /** * * Dump result and do cleanup works. * * @throws IOException * IO Exception * */ public void done() throws IOException; }
apache-2.0
DaveVoorhis/Rel
ServerV0000/src/org/reldb/rel/v0/vm/instructions/core/OpJump.java
400
package org.reldb.rel.v0.vm.instructions.core; import org.reldb.rel.v0.vm.Context; import org.reldb.rel.v0.vm.Instruction; public class OpJump extends Instruction { private int address; public OpJump(int address) { this.address = address; } public final void execute(Context context) { context.jump(address); } public String toString() { return getName() + " " + address; } }
apache-2.0
lsmaira/gradle
subprojects/dependency-management/src/main/java/org/gradle/api/internal/artifacts/ivyservice/projectmodule/ProjectDependencyResolver.java
8906
/* * Copyright 2013 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.gradle.api.internal.artifacts.ivyservice.projectmodule; import org.gradle.api.artifacts.component.ComponentArtifactIdentifier; import org.gradle.api.artifacts.component.ComponentIdentifier; import org.gradle.api.artifacts.component.ProjectComponentIdentifier; import org.gradle.api.artifacts.component.ProjectComponentSelector; import org.gradle.api.internal.artifacts.component.ComponentIdentifierFactory; import org.gradle.api.internal.artifacts.ivyservice.ivyresolve.ComponentResolvers; import org.gradle.api.internal.artifacts.ivyservice.ivyresolve.ErrorHandlingArtifactResolver; import org.gradle.api.internal.artifacts.ivyservice.ivyresolve.strategy.VersionSelector; import org.gradle.api.internal.artifacts.ivyservice.resolveengine.artifact.ArtifactSet; import org.gradle.api.internal.artifacts.ivyservice.resolveengine.artifact.DefaultArtifactSet; import org.gradle.api.internal.artifacts.ivyservice.resolveengine.artifact.ResolvableArtifact; import org.gradle.api.internal.artifacts.ivyservice.resolveengine.excludes.ModuleExclusion; import org.gradle.api.internal.artifacts.type.ArtifactTypeRegistry; import org.gradle.api.internal.attributes.ImmutableAttributes; import org.gradle.api.internal.component.ArtifactType; import org.gradle.api.internal.project.ProjectStateRegistry; import org.gradle.internal.Factory; import org.gradle.internal.component.local.model.DefaultProjectComponentSelector; import org.gradle.internal.component.local.model.LocalComponentArtifactMetadata; import org.gradle.internal.component.local.model.LocalComponentMetadata; import org.gradle.internal.component.model.ComponentArtifactMetadata; import org.gradle.internal.component.model.ComponentOverrideMetadata; import org.gradle.internal.component.model.ComponentResolveMetadata; import org.gradle.internal.component.model.ConfigurationMetadata; import org.gradle.internal.component.model.DependencyMetadata; import org.gradle.internal.component.model.ModuleSource; import org.gradle.internal.resolve.ModuleVersionResolveException; import org.gradle.internal.resolve.resolver.ArtifactResolver; import org.gradle.internal.resolve.resolver.ComponentMetaDataResolver; import org.gradle.internal.resolve.resolver.DependencyToComponentIdResolver; import org.gradle.internal.resolve.resolver.OriginArtifactSelector; import org.gradle.internal.resolve.result.BuildableArtifactResolveResult; import org.gradle.internal.resolve.result.BuildableArtifactSetResolveResult; import org.gradle.internal.resolve.result.BuildableComponentIdResolveResult; import org.gradle.internal.resolve.result.BuildableComponentResolveResult; import javax.annotation.Nullable; import java.io.File; import java.util.Map; import java.util.concurrent.ConcurrentHashMap; public class ProjectDependencyResolver implements ComponentMetaDataResolver, DependencyToComponentIdResolver, ArtifactResolver, OriginArtifactSelector, ComponentResolvers { private final LocalComponentRegistry localComponentRegistry; private final ComponentIdentifierFactory componentIdentifierFactory; private final ProjectStateRegistry projectStateRegistry; private final ArtifactResolver self; // This should live closer to the project itself private final Map<ComponentArtifactIdentifier, ResolvableArtifact> allProjectArtifacts = new ConcurrentHashMap<ComponentArtifactIdentifier, ResolvableArtifact>(); public ProjectDependencyResolver(LocalComponentRegistry localComponentRegistry, ComponentIdentifierFactory componentIdentifierFactory, ProjectStateRegistry projectStateRegistry) { this.localComponentRegistry = localComponentRegistry; this.componentIdentifierFactory = componentIdentifierFactory; this.projectStateRegistry = projectStateRegistry; self = new ErrorHandlingArtifactResolver(this); } @Override public ArtifactResolver getArtifactResolver() { return this; } @Override public DependencyToComponentIdResolver getComponentIdResolver() { return this; } @Override public ComponentMetaDataResolver getComponentResolver() { return this; } @Override public OriginArtifactSelector getArtifactSelector() { return this; } @Override public void resolve(DependencyMetadata dependency, VersionSelector acceptor, VersionSelector rejector, BuildableComponentIdResolveResult result) { if (dependency.getSelector() instanceof ProjectComponentSelector) { ProjectComponentSelector selector = (ProjectComponentSelector) dependency.getSelector(); ProjectComponentIdentifier projectId = componentIdentifierFactory.createProjectComponentIdentifier(selector); LocalComponentMetadata componentMetaData = localComponentRegistry.getComponent(projectId); if (componentMetaData == null) { result.failed(new ModuleVersionResolveException(selector, projectId + " not found.")); } else { result.resolved(componentMetaData); } } } @Override public void resolve(ComponentIdentifier identifier, ComponentOverrideMetadata componentOverrideMetadata, final BuildableComponentResolveResult result) { if (isProjectModule(identifier)) { ProjectComponentIdentifier projectId = (ProjectComponentIdentifier) identifier; LocalComponentMetadata componentMetaData = localComponentRegistry.getComponent(projectId); if (componentMetaData == null) { result.failed(new ModuleVersionResolveException(DefaultProjectComponentSelector.newSelector(projectId), projectId + " not found.")); } else { result.resolved(componentMetaData); } } } @Override public boolean isFetchingMetadataCheap(ComponentIdentifier identifier) { return true; } @Override public void resolveArtifactsWithType(ComponentResolveMetadata component, ArtifactType artifactType, BuildableArtifactSetResolveResult result) { if (isProjectModule(component.getId())) { throw new UnsupportedOperationException("Resolving artifacts by type is not yet supported for project modules"); } } @Nullable @Override public ArtifactSet resolveArtifacts(final ComponentResolveMetadata component, final ConfigurationMetadata configuration, final ArtifactTypeRegistry artifactTypeRegistry, final ModuleExclusion exclusions, final ImmutableAttributes overriddenAttributes) { if (isProjectModule(component.getId())) { ProjectComponentIdentifier projectId = (ProjectComponentIdentifier) component.getId(); return projectStateRegistry.stateFor(projectId).withMutableState(new Factory<ArtifactSet>() { @Override public ArtifactSet create() { return DefaultArtifactSet.multipleVariants(component.getId(), component.getModuleVersionId(), component.getSource(), exclusions, configuration.getVariants(), component.getAttributesSchema(), self, allProjectArtifacts, artifactTypeRegistry, overriddenAttributes); } }); } else { return null; } } @Override public void resolveArtifact(ComponentArtifactMetadata artifact, ModuleSource moduleSource, final BuildableArtifactResolveResult result) { if (isProjectModule(artifact.getComponentId())) { final LocalComponentArtifactMetadata projectArtifact = (LocalComponentArtifactMetadata) artifact; ProjectComponentIdentifier projectId = (ProjectComponentIdentifier) artifact.getComponentId(); projectStateRegistry.stateFor(projectId).withMutableState(new Runnable() { @Override public void run() { File localArtifactFile = projectArtifact.getFile(); if (localArtifactFile != null) { result.resolved(localArtifactFile); } else { result.notFound(projectArtifact.getId()); } } }); } } private boolean isProjectModule(ComponentIdentifier componentId) { return componentId instanceof ProjectComponentIdentifier; } }
apache-2.0
hortonworks/cloudbreak
cloud-api/src/main/java/com/sequenceiq/cloudbreak/cloud/model/catalog/Versions.java
1354
package com.sequenceiq.cloudbreak.cloud.model.catalog; import java.util.Collections; import java.util.List; import com.fasterxml.jackson.annotation.JsonCreator; import com.fasterxml.jackson.annotation.JsonIgnoreProperties; import com.fasterxml.jackson.annotation.JsonInclude; import com.fasterxml.jackson.annotation.JsonProperty; @JsonIgnoreProperties(ignoreUnknown = true) @JsonInclude(JsonInclude.Include.NON_EMPTY) public class Versions { private static final String CLOUDBREAK = "cloudbreak"; private static final String FREEIPA = "freeipa"; private final List<CloudbreakVersion> cloudbreakVersions; private final List<CloudbreakVersion> freeipaVersions; @JsonCreator public Versions(@JsonProperty(CLOUDBREAK) List<CloudbreakVersion> cloudbreakVersions, @JsonProperty(FREEIPA) List<CloudbreakVersion> freeipaVersions) { this.cloudbreakVersions = (cloudbreakVersions == null) ? Collections.emptyList() : cloudbreakVersions; this.freeipaVersions = (freeipaVersions == null) ? Collections.emptyList() : freeipaVersions; } @JsonProperty(CLOUDBREAK) public List<CloudbreakVersion> getCloudbreakVersions() { return cloudbreakVersions; } @JsonProperty(FREEIPA) public List<CloudbreakVersion> getFreeipaVersions() { return freeipaVersions; } }
apache-2.0
miswenwen/My_bird_work
Bird_work/我的项目/Settings/src/com/mediatek/widget/ChartBandwidthNetworkSeriesView.java
5880
/* * Copyright (C) 2011 The Android Open Source Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.mediatek.widget; import android.content.Context; import android.content.res.TypedArray; import android.graphics.Canvas; import android.graphics.Color; import android.graphics.Paint; import android.graphics.Paint.Style; import android.graphics.Path; import android.net.NetworkStats; import android.util.AttributeSet; import android.util.Log; import android.view.View; import com.android.internal.util.Preconditions; import com.android.settings.R; import com.android.settings.widget.ChartAxis; /** * {@link NetworkStats} series to render inside a {@link ChartView}, * using {@link ChartAxis} to map into screen coordinates. */ public class ChartBandwidthNetworkSeriesView extends View { private static final String TAG = "ChartBandwidthNetworkSeriesView"; private static final int TOTAL_LEN = 90; private static final long MB_IN_BYTES = 1024 * 1024; private ChartAxis mHoriz; private ChartAxis mVert; private Paint mPaintStroke; private Path mPathStroke; private long mStart; private long mEnd; private long mLeftBound; private NetworkStats mStats; private long [] mCurrentBytes; private int mCurrentLen; private long mTotalUsed; /** Series will be extended to reach this end time. */ private long mEndTime = Long.MIN_VALUE; private boolean mPathValid = false; private long mMax; public ChartBandwidthNetworkSeriesView(Context context) { this(context, null, 0); } public ChartBandwidthNetworkSeriesView(Context context, AttributeSet attrs) { this(context, attrs, 0); } public ChartBandwidthNetworkSeriesView(Context context, AttributeSet attrs, int defStyle) { super(context, attrs, defStyle); final TypedArray a = context.obtainStyledAttributes( attrs, R.styleable.ChartNetworkSeriesView, defStyle, 0); final int stroke = a.getColor(R.styleable.ChartNetworkSeriesView_strokeColor, Color.RED); setSeriesColor(stroke); setWillNotDraw(false); a.recycle(); mPathStroke = new Path(); mCurrentBytes = new long[TOTAL_LEN]; mCurrentLen = 0; } public void setSeriesColor(int stroke) { mPaintStroke = new Paint(); mPaintStroke.setStrokeWidth(3); mPaintStroke.setColor(stroke); mPaintStroke.setStyle(Style.STROKE); mPaintStroke.setAntiAlias(true); } void init(ChartAxis horiz, ChartAxis vert) { mHoriz = Preconditions.checkNotNull(horiz, "missing horiz"); mVert = Preconditions.checkNotNull(vert, "missing vert"); } public long getMaxBytes() { return mMax > MB_IN_BYTES ? mMax : MB_IN_BYTES; } public long getTotalUsedData() { return mTotalUsed; } public void setNetworkStates(NetworkStats networkStats) { mStats = networkStats; generatePath(); } public void invalidatePath() { mPathValid = false; mMax = 0; invalidate(); } /** * Erase any existing {@link Path} and generate series outline based on * currently bound {@link NetworkStats} data. */ public void generatePath() { long range = getMaxBytes(); mMax = 0; mPathStroke.reset(); // bail when not enough stats to render if (mStats == null || mStats.size() < 1) { return; } mPathValid = true; long totalData = 0; long currentData = 0; for (int i = 0 ; i < mStats.size(); i++) { NetworkStats.Entry entry = null; entry = mStats.getValues(i, entry); Log.d("@M_" + TAG, "index = " + i + ", rxBytes = " + entry.rxBytes + ", txBytes = " + entry.txBytes); totalData += entry.rxBytes + entry.txBytes; } Log.d("@M_" + TAG, "totalData = " + totalData + ", mTotalUsed = " + mTotalUsed); currentData = mTotalUsed == 0 ? 0 : totalData - mTotalUsed; mTotalUsed = totalData; Log.d("@M_" + TAG, "currentData = " + currentData); if (mCurrentLen < 90) { mCurrentBytes[mCurrentLen] = currentData; mCurrentLen++; } else { System.arraycopy(mCurrentBytes, 1, mCurrentBytes, 0, 89); mCurrentBytes[89] = currentData; } mPathStroke.moveTo(mHoriz.convertToPoint(100 - mCurrentLen + 1), mVert.convertToPoint(mCurrentBytes[0] / range * 100)); for (int i = 0; i < mCurrentLen; i++) { mPathStroke.lineTo(mHoriz.convertToPoint(100 - mCurrentLen + 1 + i), mVert.convertToPoint((long) mCurrentBytes[i])); mMax = mMax < mCurrentBytes[i] ? mCurrentBytes[i] : mMax; } invalidate(); } @Override protected void onDraw(Canvas canvas) { int save; if (!mPathValid) { generatePath(); } final float primaryLeftPoint = mHoriz.convertToPoint(0); final float primaryRightPoint = mHoriz.convertToPoint(100); save = canvas.save(); canvas.clipRect(primaryLeftPoint, 0, primaryRightPoint, getHeight()); canvas.drawPath(mPathStroke, mPaintStroke); canvas.restoreToCount(save); } }
apache-2.0
clonetwin26/buck
test/com/facebook/buck/cxx/toolchain/CxxPlatformsTest.java
8154
/* * Copyright 2015-present Facebook, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. You may obtain * a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations * under the License. */ package com.facebook.buck.cxx.toolchain; import static org.hamcrest.CoreMatchers.instanceOf; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.is; import static org.junit.Assert.assertThat; import com.facebook.buck.config.BuckConfig; import com.facebook.buck.config.FakeBuckConfig; import com.facebook.buck.cxx.toolchain.linker.DefaultLinkerProvider; import com.facebook.buck.cxx.toolchain.linker.LinkerProvider; import com.facebook.buck.io.filesystem.ProjectFilesystem; import com.facebook.buck.model.InternalFlavor; import com.facebook.buck.rules.BuildRuleResolver; import com.facebook.buck.rules.ConstantToolProvider; import com.facebook.buck.rules.HashedFileTool; import com.facebook.buck.rules.PathSourcePath; import com.facebook.buck.rules.TestBuildRuleResolver; import com.facebook.buck.testutil.FakeProjectFilesystem; import com.facebook.buck.util.environment.Platform; import com.google.common.base.Suppliers; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; import java.nio.file.Paths; import java.util.Optional; import org.hamcrest.junit.ExpectedException; import org.junit.Rule; import org.junit.Test; /** Unit tests for {@link CxxPlatforms}. */ public class CxxPlatformsTest { @Rule public final ExpectedException expectedException = ExpectedException.none(); @Test public void returnsKnownDefaultPlatformSetInConfig() { ImmutableMap<String, ImmutableMap<String, String>> sections = ImmutableMap.of("cxx", ImmutableMap.of("default_platform", "borland_cxx_452")); ProjectFilesystem filesystem = new FakeProjectFilesystem(); CompilerProvider compiler = new CompilerProvider( Suppliers.ofInstance(PathSourcePath.of(filesystem, Paths.get("borland"))), Optional.of(CxxToolProvider.Type.GCC)); PreprocessorProvider preprocessor = new PreprocessorProvider( Suppliers.ofInstance(PathSourcePath.of(filesystem, Paths.get("borland"))), Optional.of(CxxToolProvider.Type.GCC)); HashedFileTool borland = new HashedFileTool(PathSourcePath.of(filesystem, Paths.get("borland"))); CxxPlatform borlandCxx452Platform = CxxPlatform.builder() .setFlavor(InternalFlavor.of("borland_cxx_452")) .setAs(compiler) .setAspp(preprocessor) .setCc(compiler) .setCpp(preprocessor) .setCxx(compiler) .setCxxpp(preprocessor) .setLd( new DefaultLinkerProvider( LinkerProvider.Type.GNU, new ConstantToolProvider(borland))) .setStrip(borland) .setSymbolNameTool(new PosixNmSymbolNameTool(borland)) .setAr(ArchiverProvider.from(new GnuArchiver(borland))) .setRanlib(new ConstantToolProvider(borland)) .setSharedLibraryExtension("so") .setSharedLibraryVersionedExtensionFormat(".so.%s") .setStaticLibraryExtension("a") .setObjectFileExtension("so") .setCompilerDebugPathSanitizer(CxxPlatformUtils.DEFAULT_COMPILER_DEBUG_PATH_SANITIZER) .setAssemblerDebugPathSanitizer(CxxPlatformUtils.DEFAULT_ASSEMBLER_DEBUG_PATH_SANITIZER) .setHeaderVerification(CxxPlatformUtils.DEFAULT_PLATFORM.getHeaderVerification()) .setPublicHeadersSymlinksEnabled(true) .setPrivateHeadersSymlinksEnabled(true) .build(); BuckConfig buckConfig = FakeBuckConfig.builder().setSections(sections).build(); assertThat( CxxPlatforms.getConfigDefaultCxxPlatform( new CxxBuckConfig(buckConfig), ImmutableMap.of(borlandCxx452Platform.getFlavor(), borlandCxx452Platform), CxxPlatformUtils.DEFAULT_PLATFORM), equalTo(borlandCxx452Platform)); } @Test public void unknownDefaultPlatformSetInConfigFallsBackToSystemDefault() { ImmutableMap<String, ImmutableMap<String, String>> sections = ImmutableMap.of("cxx", ImmutableMap.of("default_platform", "borland_cxx_452")); BuckConfig buckConfig = FakeBuckConfig.builder().setSections(sections).build(); assertThat( CxxPlatforms.getConfigDefaultCxxPlatform( new CxxBuckConfig(buckConfig), ImmutableMap.of(), CxxPlatformUtils.DEFAULT_PLATFORM), equalTo(CxxPlatformUtils.DEFAULT_PLATFORM)); } public LinkerProvider getPlatformLinker(LinkerProvider.Type linkerType) { ImmutableMap<String, ImmutableMap<String, String>> sections = ImmutableMap.of( "cxx", ImmutableMap.of( "ld", Paths.get("fake_path").toString(), "linker_platform", linkerType.name())); CxxBuckConfig buckConfig = new CxxBuckConfig( FakeBuckConfig.builder() .setSections(sections) .setFilesystem(new FakeProjectFilesystem(ImmutableSet.of(Paths.get("fake_path")))) .build()); return CxxPlatformUtils.build(buckConfig).getLd(); } @Test public void linkerOverriddenByConfig() { assertThat( "MACOS linker was not a DarwinLinker instance", getPlatformLinker(LinkerProvider.Type.DARWIN).getType(), is(LinkerProvider.Type.DARWIN)); assertThat( "LINUX linker was not a GnuLinker instance", getPlatformLinker(LinkerProvider.Type.GNU).getType(), is(LinkerProvider.Type.GNU)); assertThat( "WINDOWS linker was not a GnuLinker instance", getPlatformLinker(LinkerProvider.Type.WINDOWS).getType(), is(LinkerProvider.Type.WINDOWS)); } @Test public void invalidLinkerOverrideFails() { ImmutableMap<String, ImmutableMap<String, String>> sections = ImmutableMap.of( "cxx", ImmutableMap.of( "ld", Paths.get("fake_path").toString(), "linker_platform", "WRONG_PLATFORM")); CxxBuckConfig buckConfig = new CxxBuckConfig( FakeBuckConfig.builder() .setSections(sections) .setFilesystem(new FakeProjectFilesystem(ImmutableSet.of(Paths.get("fake_path")))) .build()); expectedException.expect(RuntimeException.class); CxxPlatformUtils.build(buckConfig); } public Archiver getPlatformArchiver(Platform archiverPlatform) { ImmutableMap<String, ImmutableMap<String, String>> sections = ImmutableMap.of( "cxx", ImmutableMap.of( "ar", Paths.get("fake_path").toString(), "archiver_platform", archiverPlatform.name())); CxxBuckConfig buckConfig = new CxxBuckConfig( FakeBuckConfig.builder() .setSections(sections) .setFilesystem(new FakeProjectFilesystem(ImmutableSet.of(Paths.get("fake_path")))) .build()); BuildRuleResolver ruleResolver = new TestBuildRuleResolver(); return CxxPlatformUtils.build(buckConfig).getAr().resolve(ruleResolver); } @Test public void archiverrOverriddenByConfig() { assertThat( "MACOS archiver was not a BsdArchiver instance", getPlatformArchiver(Platform.MACOS), instanceOf(BsdArchiver.class)); assertThat( "LINUX archiver was not a GnuArchiver instance", getPlatformArchiver(Platform.LINUX), instanceOf(GnuArchiver.class)); assertThat( "WINDOWS archiver was not a GnuArchiver instance", getPlatformArchiver(Platform.WINDOWS), instanceOf(WindowsArchiver.class)); } }
apache-2.0
kieker-monitoring/kieker
kieker-model/src-gen/kieker/model/analysismodel/trace/util/TraceSwitch.java
3964
/** */ package kieker.model.analysismodel.trace.util; import kieker.model.analysismodel.trace.*; import org.eclipse.emf.ecore.EObject; import org.eclipse.emf.ecore.EPackage; import org.eclipse.emf.ecore.util.Switch; /** * <!-- begin-user-doc --> * The <b>Switch</b> for the model's inheritance hierarchy. * It supports the call {@link #doSwitch(EObject) doSwitch(object)} * to invoke the <code>caseXXX</code> method for each class of the model, * starting with the actual class of the object * and proceeding up the inheritance hierarchy * until a non-null result is returned, * which is the result of the switch. * <!-- end-user-doc --> * @see kieker.model.analysismodel.trace.TracePackage * @generated */ public class TraceSwitch<T> extends Switch<T> { /** * The cached model package * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ protected static TracePackage modelPackage; /** * Creates an instance of the switch. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public TraceSwitch() { if (modelPackage == null) { modelPackage = TracePackage.eINSTANCE; } } /** * Checks whether this is a switch for the given package. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @param ePackage the package in question. * @return whether this is a switch for the given package. * @generated */ @Override protected boolean isSwitchFor(EPackage ePackage) { return ePackage == modelPackage; } /** * Calls <code>caseXXX</code> for each class of the model until one returns a non null result; it yields that result. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @return the first non-null result returned by a <code>caseXXX</code> call. * @generated */ @Override protected T doSwitch(int classifierID, EObject theEObject) { switch (classifierID) { case TracePackage.TRACE: { Trace trace = (Trace)theEObject; T result = caseTrace(trace); if (result == null) result = defaultCase(theEObject); return result; } case TracePackage.OPERATION_CALL: { OperationCall operationCall = (OperationCall)theEObject; T result = caseOperationCall(operationCall); if (result == null) result = defaultCase(theEObject); return result; } default: return defaultCase(theEObject); } } /** * Returns the result of interpreting the object as an instance of '<em>Trace</em>'. * <!-- begin-user-doc --> * This implementation returns null; * returning a non-null result will terminate the switch. * <!-- end-user-doc --> * @param object the target of the switch. * @return the result of interpreting the object as an instance of '<em>Trace</em>'. * @see #doSwitch(org.eclipse.emf.ecore.EObject) doSwitch(EObject) * @generated */ public T caseTrace(Trace object) { return null; } /** * Returns the result of interpreting the object as an instance of '<em>Operation Call</em>'. * <!-- begin-user-doc --> * This implementation returns null; * returning a non-null result will terminate the switch. * <!-- end-user-doc --> * @param object the target of the switch. * @return the result of interpreting the object as an instance of '<em>Operation Call</em>'. * @see #doSwitch(org.eclipse.emf.ecore.EObject) doSwitch(EObject) * @generated */ public T caseOperationCall(OperationCall object) { return null; } /** * Returns the result of interpreting the object as an instance of '<em>EObject</em>'. * <!-- begin-user-doc --> * This implementation returns null; * returning a non-null result will terminate the switch, but this is the last case anyway. * <!-- end-user-doc --> * @param object the target of the switch. * @return the result of interpreting the object as an instance of '<em>EObject</em>'. * @see #doSwitch(org.eclipse.emf.ecore.EObject) * @generated */ @Override public T defaultCase(EObject object) { return null; } } //TraceSwitch
apache-2.0
astubbs/wicket.get-portals2
wicket/src/main/java/org/apache/wicket/markup/html/link/ImageMap.java
8393
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.wicket.markup.html.link; import java.util.ArrayList; import java.util.Iterator; import java.util.List; import org.apache.wicket.IClusterable; import org.apache.wicket.markup.ComponentTag; import org.apache.wicket.markup.MarkupStream; import org.apache.wicket.markup.html.WebMarkupContainer; /** * An image map holds links with different hot-area shapes. * * @author Jonathan Locke */ public final class ImageMap extends WebMarkupContainer { private static final long serialVersionUID = 1L; /** list of shape links. */ private final List<ShapeLink> shapeLinks = new ArrayList<ShapeLink>(); /** * A shape that has a circle form. */ private static final class CircleLink extends ShapeLink { private static final long serialVersionUID = 1L; /** The circle's radius. */ private final int radius; /** Upper left x */ private final int x; /** Upper left y */ private final int y; /** * Construct. * * @param x * left upper x * @param y * left upper y * @param radius * the circles' radius * @param link * the link */ public CircleLink(final int x, final int y, final int radius, final Link<?> link) { super(link); this.x = x; this.y = y; this.radius = radius; } /** * @see org.apache.wicket.markup.html.link.ImageMap.ShapeLink#getCoordinates() */ @Override String getCoordinates() { return x + "," + y + "," + radius; } /** * @see org.apache.wicket.markup.html.link.ImageMap.ShapeLink#getType() */ @Override String getType() { return "circle"; } } /** * A shape that has a free (polygon) form. */ private static final class PolygonLink extends ShapeLink { private static final long serialVersionUID = 1L; /** Its coordinates. */ private final int[] coordinates; /** * Construct. * * @param coordinates * the polygon coordinates * @param link * the link */ public PolygonLink(final int[] coordinates, final Link<?> link) { super(link); this.coordinates = coordinates; } /** * @see org.apache.wicket.markup.html.link.ImageMap.ShapeLink#getCoordinates() */ @Override String getCoordinates() { final StringBuffer buffer = new StringBuffer(); for (int i = 0; i < coordinates.length; i++) { buffer.append(coordinates[i]); if (i < (coordinates.length - 1)) { buffer.append(','); } } return buffer.toString(); } /** * @see org.apache.wicket.markup.html.link.ImageMap.ShapeLink#getType() */ @Override String getType() { return "polygon"; } } /** * A shape that has a rectangular form. */ private static final class RectangleLink extends ShapeLink { private static final long serialVersionUID = 1L; /** left upper x. */ private final int x1; /** right bottom x. */ private final int x2; /** left upper y. */ private final int y1; /** right bottom y. */ private final int y2; /** * Construct. * * @param x1 * left upper x * @param y1 * left upper y * @param x2 * right bottom x * @param y2 * right bottom y * @param link * the link */ public RectangleLink(final int x1, final int y1, final int x2, final int y2, final Link<?> link) { super(link); this.x1 = x1; this.y1 = y1; this.x2 = x2; this.y2 = y2; } /** * @see org.apache.wicket.markup.html.link.ImageMap.ShapeLink#getCoordinates() */ @Override String getCoordinates() { return x1 + "," + y1 + "," + x2 + "," + y2; } /** * @see org.apache.wicket.markup.html.link.ImageMap.ShapeLink#getType() */ @Override String getType() { return "rectangle"; } } /** * Base class for shaped links. */ private static abstract class ShapeLink implements IClusterable { /** * */ private static final long serialVersionUID = 1L; /** The link. */ private final Link<?> link; /** * Constructor. * * @param link * The link */ public ShapeLink(final Link<?> link) { this.link = link; } /** * The shape as a string using the given request cycle; will be used for rendering. * * @return The shape as a string */ @Override public String toString() { // Add any popup script final String popupJavaScript; if (link.getPopupSettings() != null) { popupJavaScript = link.getPopupSettings().getPopupJavaScript(); } else { popupJavaScript = null; } return "<area shape=\"" + getType() + "\"" + " coords=\"" + getCoordinates() + "\"" + " href=\"" + link.getURL() + "\"" // Output the markup ID if that was specified, so we can link tooltips, etc. to // it. + (link.getOutputMarkupId() ? " id=\"" + link.getMarkupId() + "\"" : "") + ((popupJavaScript == null) ? "" : (" onClick = \"" + popupJavaScript + "\"")) + ">"; } /** * Gets the coordinates of the shape. * * @return The coordinates of the shape */ abstract String getCoordinates(); /** * Gets the shape type. * * @return the shape type */ abstract String getType(); } /** * Constructor. * * @param id * See Component */ public ImageMap(final String id) { super(id); } /** * Adds a circle link. * * @param x1 * top left x * @param y1 * top left y * @param radius * the radius * @param link * the link * @return This */ public ImageMap addCircleLink(final int x1, final int y1, final int radius, final Link<?> link) { add(link); shapeLinks.add(new CircleLink(x1, y1, radius, link)); return this; } /** * Adds a polygon link. * * @param coordinates * the coordinates for the polygon * @param link * the link * @return This */ public ImageMap addPolygonLink(final int[] coordinates, final Link<?> link) { add(link); shapeLinks.add(new PolygonLink(coordinates, link)); return this; } /** * Adds a rectangular link. * * @param x1 * top left x * @param y1 * top left y * @param x2 * bottom right x * @param y2 * bottom right y * @param link * @return This */ public ImageMap addRectangleLink(final int x1, final int y1, final int x2, final int y2, final Link<?> link) { add(link); shapeLinks.add(new RectangleLink(x1, y1, x2, y2, link)); return this; } /** * Renders this component. * * @see org.apache.wicket.Component#onRender(MarkupStream) */ @Override protected void onRender(final MarkupStream markupStream) { // Get mutable copy of next tag final ComponentTag tag = markupStream.getTag().mutable(); // Must be an img tag checkComponentTag(tag, "img"); // Set map name to path tag.put("usemap", "#" + getPath()); // Write out the tag renderComponentTag(tag); markupStream.next(); // Write out the image map final StringBuffer imageMap = new StringBuffer(); imageMap.append("\n<map name=\"").append(getPath()).append("\"> "); for (Iterator<ShapeLink> iterator = shapeLinks.iterator(); iterator.hasNext();) { final ShapeLink shapeLink = iterator.next(); imageMap.append('\n'); imageMap.append(shapeLink.toString()); // Tell framework that this link was actually rendered getPage().componentRendered(shapeLink.link); } imageMap.append("\n</map>"); getResponse().write(imageMap.toString()); } }
apache-2.0
nagyist/marketcetera
trunk/modules/marketdata/marketcetera/src/test/java/org/marketcetera/marketdata/marketcetera/MarketceteraFeedModuleTest.java
2788
package org.marketcetera.marketdata.marketcetera; import org.junit.Test; import org.marketcetera.marketdata.Capability; import org.marketcetera.marketdata.SimulatedMarketDataModuleTestBase; import org.marketcetera.module.ModuleFactory; import org.marketcetera.module.ModuleURN; import org.marketcetera.module.ConfigurationProviderTest.MockConfigurationProvider; /* $License$ */ /** * Tests {@link MarketceteraFeedModule}. * * @author <a href="mailto:colin@marketcetera.com">Colin DuPlantis</a> * @version $Id: MarketceteraFeedModuleTest.java 16893 2014-04-25 18:20:56Z colin $ * @since 1.0.0 */ public class MarketceteraFeedModuleTest extends SimulatedMarketDataModuleTestBase { /* (non-Javadoc) * @see org.marketcetera.marketdata.MarketDataModuleTestBase#populateConfigurationProvider(org.marketcetera.module.ConfigurationProviderTest.MockConfigurationProvider) */ @Override protected void populateConfigurationProvider(MockConfigurationProvider inProvider) { inProvider.addValue(MarketceteraFeedModuleFactory.INSTANCE_URN, "URL", "FIX.4.4://exchange.marketcetera.com:7004"); inProvider.addValue(MarketceteraFeedModuleFactory.INSTANCE_URN, "SenderCompID", "sender"); inProvider.addValue(MarketceteraFeedModuleFactory.INSTANCE_URN, "TargetCompID", "MRKT-" + System.nanoTime()); } /* (non-Javadoc) * @see org.marketcetera.marketdata.MarketDataModuleTestBase#getFactory() */ @Override protected ModuleFactory getFactory() { return new MarketceteraFeedModuleFactory(); } /* (non-Javadoc) * @see org.marketcetera.marketdata.MarketDataModuleTestBase#getInstanceURN() */ @Override protected ModuleURN getInstanceURN() { return MarketceteraFeedModuleFactory.INSTANCE_URN; } /* (non-Javadoc) * @see org.marketcetera.marketdata.MarketDataModuleTestBase#getExpectedCapabilities() */ @Override protected Capability[] getExpectedCapabilities() { return new Capability[] { Capability.TOP_OF_BOOK, Capability.LATEST_TICK, Capability.MARKET_STAT }; } // TODO these tests are shimmed in until I can figure out how to simulate data in data feeds @Test public void dataRequestFromString() throws Exception { } @Test public void dataRequestProducesData() throws Exception { } /* (non-Javadoc) * @see org.marketcetera.marketdata.MarketDataModuleTestBase#getProvider() */ @Override protected String getProvider() { return MarketceteraFeedModuleFactory.IDENTIFIER; } }
apache-2.0
pdrados/cas
api/cas-server-core-api-configuration-model/src/main/java/org/apereo/cas/configuration/model/support/redis/RedisClusterProperties.java
936
package org.apereo.cas.configuration.model.support.redis; import org.apereo.cas.configuration.support.RequiresModule; import lombok.Getter; import lombok.Setter; import lombok.experimental.Accessors; import java.io.Serializable; import java.util.ArrayList; import java.util.List; /** * This is {@link RedisClusterProperties}. * * @author Misagh Moayyed * @since 6.3.0 */ @Getter @Setter @Accessors(chain = true) @RequiresModule(name = "cas-server-support-redis-core") public class RedisClusterProperties implements Serializable { private static final long serialVersionUID = 5236837157740950831L; /** * List of nodes available in the redis cluster. */ private List<RedisClusterNodeProperties> nodes = new ArrayList<>(0); /** * The cluster connection's password. */ private String password; /** * The max number of redirects to follow. */ private int maxRedirects; }
apache-2.0
lessthanoptimal/BoofCV
main/boofcv-geo/src/test/java/boofcv/alg/geo/selfcalib/TestRefineDualQuadraticAlgebraicError.java
6836
/* * Copyright (c) 2021, Peter Abeles. All Rights Reserved. * * This file is part of BoofCV (http://boofcv.org). * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package boofcv.alg.geo.selfcalib; import boofcv.alg.geo.selfcalib.RefineDualQuadraticAlgebraicError.CameraState; import boofcv.struct.calib.CameraPinhole; import georegression.struct.point.Point3D_F64; import org.ejml.UtilEjml; import org.junit.jupiter.api.Test; import java.util.ArrayList; import java.util.List; import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertTrue; /** * @author Peter Abeles */ public class TestRefineDualQuadraticAlgebraicError extends CommonAutoCalibrationChecks { @Test void encode_decode() { var alg = new RefineDualQuadraticAlgebraicError(); for (int i = 0; i < 10; i++) { alg.priorCameras.grow().setTo(400 + i, 1.0 + i/20.0, 340, 400); } alg.cameras.resize(alg.priorCameras.size); double[] parameters = new double[(1 + 1 + 2)*10 + 3]; Point3D_F64 p = new Point3D_F64(planeAtInfinity.get(0), planeAtInfinity.get(1), planeAtInfinity.get(2)); alg.encodeParameters(p, alg.priorCameras, parameters); p.setTo(0, 0, 0); alg.decodeParameters(parameters, alg.cameras, p); for (int cameraIdx = 0; cameraIdx < alg.cameras.size; cameraIdx++) { CameraState found = alg.cameras.get(cameraIdx); CameraState expected = alg.cameras.get(cameraIdx); assertEquals(expected.fx, found.fx); assertEquals(expected.aspectRatio, found.aspectRatio); assertEquals(expected.cx, found.cx); assertEquals(expected.cy, found.cy); } assertEquals(planeAtInfinity.get(0), p.x); assertEquals(planeAtInfinity.get(1), p.y); assertEquals(planeAtInfinity.get(2), p.z); } @Test void solvePerfect() { List<CameraPinhole> expected = new ArrayList<>(); List<CameraPinhole> found = new ArrayList<>(); for (int i = 0; i < 10; i++) { expected.add(new CameraPinhole(400 + i*5, 420, 0.0, 410, 420, 0, 0)); found.add(new CameraPinhole(expected.get(i))); } var alg = new RefineDualQuadraticAlgebraicError(); checkRefine(alg, expected, found, 1e-6); } @Test void solveNoise() { List<CameraPinhole> expected = new ArrayList<>(); List<CameraPinhole> found = new ArrayList<>(); for (int i = 0; i < 30; i++) { expected.add(new CameraPinhole(400 + i*5, 420, 0.0, 410, 420, 0, 0)); found.add(new CameraPinhole(expected.get(i))); found.get(i).fx += 1.5*rand.nextGaussian(); found.get(i).fy += 1.5*rand.nextGaussian(); found.get(i).cx += 1.5*rand.nextGaussian(); found.get(i).cy += 1.5*rand.nextGaussian(); } var alg = new RefineDualQuadraticAlgebraicError(); checkRefine(alg, expected, found, 1.5); } @Test void solveNoise_solveSingleCamera() { List<CameraPinhole> expected = new ArrayList<>(); List<CameraPinhole> found = new ArrayList<>(); // Just one camera as a constraint for (int i = 0; i < 30; i++) { expected.add(new CameraPinhole(400, 420, 0.0, 410, 420, 0, 0)); } found.add(new CameraPinhole(expected.get(0))); found.get(0).fx += 1.5*rand.nextGaussian(); found.get(0).fy += 1.5*rand.nextGaussian(); found.get(0).cx += 1.5*rand.nextGaussian(); found.get(0).cy += 1.5*rand.nextGaussian(); var alg = new RefineDualQuadraticAlgebraicError(); checkRefine(alg, expected, found, 0.5); } @Test void solveNoise_KnownPrinciplePoint() { List<CameraPinhole> expected = new ArrayList<>(); List<CameraPinhole> found = new ArrayList<>(); for (int i = 0; i < 30; i++) { expected.add(new CameraPinhole(400 + i*5, 420, 0.0, 5, 6, 0, 0)); found.add(new CameraPinhole(expected.get(i))); found.get(i).fx += 1*rand.nextGaussian(); found.get(i).fy += 1*rand.nextGaussian(); } var alg = new RefineDualQuadraticAlgebraicError(); alg.setKnownPrinciplePoint(true); checkRefine(alg, expected, found, 5); } @Test void solveFixedAspect() { List<CameraPinhole> expected = new ArrayList<>(); List<CameraPinhole> found = new ArrayList<>(); for (int i = 0; i < 30; i++) { expected.add(new CameraPinhole(400 + i*5, 420, 0.0, 410, 420, 0, 0)); found.add(new CameraPinhole(expected.get(i))); double scaleF = 1.0 + (rand.nextDouble()-0.5)/10.0; found.get(i).fx *= scaleF; found.get(i).fy *= scaleF; found.get(i).cx += 2*rand.nextGaussian(); found.get(i).cy += 2*rand.nextGaussian(); } var alg = new RefineDualQuadraticAlgebraicError(); alg.setKnownAspect(true); checkRefine(alg, expected, found, 5); } private void checkRefine( RefineDualQuadraticAlgebraicError alg, List<CameraPinhole> expected, List<CameraPinhole> noisy, double tol ) { // alg.setVerbose(System.out, null); renderGood(expected); setState(alg, noisy, planeAtInfinity.get(0), planeAtInfinity.get(1), planeAtInfinity.get(2)); assertTrue(alg.refine()); List<CameraState> found = alg.getCameras().toList(); assertEquals(noisy.size(), found.size()); // checking against size of noisy, since noisy is number of cameras // if intrinsics are the same than there will only be one camera // estimate gets worse for (int i = 0; i < noisy.size(); i++) { CameraPinhole e = expected.get(i); CameraState f = found.get(i); if (alg.isKnownAspect()) { assertEquals(e.fx, f.fx, tol); assertEquals(e.fy/e.fx, f.aspectRatio, 0.01); } else { assertEquals(e.fx, f.fx, tol); assertEquals(e.fy/e.fx, f.aspectRatio, 0.05); } if (alg.isKnownPrinciplePoint()) { assertEquals(e.cx, f.cx, UtilEjml.TEST_F64_SQ); assertEquals(e.cy, f.cy, UtilEjml.TEST_F64_SQ); } else { assertEquals(e.cx, f.cx, tol); assertEquals(e.cy, f.cy, tol); } } } private void setState( RefineDualQuadraticAlgebraicError alg, List<CameraPinhole> noisy, double px, double py, double pz ) { alg.initialize(noisy.size(), listP.size()); for (int i = 0; i < listP.size(); i++) { alg.setProjective(i, listP.get(i)); } for (int i = 0; i < noisy.size(); i++) { CameraPinhole c = noisy.get(i); alg.setCamera(i, c.fx, c.cx, c.cy, c.fy/c.fx); } alg.setPlaneAtInfinity(px, py, pz); if (listP.size() == noisy.size()) { for (int i = 0; i < listP.size(); i++) { alg.setViewToCamera(i, i); } } else { for (int i = 0; i < listP.size(); i++) { alg.setViewToCamera(i, 0); } } } }
apache-2.0
LiuJiJiJi/Spring-Boot-example
Thread/src.main.java/com/mobin/thread/Lock/ReentrackReadLock.java
2430
package com.mobin.thread.Lock; import java.util.HashMap; import java.util.Map; /** * Created by Mobin on 2016/3/23. * 读锁重入 * 条件:当前线程没有写操作或写请求,必须持有读线程(不管是否有读请求) */ public class ReentrackReadLock implements Runnable { Map<Thread, Integer> mapReadLock = new HashMap<Thread, Integer>(); //在读写锁中读线程是允许多个的,所以用map private int write = 0; private int writeRequest = 0; public Boolean isRead(Thread readThread) { //判断是否有读线程,这是读锁重入的条件之一 return mapReadLock.get(readThread) != null; } public Boolean isCanGetReadAccess(Thread readThread) { //判断读锁的重入的条件是否都满足 if (write > 0) return false; if (writeRequest > 0) return false; if (isRead(readThread)) return true; return true; //第一次时以上条件都不满足 } public int getReadThreadCount(Thread readThread) { //获取读线程的总数 Integer count = mapReadLock.get(readThread); return count == null ? 0 : count.intValue(); } public void readLock() throws InterruptedException { Thread readThread = Thread.currentThread(); System.out.println(isCanGetReadAccess(readThread)); if (!isCanGetReadAccess(readThread)) wait(); mapReadLock.put(readThread, getReadThreadCount(readThread) + 1); } public void unReadLock(){ Thread thread = Thread.currentThread(); if(getReadThreadCount(thread) == 1) mapReadLock.remove(thread); else mapReadLock.put(thread,getReadThreadCount( thread) -1); notifyAll(); } public void run() { //等于readLock Thread readThread = Thread.currentThread(); if (!isCanGetReadAccess(readThread)) { try { wait(); } catch (InterruptedException e) { e.printStackTrace(); } } mapReadLock.put(readThread, getReadThreadCount(readThread) + 1); //System.out.println(getReadThreadCount(readThread)); } public static void main(String[] args) throws InterruptedException { Runnable readThread = new ReentrackReadLock(); Thread thread = new Thread(readThread); thread.start(); } }
apache-2.0
radarsh/gradle-test-logger-plugin
src/test-functional/resources/sample-junit5-jupiter-nested-tests/src/test/java/com/adarshr/test/NestedTest.java
979
package com.adarshr.test; import org.junit.jupiter.api.Assertions; import org.junit.jupiter.api.Disabled; import org.junit.jupiter.api.Test; import org.junit.jupiter.api.Nested; public class NestedTest { @Nested public class NestedTestsetOne { @Test public void firstTestOfNestedTestsetOne() { Assertions.assertEquals(1, 1); } @Test public void secondTestOfNestedTestsetOne() { Assertions.assertEquals(1, 1); } } @Nested public class NestedTestsetTwo { @Test public void firstTestOfNestedTestsetTwo() { Assertions.assertEquals(1, 1); } @Test public void secondTestOfNestedTestsetTwo() { Assertions.assertEquals(1, 1); } } @Nested public class NestedTestsetThree { @Test public void firstTestOfNestedTestsetThree() { Assertions.assertEquals(1, 1); } } }
apache-2.0
robertroeser/Hystrix
hystrix-contrib/hystrix-reactivesocket-event-stream/src/main/java/com/netflix/hystrix/contrib/reactivesocket/metrics/HystrixCollapserMetricsStream.java
5189
package com.netflix.hystrix.contrib.reactivesocket.metrics; import com.fasterxml.jackson.core.JsonGenerator; import com.netflix.hystrix.HystrixCollapserKey; import com.netflix.hystrix.HystrixCollapserMetrics; import com.netflix.hystrix.HystrixEventType; import com.netflix.hystrix.contrib.reactivesocket.StreamingSupplier; import org.agrona.LangUtil; import rx.functions.Func0; import java.io.ByteArrayOutputStream; import java.util.stream.Stream; public class HystrixCollapserMetricsStream extends StreamingSupplier<HystrixCollapserMetrics> { private static HystrixCollapserMetricsStream INSTANCE = new HystrixCollapserMetricsStream(); private HystrixCollapserMetricsStream() { super(); } public static HystrixCollapserMetricsStream getInstance() { return INSTANCE; } @Override protected Stream getStream() { return HystrixCollapserMetrics.getInstances().stream(); } protected byte[] getPayloadData(final HystrixCollapserMetrics collapserMetrics) { byte[] retVal = null; try { HystrixCollapserKey key = collapserMetrics.getCollapserKey(); ByteArrayOutputStream bos = new ByteArrayOutputStream(); JsonGenerator json = jsonFactory.createGenerator(bos); json.writeStartObject(); json.writeStringField("type", "HystrixCollapser"); json.writeStringField("name", key.name()); json.writeNumberField("currentTime", System.currentTimeMillis()); safelyWriteNumberField(json, "rollingCountRequestsBatched", new Func0<Long>() { @Override public Long call() { return collapserMetrics.getRollingCount(HystrixEventType.Collapser.ADDED_TO_BATCH); } }); safelyWriteNumberField(json, "rollingCountBatches", new Func0<Long>() { @Override public Long call() { return collapserMetrics.getRollingCount(HystrixEventType.Collapser.BATCH_EXECUTED); } }); safelyWriteNumberField(json, "rollingCountResponsesFromCache", new Func0<Long>() { @Override public Long call() { return collapserMetrics.getRollingCount(HystrixEventType.Collapser.RESPONSE_FROM_CACHE); } }); // batch size percentiles json.writeNumberField("batchSize_mean", collapserMetrics.getBatchSizeMean()); json.writeObjectFieldStart("batchSize"); json.writeNumberField("25", collapserMetrics.getBatchSizePercentile(25)); json.writeNumberField("50", collapserMetrics.getBatchSizePercentile(50)); json.writeNumberField("75", collapserMetrics.getBatchSizePercentile(75)); json.writeNumberField("90", collapserMetrics.getBatchSizePercentile(90)); json.writeNumberField("95", collapserMetrics.getBatchSizePercentile(95)); json.writeNumberField("99", collapserMetrics.getBatchSizePercentile(99)); json.writeNumberField("99.5", collapserMetrics.getBatchSizePercentile(99.5)); json.writeNumberField("100", collapserMetrics.getBatchSizePercentile(100)); json.writeEndObject(); // shard size percentiles (commented-out for now) //json.writeNumberField("shardSize_mean", collapserMetrics.getShardSizeMean()); //json.writeObjectFieldStart("shardSize"); //json.writeNumberField("25", collapserMetrics.getShardSizePercentile(25)); //json.writeNumberField("50", collapserMetrics.getShardSizePercentile(50)); //json.writeNumberField("75", collapserMetrics.getShardSizePercentile(75)); //json.writeNumberField("90", collapserMetrics.getShardSizePercentile(90)); //json.writeNumberField("95", collapserMetrics.getShardSizePercentile(95)); //json.writeNumberField("99", collapserMetrics.getShardSizePercentile(99)); //json.writeNumberField("99.5", collapserMetrics.getShardSizePercentile(99.5)); //json.writeNumberField("100", collapserMetrics.getShardSizePercentile(100)); //json.writeEndObject(); //json.writeNumberField("propertyValue_metricsRollingStatisticalWindowInMilliseconds", collapserMetrics.getProperties().metricsRollingStatisticalWindowInMilliseconds().get()); json.writeBooleanField("propertyValue_requestCacheEnabled", collapserMetrics.getProperties().requestCacheEnabled().get()); json.writeNumberField("propertyValue_maxRequestsInBatch", collapserMetrics.getProperties().maxRequestsInBatch().get()); json.writeNumberField("propertyValue_timerDelayInMilliseconds", collapserMetrics.getProperties().timerDelayInMilliseconds().get()); json.writeNumberField("reportingHosts", 1); // this will get summed across all instances in a cluster json.writeEndObject(); json.close(); retVal = bos.toByteArray(); } catch (Exception e) { LangUtil.rethrowUnchecked(e); } return retVal; } }
apache-2.0
512guanguan/souyou
src/com/llb/souyou/util/Rotate3D.java
2362
package com.llb.souyou.util; import android.graphics.Camera; import android.graphics.Matrix; import android.view.animation.Animation; import android.view.animation.Transformation; public class Rotate3D extends Animation{ private Camera camera;//不是相机那个Camera private float fromDegree;//开始旋转的角度 private float endDegree; private float centerX;//旋转中心 private float centerY; public Rotate3D(float fromDegree, float endDegree,float centerX, float centerY) { super(); this.fromDegree = fromDegree; this.endDegree = endDegree; this.centerX = centerX; this.centerY = centerY; } @Override public void initialize(int width, int height, int parentWidth, int parentHeight) { // TODO Auto-generated method stub super.initialize(width, height, parentWidth, parentHeight); camera=new Camera();//每次动画前都会调用,不知道是放这里还是构造函数中,待验证 } @Override protected void applyTransformation(float interpolatedTime, Transformation t) { super.applyTransformation(interpolatedTime, t); float rotateDegree=fromDegree+(endDegree-fromDegree)*interpolatedTime;//旋转角度 Matrix matrix=t.getMatrix();//返回一个3*3矩阵 // Log.i("Llb","rotate0"+rotateDegree); if (rotateDegree<-70) { rotateDegree=-90.0f;//若转个180°就完全反向了 camera.save();//保存当前状态 camera.rotateY(rotateDegree);//绕Y轴旋转 camera.getMatrix(matrix); camera.restore();//恢复状态 // Log.i("llb","rotate1"+rotateDegree); }else if (rotateDegree>70) { rotateDegree=90.0f; camera.save();//保存当前状态 camera.rotateY(rotateDegree);//绕Y轴旋转 camera.getMatrix(matrix); camera.restore();//恢复状态 }else { camera.save(); camera.translate(0, 0, centerX); // 沿Z轴位移,看起来相当于缩小了 camera.rotateY(rotateDegree); camera.translate(0, 0, -centerX); //复位 camera.getMatrix(matrix); camera.restore(); // Log.i("llb","rotate2"+rotateDegree); } //参数是平移的距离,而不是平移目的地的坐标 确保图片的翻转过程一直处于组件的中心点位置 matrix.preTranslate(-centerX, -centerY);//由于缩放是以(0,0)为中心的,所以为了把界面的中心与(0,0)对齐 setScale前 matrix.postTranslate(centerX, centerY);//setScale后 } }
apache-2.0
consulo/consulo
modules/base/usage-view/src/main/java/com/intellij/usages/PsiNamedElementUsageGroupBase.java
1137
/* * Copyright 2000-2013 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intellij.usages; import com.intellij.navigation.NavigationItem; import com.intellij.psi.PsiNamedElement; import consulo.ui.image.Image; import javax.annotation.Nonnull; /** * @author Maxim.Mossienko */ public class PsiNamedElementUsageGroupBase<T extends PsiNamedElement & NavigationItem> extends PsiElementUsageGroupBase<T> { public PsiNamedElementUsageGroupBase(@Nonnull T element, Image icon) { super(element, icon); } public PsiNamedElementUsageGroupBase(@Nonnull T element) { super(element); } }
apache-2.0
jexp/idea2
platform/platform-impl/src/com/intellij/openapi/wm/impl/IdeFrameImpl.java
8296
/* * Copyright 2000-2009 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intellij.openapi.wm.impl; import com.intellij.ide.AppLifecycleListener; import com.intellij.ide.DataManager; import com.intellij.ide.impl.ProjectUtil; import com.intellij.ide.ui.UISettings; import com.intellij.openapi.MnemonicHelper; import com.intellij.openapi.actionSystem.ActionManager; import com.intellij.openapi.actionSystem.DataConstants; import com.intellij.openapi.actionSystem.DataProvider; import com.intellij.openapi.application.Application; import com.intellij.openapi.application.ApplicationInfo; import com.intellij.openapi.application.ApplicationManager; import com.intellij.openapi.application.ModalityState; import com.intellij.openapi.application.ex.ApplicationInfoEx; import com.intellij.openapi.application.ex.ApplicationManagerEx; import com.intellij.openapi.keymap.KeymapManager; import com.intellij.openapi.project.DumbAwareRunnable; import com.intellij.openapi.project.Project; import com.intellij.openapi.project.ProjectManager; import com.intellij.openapi.wm.IdeFrame; import com.intellij.openapi.wm.ex.LayoutFocusTraversalPolicyExt; import com.intellij.openapi.wm.ex.StatusBarEx; import com.intellij.ui.AppUIUtil; import com.intellij.ui.BalloonLayout; import com.intellij.ui.FocusTrackback; import com.intellij.util.ui.UIUtil; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import javax.swing.*; import java.awt.*; import java.awt.event.WindowAdapter; import java.awt.event.WindowEvent; import java.io.File; /** * @author Anton Katilin * @author Vladimir Kondratyev */ // Made non-final for Fabrique public class IdeFrameImpl extends JFrame implements IdeFrame, DataProvider { private String myTitle; private String myFileTitle; private File myCurrentFile; private Project myProject; private final LayoutFocusTraversalPolicyExt myLayoutFocusTraversalPolicy; private IdeRootPane myRootPane; private BalloonLayout myBalloonLayout; public IdeFrameImpl(ApplicationInfoEx applicationInfoEx, ActionManager actionManager, UISettings uiSettings, DataManager dataManager, KeymapManager keymapManager, final Application application, final String[] commandLineArgs) { super(applicationInfoEx.getFullApplicationName()); myRootPane = new IdeRootPane(actionManager, uiSettings, dataManager, keymapManager, application, commandLineArgs); setRootPane(myRootPane); AppUIUtil.updateFrameIcon(this); final Dimension screenSize = Toolkit.getDefaultToolkit().getScreenSize(); setBounds(10, 10, screenSize.width - 20, screenSize.height - 40); myLayoutFocusTraversalPolicy = new LayoutFocusTraversalPolicyExt(); setFocusTraversalPolicy(myLayoutFocusTraversalPolicy); setupCloseAction(); new MnemonicHelper().register(this); myBalloonLayout = new BalloonLayout(myRootPane.getLayeredPane(), new Insets(8, 8, 8, 8)); } /** * !!!!! CAUTION !!!!! * !!!!! CAUTION !!!!! * !!!!! CAUTION !!!!! * * THIS IS AN "ABSOLUTELY-GURU METHOD". * NOBODY SHOULD ADD OTHER USAGES OF IT :) * ONLY ANTON AND VOVA ARE PERMITTED TO USE THIS METHOD!!! * * !!!!! CAUTION !!!!! * !!!!! CAUTION !!!!! * !!!!! CAUTION !!!!! */ public final void setDefaultFocusableComponent(final JComponent component) { myLayoutFocusTraversalPolicy.setOverridenDefaultComponent(component); } /** * This is overriden to get rid of strange Alloy LaF customization of frames. For unknown reason it sets the maxBounds rectangle * and it does it plain wrong. Setting bounds to <code>null</code> means default value should be taken from the underlying OS. */ public synchronized void setMaximizedBounds(Rectangle bounds) { super.setMaximizedBounds(null); } private void setupCloseAction() { setDefaultCloseOperation(WindowConstants.DO_NOTHING_ON_CLOSE); addWindowListener( new WindowAdapter() { public void windowClosing(final WindowEvent e) { final Application app = ApplicationManager.getApplication(); app.invokeLater(new DumbAwareRunnable() { public void run() { if (app.isDisposed()) { ApplicationManagerEx.getApplicationEx().exit(); return; } final Project[] openProjects = ProjectManager.getInstance().getOpenProjects(); if (openProjects.length > 1) { if (myProject != null && myProject.isOpen()) { ProjectUtil.closeProject(myProject); } app.getMessageBus().syncPublisher(AppLifecycleListener.TOPIC).projectFrameClosed(); } else { ApplicationManagerEx.getApplicationEx().exit(); } } }, ModalityState.NON_MODAL); } } ); } public StatusBarEx getStatusBar() { return ((IdeRootPane)getRootPane()).getStatusBar(); } public void updateToolbar() { ((IdeRootPane)getRootPane()).updateToolbar(); } public void updateMenuBar(){ ((IdeRootPane)getRootPane()).updateMainMenuActions(); } public void setTitle(final String title) { myTitle = title; updateTitle(); } private void setFrameTitle(final String text) { super.setTitle(text); } public void setFileTitle(final String fileTitle) { setFileTitle(fileTitle, null); } public void setFileTitle(@Nullable final String fileTitle, @Nullable File file) { myFileTitle = fileTitle; myCurrentFile = file; updateTitle(); } private void updateTitle() { final StringBuilder sb = new StringBuilder(); if (myTitle != null && myTitle.length() > 0) { sb.append(myTitle); sb.append(" - "); } if (myFileTitle != null && myFileTitle.length() > 0) { sb.append(myFileTitle); sb.append(" - "); } getRootPane().putClientProperty("Window.documentFile", myCurrentFile); sb.append(((ApplicationInfoEx)ApplicationInfo.getInstance()).getFullApplicationName()); setFrameTitle(sb.toString()); } public Object getData(final String dataId) { if (DataConstants.PROJECT.equals(dataId)) { if (myProject != null) { return myProject.isInitialized() ? myProject : null; } } return null; } public void setProject(final Project project) { getStatusBar().cleanupCustomComponents(); myProject = project; if (project != null) { if (myRootPane != null) { myRootPane.installNorthComponents(project); } } else { if (myRootPane != null) { //already disposed myRootPane.deinstallNorthComponents(); } } if (project == null) { FocusTrackback.release(this); } } public Project getProject() { return myProject; } public void dispose() { if (myRootPane != null) { myRootPane = null; } super.dispose(); } @Override public void paint(Graphics g) { UIUtil.applyRenderingHints(g); super.paint(g); } public Rectangle suggestChildFrameBounds() { //todo [kirillk] a dummy implementation final Rectangle b = getBounds(); b.x += 100; b.width -= 200; b.y += 100; b.height -= 200; return b; } @Nullable public static Component findNearestModalComponent(@NotNull Component c) { Component eachParent = c; while (eachParent != null) { if (eachParent instanceof IdeFrameImpl) return eachParent; if (eachParent instanceof JDialog) { if (((JDialog)eachParent).isModal()) return eachParent; } eachParent = eachParent.getParent(); } return null; } public final BalloonLayout getBalloonLayout() { return myBalloonLayout; } }
apache-2.0
iservport/iservport-politikei
src/main/scala/org/helianto/politikei/Application.java
1394
package org.helianto.politikei; import org.helianto.root.RootApplication; import org.springframework.boot.SpringApplication; import org.springframework.boot.autoconfigure.SpringBootApplication; import org.springframework.boot.autoconfigure.security.oauth2.client.EnableOAuth2Sso; import org.springframework.context.annotation.Configuration; import org.springframework.core.annotation.Order; import org.springframework.data.jpa.repository.config.EnableJpaRepositories; import org.springframework.security.config.annotation.authentication.builders.AuthenticationManagerBuilder; import org.springframework.security.config.annotation.web.builders.HttpSecurity; import org.springframework.security.config.annotation.web.builders.WebSecurity; import org.springframework.security.config.annotation.web.configuration.WebSecurityConfigurerAdapter; import org.springframework.security.oauth2.config.annotation.web.configuration.EnableResourceServer; import org.springframework.security.web.authentication.LoginUrlAuthenticationEntryPoint; @SpringBootApplication(scanBasePackages={"org.helianto.*.service","org.helianto.*.controller"}) @EnableJpaRepositories(basePackages={"org.helianto.*.repository"}) @EnableOAuth2Sso @EnableResourceServer public class Application extends RootApplication { public static void main(String[] args) { SpringApplication.run(Application.class, args); } }
apache-2.0
crnk-project/crnk-framework
crnk-core/src/test/java/io/crnk/core/mock/repository/TaskRepository.java
2856
package io.crnk.core.mock.repository; import io.crnk.core.exception.BadRequestException; import io.crnk.core.mock.models.Project; import io.crnk.core.mock.models.Task; import io.crnk.core.queryspec.QuerySpec; import io.crnk.core.repository.LinksRepository; import io.crnk.core.repository.MetaRepository; import io.crnk.core.repository.ResourceRepositoryBase; import io.crnk.core.resource.links.DefaultLink; import io.crnk.core.resource.links.DefaultPagedLinksInformation; import io.crnk.core.resource.links.Link; import io.crnk.core.resource.links.LinksInformation; import io.crnk.core.resource.list.ResourceList; import io.crnk.core.resource.meta.DefaultPagedMetaInformation; import io.crnk.core.resource.meta.MetaInformation; import java.util.Collection; import java.util.HashSet; import java.util.Iterator; import java.util.Set; public class TaskRepository extends ResourceRepositoryBase<Task, Long> implements MetaRepository<Task>, LinksRepository<Task> { public static final String BAD_REQUEST_NAME = "badName"; private Set<Task> tasks = new HashSet<>(); private long nextId = 0; public TaskRepository() { super(Task.class); } @Override public ResourceList<Task> findAll(QuerySpec querySpec) { TaskList list = new TaskList(); querySpec.apply(tasks, list); list.setMeta(new DefaultPagedMetaInformation()); list.setLinks(new DefaultPagedLinksInformation()); return list; } @Override public <S extends Task> S save(S entity) { if (BAD_REQUEST_NAME.equals(entity.getName())) { throw new BadRequestException("badName"); } if (entity.getId() == null) { entity.setId(nextId++); } delete(entity.getId()); // replace current one // maintain bidirectional mapping, not perfect, should be done in the resources, but serves its purpose her. Project project = entity.getProject(); if (project != null && !project.getTasks().contains(entity)) { project.getTasks().add(entity); } tasks.add(entity); return entity; } @Override public void delete(Long id) { Iterator<Task> iterator = tasks.iterator(); while (iterator.hasNext()) { Task next = iterator.next(); if (next.getId().equals(id)) { iterator.remove(); } } } @Override public LinksInformation getLinksInformation(Collection<Task> resources, QuerySpec queryParams, LinksInformation current) { return new TaskLinksInformation(); } @Override public MetaInformation getMetaInformation(Collection<Task> resources, QuerySpec queryParams, MetaInformation current) { return new MetaInformation() { public String name = "value"; }; } }
apache-2.0
wangqi/gameserver
server/src/gensrc/java/com/xinqihd/sns/gameserver/proto/XinqiBseGetValue.java
23359
// Generated by the protocol buffer compiler. DO NOT EDIT! // source: BseGetValue.proto package com.xinqihd.sns.gameserver.proto; public final class XinqiBseGetValue { private XinqiBseGetValue() {} public static void registerAllExtensions( com.google.protobuf.ExtensionRegistry registry) { } public interface BseGetValueOrBuilder extends com.google.protobuf.MessageOrBuilder { // required string action = 1; boolean hasAction(); String getAction(); // repeated string keys = 2; java.util.List<String> getKeysList(); int getKeysCount(); String getKeys(int index); // repeated string values = 3; java.util.List<String> getValuesList(); int getValuesCount(); String getValues(int index); } public static final class BseGetValue extends com.google.protobuf.GeneratedMessage implements BseGetValueOrBuilder { // Use BseGetValue.newBuilder() to construct. private BseGetValue(Builder builder) { super(builder); } private BseGetValue(boolean noInit) {} private static final BseGetValue defaultInstance; public static BseGetValue getDefaultInstance() { return defaultInstance; } public BseGetValue getDefaultInstanceForType() { return defaultInstance; } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.xinqihd.sns.gameserver.proto.XinqiBseGetValue.internal_static_com_xinqihd_sns_gameserver_proto_BseGetValue_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return com.xinqihd.sns.gameserver.proto.XinqiBseGetValue.internal_static_com_xinqihd_sns_gameserver_proto_BseGetValue_fieldAccessorTable; } private int bitField0_; // required string action = 1; public static final int ACTION_FIELD_NUMBER = 1; private java.lang.Object action_; public boolean hasAction() { return ((bitField0_ & 0x00000001) == 0x00000001); } public String getAction() { java.lang.Object ref = action_; if (ref instanceof String) { return (String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; String s = bs.toStringUtf8(); if (com.google.protobuf.Internal.isValidUtf8(bs)) { action_ = s; } return s; } } private com.google.protobuf.ByteString getActionBytes() { java.lang.Object ref = action_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((String) ref); action_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } // repeated string keys = 2; public static final int KEYS_FIELD_NUMBER = 2; private com.google.protobuf.LazyStringList keys_; public java.util.List<String> getKeysList() { return keys_; } public int getKeysCount() { return keys_.size(); } public String getKeys(int index) { return keys_.get(index); } // repeated string values = 3; public static final int VALUES_FIELD_NUMBER = 3; private com.google.protobuf.LazyStringList values_; public java.util.List<String> getValuesList() { return values_; } public int getValuesCount() { return values_.size(); } public String getValues(int index) { return values_.get(index); } private void initFields() { action_ = ""; keys_ = com.google.protobuf.LazyStringArrayList.EMPTY; values_ = com.google.protobuf.LazyStringArrayList.EMPTY; } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; if (!hasAction()) { memoizedIsInitialized = 0; return false; } memoizedIsInitialized = 1; return true; } public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { output.writeBytes(1, getActionBytes()); } for (int i = 0; i < keys_.size(); i++) { output.writeBytes(2, keys_.getByteString(i)); } for (int i = 0; i < values_.size(); i++) { output.writeBytes(3, values_.getByteString(i)); } getUnknownFields().writeTo(output); } private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream .computeBytesSize(1, getActionBytes()); } { int dataSize = 0; for (int i = 0; i < keys_.size(); i++) { dataSize += com.google.protobuf.CodedOutputStream .computeBytesSizeNoTag(keys_.getByteString(i)); } size += dataSize; size += 1 * getKeysList().size(); } { int dataSize = 0; for (int i = 0; i < values_.size(); i++) { dataSize += com.google.protobuf.CodedOutputStream .computeBytesSizeNoTag(values_.getByteString(i)); } size += dataSize; size += 1 * getValuesList().size(); } size += getUnknownFields().getSerializedSize(); memoizedSerializedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } public static com.xinqihd.sns.gameserver.proto.XinqiBseGetValue.BseGetValue parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data).buildParsed(); } public static com.xinqihd.sns.gameserver.proto.XinqiBseGetValue.BseGetValue parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data, extensionRegistry) .buildParsed(); } public static com.xinqihd.sns.gameserver.proto.XinqiBseGetValue.BseGetValue parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data).buildParsed(); } public static com.xinqihd.sns.gameserver.proto.XinqiBseGetValue.BseGetValue parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data, extensionRegistry) .buildParsed(); } public static com.xinqihd.sns.gameserver.proto.XinqiBseGetValue.BseGetValue parseFrom(java.io.InputStream input) throws java.io.IOException { return newBuilder().mergeFrom(input).buildParsed(); } public static com.xinqihd.sns.gameserver.proto.XinqiBseGetValue.BseGetValue parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return newBuilder().mergeFrom(input, extensionRegistry) .buildParsed(); } public static com.xinqihd.sns.gameserver.proto.XinqiBseGetValue.BseGetValue parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { Builder builder = newBuilder(); if (builder.mergeDelimitedFrom(input)) { return builder.buildParsed(); } else { return null; } } public static com.xinqihd.sns.gameserver.proto.XinqiBseGetValue.BseGetValue parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { Builder builder = newBuilder(); if (builder.mergeDelimitedFrom(input, extensionRegistry)) { return builder.buildParsed(); } else { return null; } } public static com.xinqihd.sns.gameserver.proto.XinqiBseGetValue.BseGetValue parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return newBuilder().mergeFrom(input).buildParsed(); } public static com.xinqihd.sns.gameserver.proto.XinqiBseGetValue.BseGetValue parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return newBuilder().mergeFrom(input, extensionRegistry) .buildParsed(); } public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(com.xinqihd.sns.gameserver.proto.XinqiBseGetValue.BseGetValue prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder<Builder> implements com.xinqihd.sns.gameserver.proto.XinqiBseGetValue.BseGetValueOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.xinqihd.sns.gameserver.proto.XinqiBseGetValue.internal_static_com_xinqihd_sns_gameserver_proto_BseGetValue_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return com.xinqihd.sns.gameserver.proto.XinqiBseGetValue.internal_static_com_xinqihd_sns_gameserver_proto_BseGetValue_fieldAccessorTable; } // Construct using com.xinqihd.sns.gameserver.proto.XinqiBseGetValue.BseGetValue.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder(BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { } } private static Builder create() { return new Builder(); } public Builder clear() { super.clear(); action_ = ""; bitField0_ = (bitField0_ & ~0x00000001); keys_ = com.google.protobuf.LazyStringArrayList.EMPTY; bitField0_ = (bitField0_ & ~0x00000002); values_ = com.google.protobuf.LazyStringArrayList.EMPTY; bitField0_ = (bitField0_ & ~0x00000004); return this; } public Builder clone() { return create().mergeFrom(buildPartial()); } public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.xinqihd.sns.gameserver.proto.XinqiBseGetValue.BseGetValue.getDescriptor(); } public com.xinqihd.sns.gameserver.proto.XinqiBseGetValue.BseGetValue getDefaultInstanceForType() { return com.xinqihd.sns.gameserver.proto.XinqiBseGetValue.BseGetValue.getDefaultInstance(); } public com.xinqihd.sns.gameserver.proto.XinqiBseGetValue.BseGetValue build() { com.xinqihd.sns.gameserver.proto.XinqiBseGetValue.BseGetValue result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } private com.xinqihd.sns.gameserver.proto.XinqiBseGetValue.BseGetValue buildParsed() throws com.google.protobuf.InvalidProtocolBufferException { com.xinqihd.sns.gameserver.proto.XinqiBseGetValue.BseGetValue result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException( result).asInvalidProtocolBufferException(); } return result; } public com.xinqihd.sns.gameserver.proto.XinqiBseGetValue.BseGetValue buildPartial() { com.xinqihd.sns.gameserver.proto.XinqiBseGetValue.BseGetValue result = new com.xinqihd.sns.gameserver.proto.XinqiBseGetValue.BseGetValue(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) == 0x00000001)) { to_bitField0_ |= 0x00000001; } result.action_ = action_; if (((bitField0_ & 0x00000002) == 0x00000002)) { keys_ = new com.google.protobuf.UnmodifiableLazyStringList( keys_); bitField0_ = (bitField0_ & ~0x00000002); } result.keys_ = keys_; if (((bitField0_ & 0x00000004) == 0x00000004)) { values_ = new com.google.protobuf.UnmodifiableLazyStringList( values_); bitField0_ = (bitField0_ & ~0x00000004); } result.values_ = values_; result.bitField0_ = to_bitField0_; onBuilt(); return result; } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.xinqihd.sns.gameserver.proto.XinqiBseGetValue.BseGetValue) { return mergeFrom((com.xinqihd.sns.gameserver.proto.XinqiBseGetValue.BseGetValue)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.xinqihd.sns.gameserver.proto.XinqiBseGetValue.BseGetValue other) { if (other == com.xinqihd.sns.gameserver.proto.XinqiBseGetValue.BseGetValue.getDefaultInstance()) return this; if (other.hasAction()) { setAction(other.getAction()); } if (!other.keys_.isEmpty()) { if (keys_.isEmpty()) { keys_ = other.keys_; bitField0_ = (bitField0_ & ~0x00000002); } else { ensureKeysIsMutable(); keys_.addAll(other.keys_); } onChanged(); } if (!other.values_.isEmpty()) { if (values_.isEmpty()) { values_ = other.values_; bitField0_ = (bitField0_ & ~0x00000004); } else { ensureValuesIsMutable(); values_.addAll(other.values_); } onChanged(); } this.mergeUnknownFields(other.getUnknownFields()); return this; } public final boolean isInitialized() { if (!hasAction()) { return false; } return true; } public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder( this.getUnknownFields()); while (true) { int tag = input.readTag(); switch (tag) { case 0: this.setUnknownFields(unknownFields.build()); onChanged(); return this; default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { this.setUnknownFields(unknownFields.build()); onChanged(); return this; } break; } case 10: { bitField0_ |= 0x00000001; action_ = input.readBytes(); break; } case 18: { ensureKeysIsMutable(); keys_.add(input.readBytes()); break; } case 26: { ensureValuesIsMutable(); values_.add(input.readBytes()); break; } } } } private int bitField0_; // required string action = 1; private java.lang.Object action_ = ""; public boolean hasAction() { return ((bitField0_ & 0x00000001) == 0x00000001); } public String getAction() { java.lang.Object ref = action_; if (!(ref instanceof String)) { String s = ((com.google.protobuf.ByteString) ref).toStringUtf8(); action_ = s; return s; } else { return (String) ref; } } public Builder setAction(String value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000001; action_ = value; onChanged(); return this; } public Builder clearAction() { bitField0_ = (bitField0_ & ~0x00000001); action_ = getDefaultInstance().getAction(); onChanged(); return this; } void setAction(com.google.protobuf.ByteString value) { bitField0_ |= 0x00000001; action_ = value; onChanged(); } // repeated string keys = 2; private com.google.protobuf.LazyStringList keys_ = com.google.protobuf.LazyStringArrayList.EMPTY; private void ensureKeysIsMutable() { if (!((bitField0_ & 0x00000002) == 0x00000002)) { keys_ = new com.google.protobuf.LazyStringArrayList(keys_); bitField0_ |= 0x00000002; } } public java.util.List<String> getKeysList() { return java.util.Collections.unmodifiableList(keys_); } public int getKeysCount() { return keys_.size(); } public String getKeys(int index) { return keys_.get(index); } public Builder setKeys( int index, String value) { if (value == null) { throw new NullPointerException(); } ensureKeysIsMutable(); keys_.set(index, value); onChanged(); return this; } public Builder addKeys(String value) { if (value == null) { throw new NullPointerException(); } ensureKeysIsMutable(); keys_.add(value); onChanged(); return this; } public Builder addAllKeys( java.lang.Iterable<String> values) { ensureKeysIsMutable(); super.addAll(values, keys_); onChanged(); return this; } public Builder clearKeys() { keys_ = com.google.protobuf.LazyStringArrayList.EMPTY; bitField0_ = (bitField0_ & ~0x00000002); onChanged(); return this; } void addKeys(com.google.protobuf.ByteString value) { ensureKeysIsMutable(); keys_.add(value); onChanged(); } // repeated string values = 3; private com.google.protobuf.LazyStringList values_ = com.google.protobuf.LazyStringArrayList.EMPTY; private void ensureValuesIsMutable() { if (!((bitField0_ & 0x00000004) == 0x00000004)) { values_ = new com.google.protobuf.LazyStringArrayList(values_); bitField0_ |= 0x00000004; } } public java.util.List<String> getValuesList() { return java.util.Collections.unmodifiableList(values_); } public int getValuesCount() { return values_.size(); } public String getValues(int index) { return values_.get(index); } public Builder setValues( int index, String value) { if (value == null) { throw new NullPointerException(); } ensureValuesIsMutable(); values_.set(index, value); onChanged(); return this; } public Builder addValues(String value) { if (value == null) { throw new NullPointerException(); } ensureValuesIsMutable(); values_.add(value); onChanged(); return this; } public Builder addAllValues( java.lang.Iterable<String> values) { ensureValuesIsMutable(); super.addAll(values, values_); onChanged(); return this; } public Builder clearValues() { values_ = com.google.protobuf.LazyStringArrayList.EMPTY; bitField0_ = (bitField0_ & ~0x00000004); onChanged(); return this; } void addValues(com.google.protobuf.ByteString value) { ensureValuesIsMutable(); values_.add(value); onChanged(); } // @@protoc_insertion_point(builder_scope:com.xinqihd.sns.gameserver.proto.BseGetValue) } static { defaultInstance = new BseGetValue(true); defaultInstance.initFields(); } // @@protoc_insertion_point(class_scope:com.xinqihd.sns.gameserver.proto.BseGetValue) } private static com.google.protobuf.Descriptors.Descriptor internal_static_com_xinqihd_sns_gameserver_proto_BseGetValue_descriptor; private static com.google.protobuf.GeneratedMessage.FieldAccessorTable internal_static_com_xinqihd_sns_gameserver_proto_BseGetValue_fieldAccessorTable; public static com.google.protobuf.Descriptors.FileDescriptor getDescriptor() { return descriptor; } private static com.google.protobuf.Descriptors.FileDescriptor descriptor; static { java.lang.String[] descriptorData = { "\n\021BseGetValue.proto\022 com.xinqihd.sns.gam" + "eserver.proto\";\n\013BseGetValue\022\016\n\006action\030\001" + " \002(\t\022\014\n\004keys\030\002 \003(\t\022\016\n\006values\030\003 \003(\tB\022B\020Xi" + "nqiBseGetValue" }; com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner = new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() { public com.google.protobuf.ExtensionRegistry assignDescriptors( com.google.protobuf.Descriptors.FileDescriptor root) { descriptor = root; internal_static_com_xinqihd_sns_gameserver_proto_BseGetValue_descriptor = getDescriptor().getMessageTypes().get(0); internal_static_com_xinqihd_sns_gameserver_proto_BseGetValue_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_com_xinqihd_sns_gameserver_proto_BseGetValue_descriptor, new java.lang.String[] { "Action", "Keys", "Values", }, com.xinqihd.sns.gameserver.proto.XinqiBseGetValue.BseGetValue.class, com.xinqihd.sns.gameserver.proto.XinqiBseGetValue.BseGetValue.Builder.class); return null; } }; com.google.protobuf.Descriptors.FileDescriptor .internalBuildGeneratedFileFrom(descriptorData, new com.google.protobuf.Descriptors.FileDescriptor[] { }, assigner); } // @@protoc_insertion_point(outer_class_scope) }
apache-2.0
ayingshu/unionall
phoenix-core/src/main/java/org/apache/phoenix/exception/SQLExceptionCode.java
22786
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.phoenix.exception; import java.sql.SQLException; import java.sql.SQLTimeoutException; import java.util.Map; import org.apache.phoenix.hbase.index.util.IndexManagementUtil; import org.apache.phoenix.jdbc.PhoenixDatabaseMetaData; import org.apache.phoenix.schema.AmbiguousColumnException; import org.apache.phoenix.schema.AmbiguousTableException; import org.apache.phoenix.schema.ColumnAlreadyExistsException; import org.apache.phoenix.schema.ColumnFamilyNotFoundException; import org.apache.phoenix.schema.ColumnNotFoundException; import org.apache.phoenix.schema.ConcurrentTableMutationException; import org.apache.phoenix.schema.ReadOnlyTableException; import org.apache.phoenix.schema.SequenceAlreadyExistsException; import org.apache.phoenix.schema.SequenceNotFoundException; import org.apache.phoenix.schema.StaleRegionBoundaryCacheException; import org.apache.phoenix.schema.TableAlreadyExistsException; import org.apache.phoenix.schema.TableNotFoundException; import org.apache.phoenix.schema.TypeMismatchException; import org.apache.phoenix.schema.types.PDataType; import org.apache.phoenix.util.MetaDataUtil; import com.google.common.collect.Maps; /** * Various SQLException Information. Including a vender-specific errorcode and a standard SQLState. * * * @since 1.0 */ public enum SQLExceptionCode { /** * Connection Exception (errorcode 01, sqlstate 08) */ IO_EXCEPTION(101, "08000", "Unexpected IO exception."), MALFORMED_CONNECTION_URL(102, "08001", "Malformed connection url."), CANNOT_ESTABLISH_CONNECTION(103, "08004", "Unable to establish connection."), /** * Data Exception (errorcode 02, sqlstate 22) */ ILLEGAL_DATA(201, "22000", "Illegal data."), DIVIDE_BY_ZERO(202, "22012", "Divide by zero."), TYPE_MISMATCH(203, "22005", "Type mismatch.", new Factory() { @Override public SQLException newException(SQLExceptionInfo info) { return new TypeMismatchException(info.getMessage()); } }), VALUE_IN_UPSERT_NOT_CONSTANT(204, "22008", "Values in UPSERT must evaluate to a constant."), MALFORMED_URL(205, "22009", "Malformed URL."), DATA_EXCEEDS_MAX_CAPACITY(206, "22003", "The data exceeds the max capacity for the data type."), MISSING_CHAR_LENGTH(207, "22003", "Missing length for CHAR."), NONPOSITIVE_CHAR_LENGTH(208, "22003", "CHAR or VARCHAR must have a positive length."), DECIMAL_PRECISION_OUT_OF_RANGE(209, "22003", "Decimal precision outside of range. Should be within 1 and " + PDataType.MAX_PRECISION + "."), MISSING_BINARY_LENGTH(210, "22003", "Missing length for BINARY."), NONPOSITIVE_BINARY_LENGTH(211, "22003", "BINARY must have a positive length."), SERVER_ARITHMETIC_ERROR(212, "22012", "Arithmetic error on server."), VALUE_OUTSIDE_RANGE(213,"22003","Value outside range."), VALUE_IN_LIST_NOT_CONSTANT(214, "22008", "Values in IN must evaluate to a constant."), SINGLE_ROW_SUBQUERY_RETURNS_MULTIPLE_ROWS(215, "22015", "Single-row sub-query returns more than one row."), SUBQUERY_RETURNS_DIFFERENT_NUMBER_OF_FIELDS(216, "22016", "Sub-query must return the same number of fields as the left-hand-side expression of 'IN'."), AMBIGUOUS_JOIN_CONDITION(217, "22017", "Amibiguous or non-equi join condition specified. Consider using table list with where clause."), /** * Constraint Violation (errorcode 03, sqlstate 23) */ CONCURRENT_TABLE_MUTATION(301, "23000", "Concurrent modification to table.", new Factory() { @Override public SQLException newException(SQLExceptionInfo info) { return new ConcurrentTableMutationException(info.getSchemaName(), info.getTableName()); } }), CANNOT_INDEX_COLUMN_ON_TYPE(302, "23100", "The column cannot be index due to its type."), /** * Invalid Cursor State (errorcode 04, sqlstate 24) */ CURSOR_BEFORE_FIRST_ROW(401, "24015","Cursor before first row."), CURSOR_PAST_LAST_ROW(402, "24016", "Cursor past last row."), /** * Syntax Error or Access Rule Violation (errorcode 05, sqlstate 42) */ AMBIGUOUS_TABLE(501, "42000", "Table name exists in more than one table schema and is used without being qualified.", new Factory() { @Override public SQLException newException(SQLExceptionInfo info) { return new AmbiguousTableException(info.getTableName(), info.getRootCause()); } }), AMBIGUOUS_COLUMN(502, "42702", "Column reference ambiguous or duplicate names.", new Factory() { @Override public SQLException newException(SQLExceptionInfo info) { return new AmbiguousColumnException(info.getColumnName(), info.getRootCause()); } }), INDEX_MISSING_PK_COLUMNS(503, "42602", "Index table missing PK Columns."), COLUMN_NOT_FOUND(504, "42703", "Undefined column.", new Factory() { @Override public SQLException newException(SQLExceptionInfo info) { return new ColumnNotFoundException(info.getSchemaName(), info.getTableName(), info.getFamilyName(), info.getColumnName()); } }), READ_ONLY_TABLE(505, "42000", "Table is read only.", new Factory() { @Override public SQLException newException(SQLExceptionInfo info) { return new ReadOnlyTableException(info.getMessage(), info.getSchemaName(), info.getTableName(), info.getFamilyName()); } }), CANNOT_DROP_PK(506, "42817", "Primary key column may not be dropped."), PRIMARY_KEY_MISSING(509, "42888", "The table does not have a primary key."), PRIMARY_KEY_ALREADY_EXISTS(510, "42889", "The table already has a primary key."), ORDER_BY_NOT_IN_SELECT_DISTINCT(511, "42890", "All ORDER BY expressions must appear in SELECT DISTINCT:"), INVALID_PRIMARY_KEY_CONSTRAINT(512, "42891", "Invalid column reference in primary key constraint"), ARRAY_NOT_ALLOWED_IN_PRIMARY_KEY(513, "42892", "Array type not allowed as primary key constraint"), COLUMN_EXIST_IN_DEF(514, "42892", "A duplicate column name was detected in the object definition or ALTER TABLE statement.", new Factory() { @Override public SQLException newException(SQLExceptionInfo info) { return new ColumnAlreadyExistsException(info.getSchemaName(), info.getTableName(), info.getColumnName()); } }), ORDER_BY_ARRAY_NOT_SUPPORTED(515, "42893", "ORDER BY of an array type is not allowed"), NON_EQUALITY_ARRAY_COMPARISON(516, "42894", "Array types may only be compared using = or !="), INVALID_NOT_NULL_CONSTRAINT(517, "42895", "Invalid not null constraint on non primary key column"), /** * Invalid Transaction State (errorcode 05, sqlstate 25) */ READ_ONLY_CONNECTION(518,"25502","Mutations are not permitted for a read-only connection."), VARBINARY_ARRAY_NOT_SUPPORTED(519, "42896", "VARBINARY ARRAY is not supported"), /** * Expression Index exceptions. */ AGGREGATE_EXPRESSION_NOT_ALLOWED_IN_INDEX(520, "42897", "Aggreagaate expression not allowed in an index"), NON_DETERMINISTIC_EXPRESSION_NOT_ALLOWED_IN_INDEX(521, "42898", "Non-deterministic expression not allowed in an index"), STATELESS_EXPRESSION_NOT_ALLOWED_IN_INDEX(522, "42899", "Stateless expression not allowed in an index"), /** * Union All related errors */ ORDER_BY_IN_UNIONALL_SELECT_NOT_SUPPORTED(523, "42900", "ORDER BY in a Union All query is not allowed"), LIMIT_IN_UNIONALL_SELECT_NOT_SUPPORTED(524, "42901", "LIMIT in a Union All query is not allowed"), SELECT_COLUMN_NUM_IN_UNIONALL_DIFFS(525, "42902", "SELECT column number differs in a Union All query is not allowed"), SELECT_COLUMN_TYPE_IN_UNIONALL_DIFFS(526, "42903", "SELECT column types differ in a Union All query is not allowed"), /** * HBase and Phoenix specific implementation defined sub-classes. * Column family related exceptions. * * For the following exceptions, use errorcode 10. */ SINGLE_PK_MAY_NOT_BE_NULL(1000, "42I00", "Single column primary key may not be NULL."), COLUMN_FAMILY_NOT_FOUND(1001, "42I01", "Undefined column family.", new Factory() { @Override public SQLException newException(SQLExceptionInfo info) { return new ColumnFamilyNotFoundException(info.getFamilyName()); } }), PROPERTIES_FOR_FAMILY(1002, "42I02","Properties may not be defined for an unused family name."), // Primary/row key related exceptions. PRIMARY_KEY_WITH_FAMILY_NAME(1003, "42J01", "Primary key columns must not have a family name."), PRIMARY_KEY_OUT_OF_ORDER(1004, "42J02", "Order of columns in primary key constraint must match the order in which they're declared."), VARBINARY_IN_ROW_KEY(1005, "42J03", "The VARBINARY/ARRAY type can only be used as the last part of a multi-part row key."), NOT_NULLABLE_COLUMN_IN_ROW_KEY(1006, "42J04", "Only nullable columns may be added to a multi-part row key."), VARBINARY_LAST_PK(1015, "42J04", "Cannot add column to table when the last PK column is of type VARBINARY or ARRAY."), NULLABLE_FIXED_WIDTH_LAST_PK(1023, "42J04", "Cannot add column to table when the last PK column is nullable and fixed width."), CANNOT_MODIFY_VIEW_PK(1036, "42J04", "Cannot modify the primary key of a VIEW."), BASE_TABLE_COLUMN(1037, "42J04", "Cannot modify columns of base table used by tenant-specific tables."), // Key/value column related errors KEY_VALUE_NOT_NULL(1007, "42K01", "A key/value column may not be declared as not null."), // View related errors. VIEW_WITH_TABLE_CONFIG(1008, "42L01", "A view may not contain table configuration properties."), VIEW_WITH_PROPERTIES(1009, "42L02", "Properties may not be defined for a view."), // Table related errors that are not in standard code. CANNOT_MUTATE_TABLE(1010, "42M01", "Not allowed to mutate table."), UNEXPECTED_MUTATION_CODE(1011, "42M02", "Unexpected mutation code."), TABLE_UNDEFINED(1012, "42M03", "Table undefined.", new Factory() { @Override public SQLException newException(SQLExceptionInfo info) { return new TableNotFoundException(info.getSchemaName(), info.getTableName()); } }), TABLE_ALREADY_EXIST(1013, "42M04", "Table already exists.", new Factory() { @Override public SQLException newException(SQLExceptionInfo info) { return new TableAlreadyExistsException(info.getSchemaName(), info.getTableName()); } }), // Syntax error TYPE_NOT_SUPPORTED_FOR_OPERATOR(1014, "42Y01", "The operator does not support the operand type."), AGGREGATE_IN_GROUP_BY(1016, "42Y26", "Aggregate expressions may not be used in GROUP BY."), AGGREGATE_IN_WHERE(1017, "42Y26", "Aggregate may not be used in WHERE."), AGGREGATE_WITH_NOT_GROUP_BY_COLUMN(1018, "42Y27", "Aggregate may not contain columns not in GROUP BY."), ONLY_AGGREGATE_IN_HAVING_CLAUSE(1019, "42Y26", "Only aggregate maybe used in the HAVING clause."), UPSERT_COLUMN_NUMBERS_MISMATCH(1020, "42Y60", "Number of columns upserting must match number of values."), // Table properties exception. INVALID_BUCKET_NUM(1021, "42Y80", "Salt bucket numbers should be with 1 and 256."), NO_SPLITS_ON_SALTED_TABLE(1022, "42Y81", "Should not specify split points on salted table with default row key order."), SALT_ONLY_ON_CREATE_TABLE(1024, "42Y82", "Salt bucket number may only be specified when creating a table."), SET_UNSUPPORTED_PROP_ON_ALTER_TABLE(1025, "42Y83", "Unsupported property set in ALTER TABLE command."), CANNOT_ADD_NOT_NULLABLE_COLUMN(1038, "42Y84", "Only nullable columns may be added for a pre-existing table."), NO_MUTABLE_INDEXES(1026, "42Y85", "Mutable secondary indexes are only supported for HBase version " + MetaDataUtil.decodeHBaseVersionAsString(PhoenixDatabaseMetaData.MUTABLE_SI_VERSION_THRESHOLD) + " and above."), INVALID_FILTER_ON_IMMUTABLE_ROWS(1027, "42Y86", "All columns referenced in a WHERE clause must be available in every index for a table with immutable rows."), INVALID_INDEX_STATE_TRANSITION(1028, "42Y87", "Invalid index state transition."), INVALID_MUTABLE_INDEX_CONFIG(1029, "42Y88", "Mutable secondary indexes must have the " + IndexManagementUtil.WAL_EDIT_CODEC_CLASS_KEY + " property set to " + IndexManagementUtil.INDEX_WAL_EDIT_CODEC_CLASS_NAME + " in the hbase-sites.xml of every region server"), CANNOT_CREATE_TENANT_SPECIFIC_TABLE(1030, "42Y89", "Cannot create table for tenant-specific connection"), CANNOT_DEFINE_PK_FOR_VIEW(1031, "42Y90", "Defining PK columns for a VIEW is not allowed."), DEFAULT_COLUMN_FAMILY_ONLY_ON_CREATE_TABLE(1034, "42Y93", "Default column family may only be specified when creating a table."), INSUFFICIENT_MULTI_TENANT_COLUMNS(1040, "42Y96", "A MULTI_TENANT table must have two or more PK columns with the first column being NOT NULL and of type VARCHAR or CHAR."), VIEW_WHERE_IS_CONSTANT(1045, "43A02", "WHERE clause in VIEW should not evaluate to a constant."), CANNOT_UPDATE_VIEW_COLUMN(1046, "43A03", "Column updated in VIEW may not differ from value specified in WHERE clause."), TOO_MANY_INDEXES(1047, "43A04", "Too many indexes have already been created on the physical table."), NO_LOCAL_INDEX_ON_TABLE_WITH_IMMUTABLE_ROWS(1048,"43A05","Local indexes aren't allowed on tables with immutable rows."), COLUMN_FAMILY_NOT_ALLOWED_TABLE_PROPERTY(1049, "43A06", "Column family not allowed for table properties."), COLUMN_FAMILY_NOT_ALLOWED_FOR_TTL(1050, "43A07", "Setting TTL for a column family not supported. You can only have TTL for the entire table."), CANNOT_ALTER_PROPERTY(1051, "43A08", "Property can be specified or changed only when creating a table"), CANNOT_SET_PROPERTY_FOR_COLUMN_NOT_ADDED(1052, "43A09", "Property cannot be specified for a column family that is not being added or modified"), CANNOT_SET_TABLE_PROPERTY_ADD_COLUMN(1053, "43A10", "Table level property cannot be set when adding a column"), NO_LOCAL_INDEXES(1054, "43A11", "Local secondary indexes are not supported for HBase versions " + MetaDataUtil.decodeHBaseVersionAsString(PhoenixDatabaseMetaData.MIN_LOCAL_SI_VERSION_DISALLOW) + " through " + MetaDataUtil.decodeHBaseVersionAsString(PhoenixDatabaseMetaData.MAX_LOCAL_SI_VERSION_DISALLOW) + " inclusive."), UNALLOWED_LOCAL_INDEXES(1055, "43A12", "Local secondary indexes are configured to not be allowed."), /** Sequence related */ SEQUENCE_ALREADY_EXIST(1200, "42Z00", "Sequence already exists.", new Factory() { @Override public SQLException newException(SQLExceptionInfo info) { return new SequenceAlreadyExistsException(info.getSchemaName(), info.getTableName()); } }), SEQUENCE_UNDEFINED(1201, "42Z01", "Sequence undefined.", new Factory() { @Override public SQLException newException(SQLExceptionInfo info) { return new SequenceNotFoundException(info.getSchemaName(), info.getTableName()); } }), START_WITH_MUST_BE_CONSTANT(1202, "42Z02", "Sequence START WITH value must be an integer or long constant."), INCREMENT_BY_MUST_BE_CONSTANT(1203, "42Z03", "Sequence INCREMENT BY value must be an integer or long constant."), CACHE_MUST_BE_NON_NEGATIVE_CONSTANT(1204, "42Z04", "Sequence CACHE value must be a non negative integer constant."), INVALID_USE_OF_NEXT_VALUE_FOR(1205, "42Z05", "NEXT VALUE FOR may only be used as in a SELECT or an UPSERT VALUES expression."), CANNOT_CALL_CURRENT_BEFORE_NEXT_VALUE(1206, "42Z06", "NEXT VALUE FOR must be called before CURRENT VALUE FOR is called."), EMPTY_SEQUENCE_CACHE(1207, "42Z07", "No more cached sequence values"), MINVALUE_MUST_BE_CONSTANT(1208, "42Z08", "Sequence MINVALUE must be an integer or long constant."), MAXVALUE_MUST_BE_CONSTANT(1209, "42Z09", "Sequence MAXVALUE must be an integer or long constant."), MINVALUE_MUST_BE_LESS_THAN_OR_EQUAL_TO_MAXVALUE(1210, "42Z10", "Sequence MINVALUE must be less than or equal to MAXVALUE."), STARTS_WITH_MUST_BE_BETWEEN_MIN_MAX_VALUE(1211, "42Z11", "STARTS WITH value must be greater than or equal to MINVALUE and less than or equal to MAXVALUE"), SEQUENCE_VAL_REACHED_MAX_VALUE(1212, "42Z12", "Reached MAXVALUE of sequence"), SEQUENCE_VAL_REACHED_MIN_VALUE(1213, "42Z13", "Reached MINVALUE of sequence"), INCREMENT_BY_MUST_NOT_BE_ZERO(1214, "42Z14", "Sequence INCREMENT BY value cannot be zero"), /** Parser error. (errorcode 06, sqlState 42P) */ PARSER_ERROR(601, "42P00", "Syntax error.", Factory.SYTAX_ERROR), MISSING_TOKEN(602, "42P00", "Syntax error.", Factory.SYTAX_ERROR), UNWANTED_TOKEN(603, "42P00", "Syntax error.", Factory.SYTAX_ERROR), MISMATCHED_TOKEN(604, "42P00", "Syntax error.", Factory.SYTAX_ERROR), UNKNOWN_FUNCTION(605, "42P00", "Syntax error.", Factory.SYTAX_ERROR), /** * Implementation defined class. Execution exceptions (errorcode 11, sqlstate XCL). */ RESULTSET_CLOSED(1101, "XCL01", "ResultSet is closed."), GET_TABLE_REGIONS_FAIL(1102, "XCL02", "Cannot get all table regions"), EXECUTE_QUERY_NOT_APPLICABLE(1103, "XCL03", "executeQuery may not be used."), EXECUTE_UPDATE_NOT_APPLICABLE(1104, "XCL04", "executeUpdate may not be used."), SPLIT_POINT_NOT_CONSTANT(1105, "XCL05", "Split points must be constants."), BATCH_EXCEPTION(1106, "XCL06", "Exception while executing batch."), EXECUTE_UPDATE_WITH_NON_EMPTY_BATCH(1107, "XCL07", "An executeUpdate is prohibited when the batch is not empty. Use clearBatch to empty the batch first."), STALE_REGION_BOUNDARY_CACHE(1108, "XCL08", "Cache of region boundaries are out of date.", new Factory() { @Override public SQLException newException(SQLExceptionInfo info) { return new StaleRegionBoundaryCacheException(info.getSchemaName(), info.getTableName()); } }), CANNOT_SPLIT_LOCAL_INDEX(1109,"XCL09", "Local index may not be pre-split"), CANNOT_SALT_LOCAL_INDEX(1110,"XCL10", "Local index may not be salted"), /** * Implementation defined class. Phoenix internal error. (errorcode 20, sqlstate INT). */ CANNOT_CALL_METHOD_ON_TYPE(2001, "INT01", "Cannot call method on the argument type."), CLASS_NOT_UNWRAPPABLE(2002, "INT03", "Class not unwrappable"), PARAM_INDEX_OUT_OF_BOUND(2003, "INT04", "Parameter position is out of range."), PARAM_VALUE_UNBOUND(2004, "INT05", "Parameter value unbound"), INTERRUPTED_EXCEPTION(2005, "INT07", "Interrupted exception."), INCOMPATIBLE_CLIENT_SERVER_JAR(2006, "INT08", "Incompatible jars detected between client and server."), OUTDATED_JARS(2007, "INT09", "Outdated jars."), INDEX_METADATA_NOT_FOUND(2008, "INT10", "Unable to find cached index metadata. "), UNKNOWN_ERROR_CODE(2009, "INT11", "Unknown error code"), OPERATION_TIMED_OUT(6000, "TIM01", "Operation timed out", new Factory() { @Override public SQLException newException(SQLExceptionInfo info) { return new SQLTimeoutException(OPERATION_TIMED_OUT.getMessage(), OPERATION_TIMED_OUT.getSQLState(), OPERATION_TIMED_OUT.getErrorCode()); } }) ; private final int errorCode; private final String sqlState; private final String message; private final Factory factory; private SQLExceptionCode(int errorCode, String sqlState, String message) { this(errorCode, sqlState, message, Factory.DEFAULTY); } private SQLExceptionCode(int errorCode, String sqlState, String message, Factory factory) { this.errorCode = errorCode; this.sqlState = sqlState; this.message = message; this.factory = factory; } public String getSQLState() { return sqlState; } public String getMessage() { return message; } public int getErrorCode() { return errorCode; } @Override public String toString() { return "ERROR " + errorCode + " (" + sqlState + "): " + message; } public Factory getExceptionFactory() { return factory; } public static interface Factory { public static final Factory DEFAULTY = new Factory() { @Override public SQLException newException(SQLExceptionInfo info) { return new SQLException(info.toString(), info.getCode().getSQLState(), info.getCode().getErrorCode(), info.getRootCause()); } }; public static final Factory SYTAX_ERROR = new Factory() { @Override public SQLException newException(SQLExceptionInfo info) { return new PhoenixParserException(info.getMessage(), info.getRootCause()); } }; public SQLException newException(SQLExceptionInfo info); } private static final Map<Integer,SQLExceptionCode> errorCodeMap = Maps.newHashMapWithExpectedSize(SQLExceptionCode.values().length); static { for (SQLExceptionCode code : SQLExceptionCode.values()) { SQLExceptionCode otherCode = errorCodeMap.put(code.getErrorCode(), code); if (otherCode != null) { throw new IllegalStateException("Duplicate error code for " + code + " and " + otherCode); } } } public static SQLExceptionCode fromErrorCode(int errorCode) throws SQLException { SQLExceptionCode code = errorCodeMap.get(errorCode); if (code == null) { throw new SQLExceptionInfo.Builder(SQLExceptionCode.UNKNOWN_ERROR_CODE) .setMessage(Integer.toString(errorCode)).build().buildException(); } return code; } }
apache-2.0
rahuldean/Aero
src/android/Aero/app/src/main/java/com/godhc/aero/models/NetworkStateInfo.java
1853
package com.godhc.aero.models; import com.orm.SugarRecord; import java.util.Calendar; import java.util.Date; public class NetworkStateInfo extends SugarRecord { boolean isActiveNetworkFound; boolean isConnected; String connectionType; String networkConnectionName; Date eventRaisedDate; public boolean isConnected() { return isConnected; } public String getConnectionType() { return connectionType; } public String getNetworkConnectionName() { return networkConnectionName; } public Date getEventRaisedDate() { return eventRaisedDate; } public boolean isActiveNetworkFound() { return isActiveNetworkFound; } public void setIsConnected(boolean isConnected) { this.isConnected = isConnected; } public void setConnectionType(String connectionType) { this.connectionType = connectionType; } public void setNetworkConnectionName(String networkConnectionName) { this.networkConnectionName = networkConnectionName; } public void setEventRaisedDate(Date eventRaisedDate) { this.eventRaisedDate = eventRaisedDate; } public void setIsActiveNetworkFound(boolean isActiveNetworkFound) { this.isActiveNetworkFound = isActiveNetworkFound; } public NetworkStateInfo() { super(); this.eventRaisedDate = Calendar.getInstance().getTime(); } public NetworkStateInfo(boolean isConnected, String connectionType, String networkConnectionName, boolean isActiveNetworkFound) { this.isConnected = isConnected; this.connectionType = connectionType; this.networkConnectionName = networkConnectionName; this.eventRaisedDate = Calendar.getInstance().getTime(); this.isActiveNetworkFound = isActiveNetworkFound; } }
apache-2.0
FuturingTech/coolweather
app/src/main/java/com/futuring/coolweather/android/util/Utility.java
3677
package com.futuring.coolweather.android.util; import android.text.TextUtils; import com.futuring.coolweather.android.db.City; import com.futuring.coolweather.android.db.County; import com.futuring.coolweather.android.db.Province; import com.futuring.coolweather.android.gson.Weather; import com.google.gson.Gson; import org.json.JSONArray; import org.json.JSONException; import org.json.JSONObject; /** * Created by 爱做梦的严重精神病患者 on 2017/8/28. */ public class Utility { /** * 解析和处理服务器返回的省级数据 * @param response * @return */ public static boolean handleProvinceResponse(String response) { if (!TextUtils.isEmpty(response)) { try{ JSONArray allProvinces = new JSONArray(response); for (int i = 0; i < allProvinces.length(); i++){ JSONObject provinceObject = allProvinces.getJSONObject(i); Province province = new Province(); province.setProvinceName(provinceObject.getString("name")); province.setProvinceCode(provinceObject.getInt("id")); province.save(); } return true; }catch (JSONException e) { e.printStackTrace(); } } return false; } /** * 解析和处理服务器返回的市级数据 * @param response * @param provinceId * @return */ public static boolean handleCityResponse(String response, int provinceId) { if (!TextUtils.isEmpty(response)) { try { JSONArray allCities = new JSONArray(response); for (int i = 0; i < allCities.length(); i++) { JSONObject cityObject = allCities.getJSONObject(i); City city = new City(); city.setCityName(cityObject.getString("name")); city.setCityCode(cityObject.getInt("id")); city.setProvinceId(provinceId); city.save(); } return true; }catch (JSONException e) { e.printStackTrace(); } } return false; } /** * 解析和处理服务器返回的县级数据 * @param response * @param cityId * @return */ public static boolean handleCountyResponse(String response, int cityId) { if (!TextUtils.isEmpty(response)) { try { JSONArray allCounties = new JSONArray(response); for (int i = 0; i < allCounties.length(); i++) { JSONObject countyObject = allCounties.getJSONObject(i); County county = new County(); county.setCountyName(countyObject.getString("name")); county.setWeatherId(countyObject.getString("weather_id")); county.setCityId(cityId); county.save(); } return true; }catch (JSONException e) { e.printStackTrace(); } } return false; } public static Weather handleWeatherResponse(String response) { try { JSONObject jsonObject = new JSONObject(response); JSONArray jsonArray = jsonObject.getJSONArray("HeWeather"); String weatherContent = jsonArray.getJSONObject(0).toString(); return new Gson().fromJson(weatherContent, Weather.class); } catch (Exception e) { e.printStackTrace(); } return null; } }
apache-2.0
mcimbora/radargun
plugins/infinispan51/src/main/java/org/radargun/service/KeyAffinityStringKeyGenerator.java
3490
package org.radargun.service; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import org.infinispan.affinity.KeyAffinityService; import org.infinispan.affinity.KeyAffinityServiceFactory; import org.radargun.config.Init; import org.radargun.config.Property; import org.radargun.stages.cache.generators.KeyGenerator; import org.radargun.logging.Log; import org.radargun.logging.LogFactory; /** * A key generator using a key affinity service. All keys produced by this key generator * are local to the node that requested them. * The generator does not honour the keyIndex passed as argument and always returns a new unique key. * * Using shared keys with this generator is possible, but the set of generated keys is different on each node * (this is very likely even if the key format was identical, we ensure that by adding the local node address). * Initially only the local entries will be loaded into cache. Then, as the node executes PUT requests, the cache * will be filled with entries that are considered non-local, but have different keys. This will result in cache * with numEntries * numNodes. * Previous GET operation will return null values, naturally. * * Therefore, using shared keys with this generator is not advisable. * */ public class KeyAffinityStringKeyGenerator implements KeyGenerator { protected final Log log = LogFactory.getLog(KeyAffinityStringKeyGenerator.class); @Property(doc = "Number of generated keys per node.", optional = false) private int keyBufferSize; @Property(doc = "Name of the cache where the keys will be stored.", optional = false) private String cache; private KeyAffinityService affinityService; private ExecutorService executor; private AddressAwareStringKeyGenerator generator; private Infinispan51EmbeddedService wrapper; @Init public void init() { if (keyBufferSize <= 0 || cache == null) { throw new IllegalArgumentException("Invalid parameters provided, 'keyBufferSize' and 'cache' need to be specified."); } wrapper = Infinispan51EmbeddedService.getInstance(); } @Override public Object generateKey(long keyIndex) { synchronized (this) { if (affinityService == null) { newKeyAffinityService(); } } return affinityService.getKeyForAddress(wrapper.cacheManager.getAddress()); } private void newKeyAffinityService() { generator = new AddressAwareStringKeyGenerator(wrapper.cacheManager.getAddress().toString()); executor = Executors.newSingleThreadExecutor(); affinityService = KeyAffinityServiceFactory.newLocalKeyAffinityService(wrapper.cacheManager.getCache(cache), generator, executor, keyBufferSize); log.info("Created key affinity service with keyBufferSize: " + keyBufferSize); Runtime.getRuntime().addShutdownHook(new Thread() { public void run() { executor.shutdown(); } }); } protected class AddressAwareStringKeyGenerator implements org.infinispan.affinity.KeyGenerator { private String address; private long previousKey; /** * Address-aware constructor - key generator will generate different keys on each node. */ public AddressAwareStringKeyGenerator(String address) { this.address = address; } @Override public Object getKey() { return "key_" + address + "_" + previousKey++; } } }
apache-2.0
La-Gonette/lagonette-android
app/src/main/java/org/lagonette/app/app/fragment/BaseFragment.java
1564
package org.lagonette.app.app.fragment; import android.os.Bundle; import android.support.annotation.LayoutRes; import android.support.annotation.NonNull; import android.support.annotation.Nullable; import android.support.v4.app.Fragment; import android.support.v4.app.FragmentActivity; import android.view.LayoutInflater; import android.view.View; import android.view.ViewGroup; public abstract class BaseFragment extends Fragment { @Override public final void onCreate(@Nullable Bundle savedInstanceState) { super.onCreate(savedInstanceState); construct(); } @Nullable @Override public final View onCreateView( LayoutInflater inflater, @Nullable ViewGroup container, @Nullable Bundle savedInstanceState) { return inflater.inflate(getContentView(), container, false); } @Override public final void onViewCreated(View view, @Nullable Bundle savedInstanceState) { inject(view); } @Override public final void onActivityCreated(@Nullable Bundle savedInstanceState) { super.onActivityCreated(savedInstanceState); construct(getActivity()); if (savedInstanceState == null) { init(); } else { restore(savedInstanceState); } onConstructed(); } protected abstract void construct(); @LayoutRes protected abstract int getContentView(); protected abstract void inject(@NonNull View view); protected abstract void construct(@NonNull FragmentActivity activity); protected abstract void init(); protected abstract void restore(@NonNull Bundle savedInstanceState); protected abstract void onConstructed(); }
apache-2.0
aws/aws-sdk-java
aws-java-sdk-medialive/src/main/java/com/amazonaws/services/medialive/model/transform/AudioTrackSelectionJsonUnmarshaller.java
2843
/* * Copyright 2017-2022 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with * the License. A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR * CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions * and limitations under the License. */ package com.amazonaws.services.medialive.model.transform; import java.math.*; import javax.annotation.Generated; import com.amazonaws.services.medialive.model.*; import com.amazonaws.transform.SimpleTypeJsonUnmarshallers.*; import com.amazonaws.transform.*; import com.fasterxml.jackson.core.JsonToken; import static com.fasterxml.jackson.core.JsonToken.*; /** * AudioTrackSelection JSON Unmarshaller */ @Generated("com.amazonaws:aws-java-sdk-code-generator") public class AudioTrackSelectionJsonUnmarshaller implements Unmarshaller<AudioTrackSelection, JsonUnmarshallerContext> { public AudioTrackSelection unmarshall(JsonUnmarshallerContext context) throws Exception { AudioTrackSelection audioTrackSelection = new AudioTrackSelection(); int originalDepth = context.getCurrentDepth(); String currentParentElement = context.getCurrentParentElement(); int targetDepth = originalDepth + 1; JsonToken token = context.getCurrentToken(); if (token == null) token = context.nextToken(); if (token == VALUE_NULL) { return null; } while (true) { if (token == null) break; if (token == FIELD_NAME || token == START_OBJECT) { if (context.testExpression("tracks", targetDepth)) { context.nextToken(); audioTrackSelection.setTracks(new ListUnmarshaller<AudioTrack>(AudioTrackJsonUnmarshaller.getInstance()) .unmarshall(context)); } } else if (token == END_ARRAY || token == END_OBJECT) { if (context.getLastParsedParentElement() == null || context.getLastParsedParentElement().equals(currentParentElement)) { if (context.getCurrentDepth() <= originalDepth) break; } } token = context.nextToken(); } return audioTrackSelection; } private static AudioTrackSelectionJsonUnmarshaller instance; public static AudioTrackSelectionJsonUnmarshaller getInstance() { if (instance == null) instance = new AudioTrackSelectionJsonUnmarshaller(); return instance; } }
apache-2.0
spring-projects/spring-data-examples
jpa/deferred/src/main/java/example/service/Customer1747Service.java
225
package example.service; import example.repo.Customer1747Repository; import org.springframework.stereotype.Service; @Service public class Customer1747Service { public Customer1747Service(Customer1747Repository repo) {} }
apache-2.0
Yggard/BrokkGUI
style/src/main/java/net/voxelindustry/brokkgui/style/tree/StyleRule.java
1216
package net.voxelindustry.brokkgui.style.tree; import javax.annotation.Nonnull; public class StyleRule { private final String ruleIdentifier; private final String ruleValue; public StyleRule(@Nonnull String ruleIdentifier, @Nonnull String ruleValue) { this.ruleIdentifier = ruleIdentifier; this.ruleValue = ruleValue; } public String getRuleIdentifier() { return ruleIdentifier; } public String getRuleValue() { return ruleValue; } @Override public String toString() { return "{ruleIdentifier='" + ruleIdentifier + '\'' + ", ruleValue='" + ruleValue + '\'' + '}'; } @Override public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; StyleRule styleRule = (StyleRule) o; return getRuleIdentifier().equals(styleRule.getRuleIdentifier()) && getRuleValue().equals(styleRule .getRuleValue()); } @Override public int hashCode() { int result = getRuleIdentifier().hashCode(); result = 31 * result + getRuleValue().hashCode(); return result; } }
apache-2.0
madusankapremaratne/carbon-apimgt
components/apimgt/org.wso2.carbon.apimgt.rest.api.publisher/src/gen/java/org/wso2/carbon/apimgt/rest/api/publisher/dto/ApplicationDTO.java
2442
package org.wso2.carbon.apimgt.rest.api.publisher.dto; import io.swagger.annotations.*; import com.fasterxml.jackson.annotation.JsonProperty; import javax.validation.constraints.NotNull; @ApiModel(description = "") public class ApplicationDTO { private String applicationId = null; private String name = null; private String subscriber = null; private String throttlingTier = null; private String description = null; private String groupId = null; /** **/ @ApiModelProperty(value = "") @JsonProperty("applicationId") public String getApplicationId() { return applicationId; } public void setApplicationId(String applicationId) { this.applicationId = applicationId; } /** **/ @ApiModelProperty(value = "") @JsonProperty("name") public String getName() { return name; } public void setName(String name) { this.name = name; } /** **/ @ApiModelProperty(value = "") @JsonProperty("subscriber") public String getSubscriber() { return subscriber; } public void setSubscriber(String subscriber) { this.subscriber = subscriber; } /** **/ @ApiModelProperty(value = "") @JsonProperty("throttlingTier") public String getThrottlingTier() { return throttlingTier; } public void setThrottlingTier(String throttlingTier) { this.throttlingTier = throttlingTier; } /** **/ @ApiModelProperty(value = "") @JsonProperty("description") public String getDescription() { return description; } public void setDescription(String description) { this.description = description; } /** **/ @ApiModelProperty(value = "") @JsonProperty("groupId") public String getGroupId() { return groupId; } public void setGroupId(String groupId) { this.groupId = groupId; } @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append("class ApplicationDTO {\n"); sb.append(" applicationId: ").append(applicationId).append("\n"); sb.append(" name: ").append(name).append("\n"); sb.append(" subscriber: ").append(subscriber).append("\n"); sb.append(" throttlingTier: ").append(throttlingTier).append("\n"); sb.append(" description: ").append(description).append("\n"); sb.append(" groupId: ").append(groupId).append("\n"); sb.append("}\n"); return sb.toString(); } }
apache-2.0
HubSpot/Nebula
NebulaService/src/main/java/com/hubspot/nebula/jersey/BindParametersInjectableProvider.java
1970
package com.hubspot.nebula.jersey; import java.util.Arrays; import java.util.Map; import javax.ws.rs.WebApplicationException; import javax.ws.rs.core.Response; import javax.ws.rs.core.Response.Status; import com.google.common.collect.Maps; import com.google.inject.Inject; import com.sun.jersey.api.core.HttpContext; import com.sun.jersey.api.model.Parameter; import com.sun.jersey.core.spi.component.ComponentContext; import com.sun.jersey.core.spi.component.ComponentScope; import com.sun.jersey.server.impl.inject.AbstractHttpContextInjectable; import com.sun.jersey.spi.inject.Injectable; import com.sun.jersey.spi.inject.InjectableProvider; // shamelessly borrowed from dropwizard-hubspot public class BindParametersInjectableProvider implements InjectableProvider<BindQueryParams, Parameter> { private final ParameterConverter converter; @Inject public BindParametersInjectableProvider(ParameterConverter converter) { this.converter = converter; } @Override public Injectable<Object> getInjectable(ComponentContext ic, final BindQueryParams bind, final Parameter p) { return new AbstractHttpContextInjectable<Object>() { @Override public Object getValue(HttpContext context) { Object parameterObject; try { parameterObject = converter.convert(p.getParameterType(), getParameterMap(bind, context)); } catch (IllegalArgumentException e) { throw new WebApplicationException(Response.status(Status.BAD_REQUEST).entity(Arrays.asList(e.getMessage())).build()); } return parameterObject; } private Map<String, Object> getParameterMap(BindQueryParams bind, HttpContext context) { Map<String, Object> mergedParams = Maps.newHashMap(); mergedParams.putAll(context.getRequest().getQueryParameters()); return mergedParams; } }; } @Override public ComponentScope getScope() { return ComponentScope.PerRequest; } }
apache-2.0
dkincade/pentaho-hadoop-shims
shims/mapr510/impl/src/test/java/org/pentaho/hadoop/shim/mapr510/MapR5DistributedCacheUtilImplOSDependentTest.java
3068
/******************************************************************************* * * Pentaho Big Data * * Copyright (C) 2002-2017 by Pentaho : http://www.pentaho.com * ******************************************************************************* * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * ******************************************************************************/ package org.pentaho.hadoop.shim.mapr510; import static org.junit.Assume.assumeTrue; import java.io.IOException; import org.pentaho.hadoop.shim.common.DistributedCacheUtilImplOSDependentTest; /** * These tests are skipped because of having issue with setting permissions on hadoop local file system for mapr on * Windows. * * @see <a href= * "http://jira.pentaho.com/browse/BAD-601?focusedCommentId=294386&page=com.atlassian.jira.plugin.system.issuetabpanels:comment-tabpanel#comment-294386">BAD-601#comment-294386</a> * for more details. */ public class MapR5DistributedCacheUtilImplOSDependentTest extends DistributedCacheUtilImplOSDependentTest { @Override public void stageForCache() throws Exception { // Don't run this test on Windows env assumeTrue( !isWindows() ); super.stageForCache(); } @Override public void stageForCache_destination_exists() throws Exception { // Don't run this test on Windows env assumeTrue( !isWindows() ); super.stageForCache_destination_exists(); } @Override public void stagePluginsForCache() throws Exception { // Don't run this test on Windows env assumeTrue( !isWindows() ); super.stagePluginsForCache(); } @Override public void findFiles_hdfs_native() throws Exception { // Don't run this test on Windows env assumeTrue( !isWindows() ); super.findFiles_hdfs_native(); } @Override public void installKettleEnvironment() throws Exception { // Don't run this test on Windows env assumeTrue( !isWindows() ); super.installKettleEnvironment(); } @Override public void installKettleEnvironment_additional_plugins() throws Exception { // Don't run this test on Windows env assumeTrue( !isWindows() ); super.installKettleEnvironment_additional_plugins(); } @Override public void isPmrInstalledAt() throws IOException { // Don't run this test on Windows env assumeTrue( !isWindows() ); super.isPmrInstalledAt(); } @Override public void configureWithPmr() throws Exception { // Don't run this test on Windows env assumeTrue( !isWindows() ); super.configureWithPmr(); } }
apache-2.0
treason258/TreLibrary
LovelyReaderAS/appbase/src/main/java/com/app/base/ui/adapter/BaseListAdapter.java
2694
package com.app.base.ui.adapter; import java.util.List; import android.app.Activity; import android.view.View; import android.view.ViewGroup; import android.widget.BaseAdapter; import android.widget.LinearLayout; import com.app.base.ui.UILayoutService; /** * ListView使用的Adapter的基类,理论上所有的ListView Adapter都继承这个Adapter. * * @author Tianyu<br> * * email:xjzx_tianyu@yeah.net<br> * date :2014年6月19日 */ public abstract class BaseListAdapter<T> extends BaseAdapter { //private final String TAG = this.getClass().getName(); protected Activity context; protected List<T> data; protected int totalPage = 1; protected int currentPage = 1; protected LinearLayout footerView; protected int moreLayoutID = -1, endLayoutID = -1; public int widthLimit; /** * 构造方法. * * @param activity * @param data * 数据 * @param totalPage * 总页数,不分页传1 * @param footerView * 列表脚布局对象 * @param moreLayout * 列表脚布局对象 * @param endLayout * 列表脚布局对象 */ public BaseListAdapter(Activity activity, List<T> data, int totalPage, LinearLayout footerView, int moreLayout, int endLayout) { this(activity, data, totalPage, footerView); this.moreLayoutID = moreLayout; this.endLayoutID = endLayout; } public BaseListAdapter(Activity activity, List<T> data, int totalPage, LinearLayout footerView) { this(activity, data, totalPage); this.footerView = footerView; } public BaseListAdapter(Activity activity, List<T> data, int totalPage) { this.context = activity; this.data = data; this.totalPage = totalPage; } @Override public T getItem(int position) { return data.get(position); } @Override public long getItemId(int position) { return position; } @Override public int getCount() { if (null != data && data.size() > 0) return data.size(); else return 0; } @Override public View getView(int position, View convertView, ViewGroup parent) { return getRowView(position, convertView, parent); } // // private View getFootView(LinearLayout.LayoutParams layoutParams, int layoutId) { // View moreView = UILayoutService.getViewFromLayout(context, layoutId); // footerView.addView(moreView, layoutParams); // return footerView; // } /** * 获取每一行显示的内容.(子类实现) * * @param position * @param convertView * @param parent * @return */ public abstract View getRowView(int position, View convertView, ViewGroup parent); }
apache-2.0
marksomnian/iCat
src/com/marksomnian/icat/ForwardingList.java
2580
package com.marksomnian.icat; import java.util.Collection; import java.util.Collections; import java.util.Iterator; import java.util.List; import java.util.ListIterator; /** * * @author Marks Polakovs <totallyupdown@gmail.com> */ public class ForwardingList<E> implements List<E> { private List<E> list; public ForwardingList(List<E> list) { this.list = list; } @Override public int size() { return list.size(); } @Override public boolean isEmpty() { return list.isEmpty(); } @Override public boolean contains(Object o) { return list.contains(o); } @Override public Iterator<E> iterator() { return list.iterator(); } @Override public Object[] toArray() { return list.toArray(); } @Override public <T> T[] toArray(T[] a) { return list.toArray(a); } @Override public boolean add(E e) { return list.add(e); } @Override public boolean remove(Object o) { return list.remove(o); } @Override public boolean containsAll(Collection<?> c) { return list.containsAll(c); } @Override public boolean addAll(Collection<? extends E> c) { return list.addAll(c); } @Override public boolean addAll(int index, Collection<? extends E> c) { return list.addAll(index, c); } @Override public boolean removeAll(Collection<?> c) { return list.removeAll(c); } @Override public boolean retainAll(Collection<?> c) { return list.retainAll(c); } @Override public void clear() { list.clear(); } @Override public E get(int index) { return list.get(index); } @Override public E set(int index, E element) { return list.set(index, element); } @Override public void add(int index, E element) { list.add(index, element); } @Override public E remove(int index) { return list.remove(index); } @Override public int indexOf(Object o) { return list.indexOf(o); } @Override public int lastIndexOf(Object o) { return list.lastIndexOf(o); } @Override public ListIterator<E> listIterator() { return list.listIterator(); } @Override public ListIterator<E> listIterator(int index) { return list.listIterator(index); } @Override public List<E> subList(int fromIndex, int toIndex) { return subList(fromIndex, toIndex); } }
apache-2.0
alibaba/dubbo
dubbo-rpc/dubbo-rpc-dubbo/src/main/java/org/apache/dubbo/rpc/protocol/dubbo/DubboInvoker.java
6827
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.dubbo.rpc.protocol.dubbo; import org.apache.dubbo.common.Constants; import org.apache.dubbo.common.URL; import org.apache.dubbo.common.config.ConfigurationUtils; import org.apache.dubbo.common.utils.AtomicPositiveInteger; import org.apache.dubbo.remoting.RemotingException; import org.apache.dubbo.remoting.TimeoutException; import org.apache.dubbo.remoting.exchange.ExchangeClient; import org.apache.dubbo.remoting.exchange.ResponseFuture; import org.apache.dubbo.rpc.AsyncRpcResult; import org.apache.dubbo.rpc.Invocation; import org.apache.dubbo.rpc.Invoker; import org.apache.dubbo.rpc.Result; import org.apache.dubbo.rpc.RpcContext; import org.apache.dubbo.rpc.RpcException; import org.apache.dubbo.rpc.RpcInvocation; import org.apache.dubbo.rpc.RpcResult; import org.apache.dubbo.rpc.SimpleAsyncRpcResult; import org.apache.dubbo.rpc.protocol.AbstractInvoker; import org.apache.dubbo.rpc.support.RpcUtils; import java.util.Set; import java.util.concurrent.locks.ReentrantLock; /** * DubboInvoker */ public class DubboInvoker<T> extends AbstractInvoker<T> { private final ExchangeClient[] clients; private final AtomicPositiveInteger index = new AtomicPositiveInteger(); private final String version; private final ReentrantLock destroyLock = new ReentrantLock(); private final Set<Invoker<?>> invokers; public DubboInvoker(Class<T> serviceType, URL url, ExchangeClient[] clients) { this(serviceType, url, clients, null); } public DubboInvoker(Class<T> serviceType, URL url, ExchangeClient[] clients, Set<Invoker<?>> invokers) { super(serviceType, url, new String[]{Constants.INTERFACE_KEY, Constants.GROUP_KEY, Constants.TOKEN_KEY, Constants.TIMEOUT_KEY}); this.clients = clients; // get version. this.version = url.getParameter(Constants.VERSION_KEY, "0.0.0"); this.invokers = invokers; } @Override protected Result doInvoke(final Invocation invocation) throws Throwable { RpcInvocation inv = (RpcInvocation) invocation; final String methodName = RpcUtils.getMethodName(invocation); inv.setAttachment(Constants.PATH_KEY, getUrl().getPath()); inv.setAttachment(Constants.VERSION_KEY, version); ExchangeClient currentClient; if (clients.length == 1) { currentClient = clients[0]; } else { currentClient = clients[index.getAndIncrement() % clients.length]; } try { boolean isAsync = RpcUtils.isAsync(getUrl(), invocation); boolean isAsyncFuture = RpcUtils.isReturnTypeFuture(inv); boolean isOneway = RpcUtils.isOneway(getUrl(), invocation); int timeout = getUrl().getMethodParameter(methodName, Constants.TIMEOUT_KEY, Constants.DEFAULT_TIMEOUT); if (isOneway) { boolean isSent = getUrl().getMethodParameter(methodName, Constants.SENT_KEY, false); currentClient.send(inv, isSent); RpcContext.getContext().setFuture(null); return new RpcResult(); } else if (isAsync) { ResponseFuture future = currentClient.request(inv, timeout); // For compatibility FutureAdapter<Object> futureAdapter = new FutureAdapter<>(future); RpcContext.getContext().setFuture(futureAdapter); Result result; if (isAsyncFuture) { // register resultCallback, sometimes we need the async result being processed by the filter chain. result = new AsyncRpcResult(futureAdapter, futureAdapter.getResultFuture(), false); } else { result = new SimpleAsyncRpcResult(futureAdapter, futureAdapter.getResultFuture(), false); } return result; } else { RpcContext.getContext().setFuture(null); return (Result) currentClient.request(inv, timeout).get(); } } catch (TimeoutException e) { throw new RpcException(RpcException.TIMEOUT_EXCEPTION, "Invoke remote method timeout. method: " + invocation.getMethodName() + ", provider: " + getUrl() + ", cause: " + e.getMessage(), e); } catch (RemotingException e) { throw new RpcException(RpcException.NETWORK_EXCEPTION, "Failed to invoke remote method: " + invocation.getMethodName() + ", provider: " + getUrl() + ", cause: " + e.getMessage(), e); } } @Override public boolean isAvailable() { if (!super.isAvailable()) { return false; } for (ExchangeClient client : clients) { if (client.isConnected() && !client.hasAttribute(Constants.CHANNEL_ATTRIBUTE_READONLY_KEY)) { //cannot write == not Available ? return true; } } return false; } @Override public void destroy() { // in order to avoid closing a client multiple times, a counter is used in case of connection per jvm, every // time when client.close() is called, counter counts down once, and when counter reaches zero, client will be // closed. if (super.isDestroyed()) { return; } else { // double check to avoid dup close destroyLock.lock(); try { if (super.isDestroyed()) { return; } super.destroy(); if (invokers != null) { invokers.remove(this); } for (ExchangeClient client : clients) { try { client.close(ConfigurationUtils.getServerShutdownTimeout()); } catch (Throwable t) { logger.warn(t.getMessage(), t); } } } finally { destroyLock.unlock(); } } } }
apache-2.0
GabrielPsantos/LP3---Ambiente-Configurado
appLp3/src/java/mack/controllers/impl/CadastraController.java
1509
/* * To change this license header, choose License Headers in Project Properties. * To change this template file, choose Tools | Templates * and open the template in the editor. */ package mack.controllers.impl; import java.util.ArrayList; import java.util.List; import java.util.logging.Level; import java.util.logging.Logger; import mack.controllers.AbstractController; import mack.dao.usuario.UsuarioDAO; import mack.dao.usuario.UsuarioDAOFactory; import mack.entities.Usuario; /** * * @author gsantos */ public class CadastraController extends AbstractController { public void execute() { String mensagem = "Erro ao inserir Usuario !"; try { UsuarioDAO dao = UsuarioDAOFactory.getUsuarioDAO(); String nome = this.getRequest().getParameter("nome"); String sobrenome = this.getRequest().getParameter("sobrenome"); dao.criaUsuario(nome,sobrenome); mensagem = "Usuario Inserido com Sucesso"; this.getRequest().setAttribute("mensagem", mensagem); //Adiciona os Usuarios na Request para enviar para a index.jsp List usuarios = new ArrayList<Usuario>(); usuarios = (List) dao.buscaTodosUsuarios(); this.setReturnPage("/index.jsp"); this.getRequest().setAttribute("usuarios", usuarios); } catch (Exception ex) { Logger.getLogger(CadastraController.class.getName()).log(Level.SEVERE, null, ex); } } }
apache-2.0
arquillian/arquillian_deprecated
spi/src/main/java/org/jboss/arquillian/spi/event/suite/BeforeSuite.java
1056
/* * JBoss, Home of Professional Open Source * Copyright 2009, Red Hat Middleware LLC, and individual contributors * by the @authors tag. See the copyright.txt in the distribution for a * full listing of individual contributors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * http://www.apache.org/licenses/LICENSE-2.0 * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.jboss.arquillian.spi.event.suite; /** * Event fired Before the Suite execution. * * @author <a href="mailto:aslak@conduct.no">Aslak Knutsen</a> * @version $Revision: $ */ public class BeforeSuite extends SuiteEvent { public BeforeSuite() { } }
apache-2.0
qatang/ctm
core/src/main/java/com/qatang/core/form/AbstractForm.java
268
package com.qatang.core.form; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * @author qatang * @since 2014-12-24 16:00 */ public class AbstractForm implements IForm { protected final Logger logger = LoggerFactory.getLogger(this.getClass()); }
apache-2.0
wequick/Small
Android/DevSample/small-databinding/src/main/java/small/databinding/DataBinderMapper.java
4612
/* * Copyright 2015-present wequick.net * * Licensed under the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. You may obtain * a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations * under the License. */ package small.databinding; import android.databinding.ViewDataBinding; import android.databinding.DataBindingComponent; import android.util.Log; import android.view.View; import java.lang.reflect.Constructor; import java.util.ArrayList; import java.util.HashMap; import net.wequick.small.Small; public class DataBinderMapper { private static final String TAG = "SmallDataBinding"; private static final int PASSING_LAYOUT_ID = 1; private HashMap<String, DataBinderMappable> dataBinderMappers; private ArrayList<String> unresolvedPackages; private String bindingPackageName; private String getPackageName(int resId) { try { return Small.getContext().getResources().getResourcePackageName(resId); } catch (Exception e) { Log.w(TAG, "Failed to get package name from resource id: " + String.format("0x%08x", resId)); return null; } } private DataBinderMappable getSubMapper(int layoutId) { return getSubMapper(getPackageName(layoutId)); } private DataBinderMappable getSubMapper(String pkg) { if (pkg == null) { return null; } if (unresolvedPackages != null && unresolvedPackages.contains(pkg)) { return null; } DataBinderMappable subMapper = null; if (dataBinderMappers != null) { subMapper = dataBinderMappers.get(pkg); } if (subMapper == null) { try { Class bindingClass = Class.forName(pkg + ".databinding.DataBinderMapper"); Constructor constructor = bindingClass.getConstructor(new Class[]{}); constructor.setAccessible(true); subMapper = (DataBinderMappable) constructor.newInstance(); } catch (Exception e) { if (unresolvedPackages == null) { unresolvedPackages = new ArrayList<>(); } unresolvedPackages.add(pkg); throw new RuntimeException("Failed to get DataBinderMapper for package '" + pkg + "'.", e); } } if (dataBinderMappers == null) { dataBinderMappers = new HashMap<>(); } dataBinderMappers.put(pkg, subMapper); bindingPackageName = pkg; return subMapper; } public ViewDataBinding getDataBinder(DataBindingComponent bindingComponent, View view, int layoutId) { DataBinderMappable subMapper = getSubMapper(layoutId); if (subMapper == null) { return null; } layoutId = subMapper.getLayoutId((String) view.getTag()); if (layoutId == 0) { bindingPackageName = null; throw new IllegalArgumentException("View is not a binding layout"); } return subMapper.getDataBinder(bindingComponent, view, layoutId); } public ViewDataBinding getDataBinder(DataBindingComponent bindingComponent, View[] views, int layoutId) { DataBinderMappable subMapper = getSubMapper(layoutId); if (subMapper == null) { return null; } layoutId = subMapper.getLayoutId((String) views[0].getTag()); if (layoutId == 0) { bindingPackageName = null; throw new IllegalArgumentException("View is not a binding layout"); } return subMapper.getDataBinder(bindingComponent, views, layoutId); } public int getLayoutId(String tag) { // Passing a non-zero layout id so that we can invoke the `getDataBinder' method // in which we'll resolve the real layout id. return PASSING_LAYOUT_ID; } public String convertBrIdToString(int id) { if (bindingPackageName == null) { return null; } DataBinderMappable subMapper = getSubMapper(bindingPackageName); if (subMapper == null) { return null; } return subMapper.convertBrIdToString(id); } }
apache-2.0
natobww/MyTest
app/src/main/java/com/example/bgfvg/test/TanmuBean.java
1018
package com.example.bgfvg.test; import android.graphics.Color; /** * 弹幕实体 * Created by hanj on 15-5-28. */ public class TanmuBean { private String[] items; private int color; private int minTextSize; private float range; public TanmuBean() { //init default value color = Color.parseColor("#ffffff"); minTextSize = 20; range = 0f; } public String[] getItems() { return items; } public void setItems(String[] items) { this.items = items; } public int getColor() { return color; } public void setColor(int color) { this.color = color; } /** * min textSize, in dp. */ public int getMinTextSize() { return minTextSize; } public void setMinTextSize(int minTextSize) { this.minTextSize = minTextSize; } public float getRange() { return range; } public void setRange(float range) { this.range = range; } }
apache-2.0