repo_name
stringlengths
5
108
path
stringlengths
6
333
size
stringlengths
1
6
content
stringlengths
4
977k
license
stringclasses
15 values
YouDiSN/OpenJDK-Research
jdk9/hotspot/src/jdk.internal.vm.compiler/share/classes/org.graalvm.compiler.hotspot/src/org/graalvm/compiler/hotspot/replacements/ObjectCloneSnippets.java
5679
/* * Copyright (c) 2011, 2015, Oracle and/or its affiliates. All rights reserved. * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. * * This code is free software; you can redistribute it and/or modify it * under the terms of the GNU General Public License version 2 only, as * published by the Free Software Foundation. * * This code is distributed in the hope that it will be useful, but WITHOUT * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License * version 2 for more details (a copy is included in the LICENSE file that * accompanied this code). * * You should have received a copy of the GNU General Public License version * 2 along with this work; if not, write to the Free Software Foundation, * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. * * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA * or visit www.oracle.com if you need additional information or have any * questions. */ package org.graalvm.compiler.hotspot.replacements; import java.lang.reflect.Method; import java.util.EnumMap; import org.graalvm.compiler.api.directives.GraalDirectives; import org.graalvm.compiler.api.replacements.Snippet; import org.graalvm.compiler.debug.GraalError; import org.graalvm.compiler.hotspot.replacements.arraycopy.ArrayCopyCallNode; import org.graalvm.compiler.nodes.java.DynamicNewArrayNode; import org.graalvm.compiler.nodes.java.NewArrayNode; import org.graalvm.compiler.replacements.Snippets; import jdk.vm.ci.meta.JavaKind; public class ObjectCloneSnippets implements Snippets { public static final EnumMap<JavaKind, Method> arrayCloneMethods = new EnumMap<>(JavaKind.class); static { arrayCloneMethods.put(JavaKind.Boolean, getCloneMethod("booleanArrayClone", boolean[].class)); arrayCloneMethods.put(JavaKind.Byte, getCloneMethod("byteArrayClone", byte[].class)); arrayCloneMethods.put(JavaKind.Char, getCloneMethod("charArrayClone", char[].class)); arrayCloneMethods.put(JavaKind.Short, getCloneMethod("shortArrayClone", short[].class)); arrayCloneMethods.put(JavaKind.Int, getCloneMethod("intArrayClone", int[].class)); arrayCloneMethods.put(JavaKind.Float, getCloneMethod("floatArrayClone", float[].class)); arrayCloneMethods.put(JavaKind.Long, getCloneMethod("longArrayClone", long[].class)); arrayCloneMethods.put(JavaKind.Double, getCloneMethod("doubleArrayClone", double[].class)); arrayCloneMethods.put(JavaKind.Object, getCloneMethod("objectArrayClone", Object[].class)); } private static Method getCloneMethod(String name, Class<?> param) { try { return ObjectCloneSnippets.class.getDeclaredMethod(name, param); } catch (SecurityException | NoSuchMethodException e) { throw new GraalError(e); } } @Snippet public static boolean[] booleanArrayClone(boolean[] src) { boolean[] result = (boolean[]) NewArrayNode.newUninitializedArray(Boolean.TYPE, src.length); ArrayCopyCallNode.disjointArraycopy(src, 0, result, 0, src.length, JavaKind.Boolean); return result; } @Snippet public static byte[] byteArrayClone(byte[] src) { byte[] result = (byte[]) NewArrayNode.newUninitializedArray(Byte.TYPE, src.length); ArrayCopyCallNode.disjointArraycopy(src, 0, result, 0, src.length, JavaKind.Byte); return result; } @Snippet public static short[] shortArrayClone(short[] src) { short[] result = (short[]) NewArrayNode.newUninitializedArray(Short.TYPE, src.length); ArrayCopyCallNode.disjointArraycopy(src, 0, result, 0, src.length, JavaKind.Short); return result; } @Snippet public static char[] charArrayClone(char[] src) { char[] result = (char[]) NewArrayNode.newUninitializedArray(Character.TYPE, src.length); ArrayCopyCallNode.disjointArraycopy(src, 0, result, 0, src.length, JavaKind.Char); return result; } @Snippet public static int[] intArrayClone(int[] src) { int[] result = (int[]) NewArrayNode.newUninitializedArray(Integer.TYPE, src.length); ArrayCopyCallNode.disjointArraycopy(src, 0, result, 0, src.length, JavaKind.Int); return result; } @Snippet public static float[] floatArrayClone(float[] src) { float[] result = (float[]) NewArrayNode.newUninitializedArray(Float.TYPE, src.length); ArrayCopyCallNode.disjointArraycopy(src, 0, result, 0, src.length, JavaKind.Float); return result; } @Snippet public static long[] longArrayClone(long[] src) { long[] result = (long[]) NewArrayNode.newUninitializedArray(Long.TYPE, src.length); ArrayCopyCallNode.disjointArraycopy(src, 0, result, 0, src.length, JavaKind.Long); return result; } @Snippet public static double[] doubleArrayClone(double[] src) { double[] result = (double[]) NewArrayNode.newUninitializedArray(Double.TYPE, src.length); ArrayCopyCallNode.disjointArraycopy(src, 0, result, 0, src.length, JavaKind.Double); return result; } @Snippet public static Object[] objectArrayClone(Object[] src) { /* Since this snippet is lowered early the array must be initialized */ Object[] result = (Object[]) DynamicNewArrayNode.newArray(GraalDirectives.guardingNonNull(src.getClass().getComponentType()), src.length, JavaKind.Object); ArrayCopyCallNode.disjointUninitializedArraycopy(src, 0, result, 0, src.length, JavaKind.Object); return result; } }
gpl-2.0
tsdl2013/iBeebo
app/src/main/java/org/zarroboogs/weibo/dao/MapDao.java
2199
package org.zarroboogs.weibo.dao; import org.json.JSONArray; import org.json.JSONException; import org.json.JSONObject; import org.zarroboogs.util.net.HttpUtility; import org.zarroboogs.util.net.WeiboException; import org.zarroboogs.util.net.HttpUtility.HttpMethod; import org.zarroboogs.utils.ImageUtility; import org.zarroboogs.utils.WeiBoURLs; import org.zarroboogs.utils.file.FileLocationMethod; import org.zarroboogs.utils.file.FileManager; import org.zarroboogs.weibo.support.asyncdrawable.TaskCache; import android.graphics.Bitmap; import android.text.TextUtils; import java.util.HashMap; import java.util.Map; public class MapDao { public Bitmap getMap() throws WeiboException { String url = WeiBoURLs.STATIC_MAP; Map<String, String> map = new HashMap<String, String>(); map.put("access_token", access_token); String coordinates = String.valueOf(lat) + "," + String.valueOf(lan); map.put("center_coordinate", coordinates); map.put("zoom", "14"); map.put("size", "600x380"); String jsonData = HttpUtility.getInstance().executeNormalTask(HttpMethod.Get, url, map); String mapUrl = ""; try { JSONObject jsonObject = new JSONObject(jsonData); JSONArray array = jsonObject.optJSONArray("map"); jsonObject = array.getJSONObject(0); mapUrl = jsonObject.getString("image_url"); } catch (JSONException e) { } if (TextUtils.isEmpty(mapUrl)) { return null; } String filePath = FileManager.getFilePathFromUrl(mapUrl, FileLocationMethod.map); boolean downloaded = TaskCache.waitForPictureDownload(mapUrl, null, filePath, FileLocationMethod.map); if (!downloaded) { return null; } Bitmap bitmap = ImageUtility.readNormalPic(FileManager.getFilePathFromUrl(mapUrl, FileLocationMethod.map), -1, -1); return bitmap; } public MapDao(String token, double lan, double lat) { this.access_token = token; this.lan = lan; this.lat = lat; } private String access_token; private double lan; private double lat; }
gpl-3.0
Samernieve/EnderIO
src/main/java/crazypants/enderio/machine/capbank/network/InventoryImpl.java
3083
package crazypants.enderio.machine.capbank.network; import net.minecraft.entity.player.EntityPlayer; import net.minecraft.inventory.IInventory; import net.minecraft.item.ItemStack; import cofh.api.energy.IEnergyContainerItem; import crazypants.enderio.EnderIO; import crazypants.enderio.machine.capbank.TileCapBank; public class InventoryImpl implements IInventory { public static boolean isInventoryEmtpy(TileCapBank cap) { for (ItemStack st : cap.getInventory()) { if(st != null) { return false; } } return true; } public static boolean isInventoryEmtpy(ItemStack[] inv) { if(inv == null) { return true; } for (ItemStack st : inv) { if(st != null) { return false; } } return true; } private ItemStack[] inventory; private TileCapBank capBank; public InventoryImpl() { } public TileCapBank getCapBank() { return capBank; } public void setCapBank(TileCapBank cap) { capBank = cap; if(cap == null) { inventory = null; return; } inventory = cap.getInventory(); } public boolean isEmtpy() { return isInventoryEmtpy(inventory); } public ItemStack[] getStacks() { return inventory; } @Override public ItemStack getStackInSlot(int slot) { if(inventory == null) { return null; } if(slot < 0 || slot >= inventory.length) { return null; } return inventory[slot]; } @Override public ItemStack decrStackSize(int fromSlot, int amount) { if(inventory == null) { return null; } if(fromSlot < 0 || fromSlot >= inventory.length) { return null; } ItemStack item = inventory[fromSlot]; if(item == null) { return null; } if(item.stackSize <= amount) { ItemStack result = item.copy(); inventory[fromSlot] = null; return result; } item.stackSize -= amount; return item.copy(); } @Override public void setInventorySlotContents(int slot, ItemStack itemstack) { if(inventory == null) { return; } if(slot < 0 || slot >= inventory.length) { return; } inventory[slot] = itemstack; } @Override public int getSizeInventory() { return 4; } //--- constant values @Override public ItemStack getStackInSlotOnClosing(int p_70304_1_) { return null; } @Override public String getInventoryName() { return EnderIO.blockCapBank.getUnlocalizedName() + ".name"; } @Override public boolean hasCustomInventoryName() { return false; } @Override public int getInventoryStackLimit() { return 1; } @Override public boolean isUseableByPlayer(EntityPlayer p_70300_1_) { return true; } @Override public boolean isItemValidForSlot(int slot, ItemStack itemstack) { if(itemstack == null) { return false; } return itemstack.getItem() instanceof IEnergyContainerItem; } @Override public void openInventory() { } @Override public void closeInventory() { } @Override public void markDirty() { } }
unlicense
bit1129/open-source-projects
src/java/test/org/apache/zookeeper/server/quorum/WatchLeakTest.java
12847
/** * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with this * work for additional information regarding copyright ownership. The ASF * licenses this file to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations under * the License. */ package org.apache.zookeeper.server.quorum; import static org.mockito.Matchers.any; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; import static org.junit.Assert.*; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.List; import java.io.BufferedOutputStream; import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; import java.io.DataInputStream; import java.io.DataOutputStream; import java.io.File; import java.io.IOException; import java.net.InetSocketAddress; import java.net.Socket; import java.nio.ByteBuffer; import java.nio.channels.SelectableChannel; import java.nio.channels.SelectionKey; import java.nio.channels.Selector; import java.nio.channels.SocketChannel; import java.util.ArrayList; import java.util.Collections; import java.util.List; import java.util.Random; import org.junit.runner.RunWith; import org.junit.runners.Parameterized; import org.junit.runners.Parameterized.Parameters; import org.apache.jute.InputArchive; import org.apache.jute.OutputArchive; import org.apache.zookeeper.MockPacket; import org.apache.zookeeper.ZooDefs; import org.apache.zookeeper.proto.ConnectRequest; import org.apache.zookeeper.proto.ReplyHeader; import org.apache.zookeeper.proto.RequestHeader; import org.apache.zookeeper.proto.SetWatches; import org.apache.zookeeper.server.MockNIOServerCnxn; import org.apache.zookeeper.server.NIOServerCnxn; import org.apache.zookeeper.server.NIOServerCnxnFactory; import org.apache.zookeeper.server.MockSelectorThread; import org.apache.zookeeper.server.ZKDatabase; import org.apache.zookeeper.server.ZooTrace; import org.apache.zookeeper.server.persistence.FileTxnSnapLog; import org.junit.Test; import org.mockito.invocation.InvocationOnMock; import org.mockito.stubbing.Answer; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * Demonstrate ZOOKEEPER-1382 : Watches leak on expired session */ @RunWith(Parameterized.class) public class WatchLeakTest { protected static final Logger LOG = LoggerFactory .getLogger(WatchLeakTest.class); final long SESSION_ID = 0xBABEL; private final boolean sessionTimedout; public WatchLeakTest(boolean sessionTimedout) { this.sessionTimedout = sessionTimedout; } @Parameters public static Collection<Object[]> configs() { return Arrays.asList(new Object[][] { { false }, { true }, }); } /** * Check that if session has expired then no watch can be set */ @Test public void testWatchesLeak() throws Exception { NIOServerCnxnFactory serverCnxnFactory = mock(NIOServerCnxnFactory.class); final SelectionKey sk = new FakeSK(); MockSelectorThread selectorThread = mock(MockSelectorThread.class); when(selectorThread.addInterestOpsUpdateRequest(any(SelectionKey.class))).thenAnswer(new Answer<Boolean>() { @Override public Boolean answer(InvocationOnMock invocation) throws Throwable { SelectionKey sk = (SelectionKey)invocation.getArguments()[0]; NIOServerCnxn nioSrvCnx = (NIOServerCnxn)sk.attachment(); sk.interestOps(nioSrvCnx.getInterestOps()); return true; } }); ZKDatabase database = new ZKDatabase(null); database.setlastProcessedZxid(2L); QuorumPeer quorumPeer = mock(QuorumPeer.class); FileTxnSnapLog logfactory = mock(FileTxnSnapLog.class); // Directories are not used but we need it to avoid NPE when(logfactory.getDataDir()).thenReturn(new File("")); when(logfactory.getSnapDir()).thenReturn(new File("")); FollowerZooKeeperServer fzks = null; try { // Create a new follower fzks = new FollowerZooKeeperServer(logfactory, quorumPeer, database); fzks.startup(); fzks.setServerCnxnFactory(serverCnxnFactory); quorumPeer.follower = new MyFollower(quorumPeer, fzks); LOG.info("Follower created"); // Simulate a socket channel between a client and a follower final SocketChannel socketChannel = createClientSocketChannel(); // Create the NIOServerCnxn that will handle the client requests final MockNIOServerCnxn nioCnxn = new MockNIOServerCnxn(fzks, socketChannel, sk, serverCnxnFactory, selectorThread); sk.attach(nioCnxn); // Send the connection request as a client do nioCnxn.doIO(sk); LOG.info("Client connection sent"); // Send the valid or invalid session packet to the follower QuorumPacket qp = createValidateSessionPacketResponse(!sessionTimedout); quorumPeer.follower.processPacket(qp); LOG.info("Session validation sent"); // OK, now the follower knows that the session is valid or invalid, let's try // to send the watches nioCnxn.doIO(sk); // wait for the the request processor to do his job Thread.sleep(1000L); LOG.info("Watches processed"); // If session has not been validated, there must be NO watches int watchCount = database.getDataTree().getWatchCount(); if (sessionTimedout) { // Session has not been re-validated ! LOG.info("session is not valid, watches = {}", watchCount); assertEquals("Session is not valid so there should be no watches", 0, watchCount); } else { // Session has been re-validated LOG.info("session is valid, watches = {}", watchCount); assertEquals("Session is valid so the watch should be there", 1, watchCount); } } finally { if (fzks != null) { fzks.shutdown(); } } } /** * A follower with no real leader connection */ public static class MyFollower extends Follower { /** * Create a follower with a mocked leader connection * * @param self * @param zk */ MyFollower(QuorumPeer self, FollowerZooKeeperServer zk) { super(self, zk); leaderOs = mock(OutputArchive.class); leaderIs = mock(InputArchive.class); bufferedOutput = mock(BufferedOutputStream.class); } } /** * Simulate the behavior of a real selection key */ private static class FakeSK extends SelectionKey { @Override public SelectableChannel channel() { return null; } @Override public Selector selector() { return mock(Selector.class); } @Override public boolean isValid() { return true; } @Override public void cancel() { } @Override public int interestOps() { return ops; } private int ops = OP_WRITE + OP_READ; @Override public SelectionKey interestOps(int ops) { this.ops = ops; return this; } @Override public int readyOps() { boolean reading = (ops & OP_READ) != 0; boolean writing = (ops & OP_WRITE) != 0; if (reading && writing) { LOG.info("Channel is ready for reading and writing"); } else if (reading) { LOG.info("Channel is ready for reading only"); } else if (writing) { LOG.info("Channel is ready for writing only"); } return ops; } } /** * Create a watches message with a single watch on / * * @return a message that attempts to set 1 watch on / */ private ByteBuffer createWatchesMessage() { List<String> dataWatches = new ArrayList<String>(1); dataWatches.add("/"); List<String> existWatches = Collections.emptyList(); List<String> childWatches = Collections.emptyList(); SetWatches sw = new SetWatches(1L, dataWatches, existWatches, childWatches); RequestHeader h = new RequestHeader(); h.setType(ZooDefs.OpCode.setWatches); h.setXid(-8); MockPacket p = new MockPacket(h, new ReplyHeader(), sw, null, null); return p.createAndReturnBB(); } /** * This is the secret that we use to generate passwords, for the moment it * is more of a sanity check. */ static final private long superSecret = 0XB3415C00L; /** * Create a connection request * * @return a serialized connection request */ private ByteBuffer createConnRequest() { Random r = new Random(SESSION_ID ^ superSecret); byte p[] = new byte[16]; r.nextBytes(p); ConnectRequest conReq = new ConnectRequest(0, 1L, 30000, SESSION_ID, p); MockPacket packet = new MockPacket(null, null, conReq, null, null, false); return packet.createAndReturnBB(); } /** * Mock a client channel with a connection request and a watches message * inside. * * @return a socket channel * @throws IOException */ private SocketChannel createClientSocketChannel() throws IOException { SocketChannel socketChannel = mock(SocketChannel.class); Socket socket = mock(Socket.class); InetSocketAddress socketAddress = new InetSocketAddress(1234); when(socket.getRemoteSocketAddress()).thenReturn(socketAddress); when(socketChannel.socket()).thenReturn(socket); // Send watches packet to server connection final ByteBuffer connRequest = createConnRequest(); final ByteBuffer watchesMessage = createWatchesMessage(); final ByteBuffer request = ByteBuffer.allocate(connRequest.limit() + watchesMessage.limit()); request.put(connRequest); request.put(watchesMessage); Answer<Integer> answer = new Answer<Integer>() { int i = 0; @Override public Integer answer(InvocationOnMock invocation) throws Throwable { Object[] args = invocation.getArguments(); ByteBuffer bb = (ByteBuffer) args[0]; for (int k = 0; k < bb.limit(); k++) { bb.put(request.get(i)); i = i + 1; } return bb.limit(); } }; when(socketChannel.read(any(ByteBuffer.class))).thenAnswer(answer); return socketChannel; } /** * Forge an invalid session packet as a LEADER do * * @param valid <code>true</code> to create a valid session message * * @throws Exception */ private QuorumPacket createValidateSessionPacketResponse(boolean valid) throws Exception { QuorumPacket qp = createValidateSessionPacket(); ByteArrayInputStream bis = new ByteArrayInputStream(qp.getData()); DataInputStream dis = new DataInputStream(bis); long id = dis.readLong(); ByteArrayOutputStream bos = new ByteArrayOutputStream(); DataOutputStream dos = new DataOutputStream(bos); dos.writeLong(id); // false means that the session has expired dos.writeBoolean(valid); qp.setData(bos.toByteArray()); return qp; } /** * Forge an validate session packet as a LEARNER do * * @return * @throws Exception */ private QuorumPacket createValidateSessionPacket() throws Exception { ByteArrayOutputStream baos = new ByteArrayOutputStream(); DataOutputStream dos = new DataOutputStream(baos); dos.writeLong(SESSION_ID); dos.writeInt(3000); dos.close(); QuorumPacket qp = new QuorumPacket(Leader.REVALIDATE, -1, baos.toByteArray(), null); return qp; } }
apache-2.0
hello2009chen/spring-boot
spring-boot-project/spring-boot-cli/src/main/java/org/springframework/boot/cli/command/shell/RunProcessCommand.java
1906
/* * Copyright 2012-2018 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.boot.cli.command.shell; import java.io.IOException; import java.util.Arrays; import java.util.Collection; import org.springframework.boot.cli.command.AbstractCommand; import org.springframework.boot.cli.command.Command; import org.springframework.boot.cli.command.status.ExitStatus; import org.springframework.boot.loader.tools.RunProcess; import org.springframework.util.StringUtils; /** * Special {@link Command} used to run a process from the shell. NOTE: this command is not * directly installed into the shell. * * @author Phillip Webb */ class RunProcessCommand extends AbstractCommand { private final String[] command; private volatile RunProcess process; RunProcessCommand(String... command) { super(null, null); this.command = command; } @Override public ExitStatus run(String... args) throws Exception { return run(Arrays.asList(args)); } protected ExitStatus run(Collection<String> args) throws IOException { this.process = new RunProcess(this.command); int code = this.process.run(true, StringUtils.toStringArray(args)); if (code == 0) { return ExitStatus.OK; } else { return new ExitStatus(code, "EXTERNAL_ERROR"); } } public boolean handleSigInt() { return this.process.handleSigInt(); } }
apache-2.0
sonchang/cattle
code/iaas/model/src/main/java/io/cattle/platform/core/model/tables/AgentTable.java
7604
/** * This class is generated by jOOQ */ package io.cattle.platform.core.model.tables; /** * This class is generated by jOOQ. */ @javax.annotation.Generated(value = { "http://www.jooq.org", "3.3.0" }, comments = "This class is generated by jOOQ") @java.lang.SuppressWarnings({ "all", "unchecked", "rawtypes" }) public class AgentTable extends org.jooq.impl.TableImpl<io.cattle.platform.core.model.tables.records.AgentRecord> { private static final long serialVersionUID = -328097319; /** * The singleton instance of <code>cattle.agent</code> */ public static final io.cattle.platform.core.model.tables.AgentTable AGENT = new io.cattle.platform.core.model.tables.AgentTable(); /** * The class holding records for this type */ @Override public java.lang.Class<io.cattle.platform.core.model.tables.records.AgentRecord> getRecordType() { return io.cattle.platform.core.model.tables.records.AgentRecord.class; } /** * The column <code>cattle.agent.id</code>. */ public final org.jooq.TableField<io.cattle.platform.core.model.tables.records.AgentRecord, java.lang.Long> ID = createField("id", org.jooq.impl.SQLDataType.BIGINT.nullable(false), this, ""); /** * The column <code>cattle.agent.name</code>. */ public final org.jooq.TableField<io.cattle.platform.core.model.tables.records.AgentRecord, java.lang.String> NAME = createField("name", org.jooq.impl.SQLDataType.VARCHAR.length(255), this, ""); /** * The column <code>cattle.agent.account_id</code>. */ public final org.jooq.TableField<io.cattle.platform.core.model.tables.records.AgentRecord, java.lang.Long> ACCOUNT_ID = createField("account_id", org.jooq.impl.SQLDataType.BIGINT, this, ""); /** * The column <code>cattle.agent.kind</code>. */ public final org.jooq.TableField<io.cattle.platform.core.model.tables.records.AgentRecord, java.lang.String> KIND = createField("kind", org.jooq.impl.SQLDataType.VARCHAR.length(255).nullable(false), this, ""); /** * The column <code>cattle.agent.uuid</code>. */ public final org.jooq.TableField<io.cattle.platform.core.model.tables.records.AgentRecord, java.lang.String> UUID = createField("uuid", org.jooq.impl.SQLDataType.VARCHAR.length(128).nullable(false), this, ""); /** * The column <code>cattle.agent.description</code>. */ public final org.jooq.TableField<io.cattle.platform.core.model.tables.records.AgentRecord, java.lang.String> DESCRIPTION = createField("description", org.jooq.impl.SQLDataType.VARCHAR.length(1024), this, ""); /** * The column <code>cattle.agent.state</code>. */ public final org.jooq.TableField<io.cattle.platform.core.model.tables.records.AgentRecord, java.lang.String> STATE = createField("state", org.jooq.impl.SQLDataType.VARCHAR.length(128).nullable(false), this, ""); /** * The column <code>cattle.agent.created</code>. */ public final org.jooq.TableField<io.cattle.platform.core.model.tables.records.AgentRecord, java.util.Date> CREATED = createField("created", org.jooq.impl.SQLDataType.TIMESTAMP.asConvertedDataType(new io.cattle.platform.db.jooq.converter.DateConverter()), this, ""); /** * The column <code>cattle.agent.removed</code>. */ public final org.jooq.TableField<io.cattle.platform.core.model.tables.records.AgentRecord, java.util.Date> REMOVED = createField("removed", org.jooq.impl.SQLDataType.TIMESTAMP.asConvertedDataType(new io.cattle.platform.db.jooq.converter.DateConverter()), this, ""); /** * The column <code>cattle.agent.remove_time</code>. */ public final org.jooq.TableField<io.cattle.platform.core.model.tables.records.AgentRecord, java.util.Date> REMOVE_TIME = createField("remove_time", org.jooq.impl.SQLDataType.TIMESTAMP.asConvertedDataType(new io.cattle.platform.db.jooq.converter.DateConverter()), this, ""); /** * The column <code>cattle.agent.data</code>. */ public final org.jooq.TableField<io.cattle.platform.core.model.tables.records.AgentRecord, java.util.Map<String,Object>> DATA = createField("data", org.jooq.impl.SQLDataType.CLOB.length(16777215).asConvertedDataType(new io.cattle.platform.db.jooq.converter.DataConverter()), this, ""); /** * The column <code>cattle.agent.uri</code>. */ public final org.jooq.TableField<io.cattle.platform.core.model.tables.records.AgentRecord, java.lang.String> URI = createField("uri", org.jooq.impl.SQLDataType.VARCHAR.length(255), this, ""); /** * The column <code>cattle.agent.managed_config</code>. */ public final org.jooq.TableField<io.cattle.platform.core.model.tables.records.AgentRecord, java.lang.Boolean> MANAGED_CONFIG = createField("managed_config", org.jooq.impl.SQLDataType.BIT.nullable(false).defaulted(true), this, ""); /** * The column <code>cattle.agent.agent_group_id</code>. */ public final org.jooq.TableField<io.cattle.platform.core.model.tables.records.AgentRecord, java.lang.Long> AGENT_GROUP_ID = createField("agent_group_id", org.jooq.impl.SQLDataType.BIGINT, this, ""); /** * The column <code>cattle.agent.zone_id</code>. */ public final org.jooq.TableField<io.cattle.platform.core.model.tables.records.AgentRecord, java.lang.Long> ZONE_ID = createField("zone_id", org.jooq.impl.SQLDataType.BIGINT, this, ""); /** * Create a <code>cattle.agent</code> table reference */ public AgentTable() { this("agent", null); } /** * Create an aliased <code>cattle.agent</code> table reference */ public AgentTable(java.lang.String alias) { this(alias, io.cattle.platform.core.model.tables.AgentTable.AGENT); } private AgentTable(java.lang.String alias, org.jooq.Table<io.cattle.platform.core.model.tables.records.AgentRecord> aliased) { this(alias, aliased, null); } private AgentTable(java.lang.String alias, org.jooq.Table<io.cattle.platform.core.model.tables.records.AgentRecord> aliased, org.jooq.Field<?>[] parameters) { super(alias, io.cattle.platform.core.model.CattleTable.CATTLE, aliased, parameters, ""); } /** * {@inheritDoc} */ @Override public org.jooq.Identity<io.cattle.platform.core.model.tables.records.AgentRecord, java.lang.Long> getIdentity() { return io.cattle.platform.core.model.Keys.IDENTITY_AGENT; } /** * {@inheritDoc} */ @Override public org.jooq.UniqueKey<io.cattle.platform.core.model.tables.records.AgentRecord> getPrimaryKey() { return io.cattle.platform.core.model.Keys.KEY_AGENT_PRIMARY; } /** * {@inheritDoc} */ @Override public java.util.List<org.jooq.UniqueKey<io.cattle.platform.core.model.tables.records.AgentRecord>> getKeys() { return java.util.Arrays.<org.jooq.UniqueKey<io.cattle.platform.core.model.tables.records.AgentRecord>>asList(io.cattle.platform.core.model.Keys.KEY_AGENT_PRIMARY, io.cattle.platform.core.model.Keys.KEY_AGENT_IDX_AGENT_UUID); } /** * {@inheritDoc} */ @Override public java.util.List<org.jooq.ForeignKey<io.cattle.platform.core.model.tables.records.AgentRecord, ?>> getReferences() { return java.util.Arrays.<org.jooq.ForeignKey<io.cattle.platform.core.model.tables.records.AgentRecord, ?>>asList(io.cattle.platform.core.model.Keys.FK_AGENT__ACCOUNT_ID, io.cattle.platform.core.model.Keys.FK_AGENT__AGENT_GROUP_ID, io.cattle.platform.core.model.Keys.FK_AGENT__ZONE_ID); } /** * {@inheritDoc} */ @Override public io.cattle.platform.core.model.tables.AgentTable as(java.lang.String alias) { return new io.cattle.platform.core.model.tables.AgentTable(alias, this); } /** * Rename this table */ public io.cattle.platform.core.model.tables.AgentTable rename(java.lang.String name) { return new io.cattle.platform.core.model.tables.AgentTable(name, null); } }
apache-2.0
prestodb/presto
presto-ml/src/test/java/com/facebook/presto/ml/AbstractTestMLFunctions.java
1068
/* * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.facebook.presto.ml; import com.facebook.presto.operator.scalar.AbstractTestFunctions; import org.testng.annotations.BeforeClass; import static com.facebook.presto.metadata.FunctionExtractor.extractFunctions; abstract class AbstractTestMLFunctions extends AbstractTestFunctions { @BeforeClass protected void registerFunctions() { functionAssertions.getMetadata().registerBuiltInFunctions( extractFunctions(new MLPlugin().getFunctions())); } }
apache-2.0
droolsjbpm/jbpm
jbpm-services/jbpm-kie-services/src/test/java/org/jbpm/kie/test/objects/Building.java
720
/* * Copyright 2017 Red Hat, Inc. and/or its affiliates. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.jbpm.kie.test.objects; public interface Building { public Integer getDoors(); }
apache-2.0
samaitra/ignite
modules/core/src/main/java/org/apache/ignite/internal/client/util/GridClientConsistentHash.java
12863
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.ignite.internal.client.util; import java.util.Arrays; import java.util.Collection; import java.util.Comparator; import java.util.HashSet; import java.util.Iterator; import java.util.Map; import java.util.NavigableMap; import java.util.Random; import java.util.Set; import java.util.SortedSet; import java.util.TreeMap; import java.util.TreeSet; import java.util.concurrent.locks.ReadWriteLock; import java.util.concurrent.locks.ReentrantReadWriteLock; import org.apache.ignite.internal.client.GridClientPredicate; import org.apache.ignite.internal.util.typedef.internal.U; import org.jetbrains.annotations.Nullable; /** * Controls key to node affinity using consistent hash algorithm. This class is thread-safe * and does not have to be externally synchronized. * <p> * For a good explanation of what consistent hashing is, you can refer to * <a href="http://weblogs.java.net/blog/tomwhite/archive/2007/11/consistent_hash.html">Tom White's Blog</a>. */ public class GridClientConsistentHash<N> { /** Prime number. */ private static final int PRIME = 15485857; /** Random generator. */ private static final Random RAND = new Random(); /** Affinity seed. */ private final Object affSeed; /** Map of hash assignments. */ private final NavigableMap<Integer, SortedSet<N>> circle = new TreeMap<>(); /** Read/write lock. */ private final ReadWriteLock rw = new ReentrantReadWriteLock(); /** Distinct nodes in the hash. */ private Collection<N> nodes = new HashSet<>(); /** Nodes comparator to resolve hash codes collisions. */ private Comparator<N> nodesComp; /** * Constructs consistent hash using empty affinity seed and {@code MD5} hasher function. */ public GridClientConsistentHash() { this(null, null); } /** * Constructs consistent hash using given affinity seed and {@code MD5} hasher function. * * @param affSeed Affinity seed (will be used as key prefix for hashing). */ public GridClientConsistentHash(Object affSeed) { this(null, affSeed); } /** * Constructs consistent hash using given affinity seed and hasher function. * * @param nodesComp Nodes comparator to resolve hash codes collisions. * If {@code null} natural order will be used. * @param affSeed Affinity seed (will be used as key prefix for hashing). */ public GridClientConsistentHash(Comparator<N> nodesComp, Object affSeed) { this.nodesComp = nodesComp; this.affSeed = affSeed == null ? new Integer(PRIME) : affSeed; } /** * Adds nodes to consistent hash algorithm (if nodes are {@code null} or empty, then no-op). * * @param nodes Nodes to add. * @param replicas Number of replicas for every node. */ public void addNodes(Collection<N> nodes, int replicas) { if (nodes == null || nodes.isEmpty()) return; rw.writeLock().lock(); try { for (N node : nodes) addNode(node, replicas); } finally { rw.writeLock().unlock(); } } /** * Adds a node to consistent hash algorithm. * * @param node New node (if {@code null} then no-op). * @param replicas Number of replicas for the node. * @return {@code True} if node was added, {@code false} if it is {@code null} or * is already contained in the hash. */ public boolean addNode(N node, int replicas) { if (node == null) return false; long seed = affSeed.hashCode() * 31 + hash(node); rw.writeLock().lock(); try { if (!nodes.add(node)) return false; int hash = hash(seed); SortedSet<N> set = circle.get(hash); if (set == null) circle.put(hash, set = new TreeSet<>(nodesComp)); set.add(node); for (int i = 1; i <= replicas; i++) { seed = seed * affSeed.hashCode() + i; hash = hash(seed); set = circle.get(hash); if (set == null) circle.put(hash, set = new TreeSet<>(nodesComp)); set.add(node); } return true; } finally { rw.writeLock().unlock(); } } /** * Removes a node and all of its replicas. * * @param node Node to remove (if {@code null}, then no-op). * @return {@code True} if node was removed, {@code false} if node is {@code null} or * not present in hash. */ public boolean removeNode(N node) { if (node == null) return false; rw.writeLock().lock(); try { if (!nodes.remove(node)) return false; for (Iterator<SortedSet<N>> it = circle.values().iterator(); it.hasNext();) { SortedSet<N> set = it.next(); if (!set.remove(node)) continue; if (set.isEmpty()) it.remove(); } return true; } finally { rw.writeLock().unlock(); } } /** * Gets number of distinct nodes, excluding replicas, in consistent hash. * * @return Number of distinct nodes, excluding replicas, in consistent hash. */ public int count() { rw.readLock().lock(); try { return nodes.size(); } finally { rw.readLock().unlock(); } } /** * Gets size of all nodes (including replicas) in consistent hash. * * @return Size of all nodes (including replicas) in consistent hash. */ public int size() { rw.readLock().lock(); try { int size = 0; for (SortedSet<N> set : circle.values()) size += set.size(); return size; } finally { rw.readLock().unlock(); } } /** * Checks if consistent hash has nodes added to it. * * @return {@code True} if consistent hash is empty, {@code false} otherwise. */ public boolean isEmpty() { return count() == 0; } /** * Gets set of all distinct nodes in the consistent hash (in no particular order). * * @return Set of all distinct nodes in the consistent hash. */ public Set<N> nodes() { rw.readLock().lock(); try { return new HashSet<>(nodes); } finally { rw.readLock().unlock(); } } /** * Picks a random node from consistent hash. * * @return Random node from consistent hash or {@code null} if there are no nodes. */ public N random() { return node(RAND.nextLong()); } /** * Gets node for a key. * * @param key Key. * @return Node. */ public N node(Object key) { int hash = hash(key); rw.readLock().lock(); try { Map.Entry<Integer, SortedSet<N>> firstEntry = circle.firstEntry(); if (firstEntry == null) return null; Map.Entry<Integer, SortedSet<N>> tailEntry = circle.tailMap(hash, true).firstEntry(); // Get first node hash in the circle clock-wise. return circle.get(tailEntry == null ? firstEntry.getKey() : tailEntry.getKey()).first(); } finally { rw.readLock().unlock(); } } /** * Gets node for a given key. * * @param key Key to get node for. * @param inc Optional inclusion set. Only nodes contained in this set may be returned. * If {@code null}, then all nodes may be included. * @return Node for key, or {@code null} if node was not found. */ public N node(Object key, Collection<N> inc) { return node(key, inc, null); } /** * Gets node for a given key. * * @param key Key to get node for. * @param inc Optional inclusion set. Only nodes contained in this set may be returned. * If {@code null}, then all nodes may be included. * @param exc Optional exclusion set. Only nodes not contained in this set may be returned. * If {@code null}, then all nodes may be returned. * @return Node for key, or {@code null} if node was not found. */ public N node(Object key, @Nullable final Collection<N> inc, @Nullable final Collection<N> exc) { if (inc == null && exc == null) return node(key); return node(key, new GridClientPredicate<N>() { @Override public boolean apply(N n) { return (inc == null || inc.contains(n)) && (exc == null || !exc.contains(n)); } }); } /** * Gets node for a given key. * * @param key Key to get node for. * @param p Optional predicate for node filtering. * @return Node for key, or {@code null} if node was not found. */ public N node(Object key, GridClientPredicate<N>... p) { if (p == null || p.length == 0) return node(key); int hash = hash(key); rw.readLock().lock(); try { final int size = nodes.size(); if (size == 0) return null; Set<N> failed = null; // Move clock-wise starting from selected position 'hash'. for (SortedSet<N> set : circle.tailMap(hash, true).values()) { for (N n : set) { if (failed != null && failed.contains(n)) continue; if (apply(p, n)) return n; if (failed == null) failed = new HashSet<>(); failed.add(n); if (failed.size() == size) return null; } } // // Copy-paste is used to escape several new objects creation. // // Wrap around moving clock-wise from the circle start. for (SortedSet<N> set : circle.headMap(hash, false).values()) { // Circle head. for (N n : set) { if (failed != null && failed.contains(n)) continue; if (apply(p, n)) return n; if (failed == null) failed = U.newHashSet(size); failed.add(n); if (failed.size() == size) return null; } } return null; } finally { rw.readLock().unlock(); } } /** * Apply predicate to the node. * * @param p Predicate. * @param n Node. * @return {@code True} if filter passed or empty. */ private boolean apply(GridClientPredicate<N>[] p, N n) { if (p != null) { for (GridClientPredicate<? super N> r : p) { if (r != null && !r.apply(n)) return false; } } return true; } /** * Gets hash code for a given object. * * @param o Object to get hash code for. * @return Hash code. */ public static int hash(Object o) { int h = o == null ? 0 : o instanceof byte[] ? Arrays.hashCode((byte[])o) : o.hashCode(); // Spread bits to hash code. h += (h << 15) ^ 0xffffcd7d; h ^= (h >>> 10); h += (h << 3); h ^= (h >>> 6); h += (h << 2) + (h << 14); return h ^ (h >>> 16); } /** {@inheritDoc} */ @Override public String toString() { return getClass().getSimpleName() + " [affSeed=" + affSeed + ", circle=" + circle + ", nodesComp=" + nodesComp + ", nodes=" + nodes + "]"; } }
apache-2.0
MagicWiz/log4j2
log4j-api/src/test/java/org/apache/logging/log4j/AbstractLoggerTest.java
26624
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache license, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the license for the specific language governing permissions and * limitations under the license. */ package org.apache.logging.log4j; import org.apache.logging.log4j.message.Message; import org.apache.logging.log4j.message.ObjectMessage; import org.apache.logging.log4j.message.ParameterizedMessage; import org.apache.logging.log4j.message.SimpleMessage; import org.apache.logging.log4j.spi.AbstractLogger; import org.junit.Test; import static org.junit.Assert.*; /** * */ public class AbstractLoggerTest extends AbstractLogger { private static class LogEvent { String markerName; Message data; Throwable t; public LogEvent(final String markerName, final Message data, final Throwable t) { this.markerName = markerName; this.data = data; this.t = t; } } private static final long serialVersionUID = 1L; private static Level currentLevel; private LogEvent currentEvent; private static Throwable t = new UnsupportedOperationException("Test"); private static Class<AbstractLogger> obj = AbstractLogger.class; private static String pattern = "{}, {}"; private static String p1 = "Long Beach"; private static String p2 = "California"; private static Message simple = new SimpleMessage("Hello"); private static Message object = new ObjectMessage(obj); private static Message param = new ParameterizedMessage(pattern, p1, p2); private static String marker = "TEST"; private static LogEvent[] events = new LogEvent[] { new LogEvent(null, simple, null), new LogEvent(marker, simple, null), new LogEvent(null, simple, t), new LogEvent(marker, simple, t), new LogEvent(null, object, null), new LogEvent(marker, object, null), new LogEvent(null, object, t), new LogEvent(marker, object, t), new LogEvent(null, param, null), new LogEvent(marker, param, null), new LogEvent(null, simple, null), new LogEvent(null, simple, t), new LogEvent(marker, simple, null), new LogEvent(marker, simple, t), new LogEvent(marker, simple, null), }; @Override public Level getLevel() { return currentLevel; } @Override public boolean isEnabled(final Level level, final Marker marker, final Message data, final Throwable t) { assertTrue("Incorrect Level. Expected " + currentLevel + ", actual " + level, level.equals(currentLevel)); if (marker == null) { if (currentEvent.markerName != null) { fail("Incorrect marker. Expected " + currentEvent.markerName + ", actual is null"); } } else { if (currentEvent.markerName == null) { fail("Incorrect marker. Expected null. Actual is " + marker.getName()); } else { assertTrue("Incorrect marker. Expected " + currentEvent.markerName + ", actual " + marker.getName(), currentEvent.markerName.equals(marker.getName())); } } if (data == null) { if (currentEvent.data != null) { fail("Incorrect message. Expected " + currentEvent.data + ", actual is null"); } } else { if (currentEvent.data == null) { fail("Incorrect message. Expected null. Actual is " + data.getFormattedMessage()); } else { assertTrue("Incorrect message type. Expected " + currentEvent.data + ", actual " + data, data.getClass().isAssignableFrom(currentEvent.data.getClass())); assertTrue("Incorrect message. Expected " + currentEvent.data.getFormattedMessage() + ", actual " + data.getFormattedMessage(), currentEvent.data.getFormattedMessage().equals(data.getFormattedMessage())); } } if (t == null) { if (currentEvent.t != null) { fail("Incorrect Throwable. Expected " + currentEvent.t + ", actual is null"); } } else { if (currentEvent.t == null) { fail("Incorrect Throwable. Expected null. Actual is " + t); } else { assertTrue("Incorrect Throwable. Expected " + currentEvent.t + ", actual " + t, currentEvent.t.equals(t)); } } return true; } @Override public boolean isEnabled(final Level level, final Marker marker, final Object data, final Throwable t) { return isEnabled(level, marker, new ObjectMessage(data), t); } @Override public boolean isEnabled(final Level level, final Marker marker, final String data) { return isEnabled(level, marker, new SimpleMessage(data), null); } @Override public boolean isEnabled(final Level level, final Marker marker, final String data, final Object... p1) { return isEnabled(level, marker, new ParameterizedMessage(data, p1), null); } @Override public boolean isEnabled(final Level level, final Marker marker, final String data, final Throwable t) { return isEnabled(level, marker, new SimpleMessage(data), t); } @Override public void logMessage(final String fqcn, final Level level, final Marker marker, final Message data, final Throwable t) { assertTrue("Incorrect Level. Expected " + currentLevel + ", actual " + level, level.equals(currentLevel)); if (marker == null) { if (currentEvent.markerName != null) { fail("Incorrect marker. Expected " + currentEvent.markerName + ", actual is null"); } } else { if (currentEvent.markerName == null) { fail("Incorrect marker. Expected null. Actual is " + marker.getName()); } else { assertTrue("Incorrect marker. Expected " + currentEvent.markerName + ", actual " + marker.getName(), currentEvent.markerName.equals(marker.getName())); } } if (data == null) { if (currentEvent.data != null) { fail("Incorrect message. Expected " + currentEvent.data + ", actual is null"); } } else { if (currentEvent.data == null) { fail("Incorrect message. Expected null. Actual is " + data.getFormattedMessage()); } else { assertTrue("Incorrect message type. Expected " + currentEvent.data + ", actual " + data, data.getClass().isAssignableFrom(currentEvent.data.getClass())); assertTrue("Incorrect message. Expected " + currentEvent.data.getFormattedMessage() + ", actual " + data.getFormattedMessage(), currentEvent.data.getFormattedMessage().equals(data.getFormattedMessage())); } } if (t == null) { if (currentEvent.t != null) { fail("Incorrect Throwable. Expected " + currentEvent.t + ", actual is null"); } } else { if (currentEvent.t == null) { fail("Incorrect Throwable. Expected null. Actual is " + t); } else { assertTrue("Incorrect Throwable. Expected " + currentEvent.t + ", actual " + t, currentEvent.t.equals(t)); } } } @Test public void testDebug() { currentLevel = Level.DEBUG; currentEvent = events[0]; debug("Hello"); debug(null, "Hello"); currentEvent = events[1]; debug(MarkerManager.getMarker("TEST"), "Hello"); currentEvent = events[2]; debug("Hello", t); debug(null, "Hello", t); currentEvent = events[3]; debug(MarkerManager.getMarker("TEST"), "Hello", t); currentEvent = events[4]; debug(obj); currentEvent = events[5]; debug(MarkerManager.getMarker("TEST"), obj); currentEvent = events[6]; debug(obj, t); debug(null, obj, t); currentEvent = events[7]; debug(MarkerManager.getMarker("TEST"), obj, t); currentEvent = events[8]; debug(pattern, p1, p2); currentEvent = events[9]; debug(MarkerManager.getMarker("TEST"), pattern, p1, p2); currentEvent = events[10]; debug(simple); debug(null, simple); debug(null, simple, null); currentEvent = events[11]; debug(simple, t); debug(null, simple, t); currentEvent = events[12]; debug(MarkerManager.getMarker("TEST"), simple, null); currentEvent = events[13]; debug(MarkerManager.getMarker("TEST"), simple, t); currentEvent = events[14]; debug(MarkerManager.getMarker("TEST"), simple); } @Test public void testError() { currentLevel = Level.ERROR; currentEvent = events[0]; error("Hello"); error(null, "Hello"); currentEvent = events[1]; error(MarkerManager.getMarker("TEST"), "Hello"); currentEvent = events[2]; error("Hello", t); error(null, "Hello", t); currentEvent = events[3]; error(MarkerManager.getMarker("TEST"), "Hello", t); currentEvent = events[4]; error(obj); currentEvent = events[5]; error(MarkerManager.getMarker("TEST"), obj); currentEvent = events[6]; error(obj, t); error(null, obj, t); currentEvent = events[7]; error(MarkerManager.getMarker("TEST"), obj, t); currentEvent = events[8]; error(pattern, p1, p2); currentEvent = events[9]; error(MarkerManager.getMarker("TEST"), pattern, p1, p2); currentEvent = events[10]; error(simple); error(null, simple); error(null, simple, null); currentEvent = events[11]; error(simple, t); error(null, simple, t); currentEvent = events[12]; error(MarkerManager.getMarker("TEST"), simple, null); currentEvent = events[13]; error(MarkerManager.getMarker("TEST"), simple, t); currentEvent = events[14]; error(MarkerManager.getMarker("TEST"), simple); } @Test public void testFatal() { currentLevel = Level.FATAL; currentEvent = events[0]; fatal("Hello"); fatal(null, "Hello"); currentEvent = events[1]; fatal(MarkerManager.getMarker("TEST"), "Hello"); currentEvent = events[2]; fatal("Hello", t); fatal(null, "Hello", t); currentEvent = events[3]; fatal(MarkerManager.getMarker("TEST"), "Hello", t); currentEvent = events[4]; fatal(obj); currentEvent = events[5]; fatal(MarkerManager.getMarker("TEST"), obj); currentEvent = events[6]; fatal(obj, t); fatal(null, obj, t); currentEvent = events[7]; fatal(MarkerManager.getMarker("TEST"), obj, t); currentEvent = events[8]; fatal(pattern, p1, p2); currentEvent = events[9]; fatal(MarkerManager.getMarker("TEST"), pattern, p1, p2); currentEvent = events[10]; fatal(simple); fatal(null, simple); fatal(null, simple, null); currentEvent = events[11]; fatal(simple, t); fatal(null, simple, t); currentEvent = events[12]; fatal(MarkerManager.getMarker("TEST"), simple, null); currentEvent = events[13]; fatal(MarkerManager.getMarker("TEST"), simple, t); currentEvent = events[14]; fatal(MarkerManager.getMarker("TEST"), simple); } @Test public void testInfo() { currentLevel = Level.INFO; currentEvent = events[0]; info("Hello"); info(null, "Hello"); currentEvent = events[1]; info(MarkerManager.getMarker("TEST"), "Hello"); currentEvent = events[2]; info("Hello", t); info(null, "Hello", t); currentEvent = events[3]; info(MarkerManager.getMarker("TEST"), "Hello", t); currentEvent = events[4]; info(obj); currentEvent = events[5]; info(MarkerManager.getMarker("TEST"), obj); currentEvent = events[6]; info(obj, t); info(null, obj, t); currentEvent = events[7]; info(MarkerManager.getMarker("TEST"), obj, t); currentEvent = events[8]; info(pattern, p1, p2); currentEvent = events[9]; info(MarkerManager.getMarker("TEST"), pattern, p1, p2); currentEvent = events[10]; info(simple); info(null, simple); info(null, simple, null); currentEvent = events[11]; info(simple, t); info(null, simple, t); currentEvent = events[12]; info(MarkerManager.getMarker("TEST"), simple, null); currentEvent = events[13]; info(MarkerManager.getMarker("TEST"), simple, t); currentEvent = events[14]; info(MarkerManager.getMarker("TEST"), simple); } @Test public void testLogDebug() { currentLevel = Level.DEBUG; currentEvent = events[0]; log(Level.DEBUG, "Hello"); log(Level.DEBUG, null, "Hello"); currentEvent = events[1]; log(Level.DEBUG, MarkerManager.getMarker("TEST"), "Hello"); currentEvent = events[2]; log(Level.DEBUG, "Hello", t); log(Level.DEBUG, null, "Hello", t); currentEvent = events[3]; log(Level.DEBUG, MarkerManager.getMarker("TEST"), "Hello", t); currentEvent = events[4]; log(Level.DEBUG, obj); currentEvent = events[5]; log(Level.DEBUG, MarkerManager.getMarker("TEST"), obj); currentEvent = events[6]; log(Level.DEBUG, obj, t); log(Level.DEBUG, null, obj, t); currentEvent = events[7]; log(Level.DEBUG, MarkerManager.getMarker("TEST"), obj, t); currentEvent = events[8]; log(Level.DEBUG, pattern, p1, p2); currentEvent = events[9]; log(Level.DEBUG, MarkerManager.getMarker("TEST"), pattern, p1, p2); currentEvent = events[10]; log(Level.DEBUG, simple); log(Level.DEBUG, null, simple); log(Level.DEBUG, null, simple, null); currentEvent = events[11]; log(Level.DEBUG, simple, t); log(Level.DEBUG, null, simple, t); currentEvent = events[12]; log(Level.DEBUG, MarkerManager.getMarker("TEST"), simple, null); currentEvent = events[13]; log(Level.DEBUG, MarkerManager.getMarker("TEST"), simple, t); currentEvent = events[14]; log(Level.DEBUG, MarkerManager.getMarker("TEST"), simple); } @Test public void testLogError() { currentLevel = Level.ERROR; currentEvent = events[0]; log(Level.ERROR, "Hello"); log(Level.ERROR, null, "Hello"); currentEvent = events[1]; log(Level.ERROR, MarkerManager.getMarker("TEST"), "Hello"); currentEvent = events[2]; log(Level.ERROR, "Hello", t); log(Level.ERROR, null, "Hello", t); currentEvent = events[3]; log(Level.ERROR, MarkerManager.getMarker("TEST"), "Hello", t); currentEvent = events[4]; log(Level.ERROR, obj); currentEvent = events[5]; log(Level.ERROR, MarkerManager.getMarker("TEST"), obj); currentEvent = events[6]; log(Level.ERROR, obj, t); log(Level.ERROR, null, obj, t); currentEvent = events[7]; log(Level.ERROR, MarkerManager.getMarker("TEST"), obj, t); currentEvent = events[8]; log(Level.ERROR, pattern, p1, p2); currentEvent = events[9]; log(Level.ERROR, MarkerManager.getMarker("TEST"), pattern, p1, p2); currentEvent = events[10]; log(Level.ERROR, simple); log(Level.ERROR, null, simple); log(Level.ERROR, null, simple, null); currentEvent = events[11]; log(Level.ERROR, simple, t); log(Level.ERROR, null, simple, t); currentEvent = events[12]; log(Level.ERROR, MarkerManager.getMarker("TEST"), simple, null); currentEvent = events[13]; log(Level.ERROR, MarkerManager.getMarker("TEST"), simple, t); currentEvent = events[14]; log(Level.ERROR, MarkerManager.getMarker("TEST"), simple); } @Test public void testLogFatal() { currentLevel = Level.FATAL; currentEvent = events[0]; log(Level.FATAL, "Hello"); log(Level.FATAL, null, "Hello"); currentEvent = events[1]; log(Level.FATAL, MarkerManager.getMarker("TEST"), "Hello"); currentEvent = events[2]; log(Level.FATAL, "Hello", t); log(Level.FATAL, null, "Hello", t); currentEvent = events[3]; log(Level.FATAL, MarkerManager.getMarker("TEST"), "Hello", t); currentEvent = events[4]; log(Level.FATAL, obj); currentEvent = events[5]; log(Level.FATAL, MarkerManager.getMarker("TEST"), obj); currentEvent = events[6]; log(Level.FATAL, obj, t); log(Level.FATAL, null, obj, t); currentEvent = events[7]; log(Level.FATAL, MarkerManager.getMarker("TEST"), obj, t); currentEvent = events[8]; log(Level.FATAL, pattern, p1, p2); currentEvent = events[9]; log(Level.FATAL, MarkerManager.getMarker("TEST"), pattern, p1, p2); currentEvent = events[10]; log(Level.FATAL, simple); log(Level.FATAL, null, simple); log(Level.FATAL, null, simple, null); currentEvent = events[11]; log(Level.FATAL, simple, t); log(Level.FATAL, null, simple, t); currentEvent = events[12]; log(Level.FATAL, MarkerManager.getMarker("TEST"), simple, null); currentEvent = events[13]; log(Level.FATAL, MarkerManager.getMarker("TEST"), simple, t); currentEvent = events[14]; log(Level.FATAL, MarkerManager.getMarker("TEST"), simple); } @Test public void testLogInfo() { currentLevel = Level.INFO; currentEvent = events[0]; log(Level.INFO, "Hello"); log(Level.INFO, null, "Hello"); currentEvent = events[1]; log(Level.INFO, MarkerManager.getMarker("TEST"), "Hello"); currentEvent = events[2]; log(Level.INFO, "Hello", t); log(Level.INFO, null, "Hello", t); currentEvent = events[3]; log(Level.INFO, MarkerManager.getMarker("TEST"), "Hello", t); currentEvent = events[4]; log(Level.INFO, obj); currentEvent = events[5]; log(Level.INFO, MarkerManager.getMarker("TEST"), obj); currentEvent = events[6]; log(Level.INFO, obj, t); log(Level.INFO, null, obj, t); currentEvent = events[7]; log(Level.INFO, MarkerManager.getMarker("TEST"), obj, t); currentEvent = events[8]; log(Level.INFO, pattern, p1, p2); currentEvent = events[9]; log(Level.INFO, MarkerManager.getMarker("TEST"), pattern, p1, p2); currentEvent = events[10]; log(Level.INFO, simple); log(Level.INFO, null, simple); log(Level.INFO, null, simple, null); currentEvent = events[11]; log(Level.INFO, simple, t); log(Level.INFO, null, simple, t); currentEvent = events[12]; log(Level.INFO, MarkerManager.getMarker("TEST"), simple, null); currentEvent = events[13]; log(Level.INFO, MarkerManager.getMarker("TEST"), simple, t); currentEvent = events[14]; log(Level.INFO, MarkerManager.getMarker("TEST"), simple); } @Test public void testLogTrace() { currentLevel = Level.TRACE; currentEvent = events[0]; log(Level.TRACE, "Hello"); log(Level.TRACE, null, "Hello"); currentEvent = events[1]; log(Level.TRACE, MarkerManager.getMarker("TEST"), "Hello"); currentEvent = events[2]; log(Level.TRACE, "Hello", t); log(Level.TRACE, null, "Hello", t); currentEvent = events[3]; log(Level.TRACE, MarkerManager.getMarker("TEST"), "Hello", t); currentEvent = events[4]; log(Level.TRACE, obj); currentEvent = events[5]; log(Level.TRACE, MarkerManager.getMarker("TEST"), obj); currentEvent = events[6]; log(Level.TRACE, obj, t); log(Level.TRACE, null, obj, t); currentEvent = events[7]; log(Level.TRACE, MarkerManager.getMarker("TEST"), obj, t); currentEvent = events[8]; log(Level.TRACE, pattern, p1, p2); currentEvent = events[9]; log(Level.TRACE, MarkerManager.getMarker("TEST"), pattern, p1, p2); currentEvent = events[10]; log(Level.TRACE, simple); log(Level.TRACE, null, simple); log(Level.TRACE, null, simple, null); currentEvent = events[11]; log(Level.TRACE, simple, t); log(Level.TRACE, null, simple, t); currentEvent = events[12]; log(Level.TRACE, MarkerManager.getMarker("TEST"), simple, null); currentEvent = events[13]; log(Level.TRACE, MarkerManager.getMarker("TEST"), simple, t); currentEvent = events[14]; log(Level.TRACE, MarkerManager.getMarker("TEST"), simple); } @Test public void testLogWarn() { currentLevel = Level.WARN; currentEvent = events[0]; log(Level.WARN, "Hello"); log(Level.WARN, null, "Hello"); currentEvent = events[1]; log(Level.WARN, MarkerManager.getMarker("TEST"), "Hello"); currentEvent = events[2]; log(Level.WARN, "Hello", t); log(Level.WARN, null, "Hello", t); currentEvent = events[3]; log(Level.WARN, MarkerManager.getMarker("TEST"), "Hello", t); currentEvent = events[4]; log(Level.WARN, obj); currentEvent = events[5]; log(Level.WARN, MarkerManager.getMarker("TEST"), obj); currentEvent = events[6]; log(Level.WARN, obj, t); log(Level.WARN, null, obj, t); currentEvent = events[7]; log(Level.WARN, MarkerManager.getMarker("TEST"), obj, t); currentEvent = events[8]; log(Level.WARN, pattern, p1, p2); currentEvent = events[9]; log(Level.WARN, MarkerManager.getMarker("TEST"), pattern, p1, p2); currentEvent = events[10]; log(Level.WARN, simple); log(Level.WARN, null, simple); log(Level.WARN, null, simple, null); currentEvent = events[11]; log(Level.WARN, simple, t); log(Level.WARN, null, simple, t); currentEvent = events[12]; log(Level.WARN, MarkerManager.getMarker("TEST"), simple, null); currentEvent = events[13]; log(Level.WARN, MarkerManager.getMarker("TEST"), simple, t); currentEvent = events[14]; log(Level.WARN, MarkerManager.getMarker("TEST"), simple); } @Test public void testTrace() { currentLevel = Level.TRACE; currentEvent = events[0]; trace("Hello"); trace(null, "Hello"); currentEvent = events[1]; trace(MarkerManager.getMarker("TEST"), "Hello"); currentEvent = events[2]; trace("Hello", t); trace(null, "Hello", t); currentEvent = events[3]; trace(MarkerManager.getMarker("TEST"), "Hello", t); currentEvent = events[4]; trace(obj); currentEvent = events[5]; trace(MarkerManager.getMarker("TEST"), obj); currentEvent = events[6]; trace(obj, t); trace(null, obj, t); currentEvent = events[7]; trace(MarkerManager.getMarker("TEST"), obj, t); currentEvent = events[8]; trace(pattern, p1, p2); currentEvent = events[9]; trace(MarkerManager.getMarker("TEST"), pattern, p1, p2); currentEvent = events[10]; trace(simple); trace(null, simple); trace(null, simple, null); currentEvent = events[11]; trace(simple, t); trace(null, simple, t); currentEvent = events[12]; trace(MarkerManager.getMarker("TEST"), simple, null); currentEvent = events[13]; trace(MarkerManager.getMarker("TEST"), simple, t); currentEvent = events[14]; trace(MarkerManager.getMarker("TEST"), simple); } @Test public void testWarn() { currentLevel = Level.WARN; currentEvent = events[0]; warn("Hello"); warn(null, "Hello"); currentEvent = events[1]; warn(MarkerManager.getMarker("TEST"), "Hello"); currentEvent = events[2]; warn("Hello", t); warn(null, "Hello", t); currentEvent = events[3]; warn(MarkerManager.getMarker("TEST"), "Hello", t); currentEvent = events[4]; warn(obj); currentEvent = events[5]; warn(MarkerManager.getMarker("TEST"), obj); currentEvent = events[6]; warn(obj, t); warn(null, obj, t); currentEvent = events[7]; warn(MarkerManager.getMarker("TEST"), obj, t); currentEvent = events[8]; warn(pattern, p1, p2); currentEvent = events[9]; warn(MarkerManager.getMarker("TEST"), pattern, p1, p2); currentEvent = events[10]; warn(simple); warn(null, simple); warn(null, simple, null); currentEvent = events[11]; warn(simple, t); warn(null, simple, t); currentEvent = events[12]; warn(MarkerManager.getMarker("TEST"), simple, null); currentEvent = events[13]; warn(MarkerManager.getMarker("TEST"), simple, t); currentEvent = events[14]; warn(MarkerManager.getMarker("TEST"), simple); } }
apache-2.0
erikdubbelboer/druid
processing/src/main/java/io/druid/segment/data/SingleValueIndexedIntsWriter.java
1515
/* * Licensed to Metamarkets Group Inc. (Metamarkets) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. Metamarkets licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package io.druid.segment.data; import io.druid.java.util.common.IAE; import java.io.IOException; public abstract class SingleValueIndexedIntsWriter implements IndexedIntsWriter { @Override public void add(Object obj) throws IOException { if (obj == null) { addValue(0); } else if (obj instanceof Integer) { addValue(((Number) obj).intValue()); } else if (obj instanceof int[]) { int[] vals = (int[]) obj; if (vals.length == 0) { addValue(0); } else { addValue(vals[0]); } } else { throw new IAE("Unsupported single value type: " + obj.getClass()); } } protected abstract void addValue(int val) throws IOException; }
apache-2.0
pleacu/jbpm
jbpm-services/jbpm-executor-cdi/src/test/java/org/jbpm/executor/cdi/ExecutorDatabaseProducer.java
1548
/* * Copyright 2013 Red Hat, Inc. and/or its affiliates. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.jbpm.executor.cdi; import javax.enterprise.context.ApplicationScoped; import javax.enterprise.inject.Produces; import javax.persistence.EntityManagerFactory; import javax.persistence.Persistence; import javax.persistence.PersistenceUnit; import org.jbpm.shared.services.impl.TransactionalCommandService; @ApplicationScoped public class ExecutorDatabaseProducer { private EntityManagerFactory emf; @PersistenceUnit(unitName = "org.jbpm.executor") @ApplicationScoped @Produces public EntityManagerFactory getEntityManagerFactory() { if (this.emf == null) { // this needs to be here for non EE containers this.emf = Persistence.createEntityManagerFactory("org.jbpm.executor"); } return this.emf; } @Produces public TransactionalCommandService produceCommandService(EntityManagerFactory emf) { return new TransactionalCommandService(emf); } }
apache-2.0
smmribeiro/intellij-community
java/java-tests/testData/inspection/inefficientStreamCount/beforePeekFlatMapCount.java
257
// "Replace with 'Stream.mapToLong().sum()'" "true" import java.util.Collection; import java.util.List; class Test { void foo(List<List<String>> s) { long count = s.stream().peek(System.out::println).flatMap(Collection::stream).c<caret>ount(); } }
apache-2.0
zqq90/webit-editor
src/main/java/jsyntaxpane/components/Markers.java
4976
/* * Copyright 2008 Ayman Al-Sairafi ayman.alsairafi@gmail.com * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License * at http://www.apache.org/licenses/LICENSE-2.0 * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package jsyntaxpane.components; import jsyntaxpane.actions.*; import java.awt.Color; import java.util.logging.Level; import java.util.logging.Logger; import java.util.regex.Matcher; import java.util.regex.Pattern; import javax.swing.text.BadLocationException; import javax.swing.text.DefaultHighlighter; import javax.swing.text.Highlighter; import javax.swing.text.JTextComponent; import jsyntaxpane.SyntaxDocument; import jsyntaxpane.Token; /** * This class contains static utility methods to make highliting in text * components easier. * * @author Ayman Al-Sairafi */ public class Markers { // This subclass is used in our highlighting code public static class SimpleMarker extends DefaultHighlighter.DefaultHighlightPainter { public SimpleMarker(Color color) { super(color); } } /** * Removes only our private highlights * This is public so that we can remove the highlights when the editorKit * is unregistered. SimpleMarker can be null, in which case all instances of * our Markers are removed. * @param component the text component whose markers are to be removed * @param marker the SimpleMarker to remove */ public static void removeMarkers(JTextComponent component, SimpleMarker marker) { Highlighter hilite = component.getHighlighter(); Highlighter.Highlight[] hilites = hilite.getHighlights(); for (int i = 0; i < hilites.length; i++) { if (hilites[i].getPainter() instanceof SimpleMarker) { SimpleMarker hMarker = (SimpleMarker) hilites[i].getPainter(); if (marker == null || hMarker.equals(marker)) { hilite.removeHighlight(hilites[i]); } } } } /** * Remove all the markers from an JEditorPane * @param editorPane */ public static void removeMarkers(JTextComponent editorPane) { removeMarkers(editorPane, null); } /** * add highlights for the given Token on the given pane * @param pane * @param token * @param marker */ public static void markToken(JTextComponent pane, Token token, SimpleMarker marker) { markText(pane, token.start, token.end(), marker); } /** * add highlights for the given region on the given pane * @param pane * @param start * @param end * @param marker */ public static void markText(JTextComponent pane, int start, int end, SimpleMarker marker) { try { Highlighter hiliter = pane.getHighlighter(); int selStart = pane.getSelectionStart(); int selEnd = pane.getSelectionEnd(); // if there is no selection or selection does not overlap if(selStart == selEnd || end < selStart || start > selStart) { hiliter.addHighlight(start, end, marker); return; } // selection starts within the highlight, highlight before slection if(selStart > start && selStart < end ) { hiliter.addHighlight(start, selStart, marker); } // selection ends within the highlight, highlight remaining if(selEnd > start && selEnd < end ) { hiliter.addHighlight(selEnd, end, marker); } } catch (BadLocationException ex) { // nothing we can do if the request is out of bound LOG.log(Level.SEVERE, null, ex); } } /** * Mark all text in the document that matches the given pattern * @param pane control to use * @param pattern pattern to match * @param marker marker to use for highlighting */ public static void markAll(JTextComponent pane, Pattern pattern, SimpleMarker marker) { SyntaxDocument sDoc = ActionUtils.getSyntaxDocument(pane); if(sDoc == null || pattern == null) { return; } Matcher matcher = sDoc.getMatcher(pattern); // we may not have any matcher (due to undo or something, so don't do anything. if(matcher==null) { return; } while(matcher.find()) { markText(pane, matcher.start(), matcher.end(), marker); } } private static final Logger LOG = Logger.getLogger(Markers.class.getName()); }
bsd-3-clause
timmolter/XChange
xchange-ccex/src/main/java/org/knowm/xchange/ccex/dto/account/CCEXBalance.java
1759
package org.knowm.xchange.ccex.dto.account; import com.fasterxml.jackson.annotation.JsonProperty; import java.math.BigDecimal; public class CCEXBalance { private String Currency; private BigDecimal Balance; private BigDecimal Available; private BigDecimal Pending; private String CryptoAddress; public CCEXBalance( @JsonProperty("Currency") String currency, @JsonProperty("Balance") BigDecimal balance, @JsonProperty("Available") BigDecimal available, @JsonProperty("Pending") BigDecimal pending, @JsonProperty("CryptoAddress") String cryptoAddress) { super(); Currency = currency; Balance = balance; Available = available; Pending = pending; CryptoAddress = cryptoAddress; } public String getCurrency() { return Currency; } public void setCurrency(String currency) { Currency = currency; } public BigDecimal getBalance() { return Balance; } public void setBalance(BigDecimal balance) { Balance = balance; } public BigDecimal getAvailable() { return Available; } public void setAvailable(BigDecimal available) { Available = available; } public BigDecimal getPending() { return Pending; } public void setPending(BigDecimal pending) { Pending = pending; } public String getCryptoAddress() { return CryptoAddress; } public void setCryptoAddress(String cryptoAddress) { CryptoAddress = cryptoAddress; } @Override public String toString() { return "CCEXBalance [Currency=" + Currency + ", Balance=" + Balance + ", Available=" + Available + ", Pending=" + Pending + ", CryptoAddress=" + CryptoAddress + "]"; } }
mit
sgilda/windup
config/api/src/main/java/org/jboss/windup/config/phase/FinalizePhase.java
892
package org.jboss.windup.config.phase; import org.jboss.windup.config.AbstractRuleProvider; import org.ocpsoft.rewrite.config.Rule; /** * Previous: {@link PostReportRenderingPhase}<br/> * Next: {@link PostFinalizePhase} * * <p> * This occurs at the end of execution. {@link Rule}s in this phase are responsible for any cleanup of resources that * may have been opened during {@link Rule}s from earlier {@link AbstractRuleProvider}s. * </p> * * @author <a href="mailto:jesse.sightler@gmail.com">Jesse Sightler</a> * */ public class FinalizePhase extends RulePhase { public FinalizePhase() { super(FinalizePhase.class); } @Override public Class<? extends RulePhase> getExecuteAfter() { return PostReportRenderingPhase.class; } @Override public Class<? extends RulePhase> getExecuteBefore() { return null; } }
epl-1.0
ferrybig/Enderstone
src/org/jnbt/FloatTag.java
2294
package org.jnbt; /* * JNBT License * * Copyright (c) 2010 Graham Edgecombe * All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: * * * Redistributions of source code must retain the above copyright notice, * this list of conditions and the following disclaimer. * * * Redistributions in binary form must reproduce the above copyright * notice, this list of conditions and the following disclaimer in the * documentation and/or other materials provided with the distribution. * * * Neither the name of the JNBT team nor the names of its * contributors may be used to endorse or promote products derived from * this software without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE * POSSIBILITY OF SUCH DAMAGE. */ /** * The <code>TAG_Float</code> tag. * @author Graham Edgecombe * */ public final class FloatTag extends Tag { /** * The value. */ private final float value; /** * Creates the tag. * @param name The name. * @param value The value. */ public FloatTag(String name, float value) { super(name); this.value = value; } @Override public Float getValue() { return value; } @Override public String toString() { String name = getName(); String append = ""; if(name != null && !name.equals("")) { append = "(\"" + this.getName() + "\")"; } return "TAG_Float" + append + ": " + value; } }
gpl-3.0
kwf2030/doclava
src/main/java/com/google/doclava/ParsedTagInfo.java
1692
/* * Copyright (C) 2010 The Android Open Source Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.doclava; import java.util.ArrayList; public class ParsedTagInfo extends TagInfo { private ContainerInfo mContainer; private String mCommentText; private Comment mComment; ParsedTagInfo(String name, String kind, String text, ContainerInfo base, SourcePositionInfo sp) { super(name, kind, text, SourcePositionInfo.findBeginning(sp, text)); mContainer = base; mCommentText = text; } public TagInfo[] commentTags() { if (mComment == null) { mComment = new Comment(mCommentText, mContainer, position()); } return mComment.tags(); } protected void setCommentText(String comment) { mCommentText = comment; } public static <T extends ParsedTagInfo> TagInfo[] joinTags(T[] tags) { ArrayList<TagInfo> list = new ArrayList<TagInfo>(); final int N = tags.length; for (int i = 0; i < N; i++) { TagInfo[] t = tags[i].commentTags(); final int M = t.length; for (int j = 0; j < M; j++) { list.add(t[j]); } } return list.toArray(new TagInfo[list.size()]); } }
apache-2.0
aditya1105/gobblin
gobblin-data-management/src/main/java/org/apache/gobblin/data/management/retention/action/AccessControlAction.java
6736
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.gobblin.data.management.retention.action; import java.io.IOException; import java.util.List; import lombok.Getter; import lombok.extern.slf4j.Slf4j; import org.apache.commons.lang3.StringUtils; import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.permission.FsPermission; import com.google.common.annotations.VisibleForTesting; import com.google.common.base.Optional; import com.typesafe.config.Config; import org.apache.gobblin.data.management.policy.VersionSelectionPolicy; import org.apache.gobblin.data.management.version.DatasetVersion; import org.apache.gobblin.data.management.version.FileStatusAware; import org.apache.gobblin.data.management.version.FileSystemDatasetVersion; import org.apache.gobblin.util.ConfigUtils; /** * A {@link RetentionAction} that is used to change the permissions/owner/group of a {@link FileSystemDatasetVersion} */ @Slf4j public class AccessControlAction extends RetentionAction { /** * Optional - The permission mode to set on selected versions either in octal or symbolic format. E.g 750 */ private static final String MODE_KEY = "mode"; /** * Optional - The owner to set on selected versions */ private static final String OWNER_KEY = "owner"; /** * Optional - The group to set on selected versions */ private static final String GROUP_KEY = "group"; private final Optional<FsPermission> permission; private final Optional<String> owner; private final Optional<String> group; @VisibleForTesting @Getter private final VersionSelectionPolicy<DatasetVersion> selectionPolicy; @VisibleForTesting AccessControlAction(Config actionConfig, FileSystem fs, Config jobConfig) { super(actionConfig, fs, jobConfig); this.permission = actionConfig.hasPath(MODE_KEY) ? Optional.of(new FsPermission(actionConfig.getString(MODE_KEY))) : Optional .<FsPermission> absent(); this.owner = Optional.fromNullable(ConfigUtils.getString(actionConfig, OWNER_KEY, null)); this.group = Optional.fromNullable(ConfigUtils.getString(actionConfig, GROUP_KEY, null)); this.selectionPolicy = createSelectionPolicy(actionConfig, jobConfig); } /** * Applies {@link #selectionPolicy} on <code>allVersions</code> and modifies permission/owner to the selected {@link DatasetVersion}s * where necessary. * <p> * This action only available for {@link FileSystemDatasetVersion}. It simply skips the operation if a different type * of {@link DatasetVersion} is passed. * </p> * {@inheritDoc} * @see org.apache.gobblin.data.management.retention.action.RetentionAction#execute(java.util.List) */ @Override public void execute(List<DatasetVersion> allVersions) throws IOException { // Select version on which access control actions need to performed for (DatasetVersion datasetVersion : this.selectionPolicy.listSelectedVersions(allVersions)) { executeOnVersion(datasetVersion); } } private void executeOnVersion(DatasetVersion datasetVersion) throws IOException { // Perform action if it is a FileSystemDatasetVersion if (datasetVersion instanceof FileSystemDatasetVersion) { FileSystemDatasetVersion fsDatasetVersion = (FileSystemDatasetVersion) datasetVersion; // If the version is filestatus aware, use the filestatus to ignore permissions update when the path already has // the desired permissions if (datasetVersion instanceof FileStatusAware) { for (FileStatus fileStatus : ((FileStatusAware)datasetVersion).getFileStatuses()) { if (needsPermissionsUpdate(fileStatus) || needsOwnerUpdate(fileStatus) || needsGroupUpdate(fileStatus)) { updatePermissionsAndOwner(fileStatus.getPath()); } } } else { for (Path path : fsDatasetVersion.getPaths()) { updatePermissionsAndOwner(path); } } } } private boolean needsPermissionsUpdate(FileStatus fileStatus) { return this.permission.isPresent() && !this.permission.get().equals(fileStatus.getPermission()); } private boolean needsOwnerUpdate(FileStatus fileStatus) { return this.owner.isPresent() && !StringUtils.equals(owner.get(), fileStatus.getOwner()); } private boolean needsGroupUpdate(FileStatus fileStatus) { return this.group.isPresent() && !StringUtils.equals(group.get(), fileStatus.getGroup()); } private void updatePermissionsAndOwner(Path path) throws IOException { boolean atLeastOneOperationFailed = false; if (this.fs.exists(path)) { try { // Update permissions if set in config if (this.permission.isPresent()) { if (!this.isSimulateMode) { this.fs.setPermission(path, this.permission.get()); log.debug("Set permissions for {} to {}", path, this.permission.get()); } else { log.info("Simulating set permissions for {} to {}", path, this.permission.get()); } } } catch (IOException e) { log.error(String.format("Setting permissions failed on %s", path), e); atLeastOneOperationFailed = true; } // Update owner and group if set in config if (this.owner.isPresent() || this.group.isPresent()) { if (!this.isSimulateMode) { this.fs.setOwner(path, this.owner.orNull(), this.group.orNull()); log.debug("Set owner and group for {} to {}:{}", path, this.owner.orNull(), this.group.orNull()); } else { log.info("Simulating set owner and group for {} to {}:{}", path, this.owner.orNull(), this.group.orNull()); } } if (atLeastOneOperationFailed) { throw new RuntimeException(String.format( "At least one failure happened while processing %s. Look for previous logs for failures", path)); } } } }
apache-2.0
akuznetsov-gridgain/ignite
modules/core/src/test/java/org/apache/ignite/spi/checkpoint/jdbc/JdbcCheckpointSpiCustomConfigSelfTest.java
1829
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.ignite.spi.checkpoint.jdbc; import org.apache.ignite.spi.checkpoint.*; import org.apache.ignite.testframework.junits.spi.*; import org.hsqldb.jdbc.*; /** * Grid jdbc checkpoint SPI custom config self test. */ @GridSpiTest(spi = JdbcCheckpointSpi.class, group = "Checkpoint SPI") public class JdbcCheckpointSpiCustomConfigSelfTest extends GridCheckpointSpiAbstractTest<JdbcCheckpointSpi> { /** {@inheritDoc} */ @Override protected void spiConfigure(JdbcCheckpointSpi spi) throws Exception { jdbcDataSource ds = new jdbcDataSource(); ds.setDatabase("jdbc:hsqldb:mem:gg_test_" + getClass().getSimpleName()); ds.setUser("sa"); ds.setPassword(""); spi.setDataSource(ds); spi.setCheckpointTableName("custom_config_checkpoints"); spi.setKeyFieldName("key"); spi.setValueFieldName("value"); spi.setValueFieldType("longvarbinary"); spi.setExpireDateFieldName("expire_date"); super.spiConfigure(spi); } }
apache-2.0
iamthearm/bazel
src/main/java/com/google/devtools/build/lib/analysis/config/ConfigurationFactory.java
5071
// Copyright 2014 The Bazel Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package com.google.devtools.build.lib.analysis.config; import com.google.common.cache.Cache; import com.google.common.collect.ImmutableList; import com.google.devtools.build.lib.analysis.BlazeDirectories; import com.google.devtools.build.lib.analysis.ConfigurationCollectionFactory; import com.google.devtools.build.lib.analysis.config.BuildConfiguration.Fragment; import com.google.devtools.build.lib.concurrent.ThreadSafety.ThreadCompatible; import com.google.devtools.build.lib.events.EventHandler; import com.google.devtools.build.lib.util.Preconditions; import java.util.HashMap; import java.util.List; import java.util.Map; import javax.annotation.Nullable; /** * A factory class for {@link BuildConfiguration} instances. This is unfortunately more complex, * and should be simplified in the future, if * possible. Right now, creating a {@link BuildConfiguration} instance involves * creating the instance itself and the related configurations; the main method * is {@link #createConfigurations}. * * <p>Avoid calling into this class, and instead use the skyframe infrastructure to obtain * configuration instances. * * <p>Blaze currently relies on the fact that all {@link BuildConfiguration} * instances used in a build can be constructed ahead of time by this class. */ @ThreadCompatible // safe as long as separate instances are used public final class ConfigurationFactory { private final List<ConfigurationFragmentFactory> configurationFragmentFactories; private final ConfigurationCollectionFactory configurationCollectionFactory; public ConfigurationFactory( ConfigurationCollectionFactory configurationCollectionFactory, ConfigurationFragmentFactory... fragmentFactories) { this(configurationCollectionFactory, ImmutableList.copyOf(fragmentFactories)); } public ConfigurationFactory( ConfigurationCollectionFactory configurationCollectionFactory, List<ConfigurationFragmentFactory> fragmentFactories) { this.configurationCollectionFactory = Preconditions.checkNotNull(configurationCollectionFactory); this.configurationFragmentFactories = ImmutableList.copyOf(fragmentFactories); } /** * Creates a set of build configurations with top-level configuration having the given options. * * <p>The rest of the configurations are created based on the set of transitions available. */ @Nullable public BuildConfiguration createConfigurations( Cache<String, BuildConfiguration> cache, PackageProviderForConfigurations loadedPackageProvider, BuildOptions buildOptions, EventHandler errorEventListener) throws InvalidConfigurationException, InterruptedException { return configurationCollectionFactory.createConfigurations(this, cache, loadedPackageProvider, buildOptions, errorEventListener); } /** * Returns a {@link com.google.devtools.build.lib.analysis.config.BuildConfiguration} based on the * given set of build options. * * <p>If the configuration has already been created, re-uses it, otherwise, creates a new one. */ @Nullable public BuildConfiguration getConfiguration( PackageProviderForConfigurations loadedPackageProvider, BuildOptions buildOptions, boolean actionsDisabled, Cache<String, BuildConfiguration> cache) throws InvalidConfigurationException, InterruptedException { String cacheKey = buildOptions.computeCacheKey(); BuildConfiguration result = cache.getIfPresent(cacheKey); if (result != null) { return result; } Map<Class<? extends Fragment>, Fragment> fragments = new HashMap<>(); // Create configuration fragments for (ConfigurationFragmentFactory factory : configurationFragmentFactories) { Class<? extends Fragment> fragmentType = factory.creates(); Fragment fragment = loadedPackageProvider.getFragment(buildOptions, fragmentType); if (fragment != null && fragments.get(fragment.getClass()) == null) { fragments.put(fragment.getClass(), fragment); } } BlazeDirectories directories = loadedPackageProvider.getDirectories(); if (loadedPackageProvider.valuesMissing()) { return null; } result = new BuildConfiguration(directories, fragments, buildOptions, actionsDisabled); cache.put(cacheKey, result); return result; } public List<ConfigurationFragmentFactory> getFactories() { return configurationFragmentFactories; } }
apache-2.0
trekawek/jackrabbit-oak
oak-store-document/src/test/java/org/apache/jackrabbit/oak/plugins/document/blob/ds/MongoDataStoreBlobGCTest.java
2161
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.jackrabbit.oak.plugins.document.blob.ds; import java.util.Date; import org.apache.jackrabbit.oak.plugins.blob.datastore.DataStoreBlobStore; import org.apache.jackrabbit.oak.plugins.blob.datastore.DataStoreUtils; import org.apache.jackrabbit.oak.plugins.document.DocumentMK; import org.apache.jackrabbit.oak.plugins.document.MongoBlobGCTest; import org.apache.jackrabbit.oak.plugins.document.MongoUtils; import org.junit.After; import org.junit.Assume; import org.junit.Before; import org.junit.BeforeClass; /** * Test for MongoMK GC with {@link DataStoreBlobStore} * */ public class MongoDataStoreBlobGCTest extends MongoBlobGCTest { protected Date startDate; protected DataStoreBlobStore blobStore; @BeforeClass public static void setUpBeforeClass() throws Exception { try { Assume.assumeNotNull(DataStoreUtils.getBlobStore()); } catch (Exception e) { Assume.assumeNoException(e); } } @Override protected DocumentMK.Builder addToBuilder(DocumentMK.Builder mk) { return super.addToBuilder(mk).setBlobStore(blobStore); } @Before @Override public void setUpConnection() throws Exception { startDate = new Date(); blobStore = DataStoreUtils.getBlobStore(folder.newFolder()); super.setUpConnection(); } }
apache-2.0
apache/drill
exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/validate/IteratorValidatorCreator.java
2373
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.drill.exec.physical.impl.validate; import java.util.List; import org.apache.drill.common.exceptions.ExecutionSetupException; import org.apache.drill.exec.ExecConstants; import org.apache.drill.exec.ops.ExecutorFragmentContext; import org.apache.drill.exec.physical.config.IteratorValidator; import org.apache.drill.exec.physical.impl.BatchCreator; import org.apache.drill.exec.record.RecordBatch; import org.apache.drill.shaded.guava.com.google.common.base.Preconditions; public class IteratorValidatorCreator implements BatchCreator<IteratorValidator>{ static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(IteratorValidatorCreator.class); @Override public IteratorValidatorBatchIterator getBatch(ExecutorFragmentContext context, IteratorValidator config, List<RecordBatch> children) throws ExecutionSetupException { Preconditions.checkArgument(children.size() == 1); RecordBatch child = children.iterator().next(); IteratorValidatorBatchIterator iter = new IteratorValidatorBatchIterator(child, config.isRepeatable); boolean validateBatches = context.getOptions().getOption(ExecConstants.ENABLE_VECTOR_VALIDATOR) || context.getConfig().getBoolean(ExecConstants.ENABLE_VECTOR_VALIDATION); iter.enableBatchValidation(validateBatches); logger.trace("Iterator validation enabled for " + child.getClass().getSimpleName() + (validateBatches ? " with vector validation" : "")); return iter; } }
apache-2.0
stuartwdouglas/undertow
core/src/main/java/io/undertow/predicate/IdempotentPredicate.java
2348
/* * JBoss, Home of Professional Open Source. * Copyright 2014 Red Hat, Inc., and individual contributors * as indicated by the @author tags. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.undertow.predicate; import java.util.Collections; import java.util.HashSet; import java.util.Map; import java.util.Set; import io.undertow.server.HttpServerExchange; import io.undertow.util.HttpString; import io.undertow.util.Methods; /** * A predicate that returns true if the request is idempotent * according to the HTTP RFC. * * @author Stuart Douglas */ public class IdempotentPredicate implements Predicate { public static final IdempotentPredicate INSTANCE = new IdempotentPredicate(); private static final Set<HttpString> METHODS; static { Set<HttpString> methods = new HashSet<>(); methods.add(Methods.GET); methods.add(Methods.DELETE); methods.add(Methods.PUT); methods.add(Methods.HEAD); methods.add(Methods.OPTIONS); METHODS = Collections.unmodifiableSet(methods); } @Override public boolean resolve(HttpServerExchange value) { return METHODS.contains(value.getRequestMethod()); } public static class Builder implements PredicateBuilder { @Override public String name() { return "idempotent"; } @Override public Map<String, Class<?>> parameters() { return Collections.emptyMap(); } @Override public Set<String> requiredParameters() { return Collections.emptySet(); } @Override public String defaultParameter() { return null; } @Override public Predicate build(Map<String, Object> config) { return INSTANCE; } } }
apache-2.0
nikhilvibhav/camel
components/camel-cxf/src/test/java/org/apache/camel/component/cxf/converter/MyCxfCustomerConverter.java
1868
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.camel.component.cxf.converter; import javax.xml.transform.TransformerException; import org.w3c.dom.Element; import org.apache.camel.Converter; import org.apache.camel.component.cxf.CxfPayload; import org.apache.camel.converter.jaxp.DomConverter; // This converter is used to show how to override the CxfPayload default toString converter @Converter public final class MyCxfCustomerConverter { private MyCxfCustomerConverter() { //Helper class } @Converter public static String cxfPayloadToString(final CxfPayload<?> payload) { DomConverter converter = new DomConverter(); StringBuilder buf = new StringBuilder(); for (Object element : payload.getBody()) { String elementString = ""; try { elementString = converter.toString((Element) element, null); } catch (TransformerException e) { elementString = element.toString(); } buf.append(elementString); } return buf.toString(); } }
apache-2.0
jruchcolo/rice-cd
rice-framework/krad-app-framework/src/main/java/org/kuali/rice/krad/util/BeanPropertyComparator.java
6807
/** * Copyright 2005-2015 The Kuali Foundation * * Licensed under the Educational Community License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.opensource.org/licenses/ecl2.php * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.kuali.rice.krad.util; import org.apache.commons.beanutils.PropertyUtils; import org.apache.commons.collections.comparators.ComparableComparator; import org.kuali.rice.core.api.exception.KualiException; import org.kuali.rice.core.api.util.type.TypeUtils; import java.beans.PropertyDescriptor; import java.io.Serializable; import java.lang.reflect.InvocationTargetException; import java.util.Collections; import java.util.Comparator; import java.util.Iterator; import java.util.List; /** * BeanPropertyComparator compares the two beans using multiple property names * * */ public class BeanPropertyComparator implements Comparator, Serializable { private static final long serialVersionUID = -2675700473766186018L; boolean ignoreCase; private List propertyNames; private Comparator stringComparator; private Comparator booleanComparator; private Comparator genericComparator; /** * Constructs a PropertyComparator for comparing beans using the properties named in the given List * * <p>if the List is null, the beans will be compared directly * by Properties will be compared in the order in which they are listed. Case will be ignored * in String comparisons.</p> * * @param propertyNames List of property names (as Strings) used to compare beans */ public BeanPropertyComparator(List propertyNames) { this(propertyNames, true); } /** * Constructs a PropertyComparator for comparing beans using the properties named in the given List. * * <p>Properties will be compared * in the order in which they are listed. Case will be ignored if ignoreCase is true.</p> * * @param propertyNames List of property names (as Strings) used to compare beans * @param ignoreCase if true, case will be ignored during String comparisons */ public BeanPropertyComparator(List propertyNames, boolean ignoreCase) { if (propertyNames == null) { throw new IllegalArgumentException("invalid (null) propertyNames list"); } if (propertyNames.size() == 0) { throw new IllegalArgumentException("invalid (empty) propertyNames list"); } this.propertyNames = Collections.unmodifiableList(propertyNames); this.ignoreCase = ignoreCase; if (ignoreCase) { this.stringComparator = String.CASE_INSENSITIVE_ORDER; } else { this.stringComparator = ComparableComparator.getInstance(); } this.booleanComparator = new Comparator() { public int compare(Object o1, Object o2) { int compared = 0; Boolean b1 = (Boolean) o1; Boolean b2 = (Boolean) o2; if (!b1.equals(b2)) { if (b1.equals(Boolean.FALSE)) { compared = -1; } else { compared = 1; } } return compared; } }; this.genericComparator = ComparableComparator.getInstance(); } /** * Compare two JavaBeans by the properties given to the constructor. * * @param o1 Object The first bean to get data from to compare against * @param o2 Object The second bean to get data from to compare * @return int negative or positive based on order */ public int compare(Object o1, Object o2) { int compared = 0; try { for (Iterator i = propertyNames.iterator(); (compared == 0) && i.hasNext();) { String currentProperty = i.next().toString(); // choose appropriate comparator Comparator currentComparator = null; try { PropertyDescriptor propertyDescriptor = PropertyUtils.getPropertyDescriptor(o1, currentProperty); Class propertyClass = propertyDescriptor.getPropertyType(); if (propertyClass.equals(String.class)) { currentComparator = this.stringComparator; } else if (TypeUtils.isBooleanClass(propertyClass)) { currentComparator = this.booleanComparator; } else { currentComparator = this.genericComparator; } } catch (NullPointerException e) { throw new BeanComparisonException("unable to find property '" + o1.getClass().getName() + "." + currentProperty + "'", e); } // compare the values Object value1 = PropertyUtils.getProperty(o1, currentProperty); Object value2 = PropertyUtils.getProperty(o2, currentProperty); /* Fix for KULRICE-5170 : BeanPropertyComparator throws exception when a null value is found in sortable non-string data type column */ if ( value1 == null && value2 == null) return 0; else if ( value1 == null) return -1; else if ( value2 == null ) return 1; /* End KULRICE-5170 Fix*/ compared = currentComparator.compare(value1, value2); } } catch (IllegalAccessException e) { throw new BeanComparisonException("unable to compare property values", e); } catch (NoSuchMethodException e) { throw new BeanComparisonException("unable to compare property values", e); } catch (InvocationTargetException e) { throw new BeanComparisonException("unable to compare property values", e); } return compared; } public static class BeanComparisonException extends KualiException { private static final long serialVersionUID = 2622379680100640029L; /** * @param message * @param t */ public BeanComparisonException(String message, Throwable t) { super(message, t); } } }
apache-2.0
apache/openejb
container/openejb-core/src/test/java/org/apache/openejb/core/mdb/MdbInvoker.java
5024
/** * * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.openejb.core.mdb; import javax.jms.Connection; import javax.jms.ConnectionFactory; import javax.jms.JMSException; import javax.jms.Message; import javax.jms.MessageListener; import javax.jms.MessageProducer; import javax.jms.ObjectMessage; import javax.jms.Session; import java.io.Serializable; import java.lang.reflect.InvocationTargetException; import java.lang.reflect.Method; import java.util.Map; import java.util.TreeMap; public class MdbInvoker implements MessageListener { private final Map<String, Method> signatures = new TreeMap<String, Method>(); private final Object target; private Connection connection; private Session session; private ConnectionFactory connectionFactory; public MdbInvoker(ConnectionFactory connectionFactory, Object target) throws JMSException { this.target = target; this.connectionFactory = connectionFactory; for (Method method : target.getClass().getMethods()) { String signature = MdbUtil.getSignature(method); signatures.put(signature, method); } } public synchronized void destroy() { MdbUtil.close(session); session = null; MdbUtil.close(connection); connection = null; } private synchronized Session getSession() throws JMSException { connection = connectionFactory.createConnection(); connection.start(); session = connection.createSession(false, Session.AUTO_ACKNOWLEDGE); return session; } public void onMessage(Message message) { if (!(message instanceof ObjectMessage)) return; try { Session session = getSession(); if (session == null) throw new IllegalStateException("Invoker has been destroyed"); if (message == null) throw new NullPointerException("request message is null"); if (!(message instanceof ObjectMessage)) throw new IllegalArgumentException("Expected a ObjectMessage request but got a " + message.getClass().getName()); ObjectMessage objectMessage = (ObjectMessage) message; Serializable object = objectMessage.getObject(); if (object == null) throw new NullPointerException("object in ObjectMessage is null"); if (!(object instanceof Map)) { if (message instanceof ObjectMessage) throw new IllegalArgumentException("Expected a Map contained in the ObjectMessage request but got a " + object.getClass().getName()); } Map request = (Map) object; String signature = (String) request.get("method"); Method method = signatures.get(signature); Object[] args = (Object[]) request.get("args"); boolean exception = false; Object result = null; try { result = method.invoke(target, args); } catch (IllegalAccessException e) { result = e; exception = true; } catch (InvocationTargetException e) { result = e.getCause(); if (result == null) result = e; exception = true; } MessageProducer producer = null; try { // create response Map<String, Object> response = new TreeMap<String, Object>(); if (exception) { response.put("exception", "true"); } response.put("return", result); // create response message ObjectMessage resMessage = session.createObjectMessage(); resMessage.setJMSCorrelationID(objectMessage.getJMSCorrelationID()); resMessage.setObject((Serializable) response); // send response message producer = session.createProducer(objectMessage.getJMSReplyTo()); producer.send(resMessage); } catch (Exception e) { e.printStackTrace(); } finally { MdbUtil.close(producer); destroy(); } } catch (Throwable e) { e.printStackTrace(); } } }
apache-2.0
dawidmalina/pinpoint
collector/src/main/java/com/navercorp/pinpoint/collector/cluster/route/DefaultRouteHandler.java
4382
/* * Copyright 2014 NAVER Corp. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.navercorp.pinpoint.collector.cluster.route; import com.navercorp.pinpoint.collector.cluster.ClusterPointLocator; import com.navercorp.pinpoint.collector.cluster.TargetClusterPoint; import com.navercorp.pinpoint.collector.cluster.route.filter.RouteFilter; import com.navercorp.pinpoint.rpc.Future; import com.navercorp.pinpoint.rpc.ResponseMessage; import com.navercorp.pinpoint.thrift.dto.command.TCommandTransferResponse; import com.navercorp.pinpoint.thrift.dto.command.TRouteResult; import com.navercorp.pinpoint.thrift.io.TCommandTypeVersion; import org.apache.thrift.TBase; /** * @author koo.taejin * @author HyunGil Jeong */ public class DefaultRouteHandler extends AbstractRouteHandler<RequestEvent> { private final RouteFilterChain<RequestEvent> requestFilterChain; private final RouteFilterChain<ResponseEvent> responseFilterChain; public DefaultRouteHandler(ClusterPointLocator<TargetClusterPoint> targetClusterPointLocator, RouteFilterChain<RequestEvent> requestFilterChain, RouteFilterChain<ResponseEvent> responseFilterChain) { super(targetClusterPointLocator); this.requestFilterChain = requestFilterChain; this.responseFilterChain = responseFilterChain; } @Override public void addRequestFilter(RouteFilter<RequestEvent> filter) { this.requestFilterChain.addLast(filter); } @Override public void addResponseFilter(RouteFilter<ResponseEvent> filter) { this.responseFilterChain.addLast(filter); } @Override public TCommandTransferResponse onRoute(RequestEvent event) { requestFilterChain.doEvent(event); TCommandTransferResponse routeResult = onRoute0(event); responseFilterChain.doEvent(new ResponseEvent(event, event.getRequestId(), routeResult)); return routeResult; } private TCommandTransferResponse onRoute0(RequestEvent event) { TBase<?,?> requestObject = event.getRequestObject(); if (requestObject == null) { return createResponse(TRouteResult.EMPTY_REQUEST); } TargetClusterPoint clusterPoint = findClusterPoint(event.getDeliveryCommand()); if (clusterPoint == null) { return createResponse(TRouteResult.NOT_FOUND); } TCommandTypeVersion commandVersion = TCommandTypeVersion.getVersion(clusterPoint.gerVersion()); if (!commandVersion.isSupportCommand(requestObject)) { return createResponse(TRouteResult.NOT_SUPPORTED_REQUEST); } Future<ResponseMessage> future = clusterPoint.request(event.getDeliveryCommand().getPayload()); boolean isCompleted = future.await(); if (!isCompleted) { return createResponse(TRouteResult.TIMEOUT); } ResponseMessage responseMessage = future.getResult(); if (responseMessage == null) { return createResponse(TRouteResult.EMPTY_RESPONSE); } byte[] responsePayload = responseMessage.getMessage(); if (responsePayload == null || responsePayload.length == 0) { return createResponse(TRouteResult.EMPTY_RESPONSE, new byte[0]); } return createResponse(TRouteResult.OK, responsePayload); } private TCommandTransferResponse createResponse(TRouteResult result) { return createResponse(result, new byte[0]); } private TCommandTransferResponse createResponse(TRouteResult result, byte[] payload) { TCommandTransferResponse response = new TCommandTransferResponse(); response.setRouteResult(result); response.setPayload(payload); return response; } }
apache-2.0
nikhilvibhav/camel
components/camel-twitter/src/main/java/org/apache/camel/component/twitter/search/SearchConsumerHandler.java
4705
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.camel.component.twitter.search; import java.util.Collections; import java.util.List; import org.apache.camel.Exchange; import org.apache.camel.component.twitter.TwitterEndpoint; import org.apache.camel.component.twitter.consumer.AbstractTwitterConsumerHandler; import org.apache.camel.component.twitter.consumer.TwitterEventType; import org.apache.camel.util.ObjectHelper; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import twitter4j.GeoLocation; import twitter4j.Query; import twitter4j.Query.Unit; import twitter4j.QueryResult; import twitter4j.Status; import twitter4j.Twitter; import twitter4j.TwitterException; /** * Consumes search requests */ public class SearchConsumerHandler extends AbstractTwitterConsumerHandler { private static final Logger LOG = LoggerFactory.getLogger(SearchConsumerHandler.class); private String keywords; public SearchConsumerHandler(TwitterEndpoint te, String keywords) { super(te); this.keywords = keywords; } @Override public List<Exchange> pollConsume() throws TwitterException { String keywords = this.keywords; Query query; if (keywords != null && keywords.trim().length() > 0) { query = new Query(keywords); LOG.debug("Searching twitter with keywords: {}", keywords); } else { query = new Query(); LOG.debug("Searching twitter without keywords."); } if (endpoint.getProperties().isFilterOld()) { query.setSinceId(getLastId()); } return search(query); } @Override public List<Exchange> directConsume() throws TwitterException { String keywords = this.keywords; if (keywords == null || keywords.trim().length() == 0) { return Collections.emptyList(); } Query query = new Query(keywords); LOG.debug("Searching twitter with keywords: {}", keywords); return search(query); } private List<Exchange> search(Query query) throws TwitterException { Integer numberOfPages = 1; if (ObjectHelper.isNotEmpty(endpoint.getProperties().getLang())) { query.setLang(endpoint.getProperties().getLang()); } if (ObjectHelper.isNotEmpty(endpoint.getProperties().getCount())) { query.setCount(endpoint.getProperties().getCount()); } if (ObjectHelper.isNotEmpty(endpoint.getProperties().getNumberOfPages())) { numberOfPages = endpoint.getProperties().getNumberOfPages(); } if (ObjectHelper.isNotEmpty(endpoint.getProperties().getLatitude()) && ObjectHelper.isNotEmpty(endpoint.getProperties().getLongitude()) && ObjectHelper.isNotEmpty(endpoint.getProperties().getRadius())) { GeoLocation location = new GeoLocation(endpoint.getProperties().getLatitude(), endpoint.getProperties().getLongitude()); query.setGeoCode(location, endpoint.getProperties().getRadius(), Unit.valueOf(endpoint.getProperties().getDistanceMetric())); LOG.debug("Searching with additional geolocation parameters."); } LOG.debug("Searching with {} pages.", numberOfPages); Twitter twitter = getTwitter(); QueryResult qr = twitter.search(query); List<Status> tweets = qr.getTweets(); for (int i = 1; i < numberOfPages; i++) { if (!qr.hasNext()) { break; } qr = twitter.search(qr.nextQuery()); tweets.addAll(qr.getTweets()); } if (endpoint.getProperties().isFilterOld()) { for (Status status : tweets) { setLastIdIfGreater(status.getId()); } } return TwitterEventType.STATUS.createExchangeList(endpoint, tweets); } }
apache-2.0
zhiqinghuang/springfox
springfox-spring-web/src/test/java/springfox/documentation/spring/web/dummy/DummyClass.java
11454
/* * * Copyright 2015 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * * */ package springfox.documentation.spring.web.dummy; import io.swagger.annotations.ApiImplicitParam; import io.swagger.annotations.ApiImplicitParams; import io.swagger.annotations.ApiOperation; import io.swagger.annotations.ApiParam; import io.swagger.annotations.ApiResponse; import io.swagger.annotations.ApiResponses; import io.swagger.annotations.Authorization; import io.swagger.annotations.AuthorizationScope; import io.swagger.annotations.Extension; import io.swagger.annotations.ExtensionProperty; import org.springframework.http.HttpStatus; import org.springframework.http.ResponseEntity; import org.springframework.validation.BindingResult; import org.springframework.web.bind.annotation.ModelAttribute; import org.springframework.web.bind.annotation.PathVariable; import org.springframework.web.bind.annotation.RequestBody; import org.springframework.web.bind.annotation.RequestMapping; import org.springframework.web.bind.annotation.RequestPart; import org.springframework.web.bind.annotation.ResponseBody; import org.springframework.web.bind.annotation.ResponseStatus; import org.springframework.web.multipart.MultipartFile; import springfox.documentation.annotations.ApiIgnore; import springfox.documentation.spring.web.dummy.DummyModels.Ignorable; import springfox.documentation.spring.web.dummy.models.EnumType; import springfox.documentation.spring.web.dummy.models.Example; import springfox.documentation.spring.web.dummy.models.FoobarDto; import springfox.documentation.spring.web.dummy.models.Treeish; import javax.servlet.ServletRequest; import javax.servlet.http.HttpServletResponse; import java.util.Date; import java.util.List; import java.util.Map; @RequestMapping(produces = {"application/json"}, consumes = {"application/json", "application/xml"}) public class DummyClass { @ApiParam public void annotatedWithApiParam() { } public void dummyMethod() { } public void methodWithOneArgs(int a) { } public void methodWithTwoArgs(int a, String b) { } public void methodWithNoArgs() { } @ApiOperation(value = "description", httpMethod = "GET") public void methodWithHttpGETMethod() { } @ApiOperation(value = "description", nickname = "unique") public void methodWithNickName() { } @ApiOperation(value = "description", httpMethod = "GET", hidden = true) public void methodThatIsHidden() { } @ApiOperation(value = "description", httpMethod = "RUBBISH") public void methodWithInvalidHttpMethod() { } @ApiOperation(value = "summary", httpMethod = "RUBBISH") public void methodWithSummary() { } @ApiOperation(value = "", notes = "some notes") public void methodWithNotes() { } @ApiOperation(value = "", nickname = "a nickname") public void methodWithNickname() { } @ApiOperation(value = "", position = 5) public void methodWithPosition() { } @ApiOperation(value = "", consumes = "application/xml") public void methodWithXmlConsumes() { } @ApiOperation(value = "", produces = "application/xml") public void methodWithXmlProduces() { } @ApiOperation(value = "", produces = "application/xml, application/json", consumes = "application/xml, " + "application/json") public void methodWithMultipleMediaTypes() { } @ApiOperation(value = "", produces = "application/xml", consumes = "application/xml") public void methodWithBothXmlMediaTypes() { } @ApiOperation(value = "", produces = "application/json", consumes = "application/xml") public void methodWithMediaTypeAndFile(MultipartFile multipartFile) { } @ApiOperation(value = "", response = DummyModels.FunkyBusiness.class) public void methodApiResponseClass() { } @ApiResponses({ @ApiResponse(code = 201, response = Void.class, message = "Rule Scheduled successfuly"), @ApiResponse(code = 500, response = RestError.class, message = "Internal Server Error"), @ApiResponse(code = 406, response = RestError.class, message = "Not acceptable")}) public void methodAnnotatedWithApiResponse() { } @ApiOperation(value = "methodWithExtensions", extensions = { @Extension(properties = @ExtensionProperty(name="x-test1", value="value1")), @Extension(name="test2", properties = @ExtensionProperty(name="name2", value="value2")) } ) public void methodWithExtensions() { } @ApiOperation(value = "SomeVal", authorizations = @Authorization(value = "oauth2", scopes = {@AuthorizationScope(scope = "scope", description = "scope description") })) public void methodWithAuth() { } @ApiOperation(value = "") public DummyModels.FunkyBusiness methodWithAPiAnnotationButWithoutResponseClass() { return null; } @ApiOperation(value = "") public DummyModels.Paginated<BusinessType> methodWithGenericType() { return null; } public ResponseEntity<byte[]> methodWithGenericPrimitiveArray() { return null; } public ResponseEntity<DummyClass[]> methodWithGenericComplexArray() { return null; } public ResponseEntity<EnumType> methodWithEnumResponse() { return null; } @Deprecated public void methodWithDeprecated() { } public void methodWithServletRequest(ServletRequest req) { } public void methodWithBindingResult(BindingResult res) { } public void methodWithInteger(Integer integer) { } public void methodWithAnnotatedInteger(@Ignorable Integer integer) { } public void methodWithModelAttribute(@ModelAttribute Example example) { } public void methodWithoutModelAttribute(Example example) { } public void methodWithTreeishModelAttribute(@ModelAttribute Treeish example) { } @RequestMapping("/businesses/{businessId}") public void methodWithSinglePathVariable(@PathVariable String businessId) { } @RequestMapping("/businesses/{businessId}") public void methodWithSingleEnum(BusinessType businessType) { } @RequestMapping("/businesses/{businessId}") public void methodWithSingleEnumArray(BusinessType[] businessTypes) { } @RequestMapping("/businesses/{businessId}/employees/{employeeId}/salary") public void methodWithRatherLongRequestPath() { } @RequestMapping(value = "/parameter-conditions", params = "test=testValue") public void methodWithParameterRequestCondition() { } @ApiImplicitParam(name = "Authentication", dataType = "string", required = true, paramType = "header", value = "Authentication token") public void methodWithApiImplicitParam() { } @ApiImplicitParam(name = "Authentication", dataType = "string", required = true, paramType = "header", value = "Authentication token") public void methodWithApiImplicitParamAndInteger(Integer integer) { } @ApiImplicitParams({ @ApiImplicitParam(name = "lang", dataType = "string", required = true, paramType = "query", value = "Language", defaultValue = "EN", allowableValues = "EN,FR"), @ApiImplicitParam(name = "Authentication", dataType = "string", required = true, paramType = "header", value = "Authentication token") }) public void methodWithApiImplicitParams(Integer integer) { } public interface ApiImplicitParamsInterface { @ApiImplicitParams({ @ApiImplicitParam(name = "lang", dataType = "string", required = true, paramType = "query", value = "Language", defaultValue = "EN", allowableValues = "EN,FR") }) @ApiImplicitParam(name = "Authentication", dataType = "string", required = true, paramType = "header", value = "Authentication token") void methodWithApiImplicitParam(); } public static class ApiImplicitParamsClass implements ApiImplicitParamsInterface { @Override public void methodWithApiImplicitParam() { } } @ResponseBody public DummyModels.BusinessModel methodWithConcreteResponseBody() { return null; } @ResponseBody public Map<String, DummyModels.BusinessModel> methodWithMapReturn() { return null; } @ResponseBody @ResponseStatus(value = HttpStatus.ACCEPTED, reason = "Accepted request") public DummyModels.BusinessModel methodWithResponseStatusAnnotation() { return null; } @ResponseBody @ResponseStatus(value = HttpStatus.NO_CONTENT) public void methodWithResponseStatusAnnotationAndEmptyReason() { } @ResponseBody public DummyModels.AnnotatedBusinessModel methodWithModelPropertyAnnotations() { return null; } @ResponseBody public DummyModels.NamedBusinessModel methodWithModelAnnotations() { return null; } @ResponseBody public List<DummyModels.BusinessModel> methodWithListOfBusinesses() { return null; } @ResponseBody public DummyModels.CorporationModel methodWithConcreteCorporationModel() { return null; } @ResponseBody public Date methodWithDateResponseBody() { return null; } public void methodParameterWithRequestBodyAnnotation( @RequestBody DummyModels.BusinessModel model, HttpServletResponse response, DummyModels.AnnotatedBusinessModel annotatedBusinessModel) { } public void methodParameterWithRequestPartAnnotation( @RequestPart DummyModels.BusinessModel model, HttpServletResponse response, DummyModels.AnnotatedBusinessModel annotatedBusinessModel) { } public void methodParameterWithRequestPartAnnotationOnSimpleType( @RequestPart String model, HttpServletResponse response, DummyModels.AnnotatedBusinessModel annotatedBusinessModel) { } @ResponseBody public DummyModels.AnnotatedBusinessModel methodWithSameAnnotatedModelInReturnAndRequestBodyParam( @RequestBody DummyModels.AnnotatedBusinessModel model) { return null; } @ApiResponses({@ApiResponse(code = 413, message = "a message")}) public void methodWithApiResponses() { } @ApiIgnore public static class ApiIgnorableClass { @ApiIgnore public void dummyMethod() { } } @ResponseBody public DummyModels.ModelWithSerializeOnlyProperty methodWithSerializeOnlyPropInReturnAndRequestBodyParam( @RequestBody DummyModels.ModelWithSerializeOnlyProperty model) { return null; } @ResponseBody public FoobarDto methodToTestFoobarDto(@RequestBody FoobarDto model) { return null; } public enum BusinessType { PRODUCT(1), SERVICE(2); private int value; private BusinessType(int value) { this.value = value; } public int getValue() { return value; } } public class CustomClass { } public class MethodsWithSameName { public ResponseEntity methodToTest(Integer integer, Parent child) { return null; } public void methodToTest(Integer integer, Child child) { } } class Parent { } class Child extends Parent { } }
apache-2.0
carsonreinke/mozu-java-sdk
src/main/java/com/mozu/api/utils/Endpoints.java
1200
package com.mozu.api.utils; public class Endpoints { public static final String AUTH_URL = "api/platform/applications/authtickets"; public static final String AUTH_REFRESH_URL = "api/platform/applications/authtickets/refresh-ticket/%s"; public static final String TENANT_END_POINT = "api/platform/tenants"; public static final String SITES_END_POINT = "api/platform/tenants/%s/sites"; public static final String ATTRIBUTE_END_POINT = "api/commerce/catalog/admin/attributedefinition/attributes"; public static final String VOCABULARY_END_POINT = "api/commerce/catalog/admin/attributedefinition/attributes/%s/VocabularyValues"; public static final String PRODUCTTYPE_END_POINT = "api/commerce/catalog/admin/attributedefinition/producttypes"; public static final String ORDER_END_POINT = "api/commerce/orders"; public static final String APPLICATIONSTATUS_END_POINT = "api/commerce/settings/applicationstatus"; public static final String MZDB_APP_DATA_ENDPOINT = "api/platform/appdata"; public static final String MZDB_SITE_DATA_ENDPOINT = "api/platform/sitedata"; public static final String MZDB_TENANT_DATA_ENDPOINT = "api/platform/tenantdata"; }
mit
pedrosan7os/fenixedu-academic
src/main/java/org/fenixedu/academic/ui/faces/components/util/CalendarLink.java
5981
/** * Copyright © 2002 Instituto Superior Técnico * * This file is part of FenixEdu Academic. * * FenixEdu Academic is free software: you can redistribute it and/or modify * it under the terms of the GNU Lesser General Public License as published by * the Free Software Foundation, either version 3 of the License, or * (at your option) any later version. * * FenixEdu Academic is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU Lesser General Public License for more details. * * You should have received a copy of the GNU Lesser General Public License * along with FenixEdu Academic. If not, see <http://www.gnu.org/licenses/>. */ package org.fenixedu.academic.ui.faces.components.util; import java.util.Calendar; import java.util.Date; import java.util.HashMap; import java.util.Iterator; import java.util.Locale; import java.util.Map; import java.util.Map.Entry; import org.apache.struts.util.MessageResources; import org.fenixedu.academic.domain.Exam; import org.fenixedu.academic.domain.ExecutionCourse; import org.fenixedu.academic.domain.Project; import org.fenixedu.academic.domain.WrittenEvaluation; import org.fenixedu.academic.domain.WrittenTest; import org.fenixedu.academic.util.Bundle; import org.fenixedu.academic.util.DateFormatUtil; public class CalendarLink { private Calendar objectOccurrence; private String objectLinkLabel; private Map<String, String> linkParameters = new HashMap<String, String>(); private boolean asLink; public CalendarLink(boolean asLink) { setAsLink(asLink); } public CalendarLink() { this(true); } public CalendarLink(final ExecutionCourse executionCourse, final WrittenEvaluation writtenEvaluation, final Locale locale) { setObjectOccurrence(writtenEvaluation.getDay()); setObjectLinkLabel(constructCalendarPresentation(executionCourse, writtenEvaluation, locale)); } public CalendarLink(final ExecutionCourse executionCourse, final Project project, final Date date, final String tail, final Locale locale) { setObjectOccurrence(date); setObjectLinkLabel(constructCalendarPresentation(executionCourse, project, date, tail, locale)); } public void setObjectOccurrence(Calendar objectOccurrence) { this.objectOccurrence = objectOccurrence; } public void setObjectOccurrence(Date objectOccurrence) { final Calendar calendar = Calendar.getInstance(); calendar.setTime(objectOccurrence); this.objectOccurrence = calendar; } public Calendar getObjectOccurrence() { return this.objectOccurrence; } public void setObjectLinkLabel(String objectLinkLabel) { this.objectLinkLabel = objectLinkLabel; } public String getObjectLinkLabel() { return this.objectLinkLabel; } public void setLinkParameters(Map<String, String> linkParameters) { this.linkParameters = linkParameters; } public String giveLink(String editLinkPage) { final StringBuilder linkParameters = new StringBuilder(); linkParameters.append(editLinkPage); if (this.linkParameters != null && !this.linkParameters.isEmpty()) { linkParameters.append(editLinkPage.indexOf('?') > 0 ? '&' : '?'); for (final Iterator<Entry<String, String>> iterator = this.linkParameters.entrySet().iterator(); iterator.hasNext();) { final Entry<String, String> entry = iterator.next(); linkParameters.append(entry.getKey()); linkParameters.append('='); linkParameters.append(entry.getValue()); if (iterator.hasNext()) { linkParameters.append('&'); } } } return linkParameters.toString(); } public void addLinkParameter(final String key, final String value) { linkParameters.put(key, value); } private static final MessageResources messages = MessageResources.getMessageResources(Bundle.DEGREE); private String constructCalendarPresentation(final ExecutionCourse executionCourse, final WrittenEvaluation writtenEvaluation, final Locale locale) { final StringBuilder stringBuilder = new StringBuilder(); if (writtenEvaluation instanceof WrittenTest) { stringBuilder.append(messages.getMessage(locale, "label.evaluation.shortname.test")); } else if (writtenEvaluation instanceof Exam) { stringBuilder.append(messages.getMessage(locale, "label.evaluation.shortname.exam")); } stringBuilder.append(" "); stringBuilder.append(executionCourse.getSigla()); stringBuilder.append(" ("); stringBuilder.append(DateFormatUtil.format("HH:mm", writtenEvaluation.getBeginningDate())); stringBuilder.append("-"); stringBuilder.append(DateFormatUtil.format("HH:mm", writtenEvaluation.getEndDate())); stringBuilder.append(")"); return stringBuilder.toString(); } private String constructCalendarPresentation(final ExecutionCourse executionCourse, final Project project, final Date time, final String tail, final Locale locale) { final StringBuilder stringBuilder = new StringBuilder(); stringBuilder.append(messages.getMessage(locale, "label.evaluation.shortname.project")); stringBuilder.append(" "); stringBuilder.append(executionCourse.getSigla()); stringBuilder.append(" ("); stringBuilder.append(DateFormatUtil.format("HH:mm", time)); stringBuilder.append(") "); stringBuilder.append(tail); return stringBuilder.toString(); } public boolean isAsLink() { return asLink; } public void setAsLink(boolean asLink) { this.asLink = asLink; } }
lgpl-3.0
StuartGuo/Bluetooth-LE-Library---Android
library/src/main/java/uk/co/alt236/bluetoothlelib/device/beacon/ibeacon/IBeaconConstants.java
198
package uk.co.alt236.bluetoothlelib.device.beacon.ibeacon; /** * */ public class IBeaconConstants { public static final byte[] MANUFACTURER_DATA_IBEACON_PREFIX = {0x4C, 0x00, 0x02, 0x15}; }
apache-2.0
smmribeiro/intellij-community
plugins/sh/gen/com/intellij/sh/psi/impl/ShUnaryExpressionImpl.java
1138
// This is a generated file. Not intended for manual editing. package com.intellij.sh.psi.impl; import java.util.List; import org.jetbrains.annotations.*; import com.intellij.lang.ASTNode; import com.intellij.psi.PsiElement; import com.intellij.psi.PsiElementVisitor; import com.intellij.psi.util.PsiTreeUtil; import static com.intellij.sh.ShTypes.*; import com.intellij.sh.psi.*; public class ShUnaryExpressionImpl extends ShExpressionImpl implements ShUnaryExpression { public ShUnaryExpressionImpl(ASTNode node) { super(node); } @Override public void accept(@NotNull ShVisitor visitor) { visitor.visitUnaryExpression(this); } @Override public void accept(@NotNull PsiElementVisitor visitor) { if (visitor instanceof ShVisitor) accept((ShVisitor)visitor); else super.accept(visitor); } @Override @Nullable public ShExpression getExpression() { return findChildByClass(ShExpression.class); } @Override @Nullable public PsiElement getMinus() { return findChildByType(MINUS); } @Override @Nullable public PsiElement getPlus() { return findChildByType(PLUS); } }
apache-2.0
wso2/product-das
modules/integration/tests-common/integration-test-utils/src/main/java/org/wso2/das/analytics/rest/beans/DrillDownPathBean.java
2162
/* * Copyright (c) 2015, WSO2 Inc. (http://www.wso2.org) All Rights Reserved. * * WSO2 Inc. licenses this file to you under the Apache License, * Version 2.0 (the "License"); you may not use this file except * in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.wso2.das.analytics.rest.beans; import javax.xml.bind.annotation.XmlAccessType; import javax.xml.bind.annotation.XmlAccessorType; import javax.xml.bind.annotation.XmlElement; import javax.xml.bind.annotation.XmlRootElement; /** * This class represents a facet object bean. facet object defines the hierarchical fieldName, * which can be drilled down. This can be used as a value in a record. * Example : * Assume a record represents a book. * Then the record field : value pairs will be, e.g. * Price : $50.00 * Author : firstName LastName * ISBN : 234325435445435436 * Published Date : "1987" , "March", "21" * * Here Publish Date will be a facet/categoryPath, since it can be drilled down to Year, then month and date * and categorizes by each level. * */ @XmlRootElement(name = "categoryPath") @XmlAccessorType(XmlAccessType.FIELD) public class DrillDownPathBean { @XmlElement(name = "path") private String[] path; @XmlElement(name = "fieldName") private String fieldName; /** * This constructor is for jax-rs json serialization/deserialization */ public DrillDownPathBean() { } public String[] getPath() { return path; } public String getFieldName() { return fieldName; } public void setPath(String[] path) { this.path = path; } public void setFieldName(String fieldName) { this.fieldName = fieldName; } }
apache-2.0
gh351135612/presto
presto-main/src/test/java/com/facebook/presto/sql/planner/assertions/LimitMatcher.java
1577
/* * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.facebook.presto.sql.planner.assertions; import com.facebook.presto.Session; import com.facebook.presto.cost.PlanNodeCost; import com.facebook.presto.metadata.Metadata; import com.facebook.presto.sql.planner.plan.LimitNode; import com.facebook.presto.sql.planner.plan.PlanNode; import static com.google.common.base.Preconditions.checkState; public class LimitMatcher implements Matcher { private final long limit; public LimitMatcher(long limit) { this.limit = limit; } @Override public boolean shapeMatches(PlanNode node) { if (!(node instanceof LimitNode)) { return false; } LimitNode limitNode = (LimitNode) node; return limitNode.getCount() == limit; } @Override public MatchResult detailMatches(PlanNode node, PlanNodeCost planNodeCost, Session session, Metadata metadata, SymbolAliases symbolAliases) { checkState(shapeMatches(node)); return MatchResult.match(); } }
apache-2.0
TomasHofman/undertow
core/src/main/java/io/undertow/util/Methods.java
6437
/* * JBoss, Home of Professional Open Source. * Copyright 2014 Red Hat, Inc., and individual contributors * as indicated by the @author tags. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.undertow.util; import java.util.Collections; import java.util.HashMap; import java.util.Map; /** * NOTE: If you add a new method here you must also add it to {@link io.undertow.server.protocol.http.HttpRequestParser} * * @author <a href="mailto:david.lloyd@redhat.com">David M. Lloyd</a> */ public final class Methods { private Methods() { } public static final String OPTIONS_STRING = "OPTIONS"; public static final String GET_STRING = "GET"; public static final String HEAD_STRING = "HEAD"; public static final String POST_STRING = "POST"; public static final String PUT_STRING = "PUT"; public static final String DELETE_STRING = "DELETE"; public static final String TRACE_STRING = "TRACE"; public static final String CONNECT_STRING = "CONNECT"; public static final String PROPFIND_STRING = "PROPFIND"; public static final String PROPPATCH_STRING = "PROPPATCH"; public static final String MKCOL_STRING = "MKCOL"; public static final String COPY_STRING = "COPY"; public static final String MOVE_STRING = "MOVE"; public static final String LOCK_STRING = "LOCK"; public static final String UNLOCK_STRING = "UNLOCK"; public static final String ACL_STRING = "ACL"; public static final String REPORT_STRING = "REPORT"; public static final String VERSION_CONTROL_STRING = "VERSION-CONTROL"; public static final String CHECKIN_STRING = "CHECKIN"; public static final String CHECKOUT_STRING = "CHECKOUT"; public static final String UNCHECKOUT_STRING = "UNCHECKOUT"; public static final String SEARCH_STRING = "SEARCH"; public static final String MKWORKSPACE_STRING = "MKWORKSPACE"; public static final String UPDATE_STRING = "UPDATE"; public static final String LABEL_STRING = "LABEL"; public static final String MERGE_STRING = "MERGE"; public static final String BASELINE_CONTROL_STRING = "BASELINE_CONTROL"; public static final String MKACTIVITY_STRING = "MKACTIVITY"; public static final HttpString OPTIONS = new HttpString(OPTIONS_STRING); public static final HttpString GET = new HttpString(GET_STRING); public static final HttpString HEAD = new HttpString(HEAD_STRING); public static final HttpString POST = new HttpString(POST_STRING); public static final HttpString PUT = new HttpString(PUT_STRING); public static final HttpString DELETE = new HttpString(DELETE_STRING); public static final HttpString TRACE = new HttpString(TRACE_STRING); public static final HttpString CONNECT = new HttpString(CONNECT_STRING); public static final HttpString PROPFIND = new HttpString(PROPFIND_STRING); public static final HttpString PROPPATCH = new HttpString(PROPPATCH_STRING); public static final HttpString MKCOL = new HttpString(MKCOL_STRING); public static final HttpString COPY = new HttpString(COPY_STRING); public static final HttpString MOVE = new HttpString(MOVE_STRING); public static final HttpString LOCK = new HttpString(LOCK_STRING); public static final HttpString UNLOCK = new HttpString(UNLOCK_STRING); public static final HttpString ACL = new HttpString(ACL_STRING); public static final HttpString REPORT = new HttpString(REPORT_STRING); public static final HttpString VERSION_CONTROL = new HttpString(VERSION_CONTROL_STRING); public static final HttpString CHECKIN = new HttpString(CHECKIN_STRING); public static final HttpString CHECKOUT = new HttpString(CHECKOUT_STRING); public static final HttpString UNCHECKOUT = new HttpString(UNCHECKOUT_STRING); public static final HttpString SEARCH = new HttpString(SEARCH_STRING); public static final HttpString MKWORKSPACE = new HttpString(MKWORKSPACE_STRING); public static final HttpString UPDATE = new HttpString(UPDATE_STRING); public static final HttpString LABEL = new HttpString(LABEL_STRING); public static final HttpString MERGE = new HttpString(MERGE_STRING); public static final HttpString BASELINE_CONTROL = new HttpString(BASELINE_CONTROL_STRING); public static final HttpString MKACTIVITY = new HttpString(MKACTIVITY_STRING); private static final Map<String, HttpString> METHODS; static { Map<String, HttpString> methods = new HashMap<>(); putString(methods, OPTIONS); putString(methods, GET); putString(methods, HEAD); putString(methods, POST); putString(methods, PUT); putString(methods, DELETE); putString(methods, TRACE); putString(methods, CONNECT); putString(methods, PROPFIND); putString(methods, PROPPATCH); putString(methods, MKCOL); putString(methods, COPY); putString(methods, MOVE); putString(methods, LOCK); putString(methods, UNLOCK); putString(methods, ACL); putString(methods, REPORT); putString(methods, VERSION_CONTROL); putString(methods, CHECKIN); putString(methods, CHECKOUT); putString(methods, UNCHECKOUT); putString(methods, SEARCH); putString(methods, MKWORKSPACE); putString(methods, UPDATE); putString(methods, LABEL); putString(methods, MERGE); putString(methods, BASELINE_CONTROL); putString(methods, MKACTIVITY); METHODS = Collections.unmodifiableMap(methods); } private static void putString(Map<String, HttpString> methods, HttpString options) { methods.put(options.toString(), options); } public static HttpString fromString(String method) { HttpString res = METHODS.get(method); if(res == null) { return new HttpString(method); } return res; } }
apache-2.0
siosio/intellij-community
platform/lang-api/src/com/intellij/execution/configurations/VirtualConfigurationType.java
210
package com.intellij.execution.configurations; /** * Configuration of such type can't be manually added or removed by the user; the template entry is hidden. */ public interface VirtualConfigurationType { }
apache-2.0
psoreide/bnd
bndtools.core/src/bndtools/editor/model/package-info.java
115
@org.osgi.annotation.bundle.Export @org.osgi.annotation.versioning.Version("2.0.0") package bndtools.editor.model;
apache-2.0
siosio/intellij-community
plugins/InspectionGadgets/src/com/siyeh/ig/abstraction/MagicNumberInspection.java
7475
/* * Copyright 2003-2013 Dave Griffith, Bas Leijdekkers * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.siyeh.ig.abstraction; import com.intellij.codeInsight.AnnotationUtil; import com.intellij.codeInspection.ui.MultipleCheckboxOptionsPanel; import com.intellij.psi.*; import com.intellij.psi.util.PsiTreeUtil; import com.intellij.psi.util.PsiUtil; import com.siyeh.InspectionGadgetsBundle; import com.siyeh.ig.BaseInspection; import com.siyeh.ig.BaseInspectionVisitor; import com.siyeh.ig.InspectionGadgetsFix; import com.siyeh.ig.fixes.IntroduceConstantFix; import com.siyeh.ig.fixes.SuppressForTestsScopeFix; import com.siyeh.ig.psiutils.ClassUtils; import com.siyeh.ig.psiutils.ExpressionUtils; import com.siyeh.ig.psiutils.MethodUtils; import com.siyeh.ig.psiutils.TypeUtils; import org.jetbrains.annotations.NotNull; import javax.swing.*; public class MagicNumberInspection extends BaseInspection { @SuppressWarnings("PublicField") public boolean ignoreInHashCode = true; @SuppressWarnings({"PublicField", "UnusedDeclaration"}) public boolean ignoreInTestCode = false; // keep for compatibility @SuppressWarnings("PublicField") public boolean ignoreInAnnotations = true; @SuppressWarnings("PublicField") public boolean ignoreInitialCapacity = false; @Override protected InspectionGadgetsFix @NotNull [] buildFixes(Object... infos) { final PsiElement context = (PsiElement)infos[0]; final InspectionGadgetsFix fix = SuppressForTestsScopeFix.build(this, context); if (fix == null) { return new InspectionGadgetsFix[] {new IntroduceConstantFix()}; } return new InspectionGadgetsFix[] {new IntroduceConstantFix(), fix}; } @Override protected boolean buildQuickFixesOnlyForOnTheFlyErrors() { return true; } @Override @NotNull public String buildErrorString(Object... infos) { return InspectionGadgetsBundle.message("magic.number.problem.descriptor"); } @Override public JComponent createOptionsPanel() { final MultipleCheckboxOptionsPanel panel = new MultipleCheckboxOptionsPanel(this); panel.addCheckbox(InspectionGadgetsBundle.message("inspection.option.ignore.in.hashcode"), "ignoreInHashCode"); panel.addCheckbox(InspectionGadgetsBundle.message("inspection.option.ignore.in.annotations"), "ignoreInAnnotations"); panel.addCheckbox(InspectionGadgetsBundle.message("inspection.option.ignore.as.initial.capacity"), "ignoreInitialCapacity"); return panel; } @Override public BaseInspectionVisitor buildVisitor() { return new MagicNumberVisitor(); } private class MagicNumberVisitor extends BaseInspectionVisitor { @Override public void visitLiteralExpression(@NotNull PsiLiteralExpression expression) { super.visitLiteralExpression(expression); final PsiType type = expression.getType(); if (!ClassUtils.isPrimitiveNumericType(type) || PsiType.CHAR.equals(type)) { return; } if (isSpecialCaseLiteral(expression) || isFinalVariableInitialization(expression)) { return; } if (ignoreInHashCode) { final PsiMethod containingMethod = PsiTreeUtil.getParentOfType(expression, PsiMethod.class, true, PsiClass.class, PsiLambdaExpression.class); if (MethodUtils.isHashCode(containingMethod)) { return; } } if (ignoreInAnnotations) { final boolean insideAnnotation = AnnotationUtil.isInsideAnnotation(expression); if (insideAnnotation) { return; } } if (ignoreInitialCapacity && isInitialCapacity(expression)) { return; } final PsiField field = PsiTreeUtil.getParentOfType(expression, PsiField.class, true, PsiCallExpression.class); if (field != null && PsiUtil.isCompileTimeConstant(field)) { return; } final PsiElement parent = expression.getParent(); if (parent instanceof PsiPrefixExpression) { registerError(parent, parent); } else { registerError(expression, expression); } } private boolean isInitialCapacity(PsiLiteralExpression expression) { final PsiElement element = PsiTreeUtil.skipParentsOfType(expression, PsiTypeCastExpression.class, PsiParenthesizedExpression.class); if (!(element instanceof PsiExpressionList)) { return false; } final PsiElement parent = element.getParent(); if (!(parent instanceof PsiNewExpression)) { return false; } final PsiNewExpression newExpression = (PsiNewExpression)parent; return TypeUtils.expressionHasTypeOrSubtype(newExpression, CommonClassNames.JAVA_LANG_ABSTRACT_STRING_BUILDER, CommonClassNames.JAVA_UTIL_MAP, CommonClassNames.JAVA_UTIL_COLLECTION, "java.io.ByteArrayOutputStream", "java.awt.Dimension") != null; } private boolean isSpecialCaseLiteral(PsiLiteralExpression expression) { final Object object = ExpressionUtils.computeConstantExpression(expression); if (object instanceof Integer) { final int i = ((Integer)object).intValue(); return i >= 0 && i <= 10 || i == 100 || i == 1000; } else if (object instanceof Long) { final long l = ((Long)object).longValue(); return l >= 0L && l <= 2L; } else if (object instanceof Double) { final double d = ((Double)object).doubleValue(); return d == 1.0 || d == 0.0; } else if (object instanceof Float) { final float f = ((Float)object).floatValue(); return f == 1.0f || f == 0.0f; } return false; } public boolean isFinalVariableInitialization(PsiExpression expression) { final PsiElement parent = PsiTreeUtil.getParentOfType(expression, PsiVariable.class, PsiAssignmentExpression.class); final PsiVariable variable; if (!(parent instanceof PsiVariable)) { if (!(parent instanceof PsiAssignmentExpression)) { return false; } final PsiAssignmentExpression assignmentExpression = (PsiAssignmentExpression)parent; final PsiExpression lhs = assignmentExpression.getLExpression(); if (!(lhs instanceof PsiReferenceExpression)) { return false; } final PsiReferenceExpression referenceExpression = (PsiReferenceExpression)lhs; final PsiElement target = referenceExpression.resolve(); if (!(target instanceof PsiVariable)) { return false; } variable = (PsiVariable)target; } else { variable = (PsiVariable)parent; } return variable.hasModifierProperty(PsiModifier.FINAL); } } }
apache-2.0
packet-tracker/onos
providers/pcep/tunnel/src/test/java/org/onosproject/provider/pcep/tunnel/impl/PcepControllerAdapter.java
2267
/* * Copyright 2014 Open Networking Laboratory * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.onosproject.provider.pcep.tunnel.impl; import org.onosproject.net.DeviceId; import org.onosproject.pcep.api.PcepController; import org.onosproject.pcep.api.PcepDpid; import org.onosproject.pcep.api.PcepLinkListener; import org.onosproject.pcep.api.PcepSwitch; import org.onosproject.pcep.api.PcepSwitchListener; import org.onosproject.pcep.api.PcepTunnel; import org.onosproject.pcep.api.PcepTunnelListener; public class PcepControllerAdapter implements PcepController { @Override public Iterable<PcepSwitch> getSwitches() { return null; } @Override public PcepSwitch getSwitch(PcepDpid did) { return null; } @Override public void addListener(PcepSwitchListener listener) { } @Override public void removeListener(PcepSwitchListener listener) { } @Override public void addLinkListener(PcepLinkListener listener) { } @Override public void removeLinkListener(PcepLinkListener listener) { } @Override public void addTunnelListener(PcepTunnelListener listener) { } @Override public void removeTunnelListener(PcepTunnelListener listener) { } @Override public PcepTunnel applyTunnel(DeviceId srcDid, DeviceId dstDid, long srcPort, long dstPort, long bandwidth, String name) { return null; } @Override public Boolean deleteTunnel(String id) { return null; } @Override public Boolean updateTunnelBandwidth(String id, long bandwidth) { return null; } @Override public void getTunnelStatistics(String pcepTunnelId) { } }
apache-2.0
smmribeiro/intellij-community
java/java-tests/testData/inspection/streamApiCallChains/beforeUnmodifiableListJava16.java
251
// "Replace 'collect(toUnmodifiableList())' with 'toList()'" "true" import java.util.List; import java.util.stream.*; class X { void test(Stream<String> stream) { List<String> list = stream.collect<caret>(Collectors.toUnmodifiableList()); } }
apache-2.0
likaiwalkman/spring-ldap
core/src/main/java/org/springframework/ldap/core/support/ContextMapperWithControls.java
1490
/* * Copyright 2005-2013 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.ldap.core.support; import org.springframework.ldap.core.ContextMapper; import javax.naming.NamingException; import javax.naming.ldap.HasControls; /** * Extension of the {@link org.springframework.ldap.core.ContextMapper} interface that allows * controls to be passed to the mapper implementation. Uses Java 5 covariant * return types to override the return type of the * {@link #mapFromContextWithControls(Object, javax.naming.ldap.HasControls)} method to be the * type parameter T. * * @author Tim Terry * @author Ulrik Sandberg * @param <T> return type of the * {@link #mapFromContextWithControls(Object, javax.naming.ldap.HasControls)} method */ public interface ContextMapperWithControls<T> extends ContextMapper<T> { T mapFromContextWithControls(final Object ctx, final HasControls hasControls) throws NamingException; }
apache-2.0
AndreasAbdi/jackrabbit-oak
oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/nodetype/EffectiveType.java
11952
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.jackrabbit.oak.plugins.nodetype; import java.util.List; import java.util.Set; import javax.annotation.CheckForNull; import javax.annotation.Nonnull; import org.apache.jackrabbit.oak.api.PropertyState; import org.apache.jackrabbit.oak.api.Type; import org.apache.jackrabbit.oak.spi.state.ChildNodeEntry; import org.apache.jackrabbit.oak.spi.state.NodeState; import static com.google.common.base.Preconditions.checkNotNull; import static com.google.common.collect.Iterables.addAll; import static com.google.common.collect.Iterables.concat; import static com.google.common.collect.Iterables.contains; import static com.google.common.collect.Lists.newArrayListWithCapacity; import static com.google.common.collect.Sets.newHashSet; import static org.apache.jackrabbit.JcrConstants.JCR_DEFAULTPRIMARYTYPE; import static org.apache.jackrabbit.JcrConstants.JCR_MANDATORY; import static org.apache.jackrabbit.JcrConstants.JCR_MIXINTYPES; import static org.apache.jackrabbit.JcrConstants.JCR_NODETYPENAME; import static org.apache.jackrabbit.JcrConstants.JCR_PRIMARYTYPE; import static org.apache.jackrabbit.JcrConstants.JCR_SAMENAMESIBLINGS; import static org.apache.jackrabbit.JcrConstants.JCR_UUID; import static org.apache.jackrabbit.oak.api.Type.UNDEFINED; import static org.apache.jackrabbit.oak.api.Type.UNDEFINEDS; import static org.apache.jackrabbit.oak.commons.PathUtils.dropIndexFromName; import static org.apache.jackrabbit.oak.plugins.nodetype.NodeTypeConstants.REP_MANDATORY_CHILD_NODES; import static org.apache.jackrabbit.oak.plugins.nodetype.NodeTypeConstants.REP_MANDATORY_PROPERTIES; import static org.apache.jackrabbit.oak.plugins.nodetype.NodeTypeConstants.REP_NAMED_CHILD_NODE_DEFINITIONS; import static org.apache.jackrabbit.oak.plugins.nodetype.NodeTypeConstants.REP_NAMED_PROPERTY_DEFINITIONS; import static org.apache.jackrabbit.oak.plugins.nodetype.NodeTypeConstants.REP_RESIDUAL_CHILD_NODE_DEFINITIONS; import static org.apache.jackrabbit.oak.plugins.nodetype.NodeTypeConstants.REP_RESIDUAL_PROPERTY_DEFINITIONS; import static org.apache.jackrabbit.oak.plugins.nodetype.NodeTypeConstants.REP_SUPERTYPES; class EffectiveType { private final List<NodeState> types; EffectiveType(@Nonnull List<NodeState> types) { this.types = checkNotNull(types); } /** * Checks whether this effective type contains the named type. * * @param name node type name * @return {@code true} if the named type is included, * {@code false} otherwise */ boolean isNodeType(@Nonnull String name) { for (NodeState type : types) { if (name.equals(type.getName(JCR_NODETYPENAME)) || contains(type.getNames(REP_SUPERTYPES), name)) { return true; } } return false; } boolean isMandatoryProperty(@Nonnull String name) { return nameSetContains(REP_MANDATORY_PROPERTIES, name); } @Nonnull Set<String> getMandatoryProperties() { return getNameSet(REP_MANDATORY_PROPERTIES); } boolean isMandatoryChildNode(@Nonnull String name) { return nameSetContains(REP_MANDATORY_CHILD_NODES, name); } @Nonnull Set<String> getMandatoryChildNodes() { return getNameSet(REP_MANDATORY_CHILD_NODES); } /** * Finds a matching definition for a property with the given name and type. * * @param property modified property * @return matching property definition, or {@code null} */ @CheckForNull NodeState getDefinition(@Nonnull PropertyState property) { String propertyName = property.getName(); Type<?> propertyType = property.getType(); String escapedName; if (JCR_PRIMARYTYPE.equals(propertyName)) { escapedName = NodeTypeConstants.REP_PRIMARY_TYPE; } else if (JCR_MIXINTYPES.equals(propertyName)) { escapedName = NodeTypeConstants.REP_MIXIN_TYPES; } else if (JCR_UUID.equals(propertyName)) { escapedName = NodeTypeConstants.REP_UUID; } else { escapedName = propertyName; } String definedType = propertyType.toString(); String undefinedType; if (propertyType.isArray()) { undefinedType = UNDEFINEDS.toString(); } else { undefinedType = UNDEFINED.toString(); } // Find matching named property definition for (NodeState type : types) { NodeState definitions = type .getChildNode(REP_NAMED_PROPERTY_DEFINITIONS) .getChildNode(escapedName); NodeState definition = definitions.getChildNode(definedType); if (definition.exists()) { return definition; } definition = definitions.getChildNode(undefinedType); if (definition.exists()) { return definition; } // OAK-822: a mandatory definition always overrides residual ones // TODO: unnecessary if the OAK-713 fallback wasn't needed below for (ChildNodeEntry entry : definitions.getChildNodeEntries()) { definition = entry.getNodeState(); if (definition.getBoolean(JCR_MANDATORY)) { return definition; } } // TODO: Fall back to residual definitions until we have consensus on OAK-713 // throw new ConstraintViolationException( // "No matching definition found for property " + propertyName); } // Find matching residual property definition for (NodeState type : types) { NodeState residual = type.getChildNode(REP_RESIDUAL_PROPERTY_DEFINITIONS); NodeState definition = residual.getChildNode(definedType); if (!definition.exists()) { definition = residual.getChildNode(undefinedType); } if (definition.exists()) { return definition; } } return null; } /** * Finds a matching definition for a child node with the given name and * types. * * @param nameWithIndex child node name, possibly with an SNS index * @param effective effective types of the child node * @return {@code true} if there's a matching child node definition, * {@code false} otherwise */ boolean isValidChildNode(@Nonnull String nameWithIndex, @Nonnull EffectiveType effective) { String name = dropIndexFromName(nameWithIndex); boolean sns = !name.equals(nameWithIndex); Set<String> typeNames = effective.getTypeNames(); // Find matching named child node definition for (NodeState type : types) { NodeState definitions = type .getChildNode(REP_NAMED_CHILD_NODE_DEFINITIONS) .getChildNode(name); for (String typeName : typeNames) { NodeState definition = definitions.getChildNode(typeName); if (definition.exists() && snsMatch(sns, definition)) { return true; } } // OAK-822: a mandatory definition always overrides alternatives // TODO: unnecessary if the OAK-713 fallback wasn't needed below for (ChildNodeEntry entry : definitions.getChildNodeEntries()) { NodeState definition = entry.getNodeState(); if (definition.getBoolean(JCR_MANDATORY)) { return false; } } // TODO: Fall back to residual definitions until we have consensus on OAK-713 // throw new ConstraintViolationException( // "Incorrect node type of child node " + nodeName); } // Find matching residual child node definition for (NodeState type : types) { NodeState residual = type.getChildNode(REP_RESIDUAL_CHILD_NODE_DEFINITIONS); for (String typeName : typeNames) { NodeState definition = residual.getChildNode(typeName); if (definition.exists() && snsMatch(sns, definition)) { return true; } } } return false; } /** * Finds the default node type for a child node with the given name. * * @param nameWithIndex child node name, possibly with an SNS index * @return default type, or {@code null} if not found */ @CheckForNull String getDefaultType(@Nonnull String nameWithIndex) { String name = dropIndexFromName(nameWithIndex); boolean sns = !name.equals(nameWithIndex); for (NodeState type : types) { NodeState named = type .getChildNode(REP_NAMED_CHILD_NODE_DEFINITIONS) .getChildNode(name); NodeState residual = type .getChildNode(REP_RESIDUAL_CHILD_NODE_DEFINITIONS); for (ChildNodeEntry entry : concat( named.getChildNodeEntries(), residual.getChildNodeEntries())) { NodeState definition = entry.getNodeState(); String defaultType = definition.getName(JCR_DEFAULTPRIMARYTYPE); if (defaultType != null && snsMatch(sns, definition)) { return defaultType; } } } return null; } @Nonnull Set<String> getTypeNames() { Set<String> names = newHashSet(); for (NodeState type : types) { names.add(type.getName(JCR_NODETYPENAME)); addAll(names, type.getNames(REP_SUPERTYPES)); } return names; } //------------------------------------------------------------< Object >-- @Override public String toString() { List<String> names = newArrayListWithCapacity(types.size()); for (NodeState type : types) { names.add(type.getName(JCR_NODETYPENAME)); } return names.toString(); } //-----------------------------------------------------------< private >-- /** * Depending on the given SNS flag, checks whether the given child node * definition allows same-name-siblings. * * @param sns SNS flag, {@code true} if processing an SNS node * @param definition child node definition */ private boolean snsMatch(boolean sns, @Nonnull NodeState definition) { return !sns || definition.getBoolean(JCR_SAMENAMESIBLINGS); } private boolean nameSetContains(@Nonnull String set, @Nonnull String name) { for (NodeState type : types) { if (contains(type.getNames(set), name)) { return true; } } return false; } @Nonnull private Set<String> getNameSet(@Nonnull String set) { Set<String> names = newHashSet(); for (NodeState type : types) { addAll(names, type.getNames(set)); } return names; } }
apache-2.0
siosio/intellij-community
plugins/groovy/groovy-psi/src/org/jetbrains/plugins/groovy/lang/psi/impl/synthetic/GrTraitField.java
2141
// Copyright 2000-2020 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file. package org.jetbrains.plugins.groovy.lang.psi.impl.synthetic; import com.intellij.openapi.diagnostic.Logger; import com.intellij.psi.*; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import org.jetbrains.plugins.groovy.lang.psi.api.auxiliary.modifiers.GrModifierList; import org.jetbrains.plugins.groovy.lang.psi.api.statements.GrField; import org.jetbrains.plugins.groovy.lang.psi.api.statements.typedef.GrTypeDefinition; import org.jetbrains.plugins.groovy.lang.psi.util.GrTraitUtil; import org.jetbrains.plugins.groovy.transformations.TransformationContext; public class GrTraitField extends GrLightField implements PsiMirrorElement { private static final Logger LOG = Logger.getInstance(GrTraitField.class); private final PsiField myField; public GrTraitField(@NotNull GrField field, GrTypeDefinition clazz, PsiSubstitutor substitutor, @Nullable TransformationContext context) { super(clazz, getNewNameForField(field), substitutor.substitute(field.getType()), field); GrLightModifierList modifierList = getModifierList(); for (String modifier : PsiModifier.MODIFIERS) { boolean hasModifierProperty; GrModifierList fieldModifierList = field.getModifierList(); if (context == null || fieldModifierList == null) { hasModifierProperty = field.hasModifierProperty(modifier); } else { hasModifierProperty = context.hasModifierProperty(fieldModifierList, modifier); } if (hasModifierProperty) { modifierList.addModifier(modifier); } } modifierList.copyAnnotations(field.getModifierList()); myField = field; } @NotNull private static String getNewNameForField(@NotNull PsiField field) { PsiClass containingClass = field.getContainingClass(); LOG.assertTrue(containingClass != null); return GrTraitUtil.getTraitFieldPrefix(containingClass) + field.getName(); } @NotNull @Override public PsiField getPrototype() { return myField; } }
apache-2.0
dmiszkiewicz/elasticsearch
src/main/java/org/elasticsearch/action/search/type/TransportSearchCountAction.java
3920
/* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright * ownership. Elasticsearch licenses this file to you under * the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.action.search.type; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.AtomicArray; import org.elasticsearch.search.action.SearchServiceListener; import org.elasticsearch.search.action.SearchServiceTransportAction; import org.elasticsearch.search.controller.SearchPhaseController; import org.elasticsearch.search.fetch.FetchSearchResultProvider; import org.elasticsearch.search.internal.InternalSearchResponse; import org.elasticsearch.search.internal.ShardSearchRequest; import org.elasticsearch.search.query.QuerySearchResult; import org.elasticsearch.search.query.QuerySearchResultProvider; import org.elasticsearch.threadpool.ThreadPool; import static org.elasticsearch.action.search.type.TransportSearchHelper.buildScrollId; /** * */ public class TransportSearchCountAction extends TransportSearchTypeAction { @Inject public TransportSearchCountAction(Settings settings, ThreadPool threadPool, ClusterService clusterService, SearchServiceTransportAction searchService, SearchPhaseController searchPhaseController, ActionFilters actionFilters) { super(settings, threadPool, clusterService, searchService, searchPhaseController, actionFilters); } @Override protected void doExecute(SearchRequest searchRequest, ActionListener<SearchResponse> listener) { new AsyncAction(searchRequest, listener).start(); } private class AsyncAction extends BaseAsyncAction<QuerySearchResultProvider> { private AsyncAction(SearchRequest request, ActionListener<SearchResponse> listener) { super(request, listener); } @Override protected String firstPhaseName() { return "query"; } @Override protected void sendExecuteFirstPhase(DiscoveryNode node, ShardSearchRequest request, SearchServiceListener<QuerySearchResultProvider> listener) { searchService.sendExecuteQuery(node, request, listener); } @Override protected void moveToSecondPhase() throws Exception { // no need to sort, since we know we have no hits back final InternalSearchResponse internalResponse = searchPhaseController.merge(SearchPhaseController.EMPTY_DOCS, firstResults, (AtomicArray<? extends FetchSearchResultProvider>) AtomicArray.empty()); String scrollId = null; if (request.scroll() != null) { scrollId = buildScrollId(request.searchType(), firstResults, null); } listener.onResponse(new SearchResponse(internalResponse, scrollId, expectedSuccessfulOps, successfulOps.get(), buildTookInMillis(), buildShardFailures())); } } }
apache-2.0
pkdevbox/stratos
components/org.apache.stratos.common/src/main/java/org/apache/stratos/common/beans/topology/ClusterBean.java
3225
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * <p/> * http://www.apache.org/licenses/LICENSE-2.0 * <p/> * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.stratos.common.beans.topology; import org.apache.stratos.common.beans.PropertyBean; import javax.xml.bind.annotation.XmlRootElement; import java.util.List; @XmlRootElement(name = "clusters") public class ClusterBean { private String alias; private String serviceName; private String clusterId; private List<MemberBean> member; private String tenantRange; private List<String> hostNames; private boolean isLbCluster; private List<PropertyBean> property; private List<InstanceBean> instances; public List<InstanceBean> getInstances() { return instances; } public void setInstances(List<InstanceBean> instances) { this.instances = instances; } @Override public String toString() { return "Cluster [serviceName=" + getServiceName() + ", clusterId=" + getClusterId() + ", member=" + getMember() + ", tenantRange=" + getTenantRange() + ", hostNames=" + getHostNames() + ", isLbCluster=" + isLbCluster() + ", property=" + getProperty() + "]"; } public String getAlias() { return alias; } public void setAlias(String alias) { this.alias = alias; } public String getServiceName() { return serviceName; } public void setServiceName(String serviceName) { this.serviceName = serviceName; } public String getClusterId() { return clusterId; } public void setClusterId(String clusterId) { this.clusterId = clusterId; } public List<MemberBean> getMember() { return member; } public void setMember(List<MemberBean> member) { this.member = member; } public String getTenantRange() { return tenantRange; } public void setTenantRange(String tenantRange) { this.tenantRange = tenantRange; } public List<String> getHostNames() { return hostNames; } public void setHostNames(List<String> hostNames) { this.hostNames = hostNames; } public boolean isLbCluster() { return isLbCluster; } public void setLbCluster(boolean isLbCluster) { this.isLbCluster = isLbCluster; } public List<PropertyBean> getProperty() { return property; } public void setProperty(List<PropertyBean> property) { this.property = property; } }
apache-2.0
romartin/kie-wb-common
kie-wb-common-screens/kie-wb-common-datasource-mgmt/kie-wb-common-datasource-mgmt-client/src/main/java/org/kie/workbench/common/screens/datasource/management/client/dbexplorer/schemas/DatabaseSchemaExplorerView.java
1272
/* * Copyright 2016 Red Hat, Inc. and/or its affiliates. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.kie.workbench.common.screens.datasource.management.client.dbexplorer.schemas; import com.google.gwt.view.client.AsyncDataProvider; import org.uberfire.client.mvp.UberElement; import org.uberfire.ext.widgets.common.client.common.HasBusyIndicator; public interface DatabaseSchemaExplorerView extends UberElement< DatabaseSchemaExplorerView.Presenter >, HasBusyIndicator { interface Presenter { void onOpen( DatabaseSchemaRow row ); } interface Handler { void onOpen( String schemaName ); } void setDataProvider( AsyncDataProvider< DatabaseSchemaRow > dataProvider ); void redraw( ); }
apache-2.0
knabar/openmicroscopy
components/insight/SRC/org/openmicroscopy/shoola/env/data/model/AdminObject.java
6106
/* *------------------------------------------------------------------------------ * Copyright (C) 2006-2010 University of Dundee. All rights reserved. * * * This program is free software; you can redistribute it and/or modify * it under the terms of the GNU General Public License as published by * the Free Software Foundation; either version 2 of the License, or * (at your option) any later version. * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License along * with this program; if not, write to the Free Software Foundation, Inc., * 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. * *------------------------------------------------------------------------------ */ package org.openmicroscopy.shoola.env.data.model; import java.util.Collection; import java.util.HashMap; import java.util.Iterator; import java.util.List; import java.util.Map; import omero.IllegalArgumentException; import org.openmicroscopy.shoola.env.data.login.UserCredentials; import omero.gateway.model.ExperimenterData; import omero.gateway.model.GroupData; /** * Holds information about the group, users to handle. * * @author Jean-Marie Burel &nbsp;&nbsp;&nbsp;&nbsp; * <a href="mailto:j.burel@dundee.ac.uk">j.burel@dundee.ac.uk</a> * @author Donald MacDonald &nbsp;&nbsp;&nbsp;&nbsp; * <a href="mailto:donald@lifesci.dundee.ac.uk">donald@lifesci.dundee.ac.uk</a> * @version 3.0 * @since 3.0-Beta4 */ public class AdminObject { /** Indicates to create a group. */ public static final int CREATE_GROUP = 0; /** Indicates to create a group. */ public static final int CREATE_EXPERIMENTER = 1; /** Indicates to update a group. */ public static final int UPDATE_GROUP = 2; /** Indicates to update experimenter. */ public static final int UPDATE_EXPERIMENTER = 3; /** Indicates to reset the password. */ public static final int RESET_PASSWORD = 4; /** Indicates to add experimenters to group. */ public static final int ADD_EXPERIMENTER_TO_GROUP = 5; /** Indicates to reset the password. */ public static final int ACTIVATE_USER = 6; /** * Validates the index. * * @param index The value to control. */ private void checkIndex(int index) { switch (index) { case CREATE_EXPERIMENTER: case CREATE_GROUP: case UPDATE_GROUP: case UPDATE_EXPERIMENTER: case RESET_PASSWORD: case ADD_EXPERIMENTER_TO_GROUP: case ACTIVATE_USER: return; default: throw new IllegalArgumentException("Index not supported"); } } /** * Can be the group to create or the group to add the experimenters to * depending on the index. */ private GroupData group; /** The collection of groups to create. */ private List<GroupData> groups; /** * Can be the owners of the group or the experimenters to create * depending on the index. */ private Map<ExperimenterData, UserCredentials> experimenters; /** One of the constants defined by this class. */ private int index; /** Indicates the permissions associated to the group. */ private int permissions = -1; /** * Creates a new instance. * * @param group The group to handle. * @param experimenters The experimenters to handle. * @param index One of the constants defined by this class. */ public AdminObject(GroupData group, Map<ExperimenterData, UserCredentials> experimenters, int index) { checkIndex(index); this.group = group; this.experimenters = experimenters; this.index = index; this.permissions = -1; } /** * Creates a new instance. * * @param group The group to handle. * @param values The experimenters to handle. */ public AdminObject(GroupData group, Collection<ExperimenterData> values) { if (values != null) { Iterator<ExperimenterData> i = values.iterator(); experimenters = new HashMap<ExperimenterData, UserCredentials>(); while (i.hasNext()) { experimenters.put(i.next(), null); } } this.group = group; this.index = ADD_EXPERIMENTER_TO_GROUP; this.permissions = -1; } /** * Creates a new instance. * * @param experimenters The experimenters to handle. * @param index One of the constants defined by this class. */ public AdminObject(Map<ExperimenterData, UserCredentials> experimenters, int index) { this(null, experimenters, index); } /** * Sets the permissions associated to the group. * * @param permissions The value to set. One of the constants defined * by this class. */ public void setPermissions(int permissions) { switch (permissions) { case GroupData.PERMISSIONS_PRIVATE: case GroupData.PERMISSIONS_GROUP_READ: case GroupData.PERMISSIONS_GROUP_READ_LINK: case GroupData.PERMISSIONS_GROUP_READ_WRITE: case GroupData.PERMISSIONS_PUBLIC_READ: case GroupData.PERMISSIONS_PUBLIC_READ_WRITE: this.permissions = permissions; break; default: this.permissions = GroupData.PERMISSIONS_PRIVATE; } } /** * Returns the permissions associated to the group. * * @return See above. */ public int getPermissions() { return permissions; } /** * Returns the experimenters to create. * * @return See above */ public Map<ExperimenterData, UserCredentials> getExperimenters() { return experimenters; } /** * Returns the group to create or to add the experimenters to. * * @return See above. */ public GroupData getGroup() { return group; } /** * Sets the groups. * * @param groups The value to handle. */ public void setGroups(List<GroupData> groups) { this.groups = groups; } /** * Returns the groups to add the new users to. * * @return See above. */ public List<GroupData> getGroups() { return groups; } /** * Returns one of the constants defined by this class. * * @return See above. */ public int getIndex() { return index; } }
gpl-2.0
JetBrains/jdk8u_hotspot
test/runtime/ClassUnload/p2/c2.java
1173
/* * Copyright (c) 2018, Oracle and/or its affiliates. All rights reserved. * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. * * This code is free software; you can redistribute it and/or modify it * under the terms of the GNU General Public License version 2 only, as * published by the Free Software Foundation. * * This code is distributed in the hope that it will be useful, but WITHOUT * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License * version 2 for more details (a copy is included in the LICENSE file that * accompanied this code). * * You should have received a copy of the GNU General Public License version * 2 along with this work; if not, write to the Free Software Foundation, * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. * * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA * or visit www.oracle.com if you need additional information or have any * questions. */ package p2; public class c2 { int i; public void method2() { i = 5; System.out.println("c2 method2 called"); } }
gpl-2.0
nikhilvibhav/camel
tooling/camel-util-json/src/test/java/org/apache/camel/util/json/JsonSimpleOrderedTest.java
3327
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.camel.util.json; import java.io.BufferedReader; import java.io.FileInputStream; import java.io.IOException; import java.io.InputStream; import java.io.InputStreamReader; import java.util.Iterator; import java.util.LinkedHashMap; import java.util.Map; import org.junit.jupiter.api.Test; import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertFalse; import static org.junit.jupiter.api.Assertions.assertNotNull; import static org.junit.jupiter.api.Assertions.assertTrue; public class JsonSimpleOrderedTest { @Test public void testOrdered() throws Exception { InputStream is = new FileInputStream("src/test/resources/bean.json"); String json = loadText(is); JsonObject output = Jsoner.deserialize(json, new JsonObject()); assertNotNull(output); // should preserve order Map<?, ?> map = output.getMap("component"); assertTrue(map instanceof LinkedHashMap); Iterator<?> it = map.keySet().iterator(); assertEquals("kind", it.next()); assertEquals("scheme", it.next()); assertEquals("syntax", it.next()); assertEquals("title", it.next()); assertEquals("description", it.next()); assertEquals("label", it.next()); assertEquals("deprecated", it.next()); assertEquals("deprecationNote", it.next()); assertEquals("async", it.next()); assertEquals("consumerOnly", it.next()); assertEquals("producerOnly", it.next()); assertEquals("lenientProperties", it.next()); assertEquals("javaType", it.next()); assertEquals("firstVersion", it.next()); assertEquals("groupId", it.next()); assertEquals("artifactId", it.next()); assertEquals("version", it.next()); assertFalse(it.hasNext()); } public static String loadText(InputStream in) throws IOException { StringBuilder builder = new StringBuilder(); InputStreamReader isr = new InputStreamReader(in); try { BufferedReader reader = new BufferedReader(isr); while (true) { String line = reader.readLine(); if (line == null) { line = builder.toString(); return line; } builder.append(line); builder.append("\n"); } } finally { isr.close(); in.close(); } } }
apache-2.0
jwagenleitner/incubator-groovy
src/test-resources/stubgenerator/traitStaticPropertiesStub/JavaXImpl.java
995
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package stubgenerator.traitStaticPropertiesStub; public class JavaXImpl extends GroovyXImpl { public static void main(String[] args) { new JavaXImpl(); } }
apache-2.0
lbndev/maven
maven-compat/src/main/java/org/apache/maven/artifact/manager/WagonManager.java
2151
package org.apache.maven.artifact.manager; /* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ import java.util.List; import org.apache.maven.artifact.Artifact; import org.apache.maven.artifact.repository.ArtifactRepository; import org.apache.maven.wagon.ResourceDoesNotExistException; import org.apache.maven.wagon.TransferFailedException; import org.apache.maven.wagon.authentication.AuthenticationInfo; import org.apache.maven.wagon.proxy.ProxyInfo; /** * Manages <a href="https://maven.apache.org/wagon">Wagon</a> related operations in Maven. * * @author <a href="michal.maczka@dimatics.com">Michal Maczka </a> */ @Deprecated public interface WagonManager extends org.apache.maven.repository.legacy.WagonManager { /** * this method is only here for backward compat (project-info-reports:dependencies) * the default implementation will return an empty AuthenticationInfo */ AuthenticationInfo getAuthenticationInfo( String id ); ProxyInfo getProxy( String protocol ); void getArtifact( Artifact artifact, ArtifactRepository repository ) throws TransferFailedException, ResourceDoesNotExistException; void getArtifact( Artifact artifact, List<ArtifactRepository> remoteRepositories ) throws TransferFailedException, ResourceDoesNotExistException; ArtifactRepository getMirrorRepository( ArtifactRepository repository ); }
apache-2.0
medicayun/medicayundicom
dcm4chee-arc3-entities/trunk/src/main/java/org/dcm4chee/archive/entity/MPPS.java
6621
/* ***** BEGIN LICENSE BLOCK ***** * Version: MPL 1.1/GPL 2.0/LGPL 2.1 * * The contents of this file are subject to the Mozilla Public License Version * 1.1 (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * http://www.mozilla.org/MPL/ * * Software distributed under the License is distributed on an "AS IS" basis, * WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License * for the specific language governing rights and limitations under the * License. * * The Original Code is part of dcm4che, an implementation of DICOM(TM) in * Java(TM), hosted at http://sourceforge.net/projects/dcm4che. * * The Initial Developer of the Original Code is * Accurate Software Design, LLC. * Portions created by the Initial Developer are Copyright (C) 2006-2008 * the Initial Developer. All Rights Reserved. * * Contributor(s): * See listed authors below. * * Alternatively, the contents of this file may be used under the terms of * either the GNU General Public License Version 2 or later (the "GPL"), or * the GNU Lesser General Public License Version 2.1 or later (the "LGPL"), * in which case the provisions of the GPL or the LGPL are applicable instead * of those above. If you wish to allow use of your version of this file only * under the terms of either the GPL or the LGPL, and not to allow others to * use your version of this file under the terms of the MPL, indicate your * decision by deleting the provisions above and replace them with the notice * and other provisions required by the GPL or the LGPL. If you do not delete * the provisions above, a recipient may use your version of this file under * the terms of any one of the MPL, the GPL or the LGPL. * * ***** END LICENSE BLOCK ***** */ package org.dcm4chee.archive.entity; import java.io.Serializable; import java.util.Date; import java.util.Set; import javax.persistence.Column; import javax.persistence.Entity; import javax.persistence.FetchType; import javax.persistence.JoinColumn; import javax.persistence.ManyToOne; import javax.persistence.OneToMany; import javax.persistence.Table; import org.dcm4che2.data.DicomObject; import org.dcm4che2.data.Tag; import org.dcm4che2.data.UID; import org.dcm4chee.archive.common.PPSStatus; import org.dcm4chee.archive.conf.AttributeFilter; import org.dcm4chee.archive.util.DicomObjectUtils; /** * @author Damien Evans <damien.daddy@gmail.com> * @author Justin Falk <jfalkmu@gmail.com> * @author Gunter Zeilinger <gunterze@gmail.com> * @version $Revision$ $Date$ * @since Feb 29, 2008 */ @Entity @Table(name = "mpps") public class MPPS extends BaseEntity implements Serializable { private static final long serialVersionUID = -599495313070741738L; @Column(name = "created_time") private Date createdTime; @Column(name = "updated_time") private Date updatedTime; @Column(name = "mpps_iuid", unique = true, nullable = false) private String sopInstanceUID; @Column(name = "pps_start") private Date startDateTime; @Column(name = "station_aet") private String performedStationAET; @Column(name = "modality") private String modality; @Column(name = "accession_no") private String accessionNumber; @Column(name = "mpps_status", nullable = false) private PPSStatus status; // JPA definition in orm.xml private byte[] encodedAttributes; @ManyToOne(fetch = FetchType.LAZY) @JoinColumn(name = "drcode_fk") private Code discontinuationReasonCode; @ManyToOne(fetch = FetchType.LAZY) @JoinColumn(name = "patient_fk") private Patient patient; @OneToMany(mappedBy = "modalityPerformedProcedureStep", fetch = FetchType.LAZY) private Set<Series> series; public Date getCreatedTime() { return createdTime; } public Date getUpdatedTime() { return updatedTime; } public String getSopInstanceUID() { return sopInstanceUID; } public Date getStartDateTime() { return startDateTime; } public String getPerformedStationAET() { return performedStationAET; } public String getModality() { return modality; } public String getAccessionNumber() { return accessionNumber; } public PPSStatus getStatus() { return status; } public byte[] getEncodedAttributes() { return encodedAttributes; } public Code getDiscontinuationReasonCode() { return discontinuationReasonCode; } public void setDiscontinuationReasonCode(Code discontinuationReasonCode) { this.discontinuationReasonCode = discontinuationReasonCode; } public Patient getPatient() { return patient; } public void setPatient(Patient patient) { this.patient = patient; } public Set<Series> getSeries() { return series; } public void setSeries(Set<Series> series) { this.series = series; } @Override public String toString() { return "MPPS[pk=" + pk + ", iuid=" + sopInstanceUID + ", status=" + status + ", accno=" + accessionNumber + ", start=" + startDateTime + ", mod=" + modality + ", aet=" + performedStationAET + "]"; } public void onPrePersist() { createdTime = new Date(); } public void onPreUpdate() { updatedTime = new Date(); } public DicomObject getAttributes() { return DicomObjectUtils.decode(encodedAttributes); } public void setAttributes(DicomObject attrs) { this.sopInstanceUID = attrs.getString(Tag.SOPInstanceUID); this.startDateTime = attrs.getDate( Tag.PerformedProcedureStepStartDate, Tag.PerformedProcedureStepStartTime); this.performedStationAET = attrs.getString(Tag.PerformedStationAETitle); this.modality = attrs.getString(Tag.Modality); this.accessionNumber = attrs.getString(new int[] { Tag.ScheduledStepAttributesSequence, 0, Tag.AccessionNumber }); if (this.accessionNumber == null) this.accessionNumber = attrs.getString(Tag.AccessionNumber); this.status = PPSStatus.valueOf(attrs.getString( Tag.PerformedProcedureStepStatus).replace(' ', '_')); this.encodedAttributes = DicomObjectUtils.encode(AttributeFilter.getExcludePatientAttributeFilter().filter(attrs), UID.DeflatedExplicitVRLittleEndian); } }
apache-2.0
markusweimer/incubator-reef
lang/java/reef-wake/wake/src/main/java/org/apache/reef/wake/avro/impl/package-info.java
920
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ /** * Implementations of serializer and derserializer interfaces. */ package org.apache.reef.wake.avro.impl;
apache-2.0
dennishuo/hadoop
hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/TestLazyPersistFiles.java
10335
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.hdfs.server.datanode.fsdataset.impl; import com.google.common.collect.Iterators; import com.google.common.util.concurrent.Uninterruptibles; import org.apache.hadoop.fs.CreateFlag; import org.apache.hadoop.fs.Path; import org.apache.hadoop.test.GenericTestUtils; import org.junit.Assert; import org.junit.Test; import java.io.IOException; import java.util.EnumSet; import java.util.concurrent.CountDownLatch; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import java.util.concurrent.TimeoutException; import java.util.concurrent.atomic.AtomicBoolean; import static org.apache.hadoop.fs.StorageType.RAM_DISK; import static org.apache.hadoop.hdfs.DFSConfigKeys.*; import static org.hamcrest.core.Is.is; import static org.junit.Assert.assertThat; import static org.junit.Assert.fail; public class TestLazyPersistFiles extends LazyPersistTestCase { private static final int THREADPOOL_SIZE = 10; /** * Append to lazy persist file is denied. * @throws IOException */ @Test public void testAppendIsDenied() throws IOException { getClusterBuilder().build(); final String METHOD_NAME = GenericTestUtils.getMethodName(); Path path = new Path("/" + METHOD_NAME + ".dat"); makeTestFile(path, BLOCK_SIZE, true); try { client.append(path.toString(), BUFFER_LENGTH, EnumSet.of(CreateFlag.APPEND), null, null).close(); fail("Append to LazyPersist file did not fail as expected"); } catch (Throwable t) { LOG.info("Got expected exception ", t); } } /** * Truncate to lazy persist file is denied. * @throws IOException */ @Test public void testTruncateIsDenied() throws IOException { getClusterBuilder().build(); final String METHOD_NAME = GenericTestUtils.getMethodName(); Path path = new Path("/" + METHOD_NAME + ".dat"); makeTestFile(path, BLOCK_SIZE, true); try { client.truncate(path.toString(), BLOCK_SIZE/2); fail("Truncate to LazyPersist file did not fail as expected"); } catch (Throwable t) { LOG.info("Got expected exception ", t); } } /** * If one or more replicas of a lazyPersist file are lost, then the file * must be discarded by the NN, instead of being kept around as a * 'corrupt' file. */ @Test public void testCorruptFilesAreDiscarded() throws IOException, InterruptedException, TimeoutException { getClusterBuilder().setRamDiskReplicaCapacity(2).build(); final String METHOD_NAME = GenericTestUtils.getMethodName(); Path path1 = new Path("/" + METHOD_NAME + ".01.dat"); makeTestFile(path1, BLOCK_SIZE, true); ensureFileReplicasOnStorageType(path1, RAM_DISK); // Stop the DataNode and sleep for the time it takes the NN to // detect the DN as being dead. cluster.shutdownDataNodes(); Thread.sleep(30000L); assertThat(cluster.getNamesystem().getNumDeadDataNodes(), is(1)); // Next, wait for the redundancy monitor to mark the file as corrupt. Thread.sleep(2 * DFS_NAMENODE_REDUNDANCY_INTERVAL_SECONDS_DEFAULT * 1000); // Wait for the LazyPersistFileScrubber to run Thread.sleep(2 * LAZY_WRITE_FILE_SCRUBBER_INTERVAL_SEC * 1000); // Ensure that path1 does not exist anymore, whereas path2 does. assert(!fs.exists(path1)); // We should have zero blocks that needs replication i.e. the one // belonging to path2. assertThat(cluster.getNameNode() .getNamesystem() .getBlockManager() .getLowRedundancyBlocksCount(), is(0L)); } @Test public void testDisableLazyPersistFileScrubber() throws IOException, InterruptedException, TimeoutException { getClusterBuilder().setRamDiskReplicaCapacity(2).disableScrubber().build(); final String METHOD_NAME = GenericTestUtils.getMethodName(); Path path1 = new Path("/" + METHOD_NAME + ".01.dat"); makeTestFile(path1, BLOCK_SIZE, true); ensureFileReplicasOnStorageType(path1, RAM_DISK); // Stop the DataNode and sleep for the time it takes the NN to // detect the DN as being dead. cluster.shutdownDataNodes(); Thread.sleep(30000L); // Next, wait for the redundancy monitor to mark the file as corrupt. Thread.sleep(2 * DFS_NAMENODE_REDUNDANCY_INTERVAL_SECONDS_DEFAULT * 1000); // Wait for the LazyPersistFileScrubber to run Thread.sleep(2 * LAZY_WRITE_FILE_SCRUBBER_INTERVAL_SEC * 1000); // Ensure that path1 exist. Assert.assertTrue(fs.exists(path1)); } /** * If NN restarted then lazyPersist files should not deleted */ @Test public void testFileShouldNotDiscardedIfNNRestarted() throws IOException, InterruptedException, TimeoutException { getClusterBuilder().setRamDiskReplicaCapacity(2).build(); final String METHOD_NAME = GenericTestUtils.getMethodName(); Path path1 = new Path("/" + METHOD_NAME + ".01.dat"); makeTestFile(path1, BLOCK_SIZE, true); ensureFileReplicasOnStorageType(path1, RAM_DISK); cluster.shutdownDataNodes(); cluster.restartNameNodes(); // wait for the redundancy monitor to mark the file as corrupt. Thread.sleep(2 * DFS_NAMENODE_REDUNDANCY_INTERVAL_SECONDS_DEFAULT * 1000); Long corruptBlkCount = (long) Iterators.size(cluster.getNameNode() .getNamesystem().getBlockManager().getCorruptReplicaBlockIterator()); // Check block detected as corrupted assertThat(corruptBlkCount, is(1L)); // Ensure path1 exist. Assert.assertTrue(fs.exists(path1)); } /** * Concurrent read from the same node and verify the contents. */ @Test public void testConcurrentRead() throws Exception { getClusterBuilder().setRamDiskReplicaCapacity(2).build(); final String METHOD_NAME = GenericTestUtils.getMethodName(); final Path path1 = new Path("/" + METHOD_NAME + ".dat"); final int SEED = 0xFADED; final int NUM_TASKS = 5; makeRandomTestFile(path1, BLOCK_SIZE, true, SEED); ensureFileReplicasOnStorageType(path1, RAM_DISK); //Read from multiple clients final CountDownLatch latch = new CountDownLatch(NUM_TASKS); final AtomicBoolean testFailed = new AtomicBoolean(false); Runnable readerRunnable = new Runnable() { @Override public void run() { try { Assert.assertTrue(verifyReadRandomFile(path1, BLOCK_SIZE, SEED)); } catch (Throwable e) { LOG.error("readerRunnable error", e); testFailed.set(true); } finally { latch.countDown(); } } }; Thread threads[] = new Thread[NUM_TASKS]; for (int i = 0; i < NUM_TASKS; i++) { threads[i] = new Thread(readerRunnable); threads[i].start(); } Thread.sleep(500); for (int i = 0; i < NUM_TASKS; i++) { Uninterruptibles.joinUninterruptibly(threads[i]); } Assert.assertFalse(testFailed.get()); } /** * Concurrent write with eviction * RAM_DISK can hold 9 replicas * 4 threads each write 5 replicas * @throws IOException * @throws InterruptedException */ @Test public void testConcurrentWrites() throws IOException, InterruptedException { getClusterBuilder().setRamDiskReplicaCapacity(9).build(); final String METHOD_NAME = GenericTestUtils.getMethodName(); final int SEED = 0xFADED; final int NUM_WRITERS = 4; final int NUM_WRITER_PATHS = 5; Path paths[][] = new Path[NUM_WRITERS][NUM_WRITER_PATHS]; for (int i = 0; i < NUM_WRITERS; i++) { paths[i] = new Path[NUM_WRITER_PATHS]; for (int j = 0; j < NUM_WRITER_PATHS; j++) { paths[i][j] = new Path("/" + METHOD_NAME + ".Writer" + i + ".File." + j + ".dat"); } } final CountDownLatch latch = new CountDownLatch(NUM_WRITERS); final AtomicBoolean testFailed = new AtomicBoolean(false); ExecutorService executor = Executors.newFixedThreadPool(THREADPOOL_SIZE); for (int i = 0; i < NUM_WRITERS; i++) { Runnable writer = new WriterRunnable(i, paths[i], SEED, latch, testFailed); executor.execute(writer); } Thread.sleep(3 * LAZY_WRITER_INTERVAL_SEC * 1000); triggerBlockReport(); // Stop executor from adding new tasks to finish existing threads in queue latch.await(); assertThat(testFailed.get(), is(false)); } class WriterRunnable implements Runnable { private final int id; private final Path paths[]; private final int seed; private CountDownLatch latch; private AtomicBoolean bFail; public WriterRunnable(int threadIndex, Path[] paths, int seed, CountDownLatch latch, AtomicBoolean bFail) { id = threadIndex; this.paths = paths; this.seed = seed; this.latch = latch; this.bFail = bFail; System.out.println("Creating Writer: " + id); } public void run() { System.out.println("Writer " + id + " starting... "); int i = 0; try { for (i = 0; i < paths.length; i++) { makeRandomTestFile(paths[i], BLOCK_SIZE, true, seed); // eviction may faiL when all blocks are not persisted yet. // ensureFileReplicasOnStorageType(paths[i], RAM_DISK); } } catch (IOException e) { bFail.set(true); LOG.error("Writer exception: writer id:" + id + " testfile: " + paths[i].toString() + " " + e); } finally { latch.countDown(); } } } }
apache-2.0
williamchengit/TestRepo
solr/core/src/java/org/apache/solr/update/processor/UpdateRequestProcessorChain.java
8567
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.solr.update.processor; import org.apache.solr.request.SolrQueryRequest; import org.apache.solr.response.SolrQueryResponse; import org.apache.solr.util.plugin.PluginInfoInitialized; import org.apache.solr.common.util.NamedList; import org.apache.solr.common.SolrException; import org.apache.solr.core.PluginInfo; import org.apache.solr.core.SolrCore; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.util.List; import java.util.ArrayList; /** * Manages a chain of UpdateRequestProcessorFactories. * <p> * Chains can be configured via solrconfig.xml using the following syntax... * </p> * <pre class="prettyprint"> * &lt;updateRequestProcessorChain name="key" default="true"&gt; * &lt;processor class="package.Class1" /&gt; * &lt;processor class="package.Class2" &gt; * &lt;str name="someInitParam1"&gt;value&lt;/str&gt; * &lt;int name="someInitParam2"&gt;42&lt;/int&gt; * &lt;/processor&gt; * &lt;processor class="solr.LogUpdateProcessorFactory" &gt; * &lt;int name="maxNumToLog"&gt;100&lt;/int&gt; * &lt;/processor&gt; * &lt;processor class="solr.RunUpdateProcessorFactory" /&gt; * &lt;/updateRequestProcessorChain&gt; * </pre> * <p> * Multiple Chains can be defined, each with a distinct name. The name of * a chain used to handle an update request may be specified using the request * param <code>update.chain</code>. If no chain is explicitly selected * by name, then Solr will attempt to determine a default chain: * </p> * <ul> * <li>A single configured chain may explicitly be declared with * <code>default="true"</code> (see example above)</li> * <li>If no chain is explicitly declared as the default, Solr will look for * any chain that does not have a name, and treat it as the default</li> * <li>As a last resort, Solr will create an implicit default chain * consisting of:<ul> * <li>{@link LogUpdateProcessorFactory}</li> * <li>{@link DistributedUpdateProcessorFactory}</li> * <li>{@link RunUpdateProcessorFactory}</li> * </ul></li> * </ul> * * <p> * Allmost all processor chains should end with an instance of * <code>RunUpdateProcessorFactory</code> unless the user is explicitly * executing the update commands in an alternative custom * <code>UpdateRequestProcessorFactory</code>. If a chain includes * <code>RunUpdateProcessorFactory</code> but does not include a * <code>DistributingUpdateProcessorFactory</code>, it will be added * automatically by {@link #init init()}. * </p> * * @see UpdateRequestProcessorFactory * @see #init * @see #createProcessor * @since solr 1.3 */ public final class UpdateRequestProcessorChain implements PluginInfoInitialized { public final static Logger log = LoggerFactory.getLogger(UpdateRequestProcessorChain.class); private UpdateRequestProcessorFactory[] chain; private final SolrCore solrCore; public UpdateRequestProcessorChain(SolrCore solrCore) { this.solrCore = solrCore; } /** * Initializes the chain using the factories specified by the <code>PluginInfo</code>. * if the chain includes the <code>RunUpdateProcessorFactory</code>, but * does not include an implementation of the * <code>DistributingUpdateProcessorFactory</code> interface, then an * instance of <code>DistributedUpdateProcessorFactory</code> will be * injected immediately prior to the <code>RunUpdateProcessorFactory</code>. * * @see DistributingUpdateProcessorFactory * @see RunUpdateProcessorFactory * @see DistributedUpdateProcessorFactory */ @Override public void init(PluginInfo info) { final String infomsg = "updateRequestProcessorChain \"" + (null != info.name ? info.name : "") + "\"" + (info.isDefault() ? " (default)" : ""); log.info("creating " + infomsg); // wrap in an ArrayList so we know we know we can do fast index lookups // and that add(int,Object) is supported List<UpdateRequestProcessorFactory> list = new ArrayList (solrCore.initPlugins(info.getChildren("processor"),UpdateRequestProcessorFactory.class,null)); if(list.isEmpty()){ throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, infomsg + " require at least one processor"); } int numDistrib = 0; int runIndex = -1; // hi->lo incase multiple run instances, add before first one // (no idea why someone might use multiple run instances, but just in case) for (int i = list.size()-1; 0 <= i; i--) { UpdateRequestProcessorFactory factory = list.get(i); if (factory instanceof DistributingUpdateProcessorFactory) { numDistrib++; } if (factory instanceof RunUpdateProcessorFactory) { runIndex = i; } } if (1 < numDistrib) { throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, infomsg + " may not contain more then one " + "instance of DistributingUpdateProcessorFactory"); } if (0 <= runIndex && 0 == numDistrib) { // by default, add distrib processor immediately before run DistributedUpdateProcessorFactory distrib = new DistributedUpdateProcessorFactory(); distrib.init(new NamedList()); list.add(runIndex, distrib); log.info("inserting DistributedUpdateProcessorFactory into " + infomsg); } chain = list.toArray(new UpdateRequestProcessorFactory[list.size()]); } /** * Creates a chain backed directly by the specified array. Modifications to * the array will affect future calls to <code>createProcessor</code> */ public UpdateRequestProcessorChain( UpdateRequestProcessorFactory[] chain, SolrCore solrCore) { this.chain = chain; this.solrCore = solrCore; } /** * Uses the factories in this chain to creates a new * <code>UpdateRequestProcessor</code> instance specific for this request. * If the <code>DISTRIB_UPDATE_PARAM</code> is present in the request and is * non-blank, then any factory in this chain prior to the instance of * <code>{@link DistributingUpdateProcessorFactory}</code> will be skipped, * except for the log update processor factory. * * @see UpdateRequestProcessorFactory#getInstance * @see DistributingUpdateProcessorFactory#DISTRIB_UPDATE_PARAM */ public UpdateRequestProcessor createProcessor(SolrQueryRequest req, SolrQueryResponse rsp) { UpdateRequestProcessor processor = null; UpdateRequestProcessor last = null; final String distribPhase = req.getParams().get(DistributingUpdateProcessorFactory.DISTRIB_UPDATE_PARAM); final boolean skipToDistrib = distribPhase != null; boolean afterDistrib = true; // we iterate backwards, so true to start for (int i = chain.length-1; i>=0; i--) { UpdateRequestProcessorFactory factory = chain[i]; if (skipToDistrib) { if (afterDistrib) { if (factory instanceof DistributingUpdateProcessorFactory) { afterDistrib = false; } } else if (!(factory instanceof UpdateRequestProcessorFactory.RunAlways)) { // skip anything that doesn't have the marker interface continue; } } processor = factory.getInstance(req, rsp, last); last = processor == null ? last : processor; } return last; } /** * Returns the underlying array of factories used in this chain. * Modifications to the array will affect future calls to * <code>createProcessor</code> */ public UpdateRequestProcessorFactory[] getFactories() { return chain; } }
apache-2.0
donNewtonAlpha/onos
apps/pim/src/main/java/org/onosproject/pim/cli/PimInterfacesListCommand.java
1782
/* * Copyright 2016-present Open Networking Laboratory * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.onosproject.pim.cli; import org.apache.karaf.shell.commands.Command; import org.onosproject.cli.AbstractShellCommand; import org.onosproject.pim.impl.PimInterface; import org.onosproject.pim.impl.PimInterfaceService; import java.util.Set; /** * Lists the interfaces where PIM is enabled. */ @Command(scope = "onos", name = "pim-interfaces", description = "Lists the interfaces where PIM is enabled") public class PimInterfacesListCommand extends AbstractShellCommand { private static final String FORMAT = "interfaceName=%s, holdTime=%s, priority=%s, genId=%s"; private static final String ROUTE_FORMAT = " %s"; @Override protected void execute() { PimInterfaceService interfaceService = get(PimInterfaceService.class); Set<PimInterface> interfaces = interfaceService.getPimInterfaces(); interfaces.forEach(pimIntf -> { print(FORMAT, pimIntf.getInterface().name(), pimIntf.getHoldtime(), pimIntf.getPriority(), pimIntf.getGenerationId()); pimIntf.getRoutes().forEach(route -> print(ROUTE_FORMAT, route)); }); } }
apache-2.0
hongyuhong/flink
flink-runtime/src/test/java/org/apache/flink/runtime/operators/hash/HashTableTest.java
12009
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.flink.runtime.operators.hash; import org.apache.flink.api.common.typeutils.GenericPairComparator; import org.apache.flink.api.common.typeutils.TypeComparator; import org.apache.flink.api.common.typeutils.TypePairComparator; import org.apache.flink.api.common.typeutils.TypeSerializer; import org.apache.flink.api.common.typeutils.base.ByteValueSerializer; import org.apache.flink.api.common.typeutils.base.LongComparator; import org.apache.flink.api.common.typeutils.base.LongSerializer; import org.apache.flink.api.common.typeutils.base.array.BytePrimitiveArrayComparator; import org.apache.flink.api.common.typeutils.base.array.BytePrimitiveArraySerializer; import org.apache.flink.api.java.tuple.Tuple2; import org.apache.flink.api.java.typeutils.runtime.TupleComparator; import org.apache.flink.api.java.typeutils.runtime.TupleSerializer; import org.apache.flink.api.java.typeutils.runtime.ValueComparator; import org.apache.flink.core.memory.MemorySegment; import org.apache.flink.core.memory.MemorySegmentFactory; import org.apache.flink.runtime.io.disk.iomanager.IOManager; import org.apache.flink.runtime.io.disk.iomanager.IOManagerAsync; import org.apache.flink.types.ByteValue; import org.apache.flink.util.MutableObjectIterator; import org.junit.Test; import org.junit.Assert; import org.mockito.Mockito; import java.io.File; import java.io.IOException; import java.util.ArrayList; import java.util.Arrays; import java.util.List; import static org.junit.Assert.*; public class HashTableTest { private final TypeSerializer<Tuple2<Long, byte[]>> buildSerializer; private final TypeSerializer<Long> probeSerializer; private final TypeComparator<Tuple2<Long, byte[]>> buildComparator; private final TypeComparator<Long> probeComparator; private final TypePairComparator<Long, Tuple2<Long, byte[]>> pairComparator; public HashTableTest() { TypeSerializer<?>[] fieldSerializers = { LongSerializer.INSTANCE, BytePrimitiveArraySerializer.INSTANCE }; @SuppressWarnings("unchecked") Class<Tuple2<Long, byte[]>> clazz = (Class<Tuple2<Long, byte[]>>) (Class<?>) Tuple2.class; this.buildSerializer = new TupleSerializer<Tuple2<Long, byte[]>>(clazz, fieldSerializers); this.probeSerializer = LongSerializer.INSTANCE; TypeComparator<?>[] comparators = { new LongComparator(true) }; TypeSerializer<?>[] comparatorSerializers = { LongSerializer.INSTANCE }; this.buildComparator = new TupleComparator<Tuple2<Long, byte[]>>(new int[] {0}, comparators, comparatorSerializers); this.probeComparator = new LongComparator(true); this.pairComparator = new TypePairComparator<Long, Tuple2<Long, byte[]>>() { private long ref; @Override public void setReference(Long reference) { ref = reference; } @Override public boolean equalToReference(Tuple2<Long, byte[]> candidate) { //noinspection UnnecessaryUnboxing return candidate.f0.longValue() == ref; } @Override public int compareToReference(Tuple2<Long, byte[]> candidate) { long x = ref; long y = candidate.f0; return (x < y) ? -1 : ((x == y) ? 0 : 1); } }; } // ------------------------------------------------------------------------ // Tests // ------------------------------------------------------------------------ /** * This tests a combination of values that lead to a corner case situation where memory * was missing and the computation deadlocked. */ @Test public void testBufferMissingForProbing() { final IOManager ioMan = new IOManagerAsync(); try { final int pageSize = 32*1024; final int numSegments = 34; final int numRecords = 3400; final int recordLen = 270; final byte[] payload = new byte[recordLen - 8 - 4]; List<MemorySegment> memory = getMemory(numSegments, pageSize); MutableHashTable<Tuple2<Long, byte[]>, Long> table = new MutableHashTable<>( buildSerializer, probeSerializer, buildComparator, probeComparator, pairComparator, memory, ioMan, 16, false); table.open(new TupleBytesIterator(payload, numRecords), new LongIterator(10000)); try { while (table.nextRecord()) { MutableObjectIterator<Tuple2<Long, byte[]>> matches = table.getBuildSideIterator(); while (matches.next() != null); } } catch (RuntimeException e) { if (!e.getMessage().contains("exceeded maximum number of recursions")) { e.printStackTrace(); fail("Test failed with unexpected exception"); } } finally { table.close(); } checkNoTempFilesRemain(ioMan); } catch (Exception e) { e.printStackTrace(); fail(e.getMessage()); } finally { ioMan.shutdown(); } } /** * This tests the case where no additional partition buffers are used at the point when spilling * is triggered, testing that overflow bucket buffers are taken into account when deciding which * partition to spill. */ @Test public void testSpillingFreesOnlyOverflowSegments() { final IOManager ioMan = new IOManagerAsync(); final TypeSerializer<ByteValue> serializer = ByteValueSerializer.INSTANCE; final TypeComparator<ByteValue> buildComparator = new ValueComparator<>(true, ByteValue.class); final TypeComparator<ByteValue> probeComparator = new ValueComparator<>(true, ByteValue.class); @SuppressWarnings("unchecked") final TypePairComparator<ByteValue, ByteValue> pairComparator = Mockito.mock(TypePairComparator.class); try { final int pageSize = 32*1024; final int numSegments = 34; List<MemorySegment> memory = getMemory(numSegments, pageSize); MutableHashTable<ByteValue, ByteValue> table = new MutableHashTable<>( serializer, serializer, buildComparator, probeComparator, pairComparator, memory, ioMan, 1, false); table.open(new ByteValueIterator(100000000), new ByteValueIterator(1)); table.close(); checkNoTempFilesRemain(ioMan); } catch (Exception e) { e.printStackTrace(); fail(e.getMessage()); } finally { ioMan.shutdown(); } } /** * Tests that the MutableHashTable spills its partitions when creating the initial table * without overflow segments in the partitions. This means that the records are large. */ @Test public void testSpillingWhenBuildingTableWithoutOverflow() throws Exception { final IOManager ioMan = new IOManagerAsync(); final TypeSerializer<byte[]> serializer = BytePrimitiveArraySerializer.INSTANCE; final TypeComparator<byte[]> buildComparator = new BytePrimitiveArrayComparator(true); final TypeComparator<byte[]> probeComparator = new BytePrimitiveArrayComparator(true); @SuppressWarnings("unchecked") final TypePairComparator<byte[], byte[]> pairComparator = new GenericPairComparator<>( new BytePrimitiveArrayComparator(true), new BytePrimitiveArrayComparator(true)); final int pageSize = 128; final int numSegments = 33; List<MemorySegment> memory = getMemory(numSegments, pageSize); MutableHashTable<byte[], byte[]> table = new MutableHashTable<byte[], byte[]>( serializer, serializer, buildComparator, probeComparator, pairComparator, memory, ioMan, 1, false); int numElements = 9; table.open( new CombiningIterator<byte[]>( new ByteArrayIterator(numElements, 128,(byte) 0), new ByteArrayIterator(numElements, 128,(byte) 1)), new CombiningIterator<byte[]>( new ByteArrayIterator(1, 128,(byte) 0), new ByteArrayIterator(1, 128,(byte) 1))); while(table.nextRecord()) { MutableObjectIterator<byte[]> iterator = table.getBuildSideIterator(); int counter = 0; while(iterator.next() != null) { counter++; } // check that we retrieve all our elements Assert.assertEquals(numElements, counter); } table.close(); } // ------------------------------------------------------------------------ // Utilities // ------------------------------------------------------------------------ private static List<MemorySegment> getMemory(int numSegments, int segmentSize) { ArrayList<MemorySegment> list = new ArrayList<MemorySegment>(numSegments); for (int i = 0; i < numSegments; i++) { list.add(MemorySegmentFactory.allocateUnpooledSegment(segmentSize)); } return list; } private static void checkNoTempFilesRemain(IOManager ioManager) { for (File dir : ioManager.getSpillingDirectories()) { for (String file : dir.list()) { if (file != null && !(file.equals(".") || file.equals(".."))) { fail("hash table did not clean up temp files. remaining file: " + file); } } } } private static class TupleBytesIterator implements MutableObjectIterator<Tuple2<Long, byte[]>> { private final byte[] payload; private final int numRecords; private int count = 0; TupleBytesIterator(byte[] payload, int numRecords) { this.payload = payload; this.numRecords = numRecords; } @Override public Tuple2<Long, byte[]> next(Tuple2<Long, byte[]> reuse) { return next(); } @Override public Tuple2<Long, byte[]> next() { if (count++ < numRecords) { return new Tuple2<>(42L, payload); } else { return null; } } } private static class ByteArrayIterator implements MutableObjectIterator<byte[]> { private final long numRecords; private long counter = 0; private final byte[] arrayValue; ByteArrayIterator(long numRecords, int length, byte value) { this.numRecords = numRecords; arrayValue = new byte[length]; Arrays.fill(arrayValue, value); } @Override public byte[] next(byte[] array) { return next(); } @Override public byte[] next() { if (counter++ < numRecords) { return arrayValue; } else { return null; } } } private static class LongIterator implements MutableObjectIterator<Long> { private final long numRecords; private long value = 0; LongIterator(long numRecords) { this.numRecords = numRecords; } @Override public Long next(Long aLong) { return next(); } @Override public Long next() { if (value < numRecords) { return value++; } else { return null; } } } private static class ByteValueIterator implements MutableObjectIterator<ByteValue> { private final long numRecords; private long value = 0; ByteValueIterator(long numRecords) { this.numRecords = numRecords; } @Override public ByteValue next(ByteValue aLong) { return next(); } @Override public ByteValue next() { if (value++ < numRecords) { return new ByteValue((byte) 0); } else { return null; } } } private static class CombiningIterator<T> implements MutableObjectIterator<T> { private final MutableObjectIterator<T> left; private final MutableObjectIterator<T> right; public CombiningIterator(MutableObjectIterator<T> left, MutableObjectIterator<T> right) { this.left = left; this.right = right; } @Override public T next(T reuse) throws IOException { T value = left.next(reuse); if (value == null) { return right.next(reuse); } else { return value; } } @Override public T next() throws IOException { T value = left.next(); if (value == null) { return right.next(); } else { return value; } } } }
apache-2.0
jaohoang/android-styled-dialogs
library/src/main/java/com/avast/android/dialogs/core/BaseDialogBuilder.java
3904
package com.avast.android.dialogs.core; import android.content.Context; import android.os.Bundle; import android.support.v4.app.DialogFragment; import android.support.v4.app.Fragment; import android.support.v4.app.FragmentManager; /** * Internal base builder that holds common values for all dialog fragment builders. * * @author Tomas Vondracek */ public abstract class BaseDialogBuilder<T extends BaseDialogBuilder<T>> { public final static String ARG_REQUEST_CODE = "request_code"; public final static String ARG_CANCELABLE_ON_TOUCH_OUTSIDE = "cancelable_oto"; public final static String DEFAULT_TAG = "simple_dialog"; private String mTag = DEFAULT_TAG; public final static int DEFAULT_REQUEST_CODE = -42; private int mRequestCode = DEFAULT_REQUEST_CODE; public static String ARG_USE_DARK_THEME = "usedarktheme"; public static String ARG_USE_LIGHT_THEME = "uselighttheme"; protected final Context mContext; protected final FragmentManager mFragmentManager; protected final Class<? extends BaseDialogFragment> mClass; private Fragment mTargetFragment; private boolean mCancelable = true; private boolean mCancelableOnTouchOutside = true; private boolean mUseDarkTheme = false; private boolean mUseLightTheme = false; public BaseDialogBuilder(Context context, FragmentManager fragmentManager, Class<? extends BaseDialogFragment> clazz) { mFragmentManager = fragmentManager; mContext = context.getApplicationContext(); mClass = clazz; } protected abstract T self(); protected abstract Bundle prepareArguments(); public T setCancelable(boolean cancelable) { mCancelable = cancelable; return self(); } public T setCancelableOnTouchOutside(boolean cancelable) { mCancelableOnTouchOutside = cancelable; if (cancelable) { mCancelable = cancelable; } return self(); } public T setTargetFragment(Fragment fragment, int requestCode) { mTargetFragment = fragment; mRequestCode = requestCode; return self(); } public T setRequestCode(int requestCode) { mRequestCode = requestCode; return self(); } public T setTag(String tag) { mTag = tag; return self(); } public T useDarkTheme() { mUseDarkTheme = true; return self(); } public T useLightTheme() { mUseLightTheme = true; return self(); } private BaseDialogFragment create() { final Bundle args = prepareArguments(); final BaseDialogFragment fragment = (BaseDialogFragment) Fragment.instantiate(mContext, mClass.getName(), args); args.putBoolean(ARG_CANCELABLE_ON_TOUCH_OUTSIDE, mCancelableOnTouchOutside); args.putBoolean(ARG_USE_DARK_THEME, mUseDarkTheme); args.putBoolean(ARG_USE_LIGHT_THEME, mUseLightTheme); if (mTargetFragment != null) { fragment.setTargetFragment(mTargetFragment, mRequestCode); } else { args.putInt(ARG_REQUEST_CODE, mRequestCode); } fragment.setCancelable(mCancelable); return fragment; } public DialogFragment show() { BaseDialogFragment fragment = create(); fragment.show(mFragmentManager, mTag); return fragment; } /** * Like show() but allows the commit to be executed after an activity's state is saved. This * is dangerous because the commit can be lost if the activity needs to later be restored from * its state, so this should only be used for cases where it is okay for the UI state to change * unexpectedly on the user. */ public DialogFragment showAllowingStateLoss() { BaseDialogFragment fragment = create(); fragment.showAllowingStateLoss(mFragmentManager, mTag); return fragment; } }
apache-2.0
janicduplessis/buck
src/com/facebook/buck/graph/DefaultDirectedAcyclicGraph.java
954
/* * Copyright 2012-present Facebook, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. You may obtain * a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations * under the License. */ package com.facebook.buck.graph; import com.google.common.base.Preconditions; public class DefaultDirectedAcyclicGraph<T> extends DefaultTraversableGraph<T> implements DirectedAcyclicGraph<T> { public DefaultDirectedAcyclicGraph(MutableDirectedGraph<T> graph) { super(graph); Preconditions.checkArgument(super.isAcyclic()); } }
apache-2.0
damienmg/bazel
third_party/java/proguard/proguard5.3.3/src/proguard/gui/splash/Sprite.java
1374
/* * ProGuard -- shrinking, optimization, obfuscation, and preverification * of Java bytecode. * * Copyright (c) 2002-2017 Eric Lafortune @ GuardSquare * * This program is free software; you can redistribute it and/or modify it * under the terms of the GNU General Public License as published by the Free * Software Foundation; either version 2 of the License, or (at your option) * any later version. * * This program is distributed in the hope that it will be useful, but WITHOUT * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for * more details. * * You should have received a copy of the GNU General Public License along * with this program; if not, write to the Free Software Foundation, Inc., * 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA */ package proguard.gui.splash; import java.awt.*; /** * This interface describes objects that can paint themselves, possibly varying * as a function of time. * * @author Eric Lafortune */ public interface Sprite { /** * Paints the object. * * @param graphics the Graphics to paint on. * @param time the time since the start of the animation, expressed in * milliseconds. */ public void paint(Graphics graphics, long time); }
apache-2.0
vishnujayvel/QAGenerator
src/info/ephyra/answerselection/filters/WebTermImportanceFilter.java
30379
package info.ephyra.answerselection.filters; import info.ephyra.io.Logger; import info.ephyra.io.MsgPrinter; import info.ephyra.nlp.NETagger; import info.ephyra.nlp.OpenNLP; import info.ephyra.nlp.SnowballStemmer; import info.ephyra.nlp.StanfordNeTagger; import info.ephyra.nlp.indices.WordFrequencies; import info.ephyra.querygeneration.Query; import info.ephyra.querygeneration.generators.BagOfWordsG; import info.ephyra.questionanalysis.AnalyzedQuestion; import info.ephyra.questionanalysis.KeywordExtractor; import info.ephyra.questionanalysis.QuestionNormalizer; import info.ephyra.search.Result; import info.ephyra.trec.TREC13To16Parser; import info.ephyra.trec.TRECTarget; import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; import java.util.HashSet; import java.util.Iterator; import java.util.LinkedHashSet; /** * <p>A web reinforcement approach that ranks answer candidates for definitional * questions. Several variations of the target of the question are generated and * are used to retrieve relevant text snippets from the web. The frequencies of * content words in these snippets are counted and the scores of the answers are * adjusted to assign higher scores to candidates that cover frequent keywords. * This approach is based on the assumption that terms that often cooccur with * the target provide relevant information on the target that should be covered * by the answers.</p> * * <p>Several instances of this web term importance filter have been implemented * that use different sources for text snippets.</p> * * <p>This class extends the class <code>Filter</code>.</p> * * @author Guido Sautter * @version 2008-02-15 */ public abstract class WebTermImportanceFilter extends Filter { protected static final String person = "person"; protected static final String organization = "organization"; protected static final String location = "location"; protected static final String event = "event"; public static final int NO_NORMALIZATION = 0; public static final int LINEAR_LENGTH_NORMALIZATION = 1; public static final int SQUARE_ROOT_LENGTH_NORMALIZATION = 2; public static final int LOG_LENGTH_NORMALIZATION = 3; public static final int LOG_10_LENGTH_NORMALIZATION = 4; private final int normalizationMode; private final int tfNormalizationMode; private final boolean isCombined; // protected static final String WIKIPEDIA = "wikipedia"; /** */ protected WebTermImportanceFilter(int normalizationMode, int tfNormalizationMode, boolean isCombined) { this.normalizationMode = normalizationMode; this.tfNormalizationMode = tfNormalizationMode; this.isCombined = isCombined; } /** * fetch the term frequencies in the top X result snippets of a web search * for some target * * @param targets an array of strings containing the targets * @return a HashMap mapping the terms in the web serach results to their * frequency in the snippets */ public abstract HashMap<String, TermCounter> getTermCounters(String[] targets); /** * @author sautter * * Mutable integer class to avoid creating new objects all the time */ protected class TermCounter { private int value = 0; /** Constructor */ protected TermCounter() {} /** * Constructor * @param value the initial value */ protected TermCounter(int value) { this.value = value; } /** @return the value of this TermCounter */ public int getValue() { return this.value; } /** increment the value of this TermCounter by 1 */ public void increment() { this.value++; } /** increment the value of this TermCounter by <code>inc</code> * @param inc */ public void increment(int inc) { this.value += inc; } /** decrement the value of this TermCounter by 1 */ public void decrement() { this.value--; } /** decrement the value of this TermCounter by <code>dec</code> * @param dec */ public void decrement(int dec) { this.value -= dec; } /** multiply the value of this TermCounter times <code>fact</code> * @param fact */ public void multiplyValue(int fact) { this.value *= fact; } /** devide the value of this TermCounter times <code>denom</code> * @param denom */ public void divideValue(int denom) { this.value /= denom; } } /** * produce the target variations for a given target * * @param target the original traget String * @return an array of strings containing the variations of the target * String, including the original target */ public String[] getTargets(String target) { ArrayList<String> targets = new ArrayList<String>(); targets.add(target); boolean isPerson = false; boolean brackets = false; // If target starts with "the", "a", or "an", remove it. if (target.startsWith("the ")) { targets.add(target.substring(4, target.length())); } else if (target.startsWith("an ")) { targets.add(target.substring(3, target.length())); } else if (target.startsWith("a ")) { targets.add(target.substring(2, target.length())); } String targetType = this.checkType(target); if (TEST_TARGET_GENERATION) { if (targetType == null) System.out.println(" target type could not be determined"); else System.out.println(" target type is " + targetType); } if (person.equals(targetType)) { // (complete) target is of type Person, no further processing is necessary isPerson = true; // split parts in brackets from parts not in brackets: // "Norwegian Cruise Lines (NCL)" --> "Norwegian Cruise Lines" + "NCL" } else if (target.contains("(") && target.contains(")")) { int i1 = target.indexOf("("); int i2 = target.indexOf(")"); String s1 = target.substring(0, i1 - 1); String s2 = target.substring(i1 + 1, i2); // Log.println("*** '"+s1+"' '"+s2+"'", true); targets.clear(); targets.add(s1); targets.add(s2); // Log.println(" "+target+" contains brackest. No further processing // necessary.", true); brackets = true; } else if (this.cutExtension(target, targets)) { // do nothing, it's in the cutExtensions method } else if (target.endsWith("University")) { // chop off "University" String toAdd = target.substring(0, target.length() - 11); targets.add(toAdd); } else if (target.endsWith("International")) { // chop off International" String toAdd = target.substring(0, target.length() - 14); targets.add(toAdd); } else if (target.endsWith("Corporation")) { // chop off "Corporation" String toAdd = target.substring(0, target.length() - 12); targets.add(toAdd); } else { this.extractUpperCaseParts(targets); HashSet<String> duplicateFreeTargets = new LinkedHashSet<String>(targets); for (Iterator<String> iter = duplicateFreeTargets.iterator(); iter.hasNext();) { String item = iter.next(); String type = this.checkType(item); if (person.equals(type)) { // after removing the first NP, check again if target is // Person (example: "philanthropist Alberto Vilar") // Log.println(" "+item+" is Person. No further processing // necessary.", true); // attention, this also discarts events containing person names!!! // maybe remove this call //targets.clear(); targets.add(item); } } } if (isPerson) { targets.add("\"" + target + "\""); // // own extension: add 'wikipedia' to target // targets.add(target + " " + WIKIPEDIA); // targets.add("\"" + target + "\" " + WIKIPEDIA); } else if (!brackets) { // maybe remove condition //targets = this.processLongTargets(targets); this.extractUpperCaseParts(targets); //targets = this.checkForEvent(targets); // described effect done in extractUpperCaseParts(), uses NLP stuff we don't have //targets = this.checkForDeterminer(targets); // bad thing, uses to many miraculous external classen we don't have //targets = this.removeAttachedPP(targets); // done in extractUpperCaseParts() //targets = this.cutFirstNpInNpSequence(targets); this.cutFirstNpInNpSequence(targets); //targets = this.removeNounAfterNounGroup(targets); // done in extractUpperCaseParts() // own extension: extract acronyms 'Basque ETA' --> 'ETA' this.extractAcronyms(targets); //targets = this.postProcess(targets); this.postProcess(targets); } HashSet<String> duplicateFreeTargets = new LinkedHashSet<String>(targets); for (Iterator<String> iter = duplicateFreeTargets.iterator(); iter.hasNext();) { String item = iter.next(); String type = this.checkType(item); if (organization.equals(type)/* && !brackets*/) { targets.add("the " + item); if (!brackets) targets.add("the " + target); } else if (person.equals(type)) { targets.add("\"" + item + "\""); // // own extension: add 'wikipedia' to target // targets.add(item + " " + WIKIPEDIA); // targets.add("\"" + item + "\" " + WIKIPEDIA); } // own extension: add determiner to acronyms if (item.matches("([A-Z]){3,}")) targets.add("the " + item); else if (item.matches("([A-Z]\\.){2,}")) targets.add("the " + item); } // own extension: add quoted version of title case targets like 'The Daily Show' duplicateFreeTargets = new LinkedHashSet<String>(targets); for (Iterator<String> iter = duplicateFreeTargets.iterator(); iter.hasNext();) { String item = iter.next(); if (item.matches("([A-Z][a-z]++)++")) { targets.add("\"" + item + "\""); // // own extension: add 'wikipedia' to target // targets.add(item + " " + WIKIPEDIA); // targets.add("\"" + item + "\" " + WIKIPEDIA); } } // own extension: always use quoted version of original target if it has more than one word String[] targetTokens = NETagger.tokenize(target); if (targetTokens.length > 1) { targets.add("\"" + target + "\""); // // own extension: add 'wikipedia' to target // targets.add(target + " " + WIKIPEDIA); // targets.add("\"" + target + "\" " + WIKIPEDIA); } duplicateFreeTargets = new LinkedHashSet<String>(targets); return duplicateFreeTargets.toArray(new String[duplicateFreeTargets.size()]); } /** * find the NE type of a target * * @param target the target String to check * @return the NE type of target, or null, if the type couldn't be determined */ private String checkType(String target) { if (!StanfordNeTagger.isInitialized()) StanfordNeTagger.init(); HashMap<String, String[]> nesByType = StanfordNeTagger.extractNEs(target); ArrayList<String> neTypes = new ArrayList<String>(nesByType.keySet()); for (int t = 0; t < neTypes.size(); t++) { String type = neTypes.get(t); String[] nes = nesByType.get(type); for (int n = 0; n < nes.length; n++) if (nes[n].equals(target)) return type.replace("NE", ""); } return null; } /** * cut tailing words like "University", "International", "Corporation": * "Microsoft Corporation" --> "Microsoft" and add the non-cut part to target list * * @param target the target String to cut * @param targets the target list to add the cut part to * @return true if a cut target was added, false otherwise */ private boolean cutExtension(String target, ArrayList<String> targets) { if (this.extensionList.isEmpty()) for (int i = 0; i < extensions.length; i++) this.extensionList.add(extensions[i]); String[] targetTokens = target.split("\\s"); String last = targetTokens[targetTokens.length - 1]; if (this.extensionList.contains(last) && (targetTokens.length > 1)) { String cutTarget = targetTokens[0]; for (int i = 1; i < (targetTokens.length - 1); i++) cutTarget += " " + targetTokens[i]; targets.add(cutTarget); return true; } return false; } private HashSet<String> extensionList = new HashSet<String>(); private static final String[] extensions = { "University", "Corporation", "International", // last year's winner's list ends here "Incorporated", "Inc.", "Comp.", "Corp.", "Co.", "Museum", "<to be extended>" }; /** extract non lower case parts from the targets: * "the film 'Star Wars'" --> "'Star Wars'" * "1998 indictment and trial of Susan McDougal" --> "Susan McDougal" * "Miss Universe 2000 crowned" --> "Miss Universe 2000" * "Abraham from the bible" --> "Abraham" * "Gobi desert" --> "Gobi" * * @param targets the list of targets */ private void extractUpperCaseParts(ArrayList<String> targets) { HashSet<String> duplicateFreeTargets = new LinkedHashSet<String>(targets); for (Iterator<String> iter = duplicateFreeTargets.iterator(); iter.hasNext();) { String target = iter.next(); String[] targetTokens = target.split("\\s"); String upperCasePart = null; int i = 0; while (i < targetTokens.length) { // find start of next upper case part while ((i < targetTokens.length) && !Character.isUpperCase(targetTokens[i].charAt(0))) i++; // start upper case part if (i < targetTokens.length) { upperCasePart = targetTokens[i]; i++; } // collect non-lower-case part while ((i < targetTokens.length) && !Character.isLowerCase(targetTokens[i].charAt(0))) { upperCasePart += " " + targetTokens[i]; i++; } if (upperCasePart != null) { targets.add(upperCasePart); upperCasePart = null; } } } } /** extract acronyms from the targets: * "Basque ETA" --> "ETA" * * @param targets the list of targets */ private void extractAcronyms(ArrayList<String> targets) { HashSet<String> duplicateFreeTargets = new LinkedHashSet<String>(targets); for (Iterator<String> iter = duplicateFreeTargets.iterator(); iter.hasNext();) { String target = iter.next(); String[] targetTokens = target.split("\\s"); for (String t : targetTokens) { if (t.matches("([A-Z]){3,}")) { targets.add(t); } else if (t.matches("([A-Z]\\.){2,}")) { targets.add(t); } } } } /** remove first NP in a sequence of NPs: * "the film 'Star Wars'" --> "'Star Wars'" * * @param targets the list of targets */ private void cutFirstNpInNpSequence(ArrayList<String> targets) { HashSet<String> duplicateFreeTargets = new LinkedHashSet<String>(targets); for (Iterator<String> iter = duplicateFreeTargets.iterator(); iter.hasNext();) { String target = iter.next(); // tokenize and tag sentence String[] targetTokens = OpenNLP.tokenize(target); String[] posTags = OpenNLP.tagPos(targetTokens); String[] chunkTags = OpenNLP.tagChunks(targetTokens, posTags); String np = null; int i = 0; // find first NP while ((i < targetTokens.length) && !"B-NP".equals(chunkTags[i])) i++; // skip first NP i++; // find next NP while (( i < targetTokens.length) && !"B-NP".equals(chunkTags[i])) i++; // start NP if (i < targetTokens.length) { np = targetTokens[i]; i++; } // add rest of NP while (i < targetTokens.length) { np += " " + targetTokens[i]; i++; } if (np != null) targets.add(np); } } /** take care of remaining brackets * * @param targets the list of targets */ private void postProcess(ArrayList<String> targets) { HashSet<String> duplicateFreeTargets = new LinkedHashSet<String>(targets); targets.clear(); for (Iterator<String> iter = duplicateFreeTargets.iterator(); iter.hasNext();) { String target = iter.next().trim(); boolean add = true; if (target.startsWith("(") && target.endsWith(")")) target = target.substring(1, target.length() - 1).trim(); if (target.startsWith("(") != target.endsWith(")")) add = false; // own extension: cut leading and tailing apostrophes while (target.startsWith("'")) target = target.substring(1).trim(); while (target.endsWith("'")) target = target.substring(0, (target.length() - 1)).trim(); // own extension: cut leading singel letters, but keep determiner "a" while (target.matches("[b-z]\\s.++")) target = target.substring(2); // own extension: filter one-char targets if (target.length() < 2) add = false; if (add) targets.add(target); } } /** * Increment the score of each result snippet for each word in it according * to the number of top-100 web search engine snippets containing this * particular word. This favors snippets that provide information given * frequently and thus likely to be more important with regard to the * target. * * @param results array of <code>Result</code> objects * @return extended array of <code>Result</code> objects */ @SuppressWarnings("unchecked") public Result[] apply(Result[] results) { // catch empty result if (results.length == 0) return results; // produce target variations String target = results[0].getQuery().getOriginalQueryString(); System.out.println("WebTermImportanceFilter:\n processing target '" + target + "'"); HashMap<String, TermCounter> rawTermCounters = this.cacheLookup(target); // query generation test if (TEST_TARGET_GENERATION) { String[] targets = this.getTargets(target); System.out.println(" generated web serach Strings:"); for (String t : targets) System.out.println(" - " + t); // query generation test only return results; // cache miss } else if (rawTermCounters == null) { String[] targets = this.getTargets(target); System.out.println(" web serach Strings are"); for (String t : targets) System.out.println(" - " + t); rawTermCounters = this.getTermCounters(targets); this.cache(target, rawTermCounters); } // get target tokens HashSet<String> rawTargetTerms = new HashSet<String>(); String[] targetTokens = OpenNLP.tokenize(target); for (String tt : targetTokens) if (Character.isLetterOrDigit(tt.charAt(0))) rawTargetTerms.add(tt); // stem terms, collect target terms HashMap<String, TermCounter> termCounters = new HashMap<String, TermCounter>();//this.getTermCounters(targets); HashSet<String> targetTerms = new HashSet<String>(); ArrayList<String> rawTerms = new ArrayList<String>(rawTermCounters.keySet()); for (String rawTerm : rawTerms) { String stemmedTerm = SnowballStemmer.stem(rawTerm.toLowerCase()); if (!termCounters.containsKey(stemmedTerm)) termCounters.put(stemmedTerm, new TermCounter()); termCounters.get(stemmedTerm).increment(rawTermCounters.get(rawTerm).getValue()); if (rawTargetTerms.contains(rawTerm)) targetTerms.add(stemmedTerm); } // get overall recall (since 20070718) int termCount = this.getCountSum(termCounters); int termCountLog = ((termCount > 100) ? ((int) Math.log10(termCount)) : 2); System.out.println("WebTermImportanceFilter: termCountLog is " + termCountLog); // score results ArrayList<Result> resultList = new ArrayList<Result>(); boolean goOn; do { goOn = false; ArrayList<Result> rawResults = new ArrayList<Result>(); // score all results for (Result r : results) { if (r.getScore() != Float.NEGATIVE_INFINITY) { // tokenize sentence String[] sentence = NETagger.tokenize(r.getAnswer()); float importance = 0; // scan sentence for terms from web result for (int i = 0; i < sentence.length; i++) { String term = sentence[i]; if ((term.length() > 1)/* && !StringUtils.isSubsetKeywords(term, r.getQuery().getAnalyzedQuestion().getQuestion()) && !FunctionWords.lookup(term)*/) { term = SnowballStemmer.stem(term.toLowerCase()); TermCounter count = termCounters.get(term); if (count != null) { double tf; // 20070706 if (this.tfNormalizationMode == NO_NORMALIZATION) tf = 1; else if (this.tfNormalizationMode == LOG_LENGTH_NORMALIZATION) { tf = WordFrequencies.lookup(sentence[i].toLowerCase()); if (tf > Math.E) tf = Math.log(tf); else tf = 1; } else if (this.tfNormalizationMode == LOG_LENGTH_NORMALIZATION) { tf = WordFrequencies.lookup(sentence[i].toLowerCase()); if (tf > 10) tf = Math.log10(tf); else tf = 1; } else tf = 1; importance += (count.getValue() / tf); } } } // don't throw out 0-scored results for combining approaches if (this.isCombined || (importance > 0)) { if (this.normalizationMode == NO_NORMALIZATION) r.setScore(importance); else if (this.normalizationMode == LINEAR_LENGTH_NORMALIZATION) r.setScore(importance / sentence.length); // try normalized score else if (this.normalizationMode == SQUARE_ROOT_LENGTH_NORMALIZATION) r.setScore(importance / ((float) Math.sqrt(sentence.length))); // try normalized score else if (this.normalizationMode == LOG_LENGTH_NORMALIZATION) r.setScore(importance / (1 + ((float) Math.log(sentence.length)))); // try normalized score else if (this.normalizationMode == LOG_10_LENGTH_NORMALIZATION) r.setScore(importance / (1 + ((float) Math.log10(sentence.length)))); // try normalized score rawResults.add(r); } } } if (rawResults.size() != 0) { // find top result Collections.sort(rawResults); Collections.reverse(rawResults); Result top = rawResults.remove(0); resultList.add(top); // decrement scores of top result terms String[] sentence = NETagger.tokenize(top.getAnswer()); for (int i = 0; i < sentence.length; i++) { String term = SnowballStemmer.stem(sentence[i].toLowerCase()); TermCounter count = termCounters.get(term); if (count != null) { // if (targetTerms.contains(term)) count.divideValue(2); // else count.divideValue(5); // if (targetTerms.contains(term)) count.divideValue(2); // else count.divideValue(3); // if (targetTerms.contains(term)) count.divideValue(2); // else count.divideValue(2); // 20070718 if (targetTerms.contains(term)) count.divideValue(2); else count.divideValue(termCountLog); if (count.getValue() == 0) termCounters.remove(term); } } // prepare remaining results for next round results = rawResults.toArray(new Result[rawResults.size()]); goOn = true; } } while (goOn); Collections.sort(resultList); Collections.reverse(resultList); // set position-dependent extra score for combining approaches if (this.isCombined) { float eScore = 100; for (Result r : resultList) { r.addExtraScore((this.getClass().getName() + this.normalizationMode), eScore); eScore *= 0.9f; } } return resultList.toArray(new Result[resultList.size()]); } // private static String lastTarget = null; // private static String lastCacherClassName = null; // private static HashMap<String, TermCounter> lastTargetTermCounters = null; private static class CacheEntry { String target; HashMap<String, TermCounter> termCounters; public CacheEntry(String target, HashMap<String, TermCounter> termCounters) { this.target = target; this.termCounters = termCounters; } } private static HashMap<String, CacheEntry> cache = new HashMap<String, CacheEntry>(); private void cache(String target, HashMap<String, TermCounter> termCounters) { String className = this.getClass().getName(); System.out.println("WebTermImportanceFilter: caching web lookup result for target '" + target + "' from class '" + className + "'"); CacheEntry ce = new CacheEntry(target, termCounters); cache.put(className, ce); // lastTarget = target; // lastCacherClassName = className; // lastTargetTermCounters = termCounters; } private HashMap<String, TermCounter> cacheLookup(String target) { String className = this.getClass().getName(); System.out.println("WebTermImportanceFilter: doing cache lookup result for target '" + target + "', class '" + className + "'"); CacheEntry ce = cache.get(className); if (ce == null) { System.out.println(" --> cache miss, no entry for '" + className + "' so far"); return null; } else if (target.equals(ce.target)) { System.out.println(" --> cache hit"); return ce.termCounters; } else { System.out.println(" --> cache miss, last target for '" + className + "' is '" + ce.target + "'"); return null; } } /** add all the term counters in source to target (perform a union of the key sets, summing up the counters) * @param source * @param target */ protected void addTermCounters(HashMap<String, TermCounter> source, HashMap<String, TermCounter> target) { for (Iterator<String> keys = source.keySet().iterator(); keys.hasNext();) { String key = keys.next(); int count = source.get(key).getValue(); if (!target.containsKey(key)) target.put(key, new TermCounter()); target.get(key).increment(count); } } /** get the maximum count out of a set of counters * @param counters */ protected int getMaxCount(HashMap<String, TermCounter> counters) { int max = 0; for (Iterator<String> keys = counters.keySet().iterator(); keys.hasNext();) max = Math.max(max, counters.get(keys.next()).getValue()); return max; } /** get the sum of a set of counters * @param counters */ protected int getCountSum(HashMap<String, TermCounter> counters) { int sum = 0; for (Iterator<String> keys = counters.keySet().iterator(); keys.hasNext();) sum += counters.get(keys.next()).getValue(); return sum; } /** get the sum of a set of counters, each one minus the count in another set of counters * @param counters * @param compare */ protected int sumDiff(HashMap<String, TermCounter> counters, HashMap<String, TermCounter> compare) { int diffSum = 0; for (Iterator<String> keys = counters.keySet().iterator(); keys.hasNext();) { String key = keys.next(); int count = counters.get(key).getValue(); int comp = (compare.containsKey(key) ? compare.get(key).getValue() : 0); diffSum += Math.max((count - comp), 0); } return diffSum; } protected static boolean TEST_TARGET_GENERATION = false; public static void main(String[] args) { TEST_TARGET_GENERATION = true; MsgPrinter.enableStatusMsgs(true); MsgPrinter.enableErrorMsgs(true); // create tokenizer MsgPrinter.printStatusMsg("Creating tokenizer..."); if (!OpenNLP.createTokenizer("res/nlp/tokenizer/opennlp/EnglishTok.bin.gz")) MsgPrinter.printErrorMsg("Could not create tokenizer."); // LingPipe.createTokenizer(); // create sentence detector // MsgPrinter.printStatusMsg("Creating sentence detector..."); // if (!OpenNLP.createSentenceDetector("res/nlp/sentencedetector/opennlp/EnglishSD.bin.gz")) // MsgPrinter.printErrorMsg("Could not create sentence detector."); // LingPipe.createSentenceDetector(); // create stemmer MsgPrinter.printStatusMsg("Creating stemmer..."); SnowballStemmer.create(); // create part of speech tagger MsgPrinter.printStatusMsg("Creating POS tagger..."); if (!OpenNLP.createPosTagger("res/nlp/postagger/opennlp/tag.bin.gz", "res/nlp/postagger/opennlp/tagdict")) MsgPrinter.printErrorMsg("Could not create OpenNLP POS tagger."); // if (!StanfordPosTagger.init("res/nlp/postagger/stanford/" + // "train-wsj-0-18.holder")) // MsgPrinter.printErrorMsg("Could not create Stanford POS tagger."); // create chunker MsgPrinter.printStatusMsg("Creating chunker..."); if (!OpenNLP.createChunker("res/nlp/phrasechunker/opennlp/" + "EnglishChunk.bin.gz")) MsgPrinter.printErrorMsg("Could not create chunker."); // create named entity taggers MsgPrinter.printStatusMsg("Creating NE taggers..."); NETagger.loadListTaggers("res/nlp/netagger/lists/"); NETagger.loadRegExTaggers("res/nlp/netagger/patterns.lst"); MsgPrinter.printStatusMsg(" ...loading models"); // if (!NETagger.loadNameFinders("res/nlp/netagger/opennlp/")) // MsgPrinter.printErrorMsg("Could not create OpenNLP NE tagger."); if (!StanfordNeTagger.isInitialized() && !StanfordNeTagger.init()) MsgPrinter.printErrorMsg("Could not create Stanford NE tagger."); MsgPrinter.printStatusMsg(" ...done"); WebTermImportanceFilter wtif = new TargetGeneratorTest(NO_NORMALIZATION); TRECTarget[] targets = TREC13To16Parser.loadTargets(args[0]); for (TRECTarget target : targets) { String question = target.getTargetDesc(); // query generation MsgPrinter.printGeneratingQueries(); String qn = QuestionNormalizer.normalize(question); MsgPrinter.printNormalization(qn); // print normalized question string Logger.logNormalization(qn); // log normalized question string String[] kws = KeywordExtractor.getKeywords(qn); AnalyzedQuestion aq = new AnalyzedQuestion(question); aq.setKeywords(kws); aq.setFactoid(false); Query[] queries = new BagOfWordsG().generateQueries(aq); for (int q = 0; q < queries.length; q++) queries[q].setOriginalQueryString(question); Result[] results = new Result[1]; results[0] = new Result("This would be the answer", queries[0]); wtif.apply(results); } } private static class TargetGeneratorTest extends WebTermImportanceFilter { TargetGeneratorTest(int normalizationMode) { super(normalizationMode, normalizationMode, false); } public HashMap<String, TermCounter> getTermCounters(String[] targets) { return new HashMap<String, TermCounter>(); } } }
gpl-3.0
dslomov/bazel
third_party/java/proguard/proguard5.3.3/src/proguard/classfile/attribute/annotation/target/visitor/TargetInfoVisitor.java
3983
/* * ProGuard -- shrinking, optimization, obfuscation, and preverification * of Java bytecode. * * Copyright (c) 2002-2017 Eric Lafortune @ GuardSquare * * This program is free software; you can redistribute it and/or modify it * under the terms of the GNU General Public License as published by the Free * Software Foundation; either version 2 of the License, or (at your option) * any later version. * * This program is distributed in the hope that it will be useful, but WITHOUT * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for * more details. * * You should have received a copy of the GNU General Public License along * with this program; if not, write to the Free Software Foundation, Inc., * 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA */ package proguard.classfile.attribute.annotation.target.visitor; import proguard.classfile.*; import proguard.classfile.attribute.CodeAttribute; import proguard.classfile.attribute.annotation.*; import proguard.classfile.attribute.annotation.target.*; /** * This interface specifies the methods for a visitor of <code>TargetInfo</code> * objects. * * @author Eric Lafortune */ public interface TargetInfoVisitor { public void visitTypeParameterTargetInfo( Clazz clazz, TypeAnnotation typeAnnotation, TypeParameterTargetInfo typeParameterTargetInfo); public void visitTypeParameterTargetInfo( Clazz clazz, Method method, TypeAnnotation typeAnnotation, TypeParameterTargetInfo typeParameterTargetInfo); public void visitSuperTypeTargetInfo( Clazz clazz, TypeAnnotation typeAnnotation, SuperTypeTargetInfo superTypeTargetInfo); public void visitTypeParameterBoundTargetInfo(Clazz clazz, TypeAnnotation typeAnnotation, TypeParameterBoundTargetInfo typeParameterBoundTargetInfo); public void visitTypeParameterBoundTargetInfo(Clazz clazz, Field field, TypeAnnotation typeAnnotation, TypeParameterBoundTargetInfo typeParameterBoundTargetInfo); public void visitTypeParameterBoundTargetInfo(Clazz clazz, Method method, TypeAnnotation typeAnnotation, TypeParameterBoundTargetInfo typeParameterBoundTargetInfo); public void visitEmptyTargetInfo( Clazz clazz, Field field, TypeAnnotation typeAnnotation, EmptyTargetInfo emptyTargetInfo); public void visitEmptyTargetInfo( Clazz clazz, Method method, TypeAnnotation typeAnnotation, EmptyTargetInfo emptyTargetInfo); public void visitFormalParameterTargetInfo( Clazz clazz, Method method, TypeAnnotation typeAnnotation, FormalParameterTargetInfo formalParameterTargetInfo); public void visitThrowsTargetInfo( Clazz clazz, Method method, TypeAnnotation typeAnnotation, ThrowsTargetInfo throwsTargetInfo); public void visitLocalVariableTargetInfo( Clazz clazz, Method method, CodeAttribute codeAttribute, TypeAnnotation typeAnnotation, LocalVariableTargetInfo localVariableTargetInfo); public void visitCatchTargetInfo( Clazz clazz, Method method, CodeAttribute codeAttribute, TypeAnnotation typeAnnotation, CatchTargetInfo catchTargetInfo); public void visitOffsetTargetInfo( Clazz clazz, Method method, CodeAttribute codeAttribute, TypeAnnotation typeAnnotation, OffsetTargetInfo offsetTargetInfo); public void visitTypeArgumentTargetInfo( Clazz clazz, Method method, CodeAttribute codeAttribute, TypeAnnotation typeAnnotation, TypeArgumentTargetInfo typeArgumentTargetInfo); }
apache-2.0
dennishuo/hadoop
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/BackupStore.java
18966
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.mapred; import java.io.DataOutputStream; import java.io.IOException; import java.io.InputStream; import java.util.Iterator; import java.util.LinkedList; import java.util.List; import java.util.NoSuchElementException; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FSDataOutputStream; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.LocalDirAllocator; import org.apache.hadoop.fs.Path; import org.apache.hadoop.io.DataInputBuffer; import org.apache.hadoop.io.DataOutputBuffer; import org.apache.hadoop.io.WritableUtils; import org.apache.hadoop.mapred.IFile.Reader; import org.apache.hadoop.mapred.IFile.Writer; import org.apache.hadoop.mapred.Merger.Segment; import org.apache.hadoop.mapreduce.MRConfig; import org.apache.hadoop.mapreduce.MRJobConfig; import org.apache.hadoop.mapreduce.TaskAttemptID; import org.apache.hadoop.mapreduce.CryptoUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * <code>BackupStore</code> is an utility class that is used to support * the mark-reset functionality of values iterator * * <p>It has two caches - a memory cache and a file cache where values are * stored as they are iterated, after a mark. On reset, values are retrieved * from these caches. Framework moves from the memory cache to the * file cache when the memory cache becomes full. * */ @InterfaceAudience.Private @InterfaceStability.Unstable public class BackupStore<K,V> { private static final Logger LOG = LoggerFactory.getLogger(BackupStore.class.getName()); private static final int MAX_VINT_SIZE = 9; private static final int EOF_MARKER_SIZE = 2 * MAX_VINT_SIZE; private final TaskAttemptID tid; private MemoryCache memCache; private FileCache fileCache; List<Segment<K,V>> segmentList = new LinkedList<Segment<K,V>>(); private int readSegmentIndex = 0; private int firstSegmentOffset = 0; private int currentKVOffset = 0; private int nextKVOffset = -1; private DataInputBuffer currentKey = null; private DataInputBuffer currentValue = new DataInputBuffer(); private DataInputBuffer currentDiskValue = new DataInputBuffer(); private boolean hasMore = false; private boolean inReset = false; private boolean clearMarkFlag = false; private boolean lastSegmentEOF = false; private Configuration conf; public BackupStore(Configuration conf, TaskAttemptID taskid) throws IOException { final float bufferPercent = conf.getFloat(JobContext.REDUCE_MARKRESET_BUFFER_PERCENT, 0f); if (bufferPercent > 1.0 || bufferPercent < 0.0) { throw new IOException(JobContext.REDUCE_MARKRESET_BUFFER_PERCENT + bufferPercent); } int maxSize = (int)Math.min( Runtime.getRuntime().maxMemory() * bufferPercent, Integer.MAX_VALUE); // Support an absolute size also. int tmp = conf.getInt(JobContext.REDUCE_MARKRESET_BUFFER_SIZE, 0); if (tmp > 0) { maxSize = tmp; } memCache = new MemoryCache(maxSize); fileCache = new FileCache(conf); tid = taskid; this.conf = conf; LOG.info("Created a new BackupStore with a memory of " + maxSize); } /** * Write the given K,V to the cache. * Write to memcache if space is available, else write to the filecache * @param key * @param value * @throws IOException */ public void write(DataInputBuffer key, DataInputBuffer value) throws IOException { assert (key != null && value != null); if (fileCache.isActive()) { fileCache.write(key, value); return; } if (memCache.reserveSpace(key, value)) { memCache.write(key, value); } else { fileCache.activate(); fileCache.write(key, value); } } public void mark() throws IOException { // We read one KV pair in advance in hasNext. // If hasNext has read the next KV pair from a new segment, but the // user has not called next() for that KV, then reset the readSegmentIndex // to the previous segment if (nextKVOffset == 0) { assert (readSegmentIndex != 0); assert (currentKVOffset != 0); readSegmentIndex --; } // just drop segments before the current active segment int i = 0; Iterator<Segment<K,V>> itr = segmentList.iterator(); while (itr.hasNext()) { Segment<K,V> s = itr.next(); if (i == readSegmentIndex) { break; } s.close(); itr.remove(); i++; LOG.debug("Dropping a segment"); } // FirstSegmentOffset is the offset in the current segment from where we // need to start reading on the next reset firstSegmentOffset = currentKVOffset; readSegmentIndex = 0; LOG.debug("Setting the FirsSegmentOffset to " + currentKVOffset); } public void reset() throws IOException { // Create a new segment for the previously written records only if we // are not already in the reset mode if (!inReset) { if (fileCache.isActive) { fileCache.createInDiskSegment(); } else { memCache.createInMemorySegment(); } } inReset = true; // Reset the segments to the correct position from where the next read // should begin. for (int i = 0; i < segmentList.size(); i++) { Segment<K,V> s = segmentList.get(i); if (s.inMemory()) { int offset = (i == 0) ? firstSegmentOffset : 0; s.getReader().reset(offset); } else { s.closeReader(); if (i == 0) { s.reinitReader(firstSegmentOffset); s.getReader().disableChecksumValidation(); } } } currentKVOffset = firstSegmentOffset; nextKVOffset = -1; readSegmentIndex = 0; hasMore = false; lastSegmentEOF = false; LOG.debug("Reset - First segment offset is " + firstSegmentOffset + " Segment List Size is " + segmentList.size()); } public boolean hasNext() throws IOException { if (lastSegmentEOF) { return false; } // We read the next KV from the cache to decide if there is any left. // Since hasNext can be called several times before the actual call to // next(), we use hasMore to avoid extra reads. hasMore is set to false // when the user actually consumes this record in next() if (hasMore) { return true; } Segment<K,V> seg = segmentList.get(readSegmentIndex); // Mark the current position. This would be set to currentKVOffset // when the user consumes this record in next(). nextKVOffset = (int) seg.getActualPosition(); if (seg.nextRawKey()) { currentKey = seg.getKey(); seg.getValue(currentValue); hasMore = true; return true; } else { if (!seg.inMemory()) { seg.closeReader(); } } // If this is the last segment, mark the lastSegmentEOF flag and return if (readSegmentIndex == segmentList.size() - 1) { nextKVOffset = -1; lastSegmentEOF = true; return false; } nextKVOffset = 0; readSegmentIndex ++; Segment<K,V> nextSegment = segmentList.get(readSegmentIndex); // We possibly are moving from a memory segment to a disk segment. // Reset so that we do not corrupt the in-memory segment buffer. // See HADOOP-5494 if (!nextSegment.inMemory()) { currentValue.reset(currentDiskValue.getData(), currentDiskValue.getLength()); nextSegment.init(null); } if (nextSegment.nextRawKey()) { currentKey = nextSegment.getKey(); nextSegment.getValue(currentValue); hasMore = true; return true; } else { throw new IOException("New segment did not have even one K/V"); } } public void next() throws IOException { if (!hasNext()) { throw new NoSuchElementException("iterate past last value"); } // Reset hasMore. See comment in hasNext() hasMore = false; currentKVOffset = nextKVOffset; nextKVOffset = -1; } public DataInputBuffer nextValue() { return currentValue; } public DataInputBuffer nextKey() { return currentKey; } public void reinitialize() throws IOException { if (segmentList.size() != 0) { clearSegmentList(); } memCache.reinitialize(true); fileCache.reinitialize(); readSegmentIndex = firstSegmentOffset = 0; currentKVOffset = 0; nextKVOffset = -1; hasMore = inReset = clearMarkFlag = false; } /** * This function is called the ValuesIterator when a mark is called * outside of a reset zone. */ public void exitResetMode() throws IOException { inReset = false; if (clearMarkFlag ) { // If a flag was set to clear mark, do the reinit now. // See clearMark() reinitialize(); return; } if (!fileCache.isActive) { memCache.reinitialize(false); } } /** For writing the first key and value bytes directly from the * value iterators, pass the current underlying output stream * @param length The length of the impending write */ public DataOutputStream getOutputStream(int length) throws IOException { if (memCache.reserveSpace(length)) { return memCache.dataOut; } else { fileCache.activate(); return fileCache.writer.getOutputStream(); } } /** This method is called by the valueIterators after writing the first * key and value bytes to the BackupStore * @param length */ public void updateCounters(int length) { if (fileCache.isActive) { fileCache.writer.updateCountersForExternalAppend(length); } else { memCache.usedSize += length; } } public void clearMark() throws IOException { if (inReset) { // If we are in the reset mode, we just mark a flag and come out // The actual re initialization would be done when we exit the reset // mode clearMarkFlag = true; } else { reinitialize(); } } private void clearSegmentList() throws IOException { for (Segment<K,V> segment: segmentList) { long len = segment.getLength(); segment.close(); if (segment.inMemory()) { memCache.unreserve(len); } } segmentList.clear(); } class MemoryCache { private DataOutputBuffer dataOut; private int blockSize; private int usedSize; private final BackupRamManager ramManager; // Memory cache is made up of blocks. private int defaultBlockSize = 1024 * 1024; public MemoryCache(int maxSize) { ramManager = new BackupRamManager(maxSize); if (maxSize < defaultBlockSize) { defaultBlockSize = maxSize; } } public void unreserve(long len) { ramManager.unreserve((int)len); } /** * Re-initialize the memory cache. * * @param clearAll If true, re-initialize the ramManager also. */ void reinitialize(boolean clearAll) { if (clearAll) { ramManager.reinitialize(); } int allocatedSize = createNewMemoryBlock(defaultBlockSize, defaultBlockSize); assert(allocatedSize == defaultBlockSize || allocatedSize == 0); LOG.debug("Created a new mem block of " + allocatedSize); } private int createNewMemoryBlock(int requestedSize, int minSize) { int allocatedSize = ramManager.reserve(requestedSize, minSize); usedSize = 0; if (allocatedSize == 0) { dataOut = null; blockSize = 0; } else { dataOut = new DataOutputBuffer(allocatedSize); blockSize = allocatedSize; } return allocatedSize; } /** * This method determines if there is enough space left in the * memory cache to write to the requested length + space for * subsequent EOF makers. * @param length * @return true if enough space is available */ boolean reserveSpace(int length) throws IOException { int availableSize = blockSize - usedSize; if (availableSize >= length + EOF_MARKER_SIZE) { return true; } // Not enough available. Close this block assert (!inReset); createInMemorySegment(); // Create a new block int tmp = Math.max(length + EOF_MARKER_SIZE, defaultBlockSize); availableSize = createNewMemoryBlock(tmp, (length + EOF_MARKER_SIZE)); return (availableSize == 0) ? false : true; } boolean reserveSpace(DataInputBuffer key, DataInputBuffer value) throws IOException { int keyLength = key.getLength() - key.getPosition(); int valueLength = value.getLength() - value.getPosition(); int requestedSize = keyLength + valueLength + WritableUtils.getVIntSize(keyLength) + WritableUtils.getVIntSize(valueLength); return reserveSpace(requestedSize); } /** * Write the key and value to the cache in the IFile format * @param key * @param value * @throws IOException */ public void write(DataInputBuffer key, DataInputBuffer value) throws IOException { int keyLength = key.getLength() - key.getPosition(); int valueLength = value.getLength() - value.getPosition(); WritableUtils.writeVInt(dataOut, keyLength); WritableUtils.writeVInt(dataOut, valueLength); dataOut.write(key.getData(), key.getPosition(), keyLength); dataOut.write(value.getData(), value.getPosition(), valueLength); usedSize += keyLength + valueLength + WritableUtils.getVIntSize(keyLength) + WritableUtils.getVIntSize(valueLength); LOG.debug("ID: " + segmentList.size() + " WRITE TO MEM"); } /** * This method creates a memory segment from the existing buffer * @throws IOException */ void createInMemorySegment () throws IOException { // If nothing was written in this block because the record size // was greater than the allocated block size, just return. if (usedSize == 0) { ramManager.unreserve(blockSize); return; } // spaceAvailable would have ensured that there is enough space // left for the EOF markers. assert ((blockSize - usedSize) >= EOF_MARKER_SIZE); WritableUtils.writeVInt(dataOut, IFile.EOF_MARKER); WritableUtils.writeVInt(dataOut, IFile.EOF_MARKER); usedSize += EOF_MARKER_SIZE; ramManager.unreserve(blockSize - usedSize); Reader<K, V> reader = new org.apache.hadoop.mapreduce.task.reduce.InMemoryReader<K, V>(null, (org.apache.hadoop.mapred.TaskAttemptID) tid, dataOut.getData(), 0, usedSize, conf); Segment<K, V> segment = new Segment<K, V>(reader, false); segmentList.add(segment); LOG.debug("Added Memory Segment to List. List Size is " + segmentList.size()); } } class FileCache { private LocalDirAllocator lDirAlloc; private final Configuration conf; private final FileSystem fs; private boolean isActive = false; private Path file = null; private IFile.Writer<K,V> writer = null; private int spillNumber = 0; public FileCache(Configuration conf) throws IOException { this.conf = conf; this.fs = FileSystem.getLocal(conf); this.lDirAlloc = new LocalDirAllocator(MRConfig.LOCAL_DIR); } void write(DataInputBuffer key, DataInputBuffer value) throws IOException { if (writer == null) { // If spillNumber is 0, we should have called activate and not // come here at all assert (spillNumber != 0); writer = createSpillFile(); } writer.append(key, value); LOG.debug("ID: " + segmentList.size() + " WRITE TO DISK"); } void reinitialize() { spillNumber = 0; writer = null; isActive = false; } void activate() throws IOException { isActive = true; writer = createSpillFile(); } void createInDiskSegment() throws IOException { assert (writer != null); writer.close(); Segment<K,V> s = new Segment<K, V>(conf, fs, file, null, true); writer = null; segmentList.add(s); LOG.debug("Disk Segment added to List. Size is " + segmentList.size()); } boolean isActive() { return isActive; } private Writer<K,V> createSpillFile() throws IOException { Path tmp = new Path(MRJobConfig.OUTPUT + "/backup_" + tid.getId() + "_" + (spillNumber++) + ".out"); LOG.info("Created file: " + tmp); file = lDirAlloc.getLocalPathForWrite(tmp.toUri().getPath(), -1, conf); FSDataOutputStream out = fs.create(file); out = CryptoUtils.wrapIfNecessary(conf, out); return new Writer<K, V>(conf, out, null, null, null, null, true); } } static class BackupRamManager implements RamManager { private int availableSize = 0; private final int maxSize; public BackupRamManager(int size) { availableSize = maxSize = size; } public boolean reserve(int requestedSize, InputStream in) { // Not used LOG.warn("Reserve(int, InputStream) not supported by BackupRamManager"); return false; } int reserve(int requestedSize) { if (availableSize == 0) { return 0; } int reservedSize = Math.min(requestedSize, availableSize); availableSize -= reservedSize; LOG.debug("Reserving: " + reservedSize + " Requested: " + requestedSize); return reservedSize; } int reserve(int requestedSize, int minSize) { if (availableSize < minSize) { LOG.debug("No space available. Available: " + availableSize + " MinSize: " + minSize); return 0; } else { return reserve(requestedSize); } } public void unreserve(int requestedSize) { availableSize += requestedSize; LOG.debug("Unreserving: " + requestedSize + ". Available: " + availableSize); } void reinitialize() { availableSize = maxSize; } } }
apache-2.0
stresler/cattle
code/implementation/host-only-network/src/main/java/io/cattle/platform/networking/host/dao/impl/HostOnlyDaoImpl.java
3937
package io.cattle.platform.networking.host.dao.impl; import static io.cattle.platform.core.model.tables.HostTable.*; import static io.cattle.platform.core.model.tables.HostVnetMapTable.*; import static io.cattle.platform.core.model.tables.SubnetVnetMapTable.*; import static io.cattle.platform.core.model.tables.VnetTable.*; import java.util.List; import io.cattle.platform.core.model.Host; import io.cattle.platform.core.model.HostVnetMap; import io.cattle.platform.core.model.Network; import io.cattle.platform.core.model.Subnet; import io.cattle.platform.core.model.SubnetVnetMap; import io.cattle.platform.core.model.Vnet; import io.cattle.platform.core.model.tables.records.VnetRecord; import io.cattle.platform.db.jooq.dao.impl.AbstractJooqDao; import io.cattle.platform.networking.host.contants.HostOnlyConstants; import io.cattle.platform.networking.host.dao.HostOnlyDao; import io.cattle.platform.object.ObjectManager; import javax.inject.Inject; import org.jooq.Record; public class HostOnlyDaoImpl extends AbstractJooqDao implements HostOnlyDao { ObjectManager objectManager; @Override public Vnet getVnetForHost(Network network, Host host) { Long physicalHostId = host.getPhysicalHostId(); Record record = null; if ( physicalHostId == null ) { record = create() .select(VNET.fields()) .from(VNET) .join(HOST_VNET_MAP) .on(HOST_VNET_MAP.VNET_ID.eq(VNET.ID)) .where(VNET.NETWORK_ID.eq(network.getId()) .and(HOST_VNET_MAP.HOST_ID.eq(host.getId())) .and(HOST_VNET_MAP.REMOVED.isNull())) .fetchAny(); } else { record = create() .select(VNET.fields()) .from(VNET) .join(HOST_VNET_MAP) .on(HOST_VNET_MAP.VNET_ID.eq(VNET.ID)) .join(HOST) .on(HOST_VNET_MAP.HOST_ID.eq(HOST.ID)) .where(VNET.NETWORK_ID.eq(network.getId()) .and(HOST.PHYSICAL_HOST_ID.eq(physicalHostId)) .and(HOST_VNET_MAP.REMOVED.isNull())) .fetchAny(); } return record == null ? null : record.into(VnetRecord.class); } @Override public Vnet createVnetForHost(Network network, Host host, Subnet subnet, String uri) { if ( uri == null ) { uri = HostOnlyConstants.DEFAULT_HOST_SUBNET_URI; } Vnet vnet = objectManager.create(Vnet.class, VNET.URI, uri, VNET.ACCOUNT_ID, network.getAccountId(), VNET.NETWORK_ID, network.getId()); objectManager.create(HostVnetMap.class, HOST_VNET_MAP.VNET_ID, vnet.getId(), HOST_VNET_MAP.HOST_ID, host.getId()); if ( subnet != null ) { objectManager.create(SubnetVnetMap.class, SUBNET_VNET_MAP.VNET_ID, vnet.getId(), SUBNET_VNET_MAP.SUBNET_ID, subnet.getId()); } return vnet; } @Override public HostVnetMap mapVnetToHost(Vnet vnet, Host host) { List<HostVnetMap> maps = objectManager.find(HostVnetMap.class, HOST_VNET_MAP.VNET_ID, vnet.getId(), HOST_VNET_MAP.HOST_ID, host.getId()); if ( maps.size() > 0 ) { return maps.get(0); } return objectManager.create(HostVnetMap.class, HOST_VNET_MAP.VNET_ID, vnet.getId(), HOST_VNET_MAP.HOST_ID, host.getId()); } public ObjectManager getObjectManager() { return objectManager; } @Inject public void setObjectManager(ObjectManager objectManager) { this.objectManager = objectManager; } }
apache-2.0
Kerr1Gan/ShareBox
ijkplayer-java/src/main/java/tv/danmaku/ijk/media/player/misc/IjkMediaFormat.java
10341
/* * Copyright (C) 2015 Bilibili * Copyright (C) 2015 Zhang Rui <bbcallen@gmail.com> * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package tv.danmaku.ijk.media.player.misc; import android.annotation.TargetApi; import android.os.Build; import android.text.TextUtils; import java.util.HashMap; import java.util.Locale; import java.util.Map; import tv.danmaku.ijk.media.player.IjkMediaMeta; public class IjkMediaFormat implements IMediaFormat { // Common public static final String KEY_IJK_CODEC_LONG_NAME_UI = "ijk-codec-long-name-ui"; public static final String KEY_IJK_CODEC_NAME_UI = "ijk-codec-name-ui"; public static final String KEY_IJK_BIT_RATE_UI = "ijk-bit-rate-ui"; // Video public static final String KEY_IJK_CODEC_PROFILE_LEVEL_UI = "ijk-profile-level-ui"; public static final String KEY_IJK_CODEC_PIXEL_FORMAT_UI = "ijk-pixel-format-ui"; public static final String KEY_IJK_RESOLUTION_UI = "ijk-resolution-ui"; public static final String KEY_IJK_FRAME_RATE_UI = "ijk-frame-rate-ui"; // Audio public static final String KEY_IJK_SAMPLE_RATE_UI = "ijk-sample-rate-ui"; public static final String KEY_IJK_CHANNEL_UI = "ijk-channel-ui"; // Codec public static final String CODEC_NAME_H264 = "h264"; public final IjkMediaMeta.IjkStreamMeta mMediaFormat; public IjkMediaFormat(IjkMediaMeta.IjkStreamMeta streamMeta) { mMediaFormat = streamMeta; } @TargetApi(Build.VERSION_CODES.JELLY_BEAN) @Override public int getInteger(String name) { if (mMediaFormat == null) return 0; return mMediaFormat.getInt(name); } @Override public String getString(String name) { if (mMediaFormat == null) return null; if (sFormatterMap.containsKey(name)) { Formatter formatter = sFormatterMap.get(name); return formatter.format(this); } return mMediaFormat.getString(name); } //------------------------- // Formatter //------------------------- private static abstract class Formatter { public String format(IjkMediaFormat mediaFormat) { String value = doFormat(mediaFormat); if (TextUtils.isEmpty(value)) return getDefaultString(); return value; } protected abstract String doFormat(IjkMediaFormat mediaFormat); @SuppressWarnings("SameReturnValue") protected String getDefaultString() { return "N/A"; } } private static final Map<String, Formatter> sFormatterMap = new HashMap<String, Formatter>(); { sFormatterMap.put(KEY_IJK_CODEC_LONG_NAME_UI, new Formatter() { @Override public String doFormat(IjkMediaFormat mediaFormat) { return mMediaFormat.getString(IjkMediaMeta.IJKM_KEY_CODEC_LONG_NAME); } }); sFormatterMap.put(KEY_IJK_CODEC_NAME_UI, new Formatter() { @Override public String doFormat(IjkMediaFormat mediaFormat) { return mMediaFormat.getString(IjkMediaMeta.IJKM_KEY_CODEC_NAME); } }); sFormatterMap.put(KEY_IJK_BIT_RATE_UI, new Formatter() { @Override protected String doFormat(IjkMediaFormat mediaFormat) { int bitRate = mediaFormat.getInteger(IjkMediaMeta.IJKM_KEY_BITRATE); if (bitRate <= 0) { return null; } else if (bitRate < 1000) { return String.format(Locale.US, "%d bit/s", bitRate); } else { return String.format(Locale.US, "%d kb/s", bitRate / 1000); } } }); sFormatterMap.put(KEY_IJK_CODEC_PROFILE_LEVEL_UI, new Formatter() { @Override protected String doFormat(IjkMediaFormat mediaFormat) { int profileIndex = mediaFormat.getInteger(IjkMediaMeta.IJKM_KEY_CODEC_PROFILE_ID); String profile; switch (profileIndex) { case IjkMediaMeta.FF_PROFILE_H264_BASELINE: profile = "Baseline"; break; case IjkMediaMeta.FF_PROFILE_H264_CONSTRAINED_BASELINE: profile = "Constrained Baseline"; break; case IjkMediaMeta.FF_PROFILE_H264_MAIN: profile = "Main"; break; case IjkMediaMeta.FF_PROFILE_H264_EXTENDED: profile = "Extended"; break; case IjkMediaMeta.FF_PROFILE_H264_HIGH: profile = "High"; break; case IjkMediaMeta.FF_PROFILE_H264_HIGH_10: profile = "High 10"; break; case IjkMediaMeta.FF_PROFILE_H264_HIGH_10_INTRA: profile = "High 10 Intra"; break; case IjkMediaMeta.FF_PROFILE_H264_HIGH_422: profile = "High 4:2:2"; break; case IjkMediaMeta.FF_PROFILE_H264_HIGH_422_INTRA: profile = "High 4:2:2 Intra"; break; case IjkMediaMeta.FF_PROFILE_H264_HIGH_444: profile = "High 4:4:4"; break; case IjkMediaMeta.FF_PROFILE_H264_HIGH_444_PREDICTIVE: profile = "High 4:4:4 Predictive"; break; case IjkMediaMeta.FF_PROFILE_H264_HIGH_444_INTRA: profile = "High 4:4:4 Intra"; break; case IjkMediaMeta.FF_PROFILE_H264_CAVLC_444: profile = "CAVLC 4:4:4"; break; default: return null; } StringBuilder sb = new StringBuilder(); sb.append(profile); String codecName = mediaFormat.getString(IjkMediaMeta.IJKM_KEY_CODEC_NAME); if (!TextUtils.isEmpty(codecName) && codecName.equalsIgnoreCase(CODEC_NAME_H264)) { int level = mediaFormat.getInteger(IjkMediaMeta.IJKM_KEY_CODEC_LEVEL); if (level < 10) return sb.toString(); sb.append(" Profile Level "); sb.append((level / 10) % 10); if ((level % 10) != 0) { sb.append("."); sb.append(level % 10); } } return sb.toString(); } }); sFormatterMap.put(KEY_IJK_CODEC_PIXEL_FORMAT_UI, new Formatter() { @Override protected String doFormat(IjkMediaFormat mediaFormat) { return mediaFormat.getString(IjkMediaMeta.IJKM_KEY_CODEC_PIXEL_FORMAT); } }); sFormatterMap.put(KEY_IJK_RESOLUTION_UI, new Formatter() { @Override protected String doFormat(IjkMediaFormat mediaFormat) { int width = mediaFormat.getInteger(KEY_WIDTH); int height = mediaFormat.getInteger(KEY_HEIGHT); int sarNum = mediaFormat.getInteger(IjkMediaMeta.IJKM_KEY_SAR_NUM); int sarDen = mediaFormat.getInteger(IjkMediaMeta.IJKM_KEY_SAR_DEN); if (width <= 0 || height <= 0) { return null; } else if (sarNum <= 0 || sarDen <= 0) { return String.format(Locale.US, "%d x %d", width, height); } else { return String.format(Locale.US, "%d x %d [SAR %d:%d]", width, height, sarNum, sarDen); } } }); sFormatterMap.put(KEY_IJK_FRAME_RATE_UI, new Formatter() { @Override protected String doFormat(IjkMediaFormat mediaFormat) { int fpsNum = mediaFormat.getInteger(IjkMediaMeta.IJKM_KEY_FPS_NUM); int fpsDen = mediaFormat.getInteger(IjkMediaMeta.IJKM_KEY_FPS_DEN); if (fpsNum <= 0 || fpsDen <= 0) { return null; } else { return String.valueOf(((float) (fpsNum)) / fpsDen); } } }); sFormatterMap.put(KEY_IJK_SAMPLE_RATE_UI, new Formatter() { @Override protected String doFormat(IjkMediaFormat mediaFormat) { int sampleRate = mediaFormat.getInteger(IjkMediaMeta.IJKM_KEY_SAMPLE_RATE); if (sampleRate <= 0) { return null; } else { return String.format(Locale.US, "%d Hz", sampleRate); } } }); sFormatterMap.put(KEY_IJK_CHANNEL_UI, new Formatter() { @Override protected String doFormat(IjkMediaFormat mediaFormat) { int channelLayout = mediaFormat.getInteger(IjkMediaMeta.IJKM_KEY_CHANNEL_LAYOUT); if (channelLayout <= 0) { return null; } else { if (channelLayout == IjkMediaMeta.AV_CH_LAYOUT_MONO) { return "mono"; } else if (channelLayout == IjkMediaMeta.AV_CH_LAYOUT_STEREO) { return "stereo"; } else { return String.format(Locale.US, "%x", channelLayout); } } } }); } }
apache-2.0
udayinfy/vaadin
client/tests/src/com/vaadin/client/LocatorUtilTest.java
2206
/* * Copyright 2000-2014 Vaadin Ltd. * * Licensed under the Apache License, Version 2.0 (the "License"); you may not * use this file except in compliance with the License. You may obtain a copy of * the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations under * the License. */ package com.vaadin.client; import junit.framework.TestCase; import org.junit.Assert; import com.vaadin.client.componentlocator.LocatorUtil; /* * Test LocatorUtil.isUIElement() & isNotificaitonElement methods */ public class LocatorUtilTest extends TestCase { public void testIsUI1() { boolean isUI = LocatorUtil.isUIElement("com.vaadin.ui.UI"); Assert.assertTrue(isUI); } public void testIsUI2() { boolean isUI = LocatorUtil.isUIElement("/com.vaadin.ui.UI"); Assert.assertTrue(isUI); } public void testIsUI3() { boolean isUI = LocatorUtil .isUIElement("//com.vaadin.ui.UI[RandomString"); Assert.assertTrue(isUI); } public void testIsUI4() { boolean isUI = LocatorUtil.isUIElement("//com.vaadin.ui.UI[0]"); Assert.assertTrue(isUI); } public void testIsNotification1() { boolean isUI = LocatorUtil .isNotificationElement("com.vaadin.ui.VNotification"); Assert.assertTrue(isUI); } public void testIsNotification2() { boolean isUI = LocatorUtil .isNotificationElement("com.vaadin.ui.Notification"); Assert.assertTrue(isUI); } public void testIsNotification3() { boolean isUI = LocatorUtil .isNotificationElement("/com.vaadin.ui.VNotification["); Assert.assertTrue(isUI); } public void testIsNotification4() { boolean isUI = LocatorUtil .isNotificationElement("//com.vaadin.ui.VNotification[0]"); Assert.assertTrue(isUI); } }
apache-2.0
camilojd/elasticsearch
core/src/main/java/org/elasticsearch/search/suggest/DirectSpellcheckerSettings.java
4660
/* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright * ownership. Elasticsearch licenses this file to you under * the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.search.suggest; import org.apache.lucene.search.spell.DirectSpellChecker; import org.apache.lucene.search.spell.StringDistance; import org.apache.lucene.search.spell.SuggestMode; import org.apache.lucene.util.automaton.LevenshteinAutomata; public class DirectSpellcheckerSettings { // NB: If this changes, make sure to change the default in TermBuilderSuggester public static SuggestMode DEFAULT_SUGGEST_MODE = SuggestMode.SUGGEST_WHEN_NOT_IN_INDEX; public static float DEFAULT_ACCURACY = 0.5f; public static SortBy DEFAULT_SORT = SortBy.SCORE; // NB: If this changes, make sure to change the default in TermBuilderSuggester public static StringDistance DEFAULT_STRING_DISTANCE = DirectSpellChecker.INTERNAL_LEVENSHTEIN; public static int DEFAULT_MAX_EDITS = LevenshteinAutomata.MAXIMUM_SUPPORTED_DISTANCE; public static int DEFAULT_MAX_INSPECTIONS = 5; public static float DEFAULT_MAX_TERM_FREQ = 0.01f; public static int DEFAULT_PREFIX_LENGTH = 1; public static int DEFAULT_MIN_WORD_LENGTH = 4; public static float DEFAULT_MIN_DOC_FREQ = 0f; private SuggestMode suggestMode = DEFAULT_SUGGEST_MODE; private float accuracy = DEFAULT_ACCURACY; private SortBy sort = DEFAULT_SORT; private StringDistance stringDistance = DEFAULT_STRING_DISTANCE; private int maxEdits = DEFAULT_MAX_EDITS; private int maxInspections = DEFAULT_MAX_INSPECTIONS; private float maxTermFreq = DEFAULT_MAX_TERM_FREQ; private int prefixLength = DEFAULT_PREFIX_LENGTH; private int minWordLength = DEFAULT_MIN_WORD_LENGTH; private float minDocFreq = DEFAULT_MIN_DOC_FREQ; public SuggestMode suggestMode() { return suggestMode; } public void suggestMode(SuggestMode suggestMode) { this.suggestMode = suggestMode; } public float accuracy() { return accuracy; } public void accuracy(float accuracy) { this.accuracy = accuracy; } public SortBy sort() { return sort; } public void sort(SortBy sort) { this.sort = sort; } public StringDistance stringDistance() { return stringDistance; } public void stringDistance(StringDistance distance) { this.stringDistance = distance; } public int maxEdits() { return maxEdits; } public void maxEdits(int maxEdits) { this.maxEdits = maxEdits; } public int maxInspections() { return maxInspections; } public void maxInspections(int maxInspections) { this.maxInspections = maxInspections; } public float maxTermFreq() { return maxTermFreq; } public void maxTermFreq(float maxTermFreq) { this.maxTermFreq = maxTermFreq; } public int prefixLength() { return prefixLength; } public void prefixLength(int prefixLength) { this.prefixLength = prefixLength; } public int minWordLength() { return minWordLength; } public void minWordLength(int minWordLength) { this.minWordLength = minWordLength; } public float minDocFreq() { return minDocFreq; } public void minDocFreq(float minDocFreq) { this.minDocFreq = minDocFreq; } @Override public String toString() { return "[" + "suggestMode=" + suggestMode + ",sort=" + sort + ",stringDistance=" + stringDistance + ",accuracy=" + accuracy + ",maxEdits=" + maxEdits + ",maxInspections=" + maxInspections + ",maxTermFreq=" + maxTermFreq + ",prefixLength=" + prefixLength + ",minWordLength=" + minWordLength + ",minDocFreq=" + minDocFreq + "]"; } }
apache-2.0
lwriemen/bridgepoint
src/org.xtuml.bp.ui.graphics/src/org/xtuml/bp/ui/graphics/actions/AddToLayerAction.java
5561
//======================================================================== // //File: $RCSfile: AddToLayerAction.java,v $ //Version: $Revision: 1.4 $ //Modified: $Date: 2013/01/10 23:05:58 $ // //Copyright (c) 2005-2014 Mentor Graphics Corporation. All rights reserved. // //======================================================================== // Licensed under the Apache License, Version 2.0 (the "License"); you may not // use this file except in compliance with the License. You may obtain a copy // of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, WITHOUT // WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the // License for the specific language governing permissions and limitations under // the License. //======================================================================== package org.xtuml.bp.ui.graphics.actions; import java.util.ArrayList; import java.util.List; import org.eclipse.gef.GraphicalEditPart; import org.eclipse.gef.GraphicalViewer; import org.eclipse.jface.action.Action; import org.xtuml.bp.core.CorePlugin; import org.xtuml.bp.core.common.ClassQueryInterface_c; import org.xtuml.bp.core.common.Transaction; import org.xtuml.bp.core.common.TransactionManager; import org.xtuml.bp.ui.canvas.Connector_c; import org.xtuml.bp.ui.canvas.GraphicalElement_c; import org.xtuml.bp.ui.canvas.Graphicalelementinlayer_c; import org.xtuml.bp.ui.canvas.Layer_c; import org.xtuml.bp.ui.canvas.Model_c; import org.xtuml.bp.ui.canvas.Ooaofgraphics; import org.xtuml.bp.ui.canvas.Shape_c; import org.xtuml.bp.ui.graphics.editor.GraphicalEditor; import org.xtuml.bp.ui.graphics.parts.ConnectorEditPart; import org.xtuml.bp.ui.graphics.parts.ShapeEditPart; public class AddToLayerAction extends Action { private String layerName; private Model_c model; public AddToLayerAction(String layerName, Model_c model) { this.layerName = layerName; this.model = model; } @Override public void run() { Layer_c layer = Layer_c.getOneGD_LAYOnR34(model, new ClassQueryInterface_c() { @Override public boolean evaluate(Object candidate) { return ((Layer_c) candidate).getLayer_name().equals( layerName); } }); if (layer != null) { Transaction transaction = null; TransactionManager manager = TransactionManager.getSingleton(); try { transaction = manager.startTransaction( "Add element(s) to layer", Ooaofgraphics .getDefaultInstance()); List<GraphicalEditPart> selection = new ArrayList<GraphicalEditPart>(); GraphicalViewer viewer = GraphicalEditor.getEditor(model) .getGraphicalViewer(); for (Object selected : viewer.getSelectedEditParts()) { selection.add((GraphicalEditPart) selected); } for (GraphicalEditPart part : selection) { if (part instanceof ShapeEditPart || part instanceof ConnectorEditPart) { GraphicalElement_c elem = null; Object partModel = part.getModel(); if (partModel instanceof Connector_c) { elem = GraphicalElement_c .getOneGD_GEOnR2((Connector_c) partModel); } else { elem = GraphicalElement_c .getOneGD_GEOnR2((Shape_c) partModel); } if (elem != null) { // if this element already exists in the layer // skip, the tool allows this when at least one // selected element is not part of the layer Layer_c[] participatingLayers = Layer_c .getManyGD_LAYsOnR35(Graphicalelementinlayer_c .getManyGD_GLAYsOnR35(elem)); for(int i = 0; i < participatingLayers.length; i++) { if(participatingLayers[i] == layer) { continue; } } if (part instanceof ShapeEditPart) { ShapeEditPart shapePart = (ShapeEditPart) part; participatingLayers = shapePart .getInheritedLayers(); for (int i = 0; i < participatingLayers.length; i++) { if (participatingLayers[i] == layer) { continue; } } } if (part instanceof ConnectorEditPart) { ConnectorEditPart conPart = (ConnectorEditPart) part; participatingLayers = conPart .getInheritedLayers(); for (int i = 0; i < participatingLayers.length; i++) { if (participatingLayers[i] == layer) { continue; } } } layer.Addelementtolayer(elem.getElementid()); } if(!layer.getVisible()) { // see if the part also belongs to any // visible layers, otherwise de-select Layer_c[] existingLayers = Layer_c .getManyGD_LAYsOnR35(Graphicalelementinlayer_c .getManyGD_GLAYsOnR35(elem)); boolean participatesInVisibleLayer = false; for(int i = 0; i < existingLayers.length; i++) { if(existingLayers[i].getVisible()) { participatesInVisibleLayer = true; break; } } if(!participatesInVisibleLayer) { viewer.deselect(part); } } } } manager.endTransaction(transaction); } catch (Exception e) { if (transaction != null) { manager.cancelTransaction(transaction, e); } CorePlugin.logError("Unable to add element to layer.", e); } } } }
apache-2.0
sheofir/aws-sdk-java
aws-java-sdk-ec2/src/main/java/com/amazonaws/services/ec2/model/DescribeSpotInstanceRequestsResult.java
6035
/* * Copyright 2010-2015 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). * You may not use this file except in compliance with the License. * A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either * express or implied. See the License for the specific language governing * permissions and limitations under the License. */ package com.amazonaws.services.ec2.model; import java.io.Serializable; /** * <p> * Contains the output of DescribeSpotInstanceRequests. * </p> */ public class DescribeSpotInstanceRequestsResult implements Serializable, Cloneable { /** * One or more Spot Instance requests. */ private com.amazonaws.internal.ListWithAutoConstructFlag<SpotInstanceRequest> spotInstanceRequests; /** * One or more Spot Instance requests. * * @return One or more Spot Instance requests. */ public java.util.List<SpotInstanceRequest> getSpotInstanceRequests() { if (spotInstanceRequests == null) { spotInstanceRequests = new com.amazonaws.internal.ListWithAutoConstructFlag<SpotInstanceRequest>(); spotInstanceRequests.setAutoConstruct(true); } return spotInstanceRequests; } /** * One or more Spot Instance requests. * * @param spotInstanceRequests One or more Spot Instance requests. */ public void setSpotInstanceRequests(java.util.Collection<SpotInstanceRequest> spotInstanceRequests) { if (spotInstanceRequests == null) { this.spotInstanceRequests = null; return; } com.amazonaws.internal.ListWithAutoConstructFlag<SpotInstanceRequest> spotInstanceRequestsCopy = new com.amazonaws.internal.ListWithAutoConstructFlag<SpotInstanceRequest>(spotInstanceRequests.size()); spotInstanceRequestsCopy.addAll(spotInstanceRequests); this.spotInstanceRequests = spotInstanceRequestsCopy; } /** * One or more Spot Instance requests. * <p> * <b>NOTE:</b> This method appends the values to the existing list (if * any). Use {@link #setSpotInstanceRequests(java.util.Collection)} or * {@link #withSpotInstanceRequests(java.util.Collection)} if you want to * override the existing values. * <p> * Returns a reference to this object so that method calls can be chained together. * * @param spotInstanceRequests One or more Spot Instance requests. * * @return A reference to this updated object so that method calls can be chained * together. */ public DescribeSpotInstanceRequestsResult withSpotInstanceRequests(SpotInstanceRequest... spotInstanceRequests) { if (getSpotInstanceRequests() == null) setSpotInstanceRequests(new java.util.ArrayList<SpotInstanceRequest>(spotInstanceRequests.length)); for (SpotInstanceRequest value : spotInstanceRequests) { getSpotInstanceRequests().add(value); } return this; } /** * One or more Spot Instance requests. * <p> * Returns a reference to this object so that method calls can be chained together. * * @param spotInstanceRequests One or more Spot Instance requests. * * @return A reference to this updated object so that method calls can be chained * together. */ public DescribeSpotInstanceRequestsResult withSpotInstanceRequests(java.util.Collection<SpotInstanceRequest> spotInstanceRequests) { if (spotInstanceRequests == null) { this.spotInstanceRequests = null; } else { com.amazonaws.internal.ListWithAutoConstructFlag<SpotInstanceRequest> spotInstanceRequestsCopy = new com.amazonaws.internal.ListWithAutoConstructFlag<SpotInstanceRequest>(spotInstanceRequests.size()); spotInstanceRequestsCopy.addAll(spotInstanceRequests); this.spotInstanceRequests = spotInstanceRequestsCopy; } return this; } /** * Returns a string representation of this object; useful for testing and * debugging. * * @return A string representation of this object. * * @see java.lang.Object#toString() */ @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append("{"); if (getSpotInstanceRequests() != null) sb.append("SpotInstanceRequests: " + getSpotInstanceRequests() ); sb.append("}"); return sb.toString(); } @Override public int hashCode() { final int prime = 31; int hashCode = 1; hashCode = prime * hashCode + ((getSpotInstanceRequests() == null) ? 0 : getSpotInstanceRequests().hashCode()); return hashCode; } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (obj instanceof DescribeSpotInstanceRequestsResult == false) return false; DescribeSpotInstanceRequestsResult other = (DescribeSpotInstanceRequestsResult)obj; if (other.getSpotInstanceRequests() == null ^ this.getSpotInstanceRequests() == null) return false; if (other.getSpotInstanceRequests() != null && other.getSpotInstanceRequests().equals(this.getSpotInstanceRequests()) == false) return false; return true; } @Override public DescribeSpotInstanceRequestsResult clone() { try { return (DescribeSpotInstanceRequestsResult) super.clone(); } catch (CloneNotSupportedException e) { throw new IllegalStateException( "Got a CloneNotSupportedException from Object.clone() " + "even though we're Cloneable!", e); } } }
apache-2.0
seanbright/asterisk-java
src/main/java/org/asteriskjava/manager/event/DongleCENDEvent.java
1118
package org.asteriskjava.manager.event; public class DongleCENDEvent extends ManagerEvent { private static final long serialVersionUID = 3257845467831284784L; private String device; private String endstatus; private String cccause; private String duration; private String callidx; public DongleCENDEvent(Object source) { super(source); } public String getDevice() { return this.device; } public void setDevice(String device) { this.device = device; } public String getCallidx() { return callidx; } public void setCallidx(String callidx) { this.callidx = callidx; } public String getCccause() { return cccause; } public void setCccause(String cccause) { this.cccause = cccause; } public String getDuration() { return duration; } public void setDuration(String duration) { this.duration = duration; } public String getEndstatus() { return endstatus; } public void setEndstatus(String endstatus) { this.endstatus = endstatus; } }
apache-2.0
smartan/lucene
src/test/java/org/apache/lucene/util/junitcompat/TestSystemPropertiesInvariantRule.java
4895
package org.apache.lucene.util.junitcompat; /* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ import java.util.Properties; import org.junit.*; import org.junit.rules.TestRule; import org.junit.runner.JUnitCore; import org.junit.runner.Result; import org.junit.runner.notification.Failure; import com.carrotsearch.randomizedtesting.rules.SystemPropertiesInvariantRule; import com.carrotsearch.randomizedtesting.rules.SystemPropertiesRestoreRule; /** * @see SystemPropertiesRestoreRule * @see SystemPropertiesInvariantRule */ public class TestSystemPropertiesInvariantRule extends WithNestedTests { public static final String PROP_KEY1 = "new-property-1"; public static final String VALUE1 = "new-value-1"; public TestSystemPropertiesInvariantRule() { super(true); } public static class Base extends WithNestedTests.AbstractNestedTest { public void testEmpty() {} } public static class InBeforeClass extends Base { @BeforeClass public static void beforeClass() { System.setProperty(PROP_KEY1, VALUE1); } } public static class InAfterClass extends Base { @AfterClass public static void afterClass() { System.setProperty(PROP_KEY1, VALUE1); } } public static class InTestMethod extends Base { public void testMethod1() { if (System.getProperty(PROP_KEY1) != null) { throw new RuntimeException("Shouldn't be here."); } System.setProperty(PROP_KEY1, VALUE1); } public void testMethod2() { testMethod1(); } } public static class NonStringProperties extends Base { public void testMethod1() { if (System.getProperties().get(PROP_KEY1) != null) { throw new RuntimeException("Will pass."); } Properties properties = System.getProperties(); properties.put(PROP_KEY1, new Object()); Assert.assertTrue(System.getProperties().get(PROP_KEY1) != null); } public void testMethod2() { testMethod1(); } @AfterClass public static void cleanup() { System.getProperties().remove(PROP_KEY1); } } public static class IgnoredProperty { @Rule public TestRule invariant = new SystemPropertiesInvariantRule(PROP_KEY1); @Test public void testMethod1() { System.setProperty(PROP_KEY1, VALUE1); } } @Before @After public void cleanup() { System.clearProperty(PROP_KEY1); } @Test public void testRuleInvariantBeforeClass() { Result runClasses = JUnitCore.runClasses(InBeforeClass.class); Assert.assertEquals(1, runClasses.getFailureCount()); Assert.assertTrue(runClasses.getFailures().get(0).getMessage() .contains(PROP_KEY1)); Assert.assertNull(System.getProperty(PROP_KEY1)); } @Test public void testRuleInvariantAfterClass() { Result runClasses = JUnitCore.runClasses(InAfterClass.class); Assert.assertEquals(1, runClasses.getFailureCount()); Assert.assertTrue(runClasses.getFailures().get(0).getMessage() .contains(PROP_KEY1)); Assert.assertNull(System.getProperty(PROP_KEY1)); } @Test public void testRuleInvariantInTestMethod() { Result runClasses = JUnitCore.runClasses(InTestMethod.class); Assert.assertEquals(2, runClasses.getFailureCount()); for (Failure f : runClasses.getFailures()) { Assert.assertTrue(f.getMessage().contains(PROP_KEY1)); } Assert.assertNull(System.getProperty(PROP_KEY1)); } @Test public void testNonStringProperties() { Result runClasses = JUnitCore.runClasses(NonStringProperties.class); Assert.assertEquals(1, runClasses.getFailureCount()); Assert.assertTrue(runClasses.getFailures().get(0).getMessage().contains("Will pass")); Assert.assertEquals(3, runClasses.getRunCount()); } @Test public void testIgnoredProperty() { System.clearProperty(PROP_KEY1); try { Result runClasses = JUnitCore.runClasses(IgnoredProperty.class); Assert.assertEquals(0, runClasses.getFailureCount()); Assert.assertEquals(VALUE1, System.getProperty(PROP_KEY1)); } finally { System.clearProperty(PROP_KEY1); } } }
apache-2.0
AkshitaKukreja30/checkstyle
src/test/resources/com/puppycrawl/tools/checkstyle/checks/whitespace/emptylineseparator/InputEmptyLineSeparatorPrePreviousLineEmptiness.java
146
package com.puppycrawl.tools.checkstyle.checks.whitespace.emptylineseparator; public class InputEmptyLineSeparatorPrePreviousLineEmptiness { }
lgpl-2.1
gfyoung/elasticsearch
qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/AbstractRollingTestCase.java
2348
/* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright * ownership. Elasticsearch licenses this file to you under * the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.upgrades; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.test.rest.ESRestTestCase; public abstract class AbstractRollingTestCase extends ESRestTestCase { protected enum ClusterType { OLD, MIXED, UPGRADED; public static ClusterType parse(String value) { switch (value) { case "old_cluster": return OLD; case "mixed_cluster": return MIXED; case "upgraded_cluster": return UPGRADED; default: throw new AssertionError("unknown cluster type: " + value); } } } protected static final ClusterType CLUSTER_TYPE = ClusterType.parse(System.getProperty("tests.rest.suite")); @Override protected final boolean preserveIndicesUponCompletion() { return true; } @Override protected final boolean preserveReposUponCompletion() { return true; } @Override protected final Settings restClientSettings() { return Settings.builder().put(super.restClientSettings()) // increase the timeout here to 90 seconds to handle long waits for a green // cluster health. the waits for green need to be longer than a minute to // account for delayed shards .put(ESRestTestCase.CLIENT_RETRY_TIMEOUT, "90s") .put(ESRestTestCase.CLIENT_SOCKET_TIMEOUT, "90s") .build(); } }
apache-2.0
jasonchaffee/apiman
gateway/engine/es/src/main/java/io/apiman/gateway/engine/es/beans/PrimitiveBean.java
1370
/* * Copyright 2015 JBoss Inc * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.apiman.gateway.engine.es.beans; /** * Used to store a primitive value into the shared state ES document. * * @author eric.wittmann@redhat.com */ public class PrimitiveBean { private String value; private String type; /** * Constructor. */ public PrimitiveBean() { } /** * @return the value */ public String getValue() { return value; } /** * @param value the value to set */ public void setValue(String value) { this.value = value; } /** * @return the type */ public String getType() { return type; } /** * @param type the type to set */ public void setType(String type) { this.type = type; } }
apache-2.0
kl0u/flink
flink-tests/src/test/java/org/apache/flink/test/operators/MapPartitionITCase.java
3931
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.flink.test.operators; import org.apache.flink.api.common.functions.MapPartitionFunction; import org.apache.flink.api.java.DataSet; import org.apache.flink.api.java.ExecutionEnvironment; import org.apache.flink.api.java.io.LocalCollectionOutputFormat; import org.apache.flink.api.java.tuple.Tuple2; import org.apache.flink.test.util.JavaProgramTestBase; import org.apache.flink.test.util.TestBaseUtils; import org.apache.flink.util.Collector; import java.util.ArrayList; import java.util.List; /** Integration tests for {@link MapPartitionFunction}. */ @SuppressWarnings("serial") public class MapPartitionITCase extends JavaProgramTestBase { private static final String IN = "1 1\n2 2\n2 8\n4 4\n4 4\n6 6\n7 7\n8 8\n" + "1 1\n2 2\n2 2\n4 4\n4 4\n6 3\n5 9\n8 8\n1 1\n2 2\n2 2\n3 0\n4 4\n" + "5 9\n7 7\n8 8\n1 1\n9 1\n5 9\n4 4\n4 4\n6 6\n7 7\n8 8\n"; private static final String RESULT = "1 11\n2 12\n4 14\n4 14\n1 11\n2 12\n2 12\n4 14\n4 14\n3 16\n1 11\n2 12\n2 12\n0 13\n4 14\n1 11\n4 14\n4 14\n"; private List<Tuple2<String, String>> input = new ArrayList<>(); private List<Tuple2<String, Integer>> expected = new ArrayList<>(); private List<Tuple2<String, Integer>> result = new ArrayList<>(); @Override protected void preSubmit() throws Exception { // create input for (String s : IN.split("\n")) { String[] fields = s.split(" "); input.add(new Tuple2<String, String>(fields[0], fields[1])); } // create expected for (String s : RESULT.split("\n")) { String[] fields = s.split(" "); expected.add(new Tuple2<String, Integer>(fields[0], Integer.parseInt(fields[1]))); } } @Override protected void postSubmit() { compareResultCollections( expected, result, new TestBaseUtils.TupleComparator<Tuple2<String, Integer>>()); } @Override protected void testProgram() throws Exception { ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment(); DataSet<Tuple2<String, String>> data = env.fromCollection(input); data.mapPartition(new TestMapPartition()) .output(new LocalCollectionOutputFormat<Tuple2<String, Integer>>(result)); env.execute(); } private static class TestMapPartition implements MapPartitionFunction<Tuple2<String, String>, Tuple2<String, Integer>> { @Override public void mapPartition( Iterable<Tuple2<String, String>> values, Collector<Tuple2<String, Integer>> out) { for (Tuple2<String, String> value : values) { String keyString = value.f0; String valueString = value.f1; int keyInt = Integer.parseInt(keyString); int valueInt = Integer.parseInt(valueString); if (keyInt + valueInt < 10) { out.collect(new Tuple2<String, Integer>(valueString, keyInt + 10)); } } } } }
apache-2.0
alibaba/otter
node/etl/src/main/java/com/alibaba/otter/node/etl/common/task/GlobalTask.java
6295
/* * Copyright (C) 2010-2101 Alibaba Group Holding Limited. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.alibaba.otter.node.etl.common.task; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.concurrent.ExecutorService; import java.util.concurrent.Future; import org.I0Itec.zkclient.exception.ZkInterruptedException; import org.apache.commons.lang.ClassUtils; import org.apache.commons.lang.exception.ExceptionUtils; import org.apache.commons.lang.math.RandomUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.alibaba.otter.node.common.config.ConfigClientService; import com.alibaba.otter.node.etl.common.jmx.StageAggregationCollector; import com.alibaba.otter.node.etl.common.pipe.impl.RowDataPipeDelegate; import com.alibaba.otter.shared.arbitrate.ArbitrateEventService; import com.alibaba.otter.shared.arbitrate.model.TerminEventData; import com.alibaba.otter.shared.arbitrate.model.TerminEventData.TerminType; import com.alibaba.otter.shared.common.model.config.pipeline.Pipeline; /** * mainstem,select,extract,transform,load parent Thread. * * @author xiaoqing.zhouxq 2011-8-23 上午10:38:14 */ public abstract class GlobalTask extends Thread { protected final Logger logger = LoggerFactory.getLogger(this.getClass()); protected volatile boolean running = true; protected Pipeline pipeline; protected Long pipelineId; protected ArbitrateEventService arbitrateEventService; protected RowDataPipeDelegate rowDataPipeDelegate; protected ExecutorService executorService; protected ConfigClientService configClientService; protected StageAggregationCollector stageAggregationCollector; protected Map<Long, Future> pendingFuture; public GlobalTask(Pipeline pipeline){ this(pipeline.getId()); this.pipeline = pipeline; } public GlobalTask(Long pipelineId){ this.pipelineId = pipelineId; setName(createTaskName(pipelineId, ClassUtils.getShortClassName(this.getClass()))); pendingFuture = new HashMap<Long, Future>(); } public void shutdown() { running = false; interrupt(); List<Future> cancelFutures = new ArrayList<Future>(); for (Map.Entry<Long, Future> entry : pendingFuture.entrySet()) { if (!entry.getValue().isDone()) { logger.warn("WARN ## Task future processId[{}] canceled!", entry.getKey()); cancelFutures.add(entry.getValue()); } } for (Future future : cancelFutures) { future.cancel(true); } pendingFuture.clear(); } protected void sendRollbackTermin(long pipelineId, Throwable exception) { sendRollbackTermin(pipelineId, ExceptionUtils.getFullStackTrace(exception)); } protected void sendRollbackTermin(long pipelineId, String message) { TerminEventData errorEventData = new TerminEventData(); errorEventData.setPipelineId(pipelineId); errorEventData.setType(TerminType.ROLLBACK); errorEventData.setCode("setl"); errorEventData.setDesc(message); arbitrateEventService.terminEvent().single(errorEventData); // 每次发送完报警后,sleep一段时间,继续做后面的事 try { Thread.sleep(3000 + RandomUtils.nextInt(3000)); } catch (InterruptedException e) { } } /** * 自动处理数据为null的情况,重新发一遍数据 */ protected void processMissData(long pipelineId, String message) { TerminEventData errorEventData = new TerminEventData(); errorEventData.setPipelineId(pipelineId); errorEventData.setType(TerminType.RESTART); errorEventData.setCode("setl"); errorEventData.setDesc(message); arbitrateEventService.terminEvent().single(errorEventData); } protected String createTaskName(long pipelineId, String taskName) { return new StringBuilder().append("pipelineId = ").append(pipelineId).append(",taskName = ").append(taskName).toString(); } protected boolean isProfiling() { return stageAggregationCollector.isProfiling(); } protected boolean isInterrupt(Throwable e) { if (!running) { return true; } if (e instanceof InterruptedException || e instanceof ZkInterruptedException) { return true; } if (ExceptionUtils.getRootCause(e) instanceof InterruptedException) { return true; } return false; } public Collection<Long> getPendingProcess() { List<Long> result = new ArrayList<Long>(pendingFuture.keySet()); Collections.sort(result); return result; } // ====================== setter / getter ========================= public void setArbitrateEventService(ArbitrateEventService arbitrateEventService) { this.arbitrateEventService = arbitrateEventService; } public void setRowDataPipeDelegate(RowDataPipeDelegate rowDataPipeDelegate) { this.rowDataPipeDelegate = rowDataPipeDelegate; } public void setExecutorService(ExecutorService executorService) { this.executorService = executorService; } public void setConfigClientService(ConfigClientService configClientService) { this.configClientService = configClientService; } public void setStageAggregationCollector(StageAggregationCollector stageAggregationCollector) { this.stageAggregationCollector = stageAggregationCollector; } }
apache-2.0
evanv/titan
titan-hadoop-parent/titan-hadoop-1/src/main/java/com/thinkaurelius/titan/hadoop/compat/h1/Hadoop1Compat.java
2628
package com.thinkaurelius.titan.hadoop.compat.h1; import com.thinkaurelius.titan.graphdb.configuration.TitanConstants; import com.thinkaurelius.titan.hadoop.config.job.JobClasspathConfigurer; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.mapreduce.JobContext; import org.apache.hadoop.mapreduce.TaskAttemptContext; import org.apache.hadoop.mapreduce.TaskAttemptID; import org.apache.hadoop.mapreduce.TaskInputOutputContext; import org.apache.hadoop.mrunit.mapreduce.MapReduceDriver; import com.thinkaurelius.titan.hadoop.HadoopGraph; import com.thinkaurelius.titan.hadoop.compat.HadoopCompat; import com.thinkaurelius.titan.hadoop.compat.HadoopCompiler; public class Hadoop1Compat implements HadoopCompat { static final String CFG_SPECULATIVE_MAPS = "mapred.map.tasks.speculative.execution"; static final String CFG_SPECULATIVE_REDUCES = "mapred.reduce.tasks.speculative.execution"; static final String CFG_JOB_JAR = "mapred.jar"; @Override public HadoopCompiler newCompiler(HadoopGraph g) { return new Hadoop1Compiler(g); } @Override public TaskAttemptContext newTask(Configuration c, TaskAttemptID t) { return new TaskAttemptContext(c, t); } @Override public String getSpeculativeMapConfigKey() { return CFG_SPECULATIVE_MAPS; } @Override public String getSpeculativeReduceConfigKey() { return CFG_SPECULATIVE_REDUCES; } @Override public String getMapredJarConfigKey() { return CFG_JOB_JAR; } @Override public void incrementContextCounter(TaskInputOutputContext context, Enum<?> counter, long incr) { context.getCounter(counter).increment(incr); } @Override public Configuration getContextConfiguration(TaskAttemptContext context) { return context.getConfiguration(); } @Override public long getCounter(MapReduceDriver counters, Enum<?> e) { return counters.getCounters().findCounter(e).getValue(); } @Override public JobClasspathConfigurer newMapredJarConfigurer(String mapredJarPath) { return new MapredJarConfigurer(mapredJarPath); } @Override public JobClasspathConfigurer newDistCacheConfigurer() { return new DistCacheConfigurer("titan-hadoop-core-" + TitanConstants.VERSION + ".jar"); } @Override public Configuration getJobContextConfiguration(JobContext context) { return context.getConfiguration(); } @Override public Configuration newImmutableConfiguration(Configuration base) { return new ImmutableConfiguration(base); } }
apache-2.0
tillrohrmann/flink
flink-runtime/src/main/java/org/apache/flink/runtime/executiongraph/ExecutionGraphException.java
1338
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.flink.runtime.executiongraph; /** Base class for exceptions occurring in the {@link ExecutionGraph}. */ public class ExecutionGraphException extends Exception { private static final long serialVersionUID = -8253451032797220657L; public ExecutionGraphException(String message) { super(message); } public ExecutionGraphException(String message, Throwable cause) { super(message, cause); } public ExecutionGraphException(Throwable cause) { super(cause); } }
apache-2.0
apache/flink
flink-runtime/src/main/java/org/apache/flink/runtime/rest/messages/job/metrics/AggregatedJobMetricsHeaders.java
1648
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.flink.runtime.rest.messages.job.metrics; /** Headers for aggregating job metrics. */ public class AggregatedJobMetricsHeaders extends AbstractAggregatedMetricsHeaders<AggregatedJobMetricsParameters> { private static final AggregatedJobMetricsHeaders INSTANCE = new AggregatedJobMetricsHeaders(); private AggregatedJobMetricsHeaders() {} @Override public String getTargetRestEndpointURL() { return "/jobs/metrics"; } @Override public AggregatedJobMetricsParameters getUnresolvedMessageParameters() { return new AggregatedJobMetricsParameters(); } public static AggregatedJobMetricsHeaders getInstance() { return INSTANCE; } @Override public String getDescription() { return "Provides access to aggregated job metrics."; } }
apache-2.0
wangcan2014/otter
node/etl/src/main/java/com/alibaba/otter/node/etl/conflict/exception/ConflictException.java
1785
/* * Copyright (C) 2010-2101 Alibaba Group Holding Limited. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.alibaba.otter.node.etl.conflict.exception; import org.apache.commons.lang.exception.NestableRuntimeException; /** * @author jianghang 2012-4-12 下午02:59:12 * @version 4.0.2 */ public class ConflictException extends NestableRuntimeException { private static final long serialVersionUID = -7288830284122672209L; private String errorCode; private String errorDesc; public ConflictException(String errorCode){ super(errorCode); } public ConflictException(String errorCode, Throwable cause){ super(errorCode, cause); } public ConflictException(String errorCode, String errorDesc){ super(errorCode + ":" + errorDesc); } public ConflictException(String errorCode, String errorDesc, Throwable cause){ super(errorCode + ":" + errorDesc, cause); } public ConflictException(Throwable cause){ super(cause); } public String getErrorCode() { return errorCode; } public String getErrorDesc() { return errorDesc; } @Override public Throwable fillInStackTrace() { return this; } }
apache-2.0
jhshin9/scouter
scouter.client/src/org/csstudio/swt/xygraph/util/SingleSourceHelperImpl.java
3045
package org.csstudio.swt.xygraph.util; import org.csstudio.swt.xygraph.figures.XYGraph; import org.eclipse.draw2d.FigureUtilities; import org.eclipse.draw2d.SWTGraphics; import org.eclipse.draw2d.geometry.Dimension; import org.eclipse.draw2d.geometry.Rectangle; import org.eclipse.swt.SWT; import org.eclipse.swt.graphics.Color; import org.eclipse.swt.graphics.Cursor; import org.eclipse.swt.graphics.Font; import org.eclipse.swt.graphics.GC; import org.eclipse.swt.graphics.Image; import org.eclipse.swt.graphics.ImageData; import org.eclipse.swt.graphics.RGB; import org.eclipse.swt.graphics.Transform; import org.eclipse.swt.widgets.Display; import org.eclipse.swt.widgets.FileDialog; public class SingleSourceHelperImpl extends SingleSourceHelper { @Override protected Cursor createInternalCursor(Display display, ImageData imageData, int width, int height, int style) { return new Cursor(display, imageData, width, height); } @Override protected Image createInternalVerticalTextImage(String text, Font font, RGB color, boolean upToDown) { final Dimension titleSize = FigureUtilities.getTextExtents(text, font); final int w = titleSize.height; final int h = titleSize.width + 1; Image image = new Image(Display.getCurrent(), w, h); final GC gc = new GC(image); final Color titleColor = new Color(Display.getCurrent(), color); RGB transparentRGB = new RGB(240, 240, 240); gc.setBackground(XYGraphMediaFactory.getInstance().getColor( transparentRGB)); gc.fillRectangle(image.getBounds()); gc.setForeground(titleColor); gc.setFont(font); final Transform tr = new Transform(Display.getCurrent()); if (!upToDown) { tr.translate(0, h); tr.rotate(-90); gc.setTransform(tr); } else { tr.translate(w, 0); tr.rotate(90); gc.setTransform(tr); } gc.drawText(text, 0, 0); tr.dispose(); gc.dispose(); final ImageData imageData = image.getImageData(); image.dispose(); titleColor.dispose(); imageData.transparentPixel = imageData.palette.getPixel(transparentRGB); image = new Image(Display.getCurrent(), imageData); return image; } @Override protected Image getInternalXYGraphSnapShot(XYGraph xyGraph) { Rectangle bounds = xyGraph.getBounds(); Image image = new Image(null, bounds.width + 6, bounds.height + 6); GC gc = new GC(image); SWTGraphics graphics = new SWTGraphics(gc); graphics.translate(-bounds.x + 3, -bounds.y + 3); graphics.setForegroundColor(xyGraph.getForegroundColor()); graphics.setBackgroundColor(xyGraph.getBackgroundColor()); xyGraph.paint(graphics); gc.dispose(); return image; } @Override protected String getInternalImageSavePath() { FileDialog dialog = new FileDialog(Display.getDefault().getShells()[0], SWT.SAVE); dialog.setFilterNames(new String[] { "PNG Files", "All Files (*.*)" }); dialog.setFilterExtensions(new String[] { "*.png", "*.*" }); // Windows String path = dialog.open(); return path; } }
apache-2.0
rokn/Count_Words_2015
testing/openjdk/jdk/src/share/classes/sun/java2d/opengl/OGLSurfaceData.java
24976
/* * Copyright (c) 2003, 2008, Oracle and/or its affiliates. All rights reserved. * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. * * This code is free software; you can redistribute it and/or modify it * under the terms of the GNU General Public License version 2 only, as * published by the Free Software Foundation. Oracle designates this * particular file as subject to the "Classpath" exception as provided * by Oracle in the LICENSE file that accompanied this code. * * This code is distributed in the hope that it will be useful, but WITHOUT * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License * version 2 for more details (a copy is included in the LICENSE file that * accompanied this code). * * You should have received a copy of the GNU General Public License version * 2 along with this work; if not, write to the Free Software Foundation, * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. * * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA * or visit www.oracle.com if you need additional information or have any * questions. */ package sun.java2d.opengl; import java.awt.AlphaComposite; import java.awt.GraphicsEnvironment; import java.awt.Rectangle; import java.awt.Transparency; import java.awt.image.ColorModel; import java.awt.image.Raster; import sun.awt.SunHints; import sun.awt.image.PixelConverter; import sun.java2d.pipe.hw.AccelSurface; import sun.java2d.SunGraphics2D; import sun.java2d.SurfaceData; import sun.java2d.SurfaceDataProxy; import sun.java2d.loops.CompositeType; import sun.java2d.loops.GraphicsPrimitive; import sun.java2d.loops.MaskFill; import sun.java2d.loops.SurfaceType; import sun.java2d.pipe.ParallelogramPipe; import sun.java2d.pipe.PixelToParallelogramConverter; import sun.java2d.pipe.RenderBuffer; import sun.java2d.pipe.TextPipe; import static sun.java2d.pipe.BufferedOpCodes.*; import static sun.java2d.opengl.OGLContext.OGLContextCaps.*; /** * This class describes an OpenGL "surface", that is, a region of pixels * managed via OpenGL. An OGLSurfaceData can be tagged with one of three * different SurfaceType objects for the purpose of registering loops, etc. * This diagram shows the hierarchy of OGL SurfaceTypes: * * Any * / \ * OpenGLSurface OpenGLTexture * | * OpenGLSurfaceRTT * * OpenGLSurface * This kind of surface can be rendered to using OpenGL APIs. It is also * possible to copy an OpenGLSurface to another OpenGLSurface (or to itself). * This is typically accomplished by calling MakeContextCurrent(dstSD, srcSD) * and then calling glCopyPixels() (although there are other techniques to * achieve the same goal). * * OpenGLTexture * This kind of surface cannot be rendered to using OpenGL (in the same sense * as in OpenGLSurface). However, it is possible to upload a region of pixels * to an OpenGLTexture object via glTexSubImage2D(). One can also copy a * surface of type OpenGLTexture to an OpenGLSurface by binding the texture * to a quad and then rendering it to the destination surface (this process * is known as "texture mapping"). * * OpenGLSurfaceRTT * This kind of surface can be thought of as a sort of hybrid between * OpenGLSurface and OpenGLTexture, in that one can render to this kind of * surface as if it were of type OpenGLSurface, but the process of copying * this kind of surface to another is more like an OpenGLTexture. (Note that * "RTT" stands for "render-to-texture".) * * In addition to these SurfaceType variants, we have also defined some * constants that describe in more detail the type of underlying OpenGL * surface. This table helps explain the relationships between those * "type" constants and their corresponding SurfaceType: * * OGL Type Corresponding SurfaceType * -------- ------------------------- * WINDOW OpenGLSurface * PBUFFER OpenGLSurface * TEXTURE OpenGLTexture * FLIP_BACKBUFFER OpenGLSurface * FBOBJECT OpenGLSurfaceRTT */ public abstract class OGLSurfaceData extends SurfaceData implements AccelSurface { /** * OGL-specific surface types * * @see sun.java2d.pipe.hw.AccelSurface */ public static final int PBUFFER = RT_PLAIN; public static final int FBOBJECT = RT_TEXTURE; /** * Pixel formats */ public static final int PF_INT_ARGB = 0; public static final int PF_INT_ARGB_PRE = 1; public static final int PF_INT_RGB = 2; public static final int PF_INT_RGBX = 3; public static final int PF_INT_BGR = 4; public static final int PF_INT_BGRX = 5; public static final int PF_USHORT_565_RGB = 6; public static final int PF_USHORT_555_RGB = 7; public static final int PF_USHORT_555_RGBX = 8; public static final int PF_BYTE_GRAY = 9; public static final int PF_USHORT_GRAY = 10; public static final int PF_3BYTE_BGR = 11; /** * SurfaceTypes */ private static final String DESC_OPENGL_SURFACE = "OpenGL Surface"; private static final String DESC_OPENGL_SURFACE_RTT = "OpenGL Surface (render-to-texture)"; private static final String DESC_OPENGL_TEXTURE = "OpenGL Texture"; static final SurfaceType OpenGLSurface = SurfaceType.Any.deriveSubType(DESC_OPENGL_SURFACE, PixelConverter.ArgbPre.instance); static final SurfaceType OpenGLSurfaceRTT = OpenGLSurface.deriveSubType(DESC_OPENGL_SURFACE_RTT); static final SurfaceType OpenGLTexture = SurfaceType.Any.deriveSubType(DESC_OPENGL_TEXTURE); /** This will be true if the fbobject system property has been enabled. */ private static boolean isFBObjectEnabled; /** This will be true if the lcdshader system property has been enabled.*/ private static boolean isLCDShaderEnabled; /** This will be true if the biopshader system property has been enabled.*/ private static boolean isBIOpShaderEnabled; /** This will be true if the gradshader system property has been enabled.*/ private static boolean isGradShaderEnabled; private OGLGraphicsConfig graphicsConfig; protected int type; // these fields are set from the native code when the surface is // initialized private int nativeWidth, nativeHeight; protected static OGLRenderer oglRenderPipe; protected static PixelToParallelogramConverter oglTxRenderPipe; protected static ParallelogramPipe oglAAPgramPipe; protected static OGLTextRenderer oglTextPipe; protected static OGLDrawImage oglImagePipe; protected native boolean initTexture(long pData, boolean isOpaque, boolean texNonPow2, boolean texRect, int width, int height); protected native boolean initFBObject(long pData, boolean isOpaque, boolean texNonPow2, boolean texRect, int width, int height); protected native boolean initFlipBackbuffer(long pData); protected abstract boolean initPbuffer(long pData, long pConfigInfo, boolean isOpaque, int width, int height); private native int getTextureTarget(long pData); private native int getTextureID(long pData); static { if (!GraphicsEnvironment.isHeadless()) { // fbobject currently enabled by default; use "false" to disable String fbo = (String)java.security.AccessController.doPrivileged( new sun.security.action.GetPropertyAction( "sun.java2d.opengl.fbobject")); isFBObjectEnabled = !"false".equals(fbo); // lcdshader currently enabled by default; use "false" to disable String lcd = (String)java.security.AccessController.doPrivileged( new sun.security.action.GetPropertyAction( "sun.java2d.opengl.lcdshader")); isLCDShaderEnabled = !"false".equals(lcd); // biopshader currently enabled by default; use "false" to disable String biop = (String)java.security.AccessController.doPrivileged( new sun.security.action.GetPropertyAction( "sun.java2d.opengl.biopshader")); isBIOpShaderEnabled = !"false".equals(biop); // gradshader currently enabled by default; use "false" to disable String grad = (String)java.security.AccessController.doPrivileged( new sun.security.action.GetPropertyAction( "sun.java2d.opengl.gradshader")); isGradShaderEnabled = !"false".equals(grad); OGLRenderQueue rq = OGLRenderQueue.getInstance(); oglImagePipe = new OGLDrawImage(); oglTextPipe = new OGLTextRenderer(rq); oglRenderPipe = new OGLRenderer(rq); if (GraphicsPrimitive.tracingEnabled()) { oglTextPipe = oglTextPipe.traceWrap(); //The wrapped oglRenderPipe will wrap the AA pipe as well... //oglAAPgramPipe = oglRenderPipe.traceWrap(); } oglAAPgramPipe = oglRenderPipe.getAAParallelogramPipe(); oglTxRenderPipe = new PixelToParallelogramConverter(oglRenderPipe, oglRenderPipe, 1.0, 0.25, true); OGLBlitLoops.register(); OGLMaskFill.register(); OGLMaskBlit.register(); } } protected OGLSurfaceData(OGLGraphicsConfig gc, ColorModel cm, int type) { super(getCustomSurfaceType(type), cm); this.graphicsConfig = gc; this.type = type; setBlitProxyKey(gc.getProxyKey()); } @Override public SurfaceDataProxy makeProxyFor(SurfaceData srcData) { return OGLSurfaceDataProxy.createProxy(srcData, graphicsConfig); } /** * Returns the appropriate SurfaceType corresponding to the given OpenGL * surface type constant (e.g. TEXTURE -> OpenGLTexture). */ private static SurfaceType getCustomSurfaceType(int oglType) { switch (oglType) { case TEXTURE: return OpenGLTexture; case FBOBJECT: return OpenGLSurfaceRTT; case PBUFFER: default: return OpenGLSurface; } } /** * Note: This should only be called from the QFT under the AWT lock. * This method is kept separate from the initSurface() method below just * to keep the code a bit cleaner. */ private void initSurfaceNow(int width, int height) { boolean isOpaque = (getTransparency() == Transparency.OPAQUE); boolean success = false; switch (type) { case PBUFFER: success = initPbuffer(getNativeOps(), graphicsConfig.getNativeConfigInfo(), isOpaque, width, height); break; case TEXTURE: success = initTexture(getNativeOps(), isOpaque, isTexNonPow2Available(), isTexRectAvailable(), width, height); break; case FBOBJECT: success = initFBObject(getNativeOps(), isOpaque, isTexNonPow2Available(), isTexRectAvailable(), width, height); break; case FLIP_BACKBUFFER: success = initFlipBackbuffer(getNativeOps()); break; default: break; } if (!success) { throw new OutOfMemoryError("can't create offscreen surface"); } } /** * Initializes the appropriate OpenGL offscreen surface based on the value * of the type parameter. If the surface creation fails for any reason, * an OutOfMemoryError will be thrown. */ protected void initSurface(final int width, final int height) { OGLRenderQueue rq = OGLRenderQueue.getInstance(); rq.lock(); try { switch (type) { case TEXTURE: case PBUFFER: case FBOBJECT: // need to make sure the context is current before // creating the texture (or pbuffer, or fbobject) OGLContext.setScratchSurface(graphicsConfig); break; default: break; } rq.flushAndInvokeNow(new Runnable() { public void run() { initSurfaceNow(width, height); } }); } finally { rq.unlock(); } } /** * Returns the OGLContext for the GraphicsConfig associated with this * surface. */ public final OGLContext getContext() { return graphicsConfig.getContext(); } /** * Returns the OGLGraphicsConfig associated with this surface. */ final OGLGraphicsConfig getOGLGraphicsConfig() { return graphicsConfig; } /** * Returns one of the surface type constants defined above. */ public final int getType() { return type; } /** * If this surface is backed by a texture object, returns the target * for that texture (either GL_TEXTURE_2D or GL_TEXTURE_RECTANGLE_ARB). * Otherwise, this method will return zero. */ public final int getTextureTarget() { return getTextureTarget(getNativeOps()); } /** * If this surface is backed by a texture object, returns the texture ID * for that texture. * Otherwise, this method will return zero. */ public final int getTextureID() { return getTextureID(getNativeOps()); } /** * Returns native resource of specified {@code resType} associated with * this surface. * * Specifically, for {@code OGLSurfaceData} this method returns the * the following: * <pre> * TEXTURE - texture id * </pre> * * Note: the resource returned by this method is only valid on the rendering * thread. * * @return native resource of specified type or 0L if * such resource doesn't exist or can not be retrieved. * @see sun.java2d.pipe.hw.AccelSurface#getNativeResource */ public long getNativeResource(int resType) { if (resType == TEXTURE) { return getTextureID(); } return 0L; } public Raster getRaster(int x, int y, int w, int h) { throw new InternalError("not implemented yet"); } /** * For now, we can only render LCD text if: * - the fragment shader extension is available, and * - blending is disabled, and * - the source color is opaque * - and the destination is opaque * * Eventually, we could enhance the native OGL text rendering code * and remove the above restrictions, but that would require significantly * more code just to support a few uncommon cases. */ public boolean canRenderLCDText(SunGraphics2D sg2d) { return graphicsConfig.isCapPresent(CAPS_EXT_LCD_SHADER) && sg2d.compositeState <= SunGraphics2D.COMP_ISCOPY && sg2d.paintState <= SunGraphics2D.PAINT_OPAQUECOLOR && sg2d.surfaceData.getTransparency() == Transparency.OPAQUE; } public void validatePipe(SunGraphics2D sg2d) { TextPipe textpipe; boolean validated = false; // OGLTextRenderer handles both AA and non-AA text, but // only works with the following modes: // (Note: For LCD text we only enter this code path if // canRenderLCDText() has already validated that the mode is // CompositeType.SrcNoEa (opaque color), which will be subsumed // by the CompositeType.SrcNoEa (any color) test below.) if (/* CompositeType.SrcNoEa (any color) */ (sg2d.compositeState <= sg2d.COMP_ISCOPY && sg2d.paintState <= sg2d.PAINT_ALPHACOLOR) || /* CompositeType.SrcOver (any color) */ (sg2d.compositeState == sg2d.COMP_ALPHA && sg2d.paintState <= sg2d.PAINT_ALPHACOLOR && (((AlphaComposite)sg2d.composite).getRule() == AlphaComposite.SRC_OVER)) || /* CompositeType.Xor (any color) */ (sg2d.compositeState == sg2d.COMP_XOR && sg2d.paintState <= sg2d.PAINT_ALPHACOLOR)) { textpipe = oglTextPipe; } else { // do this to initialize textpipe correctly; we will attempt // to override the non-text pipes below super.validatePipe(sg2d); textpipe = sg2d.textpipe; validated = true; } PixelToParallelogramConverter txPipe = null; OGLRenderer nonTxPipe = null; if (sg2d.antialiasHint != SunHints.INTVAL_ANTIALIAS_ON) { if (sg2d.paintState <= sg2d.PAINT_ALPHACOLOR) { if (sg2d.compositeState <= sg2d.COMP_XOR) { txPipe = oglTxRenderPipe; nonTxPipe = oglRenderPipe; } } else if (sg2d.compositeState <= sg2d.COMP_ALPHA) { if (OGLPaints.isValid(sg2d)) { txPipe = oglTxRenderPipe; nonTxPipe = oglRenderPipe; } // custom paints handled by super.validatePipe() below } } else { if (sg2d.paintState <= sg2d.PAINT_ALPHACOLOR) { if (graphicsConfig.isCapPresent(CAPS_PS30) && (sg2d.imageComp == CompositeType.SrcOverNoEa || sg2d.imageComp == CompositeType.SrcOver)) { if (!validated) { super.validatePipe(sg2d); validated = true; } PixelToParallelogramConverter aaConverter = new PixelToParallelogramConverter(sg2d.shapepipe, oglAAPgramPipe, 1.0/8.0, 0.499, false); sg2d.drawpipe = aaConverter; sg2d.fillpipe = aaConverter; sg2d.shapepipe = aaConverter; } else if (sg2d.compositeState == sg2d.COMP_XOR) { // install the solid pipes when AA and XOR are both enabled txPipe = oglTxRenderPipe; nonTxPipe = oglRenderPipe; } } // other cases handled by super.validatePipe() below } if (txPipe != null) { if (sg2d.transformState >= sg2d.TRANSFORM_TRANSLATESCALE) { sg2d.drawpipe = txPipe; sg2d.fillpipe = txPipe; } else if (sg2d.strokeState != sg2d.STROKE_THIN) { sg2d.drawpipe = txPipe; sg2d.fillpipe = nonTxPipe; } else { sg2d.drawpipe = nonTxPipe; sg2d.fillpipe = nonTxPipe; } // Note that we use the transforming pipe here because it // will examine the shape and possibly perform an optimized // operation if it can be simplified. The simplifications // will be valid for all STROKE and TRANSFORM types. sg2d.shapepipe = txPipe; } else { if (!validated) { super.validatePipe(sg2d); } } // install the text pipe based on our earlier decision sg2d.textpipe = textpipe; // always override the image pipe with the specialized OGL pipe sg2d.imagepipe = oglImagePipe; } @Override protected MaskFill getMaskFill(SunGraphics2D sg2d) { if (sg2d.paintState > sg2d.PAINT_ALPHACOLOR) { /* * We can only accelerate non-Color MaskFill operations if * all of the following conditions hold true: * - there is an implementation for the given paintState * - the current Paint can be accelerated for this destination * - multitexturing is available (since we need to modulate * the alpha mask texture with the paint texture) * * In all other cases, we return null, in which case the * validation code will choose a more general software-based loop. */ if (!OGLPaints.isValid(sg2d) || !graphicsConfig.isCapPresent(CAPS_MULTITEXTURE)) { return null; } } return super.getMaskFill(sg2d); } public boolean copyArea(SunGraphics2D sg2d, int x, int y, int w, int h, int dx, int dy) { if (sg2d.transformState < sg2d.TRANSFORM_TRANSLATESCALE && sg2d.compositeState < sg2d.COMP_XOR) { x += sg2d.transX; y += sg2d.transY; oglRenderPipe.copyArea(sg2d, x, y, w, h, dx, dy); return true; } return false; } public void flush() { invalidate(); OGLRenderQueue rq = OGLRenderQueue.getInstance(); rq.lock(); try { // make sure we have a current context before // disposing the native resources (e.g. texture object) OGLContext.setScratchSurface(graphicsConfig); RenderBuffer buf = rq.getBuffer(); rq.ensureCapacityAndAlignment(12, 4); buf.putInt(FLUSH_SURFACE); buf.putLong(getNativeOps()); // this call is expected to complete synchronously, so flush now rq.flushNow(); } finally { rq.unlock(); } } /** * Disposes the native resources associated with the given OGLSurfaceData * (referenced by the pData parameter). This method is invoked from * the native Dispose() method from the Disposer thread when the * Java-level OGLSurfaceData object is about to go away. Note that we * also pass a reference to the native GLX/WGLGraphicsConfigInfo * (pConfigInfo) for the purposes of making a context current. */ static void dispose(long pData, long pConfigInfo) { OGLRenderQueue rq = OGLRenderQueue.getInstance(); rq.lock(); try { // make sure we have a current context before // disposing the native resources (e.g. texture object) OGLContext.setScratchSurface(pConfigInfo); RenderBuffer buf = rq.getBuffer(); rq.ensureCapacityAndAlignment(12, 4); buf.putInt(DISPOSE_SURFACE); buf.putLong(pData); // this call is expected to complete synchronously, so flush now rq.flushNow(); } finally { rq.unlock(); } } static void swapBuffers(long window) { OGLRenderQueue rq = OGLRenderQueue.getInstance(); rq.lock(); try { RenderBuffer buf = rq.getBuffer(); rq.ensureCapacityAndAlignment(12, 4); buf.putInt(SWAP_BUFFERS); buf.putLong(window); rq.flushNow(); } finally { rq.unlock(); } } /** * Returns true if OpenGL textures can have non-power-of-two dimensions * when using the basic GL_TEXTURE_2D target. */ boolean isTexNonPow2Available() { return graphicsConfig.isCapPresent(CAPS_TEXNONPOW2); } /** * Returns true if OpenGL textures can have non-power-of-two dimensions * when using the GL_TEXTURE_RECTANGLE_ARB target (only available when the * GL_ARB_texture_rectangle extension is present). */ boolean isTexRectAvailable() { return graphicsConfig.isCapPresent(CAPS_EXT_TEXRECT); } public Rectangle getNativeBounds() { OGLRenderQueue rq = OGLRenderQueue.getInstance(); rq.lock(); try { return new Rectangle(nativeWidth, nativeHeight); } finally { rq.unlock(); } } }
mit
coding0011/elasticsearch
server/src/main/java/org/elasticsearch/search/fetch/subphase/ScriptFieldsFetchSubPhase.java
4508
/* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright * ownership. Elasticsearch licenses this file to you under * the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.search.fetch.subphase; import org.apache.lucene.index.IndexReader; import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.index.ReaderUtil; import org.elasticsearch.common.document.DocumentField; import org.elasticsearch.common.util.CollectionUtils; import org.elasticsearch.script.FieldScript; import org.elasticsearch.search.SearchHit; import org.elasticsearch.search.fetch.FetchSubPhase; import org.elasticsearch.search.internal.SearchContext; import java.io.IOException; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.Comparator; import java.util.HashMap; import java.util.List; public final class ScriptFieldsFetchSubPhase implements FetchSubPhase { @Override public void hitsExecute(SearchContext context, SearchHit[] hits) throws IOException { if (context.hasScriptFields() == false) { return; } hits = hits.clone(); // don't modify the incoming hits Arrays.sort(hits, Comparator.comparingInt(SearchHit::docId)); int lastReaderId = -1; FieldScript[] leafScripts = null; List<ScriptFieldsContext.ScriptField> scriptFields = context.scriptFields().fields(); final IndexReader reader = context.searcher().getIndexReader(); for (SearchHit hit : hits) { int readerId = ReaderUtil.subIndex(hit.docId(), reader.leaves()); LeafReaderContext leafReaderContext = reader.leaves().get(readerId); if (readerId != lastReaderId) { leafScripts = createLeafScripts(leafReaderContext, scriptFields); lastReaderId = readerId; } int docId = hit.docId() - leafReaderContext.docBase; for (int i = 0; i < leafScripts.length; i++) { leafScripts[i].setDocument(docId); final Object value; try { value = leafScripts[i].execute(); CollectionUtils.ensureNoSelfReferences(value, "ScriptFieldsFetchSubPhase leaf script " + i); } catch (RuntimeException e) { if (scriptFields.get(i).ignoreException()) { continue; } throw e; } if (hit.fieldsOrNull() == null) { hit.fields(new HashMap<>(2)); } String scriptFieldName = scriptFields.get(i).name(); DocumentField hitField = hit.getFields().get(scriptFieldName); if (hitField == null) { final List<Object> values; if (value instanceof Collection) { values = new ArrayList<>((Collection<?>) value); } else { values = Collections.singletonList(value); } hitField = new DocumentField(scriptFieldName, values); hit.getFields().put(scriptFieldName, hitField); } } } } private FieldScript[] createLeafScripts(LeafReaderContext context, List<ScriptFieldsContext.ScriptField> scriptFields) { FieldScript[] scripts = new FieldScript[scriptFields.size()]; for (int i = 0; i < scripts.length; i++) { try { scripts[i] = scriptFields.get(i).script().newInstance(context); } catch (IOException e1) { throw new IllegalStateException("Failed to load script " + scriptFields.get(i).name(), e1); } } return scripts; } }
apache-2.0
goodwinnk/intellij-community
plugins/groovy/groovy-psi/src/org/jetbrains/plugins/groovy/lang/resolve/ast/builder/strategy/DefaultBuilderStrategySupport.java
8689
/* * Copyright 2000-2017 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.jetbrains.plugins.groovy.lang.resolve.ast.builder.strategy; import com.intellij.codeInsight.AnnotationUtil; import com.intellij.openapi.util.text.StringUtil; import com.intellij.psi.*; import com.intellij.psi.impl.light.LightMethodBuilder; import com.intellij.psi.impl.light.LightPsiClassBuilder; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import org.jetbrains.plugins.groovy.lang.psi.api.statements.typedef.GrTypeDefinition; import org.jetbrains.plugins.groovy.lang.psi.api.statements.typedef.members.GrMethod; import org.jetbrains.plugins.groovy.lang.psi.impl.PsiImplUtil; import org.jetbrains.plugins.groovy.lang.resolve.ast.builder.BuilderAnnotationContributor; import org.jetbrains.plugins.groovy.lang.resolve.ast.builder.BuilderHelperLightPsiClass; import org.jetbrains.plugins.groovy.transformations.TransformationContext; import java.util.Objects; import static org.jetbrains.plugins.groovy.lang.psi.impl.statements.expressions.TypesUtil.createType; public class DefaultBuilderStrategySupport extends BuilderAnnotationContributor { public static final String DEFAULT_STRATEGY_NAME = "DefaultStrategy"; @Override public void applyTransformation(@NotNull TransformationContext context) { new DefaultBuilderStrategyHandler(context).doProcess(); } private static class DefaultBuilderStrategyHandler { private final @NotNull TransformationContext myContext; private final @NotNull GrTypeDefinition myContainingClass; private DefaultBuilderStrategyHandler(@NotNull TransformationContext context) { myContext = context; myContainingClass = context.getCodeClass(); } public void doProcess() { processTypeDefinition(); processMethods(); } private void processTypeDefinition() { final PsiAnnotation builderAnno = PsiImplUtil.getAnnotation(myContainingClass, BUILDER_FQN); if (!isApplicable(builderAnno, DEFAULT_STRATEGY_NAME)) return; boolean includeSuper = isIncludeSuperProperties(builderAnno); final PsiClass builderClass = createBuilderClass(builderAnno, getFields(myContext, includeSuper)); myContext.addMethod(createBuilderMethod(builderClass, builderAnno)); myContext.addInnerClass(builderClass); } @NotNull private LightPsiClassBuilder createBuilderClass(@NotNull final PsiAnnotation annotation, @NotNull PsiVariable[] setters) { return createBuilderClass(annotation, setters, null); } @NotNull private LightPsiClassBuilder createBuilderClass(@NotNull final PsiAnnotation annotation, @NotNull PsiVariable[] setters, @Nullable PsiType builtType) { final LightPsiClassBuilder builderClass = new BuilderHelperLightPsiClass( myContainingClass, getBuilderClassName(annotation, myContainingClass) ); for (PsiVariable field : setters) { LightMethodBuilder setter = createFieldSetter(builderClass, field, annotation); builderClass.addMethod(setter); } final LightMethodBuilder buildMethod = createBuildMethod( annotation, builtType == null ? createType(myContainingClass) : builtType ); return builderClass.addMethod(buildMethod); } @NotNull private LightMethodBuilder createBuilderMethod(@NotNull PsiClass builderClass, @NotNull PsiAnnotation annotation) { final LightMethodBuilder builderMethod = new LightMethodBuilder(myContext.getManager(), getBuilderMethodName(annotation)); builderMethod.addModifier(PsiModifier.STATIC); builderMethod.setOriginInfo(ORIGIN_INFO); builderMethod.setNavigationElement(annotation); builderMethod.setMethodReturnType(createType(builderClass)); return builderMethod; } private void processMethods() { for (GrMethod method : myContext.getCodeClass().getCodeMethods()) { processMethod(method); } } private void processMethod(@NotNull GrMethod method) { final PsiAnnotation annotation = PsiImplUtil.getAnnotation(method, BUILDER_FQN); if (!isApplicable(annotation, DEFAULT_STRATEGY_NAME)) return; if (method.isConstructor()) { processConstructor(method, annotation); } else if (method.hasModifierProperty(PsiModifier.STATIC)) { processFactoryMethod(method, annotation); } } private void processConstructor(@NotNull GrMethod method, PsiAnnotation annotation) { PsiClass builderClass = createBuilderClass(annotation, method.getParameters()); myContext.addMethod(createBuilderMethod(builderClass, annotation)); myContext.addInnerClass(builderClass); } private void processFactoryMethod(@NotNull GrMethod method, PsiAnnotation annotation) { PsiClass builderClass = createBuilderClass(annotation, method.getParameters(), method.getReturnType()); myContext.addMethod(createBuilderMethod(builderClass, annotation)); myContext.addInnerClass(builderClass); } @NotNull private static String getBuilderMethodName(@NotNull PsiAnnotation annotation) { final String builderMethodName = AnnotationUtil.getDeclaredStringAttributeValue(annotation, "builderMethodName"); return StringUtil.isEmpty(builderMethodName) ? "builder" : builderMethodName; } } @NotNull public static String getBuilderClassName(@NotNull PsiAnnotation annotation, @NotNull GrTypeDefinition clazz) { final String builderClassName = AnnotationUtil.getDeclaredStringAttributeValue(annotation, "builderClassName"); return builderClassName == null ? String.format("%s%s", clazz.getName(), "Builder") : builderClassName; } @NotNull public static LightMethodBuilder createBuildMethod(@NotNull PsiAnnotation annotation, @NotNull PsiType builtType) { final LightMethodBuilder buildMethod = new LightMethodBuilder(annotation.getManager(), getBuildMethodName(annotation)); buildMethod.setOriginInfo(ORIGIN_INFO); buildMethod.setMethodReturnType(builtType); return buildMethod; } @NotNull public static LightMethodBuilder createFieldSetter(@NotNull PsiClass builderClass, @NotNull PsiVariable field, @NotNull PsiAnnotation annotation) { String name = Objects.requireNonNull(field.getName()); return createFieldSetter(builderClass, name, field.getType(), annotation, field); } @NotNull public static LightMethodBuilder createFieldSetter(@NotNull PsiClass builderClass, @NotNull String name, @NotNull PsiType type, @NotNull PsiAnnotation annotation, @NotNull PsiElement navigationElement) { final LightMethodBuilder fieldSetter = new LightMethodBuilder(builderClass.getManager(), getFieldMethodName(annotation, name)); fieldSetter.addModifier(PsiModifier.PUBLIC); fieldSetter.addParameter(name, type); fieldSetter.setContainingClass(builderClass); fieldSetter.setMethodReturnType(JavaPsiFacade.getElementFactory(builderClass.getProject()).createType(builderClass)); fieldSetter.setNavigationElement(navigationElement); fieldSetter.setOriginInfo(ORIGIN_INFO); return fieldSetter; } @NotNull public static String getFieldMethodName(@NotNull PsiAnnotation annotation, @NotNull String fieldName) { final String prefix = AnnotationUtil.getDeclaredStringAttributeValue(annotation, "prefix"); return StringUtil.isEmpty(prefix) ? fieldName : String.format("%s%s", prefix, StringUtil.capitalize(fieldName)); } @NotNull private static String getBuildMethodName(@NotNull PsiAnnotation annotation) { final String buildMethodName = AnnotationUtil.getDeclaredStringAttributeValue(annotation, "buildMethodName"); return StringUtil.isEmpty(buildMethodName) ? "build" : buildMethodName; } }
apache-2.0
thusithathilina/carbon-registry
components/registry/org.wso2.carbon.registry.ws.client/src/main/ws-test/org/wso2/carbon/registry/ws/client/test/security/ContinuousOperations.java
3380
/* * Copyright 2004,2005 The Apache Software Foundation. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.wso2.carbon.registry.ws.client.test.security; import org.wso2.carbon.registry.core.exceptions.RegistryException; import org.wso2.carbon.registry.core.Resource; import org.wso2.carbon.registry.core.ResourceImpl; import org.wso2.carbon.registry.core.utils.RegistryUtils; public class ContinuousOperations extends SecurityTestSetup { public ContinuousOperations(String text) { super(text); } public void testContinousDelete() throws Exception { int iterations = 100; for (int i = 0; i < iterations; i++) { Resource res1 = registry.newResource(); byte[] r1content = RegistryUtils.encodeString("R2 content"); res1.setContent(r1content); String path = "/con-delete/test/" + i + 1; registry.put(path, res1); Resource resource1 = registry.get(path); assertEquals("File content is not matching", RegistryUtils.decodeBytes((byte[]) resource1.getContent()), RegistryUtils.decodeBytes((byte[]) res1.getContent())); registry.delete(path); boolean value = false; if (registry.resourceExists(path)) { value = true; } assertFalse("Resoruce not found at the path", value); res1.discard(); resource1.discard(); Thread.sleep(100); } } public void testContinuousUpdate() throws Exception { int iterations = 100; for (int i = 0; i < iterations; i++) { Resource res1 = registry.newResource(); byte[] r1content = RegistryUtils.encodeString("R2 content"); res1.setContent(r1content); String path = "/con-delete/test-update/" + i + 1; registry.put(path, res1); Resource resource1 = registry.get(path); assertEquals("File content is not matching", RegistryUtils.decodeBytes((byte[]) resource1.getContent()), RegistryUtils.decodeBytes((byte[]) res1.getContent())); Resource resource = new ResourceImpl(); byte[] r1content1 = RegistryUtils.encodeString("R2 content updated"); resource.setContent(r1content1); resource.setProperty("abc", "abc"); registry.put(path, resource); Resource resource2 = registry.get(path); assertEquals("File content is not matching", RegistryUtils.decodeBytes((byte[]) resource.getContent()), RegistryUtils.decodeBytes((byte[]) resource2.getContent())); resource.discard(); res1.discard(); resource1.discard(); resource2.discard(); Thread.sleep(100); } } }
apache-2.0
Buzzardo/spring-boot
spring-boot-project/spring-boot-test-autoconfigure/src/main/java/org/springframework/boot/test/autoconfigure/jooq/AutoConfigureJooq.java
1395
/* * Copyright 2012-2019 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.boot.test.autoconfigure.jooq; import java.lang.annotation.Documented; import java.lang.annotation.ElementType; import java.lang.annotation.Inherited; import java.lang.annotation.Retention; import java.lang.annotation.RetentionPolicy; import java.lang.annotation.Target; import org.springframework.boot.autoconfigure.ImportAutoConfiguration; /** * {@link ImportAutoConfiguration Auto-configuration imports} for typical jOOQ tests. Most * tests should consider using {@link JooqTest @JooqTest} rather than using this * annotation directly. * * @author Michael Simons * @since 2.0.0 */ @Target(ElementType.TYPE) @Retention(RetentionPolicy.RUNTIME) @Documented @Inherited @ImportAutoConfiguration public @interface AutoConfigureJooq { }
apache-2.0
nightauer/quickdic-dictionary.dictionary
jars/icu4j-52_1/main/tests/framework/src/com/ibm/icu/dev/util/Visitor.java
4008
/* ******************************************************************************* * Copyright (C) 2002-2012, International Business Machines Corporation and * * others. All Rights Reserved. * ******************************************************************************* */ package com.ibm.icu.dev.util; import java.util.Collection; import java.util.Iterator; import java.util.Map; import com.ibm.icu.text.UnicodeSet; import com.ibm.icu.text.UnicodeSetIterator; public abstract class Visitor { public void doAt(Object item) { if (item instanceof Collection) { doAt((Collection) item); } else if (item instanceof Map) { doAt((Map) item); } else if (item instanceof Object[]) { doAt((Object[]) item); } else if (item instanceof UnicodeSet) { doAt((UnicodeSet) item); } else { doSimpleAt(item); } } public int count(Object item) { if (item instanceof Collection) { return ((Collection) item).size(); } else if (item instanceof Map) { return ((Map) item).size(); } else if (item instanceof Object[]) { return ((Object[]) item).length; } else if (item instanceof UnicodeSet) { return ((UnicodeSet) item).size(); } else { return 1; } } // the default implementation boxing public void doAt(int o) { doSimpleAt(new Integer(o)); } public void doAt(double o) { doSimpleAt(new Double(o)); } public void doAt(char o) { doSimpleAt(new Character(o)); } // for subclassing protected void doAt (Collection c) { if (c.size() == 0) doBefore(c, null); Iterator it = c.iterator(); boolean first = true; Object last = null; while (it.hasNext()) { Object item = it.next(); if (first) { doBefore(c, item); first = false; } else { doBetween(c, last, item); } doAt(last=item); } doAfter(c, last); } protected void doAt (Map c) { doAt(c.entrySet()); } protected void doAt (UnicodeSet c) { if (c.size() == 0) doBefore(c, null); UnicodeSetIterator it = new UnicodeSetIterator(c); boolean first = true; Object last = null; Object item; CodePointRange cpr0 = new CodePointRange(); CodePointRange cpr1 = new CodePointRange(); CodePointRange cpr; while(it.nextRange()) { if (it.codepoint == UnicodeSetIterator.IS_STRING) { item = it.string; } else { cpr = last == cpr0 ? cpr1 : cpr0; // make sure we don't override last cpr.codepoint = it.codepoint; cpr.codepointEnd = it.codepointEnd; item = cpr; } if (!first) { doBefore(c, item); first = true; } else { doBetween(c, last, item); } doAt(last = item); } doAfter(c, last); } protected void doAt (Object[] c) { doBefore(c, c.length == 0 ? null : c[0]); Object last = null; for (int i = 0; i < c.length; ++i) { if (i != 0) doBetween(c, last, c[i]); doAt(last = c[i]); } doAfter(c, last); } public static class CodePointRange{ public int codepoint, codepointEnd; } // ===== MUST BE OVERRIDEN ===== abstract protected void doBefore(Object container, Object item); abstract protected void doBetween(Object container, Object lastItem, Object nextItem); abstract protected void doAfter(Object container, Object item); abstract protected void doSimpleAt(Object o); }
apache-2.0
bbrouwer/spring-boot
spring-boot-project/spring-boot-autoconfigure/src/main/java/org/springframework/boot/autoconfigure/web/ErrorProperties.java
2080
/* * Copyright 2012-2017 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.boot.autoconfigure.web; import org.springframework.beans.factory.annotation.Value; /** * Configuration properties for web error handling. * * @author Michael Stummvoll * @author Stephane Nicoll * @author Vedran Pavic * @since 1.3.0 */ public class ErrorProperties { /** * Path of the error controller. */ @Value("${error.path:/error}") private String path = "/error"; /** * Include the "exception" attribute. */ private boolean includeException; /** * When to include a "stacktrace" attribute. */ private IncludeStacktrace includeStacktrace = IncludeStacktrace.NEVER; public String getPath() { return this.path; } public void setPath(String path) { this.path = path; } public boolean isIncludeException() { return this.includeException; } public void setIncludeException(boolean includeException) { this.includeException = includeException; } public IncludeStacktrace getIncludeStacktrace() { return this.includeStacktrace; } public void setIncludeStacktrace(IncludeStacktrace includeStacktrace) { this.includeStacktrace = includeStacktrace; } /** * Include Stacktrace attribute options. */ public enum IncludeStacktrace { /** * Never add stacktrace information. */ NEVER, /** * Always add stacktrace information. */ ALWAYS, /** * Add stacktrace information when the "trace" request parameter is "true". */ ON_TRACE_PARAM } }
apache-2.0
wxcandy/Mahjong
org/mockito/asm/util/CheckFieldAdapter.java
2946
/*** * ASM: a very small and fast Java bytecode manipulation framework * Copyright (c) 2000-2007 INRIA, France Telecom * All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions * are met: * 1. Redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer. * 2. Redistributions in binary form must reproduce the above copyright * notice, this list of conditions and the following disclaimer in the * documentation and/or other materials provided with the distribution. * 3. Neither the name of the copyright holders nor the names of its * contributors may be used to endorse or promote products derived from * this software without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF * THE POSSIBILITY OF SUCH DAMAGE. */ package org.mockito.asm.util; import org.mockito.asm.AnnotationVisitor; import org.mockito.asm.Attribute; import org.mockito.asm.FieldVisitor; /** * A {@link FieldVisitor} that checks that its methods are properly used. */ public class CheckFieldAdapter implements FieldVisitor { private final FieldVisitor fv; private boolean end; public CheckFieldAdapter(final FieldVisitor fv) { this.fv = fv; } public AnnotationVisitor visitAnnotation( final String desc, final boolean visible) { checkEnd(); CheckMethodAdapter.checkDesc(desc, false); return new CheckAnnotationAdapter(fv.visitAnnotation(desc, visible)); } public void visitAttribute(final Attribute attr) { checkEnd(); if (attr == null) { throw new IllegalArgumentException("Invalid attribute (must not be null)"); } fv.visitAttribute(attr); } public void visitEnd() { checkEnd(); end = true; fv.visitEnd(); } private void checkEnd() { if (end) { throw new IllegalStateException("Cannot call a visit method after visitEnd has been called"); } } }
mit
plumer/codana
tomcat_files/7.0.61/Constants (2).java
945
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.catalina.loader; public class Constants { public static final String Package = "org.apache.catalina.loader"; }
mit
NanYoMy/mybatis-generator
src/main/java/org/mybatis/generator/logging/AbstractLogFactory.java
836
/* * Copyright 2009 The Apache Software Foundation * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.mybatis.generator.logging; /** * Defines the interface for creating Log implementations. * * @author Jeff Butler * */ public interface AbstractLogFactory { Log getLog(Class<?> aClass); }
mit
Noctrunal/jcommune
jcommune-plugin-api/src/main/java/org/jtalks/jcommune/plugin/api/web/validation/validators/BbCodeAwareSizeValidator.java
4375
/** * Copyright (C) 2011 JTalks.org Team * This library is free software; you can redistribute it and/or * modify it under the terms of the GNU Lesser General Public * License as published by the Free Software Foundation; either * version 2.1 of the License, or (at your option) any later version. * This library is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU * Lesser General Public License for more details. * You should have received a copy of the GNU Lesser General Public * License along with this library; if not, write to the Free Software * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA */ package org.jtalks.jcommune.plugin.api.web.validation.validators; import org.jtalks.jcommune.plugin.api.service.PluginBbCodeService; import org.jtalks.jcommune.plugin.api.web.validation.annotations.BbCodeAwareSize; import javax.validation.ConstraintValidator; import javax.validation.ConstraintValidatorContext; import org.springframework.beans.BeansException; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.context.ApplicationContext; import org.springframework.context.ApplicationContextAware; /** * Extends default @Size annotation to ignore BB codes in string. * As for now, applicable to string values only. * * @author Evgeniy Naumenko */ public class BbCodeAwareSizeValidator implements ConstraintValidator<BbCodeAwareSize, String>, ApplicationContextAware { public static final String NEW_LINE_HTML = "<br/>"; public static final String QUOTE_HTML = "&quot"; public static final String EMPTY_LIST_BB_REGEXP = "\\[list\\][\n\r\\s]*(\\[\\*\\][\n\r\\s]*)*\\[\\/list\\]"; private int min; private int max; private ApplicationContext context; private PluginBbCodeService bbCodeService; @Autowired public BbCodeAwareSizeValidator(PluginBbCodeService bbCodeService) { this.bbCodeService = bbCodeService; } /** * {@inheritDoc} */ @Override public void initialize(BbCodeAwareSize constraintAnnotation) { this.min = constraintAnnotation.min(); this.max = constraintAnnotation.max(); } /** * The database stores both bb codes and symbols visible for users. * Post length with bb codes can't be greater than max value. * {@inheritDoc} */ @Override public boolean isValid(String value, ConstraintValidatorContext context) { if (value != null) { String emptyListRemoved = removeEmptyListBb(value); String trimed = removeBBCodes(emptyListRemoved).trim(); int plainTextLength = getDisplayedLength(trimed); return plainTextLength >= min && value.length() <= max; } return false; } /** * Removes all BB codes from the text given, simply cutting * out all [...]-style tags found * * @param source text to cleanup * @return plain text without BB tags */ private String removeBBCodes(String source) { return getBBCodeService().stripBBCodes(source); } @Override public void setApplicationContext(ApplicationContext ac) throws BeansException { this.context = ac; } private PluginBbCodeService getBBCodeService() { if (bbCodeService == null) { bbCodeService = this.context.getBean(PluginBbCodeService.class); } return bbCodeService; } /** * Calculate length of string which be displayed. * Needed because method <b>removeBBCodes</b> leaves "&quot" and "<br/>" symbols. * @param s String to calculate length. * @return Length of string which be displayed. */ private int getDisplayedLength(String s) { return s.replaceAll(QUOTE_HTML, "\"").replaceAll(NEW_LINE_HTML, "\n\r").length(); } /** * Removes all empty lists from text. Needed because <b>removeBBCodes</b> deletes * bb codes for list but not deletes bb codes for list elements. * @param text Text to remove empty lists. * @return Text without empty lists. */ private String removeEmptyListBb(String text) { return text.replaceAll(EMPTY_LIST_BB_REGEXP, ""); } }
lgpl-2.1
jhiswin/idiil-closure-compiler
src/com/google/javascript/jscomp/ClosureOptimizePrimitives.java
3783
/* * Copyright 2011 The Closure Compiler Authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.javascript.jscomp; import com.google.common.collect.Lists; import com.google.javascript.jscomp.NodeTraversal.AbstractPostOrderCallback; import com.google.javascript.rhino.IR; import com.google.javascript.rhino.Node; import com.google.javascript.rhino.Token; import java.util.List; /** * <p>Compiler pass that converts all calls to: * goog.object.create(key1, val1, key2, val2, ...) where all of the keys * are literals into object literals.</p> * * @author agrieve@google.com (Andrew Grieve) */ final class ClosureOptimizePrimitives implements CompilerPass { /** Reference to the JS compiler */ private final AbstractCompiler compiler; /** * Identifies all calls to goog.object.create. */ private class FindObjectCreateCalls extends AbstractPostOrderCallback { List<Node> callNodes = Lists.newArrayList(); @Override public void visit(NodeTraversal t, Node n, Node parent) { if (n.isCall()) { String fnName = n.getFirstChild().getQualifiedName(); if ("goog$object$create".equals(fnName) || "goog.object.create".equals(fnName)) { callNodes.add(n); } } } } /** * @param compiler The AbstractCompiler */ ClosureOptimizePrimitives(AbstractCompiler compiler) { this.compiler = compiler; } @Override public void process(Node externs, Node root) { FindObjectCreateCalls pass = new FindObjectCreateCalls(); NodeTraversal.traverse(compiler, root, pass); processObjectCreateCalls(pass.callNodes); } /** * Converts all of the given call nodes to object literals that are safe to * do so. */ private void processObjectCreateCalls(List<Node> callNodes) { for (Node callNode : callNodes) { Node curParam = callNode.getFirstChild().getNext(); if (canOptimizeObjectCreate(curParam)) { Node objNode = IR.objectlit().srcref(callNode); while (curParam != null) { Node keyNode = curParam; Node valueNode = curParam.getNext(); curParam = valueNode.getNext(); callNode.removeChild(keyNode); callNode.removeChild(valueNode); if (!keyNode.isString()) { keyNode = IR.string(NodeUtil.getStringValue(keyNode)) .srcref(keyNode); } keyNode.setType(Token.STRING_KEY); keyNode.setQuotedString(); objNode.addChildToBack(IR.propdef(keyNode, valueNode)); } callNode.getParent().replaceChild(callNode, objNode); compiler.reportCodeChange(); } } } /** * Returns whether the given call to goog.object.create can be converted to an * object literal. */ private boolean canOptimizeObjectCreate(Node firstParam) { Node curParam = firstParam; while (curParam != null) { // All keys must be strings or numbers. if (!curParam.isString() && !curParam.isNumber()) { return false; } curParam = curParam.getNext(); // Check for an odd number of parameters. if (curParam == null) { return false; } curParam = curParam.getNext(); } return true; } }
apache-2.0
ZhangXFeng/hadoop
src/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/CreateEditsLog.java
8019
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.hdfs.server.namenode; import java.io.File; import java.io.IOException; import org.apache.hadoop.fs.permission.FsPermission; import org.apache.hadoop.fs.permission.PermissionStatus; import org.apache.hadoop.hdfs.protocol.Block; import org.apache.hadoop.hdfs.server.blockmanagement.BlockInfo; import org.apache.hadoop.hdfs.server.common.GenerationStamp; import org.apache.hadoop.hdfs.server.common.Storage; /** * * CreateEditsLog * Synopsis: CreateEditsLog -f numFiles StartingBlockId numBlocksPerFile * [-r replicafactor] [-d editsLogDirectory] * Default replication factor is 1 * Default edits log directory is /tmp/EditsLogOut * * Create a name node's edits log in /tmp/EditsLogOut. * The file /tmp/EditsLogOut/current/edits can be copied to a name node's * dfs.namenode.name.dir/current direcotry and the name node can be started as usual. * * The files are created in /createdViaInjectingInEditsLog * The file names contain the starting and ending blockIds; hence once can * create multiple edits logs using this command using non overlapping * block ids and feed the files to a single name node. * * See Also @link #DataNodeCluster for injecting a set of matching * blocks created with this command into a set of simulated data nodes. * */ public class CreateEditsLog { static final String BASE_PATH = "/createdViaInjectingInEditsLog"; static final String EDITS_DIR = "/tmp/EditsLogOut"; static String edits_dir = EDITS_DIR; static final public long BLOCK_GENERATION_STAMP = GenerationStamp.LAST_RESERVED_STAMP; static void addFiles(FSEditLog editLog, int numFiles, short replication, int blocksPerFile, long startingBlockId, long blockSize, FileNameGenerator nameGenerator) { PermissionStatus p = new PermissionStatus("joeDoe", "people", new FsPermission((short)0777)); INodeId inodeId = new INodeId(); INodeDirectory dirInode = new INodeDirectory(inodeId.nextValue(), null, p, 0L); editLog.logMkDir(BASE_PATH, dirInode); BlockInfo[] blocks = new BlockInfo[blocksPerFile]; for (int iB = 0; iB < blocksPerFile; ++iB) { blocks[iB] = new BlockInfo(new Block(0, blockSize, BLOCK_GENERATION_STAMP), replication); } long currentBlockId = startingBlockId; long bidAtSync = startingBlockId; for (int iF = 0; iF < numFiles; iF++) { for (int iB = 0; iB < blocksPerFile; ++iB) { blocks[iB].setBlockId(currentBlockId++); } final INodeFile inode = new INodeFile(inodeId.nextValue(), null, p, 0L, 0L, blocks, replication, blockSize, (byte)0); inode.toUnderConstruction("", ""); // Append path to filename with information about blockIDs String path = "_" + iF + "_B" + blocks[0].getBlockId() + "_to_B" + blocks[blocksPerFile-1].getBlockId() + "_"; String filePath = nameGenerator.getNextFileName(""); filePath = filePath + path; // Log the new sub directory in edits if ((iF % nameGenerator.getFilesPerDirectory()) == 0) { String currentDir = nameGenerator.getCurrentDir(); dirInode = new INodeDirectory(inodeId.nextValue(), null, p, 0L); editLog.logMkDir(currentDir, dirInode); } INodeFile fileUc = new INodeFile(inodeId.nextValue(), null, p, 0L, 0L, BlockInfo.EMPTY_ARRAY, replication, blockSize, (byte)0); fileUc.toUnderConstruction("", ""); editLog.logOpenFile(filePath, fileUc, false, false); editLog.logCloseFile(filePath, inode); if (currentBlockId - bidAtSync >= 2000) { // sync every 2K blocks editLog.logSync(); bidAtSync = currentBlockId; } } System.out.println("Created edits log in directory " + edits_dir); System.out.println(" containing " + numFiles + " File-Creates, each file with " + blocksPerFile + " blocks"); System.out.println(" blocks range: " + startingBlockId + " to " + (currentBlockId-1)); } static final String usage = "Usage: createditlogs " + " -f numFiles startingBlockIds NumBlocksPerFile [-r replicafactor] " + "[-d editsLogDirectory]\n" + " Default replication factor is 1\n" + " Default edits log direcory is " + EDITS_DIR + "\n"; static void printUsageExit() { System.out.println(usage); System.exit(-1); } static void printUsageExit(String err) { System.out.println(err); printUsageExit(); } /** * @param args arguments * @throws IOException */ public static void main(String[] args) throws IOException { long startingBlockId = 1; int numFiles = 0; short replication = 1; int numBlocksPerFile = 0; long blockSize = 10; if (args.length == 0) { printUsageExit(); } for (int i = 0; i < args.length; i++) { // parse command line if (args[i].equals("-h")) printUsageExit(); if (args[i].equals("-f")) { if (i + 3 >= args.length || args[i+1].startsWith("-") || args[i+2].startsWith("-") || args[i+3].startsWith("-")) { printUsageExit( "Missing num files, starting block and/or number of blocks"); } numFiles = Integer.parseInt(args[++i]); startingBlockId = Integer.parseInt(args[++i]); numBlocksPerFile = Integer.parseInt(args[++i]); if (numFiles <=0 || numBlocksPerFile <= 0) { printUsageExit("numFiles and numBlocksPerFile most be greater than 0"); } } else if (args[i].equals("-l")) { if (i + 1 >= args.length) { printUsageExit( "Missing block length"); } blockSize = Long.parseLong(args[++i]); } else if (args[i].equals("-r") || args[i+1].startsWith("-")) { if (i + 1 >= args.length) { printUsageExit( "Missing replication factor"); } replication = Short.parseShort(args[++i]); } else if (args[i].equals("-d")) { if (i + 1 >= args.length || args[i+1].startsWith("-")) { printUsageExit("Missing edits logs directory"); } edits_dir = args[++i]; } else { printUsageExit(); } } File editsLogDir = new File(edits_dir); File subStructureDir = new File(edits_dir + "/" + Storage.STORAGE_DIR_CURRENT); if ( !editsLogDir.exists() ) { if ( !editsLogDir.mkdir()) { System.out.println("cannot create " + edits_dir); System.exit(-1); } } if ( !subStructureDir.exists() ) { if ( !subStructureDir.mkdir()) { System.out.println("cannot create subdirs of " + edits_dir); System.exit(-1); } } FileNameGenerator nameGenerator = new FileNameGenerator(BASE_PATH, 100); FSEditLog editLog = FSImageTestUtil.createStandaloneEditLog(editsLogDir); editLog.openForWrite(); addFiles(editLog, numFiles, replication, numBlocksPerFile, startingBlockId, blockSize, nameGenerator); editLog.logSync(); editLog.close(); } }
apache-2.0
madhawa-gunasekara/carbon-commons
components/tenant-mgt-common/org.wso2.carbon.tenant.common/src/main/java/org/wso2/carbon/stratos/common/util/CloudServicesUtil.java
9738
package org.wso2.carbon.stratos.common.util; import java.util.List; import java.util.Map; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.wso2.carbon.stratos.common.config.CloudServiceConfig; import org.wso2.carbon.stratos.common.config.CloudServicesDescConfig; import org.wso2.carbon.stratos.common.config.PermissionConfig; import org.wso2.carbon.stratos.common.constants.StratosConstants; import org.wso2.carbon.stratos.common.internal.CloudCommonServiceComponent; import org.wso2.carbon.registry.core.Collection; import org.wso2.carbon.registry.core.RegistryConstants; import org.wso2.carbon.registry.core.Resource; import org.wso2.carbon.registry.core.session.UserRegistry; import org.wso2.carbon.user.core.UserStoreException; import org.wso2.carbon.utils.multitenancy.MultitenantConstants; public class CloudServicesUtil { private static final Log log = LogFactory.getLog(CloudServicesUtil.class); // TODO protect using Java security public static void activateAllServices(CloudServicesDescConfig cloudServicesDesc, int tenantId) throws Exception { java.util.Collection<CloudServiceConfig> cloudServiceConfigList = cloudServicesDesc.getCloudServiceConfigs(). values(); if (cloudServiceConfigList != null) { for (CloudServiceConfig cloudServiceConfig : cloudServiceConfigList) { if (cloudServiceConfig.isDefaultActive()) { String cloudServiceName = cloudServiceConfig.getName(); try { if (!CloudServicesUtil.isCloudServiceActive(cloudServiceName, tenantId)) { CloudServicesUtil.setCloudServiceActive(true, cloudServiceName, tenantId, cloudServicesDesc.getCloudServiceConfigs(). get(cloudServiceName)); } } catch (Exception e) { String msg = "Error in activating the cloud service at the tenant" + "creation. tenant id: " + tenantId + ", service name: " + cloudServiceName; log.error(msg, e); throw new UserStoreException(msg, e); } } } } } public static void activateOriginalAndCompulsoryServices(CloudServicesDescConfig cloudServicesDesc, String originalService, int tenantId) throws Exception { Map<String, CloudServiceConfig> cloudServiceConfigs = cloudServicesDesc.getCloudServiceConfigs(); if (CloudServicesUtil.isServiceNameValid(cloudServicesDesc, originalService)) { if (!CloudServicesUtil.isCloudServiceActive(originalService, tenantId)) { CloudServicesUtil.setCloudServiceActive(true, originalService, tenantId, cloudServiceConfigs.get(originalService)); log.info("Successfully activated the " + originalService + " for the tenant " + tenantId); } // register the compulsory services if (!CloudServicesUtil.isCloudServiceActive(StratosConstants.CLOUD_IDENTITY_SERVICE, tenantId)) { CloudServicesUtil.setCloudServiceActive(true, StratosConstants.CLOUD_IDENTITY_SERVICE, tenantId, cloudServiceConfigs.get(StratosConstants.CLOUD_IDENTITY_SERVICE)); } if (!CloudServicesUtil.isCloudServiceActive(StratosConstants.CLOUD_GOVERNANCE_SERVICE, tenantId)) { CloudServicesUtil.setCloudServiceActive(true, StratosConstants.CLOUD_GOVERNANCE_SERVICE, tenantId, cloudServiceConfigs.get(StratosConstants.CLOUD_GOVERNANCE_SERVICE)); } } else { log.warn("Unable to activate the " + originalService + " for the tenant " + tenantId); } } public static void setCloudServiceActive(boolean active, String cloudServiceName, int tenantId, CloudServiceConfig cloudServiceConfig) throws Exception { if (cloudServiceConfig.getLabel() == null) { // for the non-labled services, we are not setting/unsetting the // service active return; } UserRegistry govRegistry = CloudCommonServiceComponent.getGovernanceSystemRegistry( MultitenantConstants.SUPER_TENANT_ID); UserRegistry configRegistry = CloudCommonServiceComponent.getConfigSystemRegistry(tenantId); String cloudServiceInfoPath = StratosConstants.CLOUD_SERVICE_INFO_STORE_PATH + RegistryConstants.PATH_SEPARATOR + tenantId + RegistryConstants.PATH_SEPARATOR + cloudServiceName; Resource cloudServiceInfoResource; if (govRegistry.resourceExists(cloudServiceInfoPath)) { cloudServiceInfoResource = govRegistry.get(cloudServiceInfoPath); } else { cloudServiceInfoResource = govRegistry.newCollection(); } cloudServiceInfoResource.setProperty(StratosConstants.CLOUD_SERVICE_IS_ACTIVE_PROP_KEY, active ? "true" : "false"); govRegistry.put(cloudServiceInfoPath, cloudServiceInfoResource); // then we will copy the permissions List<PermissionConfig> permissionConfigs = cloudServiceConfig.getPermissionConfigs(); for (PermissionConfig permissionConfig : permissionConfigs) { String path = permissionConfig.getPath(); String name = permissionConfig.getName(); if (active) { if (!configRegistry.resourceExists(path)) { Collection collection = configRegistry.newCollection(); collection.setProperty(StratosConstants.DISPLAY_NAME, name); configRegistry.put(path, collection); } } else { if (configRegistry.resourceExists(path)) { configRegistry.delete(path); } } } } public static boolean isCloudServiceActive(String cloudServiceName, int tenantId) throws Exception { UserRegistry govRegistry = CloudCommonServiceComponent.getGovernanceSystemRegistry( MultitenantConstants.SUPER_TENANT_ID); return isCloudServiceActive(cloudServiceName, tenantId, govRegistry); } public static boolean isCloudServiceActive(String cloudServiceName, int tenantId, UserRegistry govRegistry) throws Exception { // The cloud manager is always active if (StratosConstants.CLOUD_MANAGER_SERVICE.equals(cloudServiceName)) { return true; } String cloudServiceInfoPath = StratosConstants.CLOUD_SERVICE_INFO_STORE_PATH + RegistryConstants.PATH_SEPARATOR + tenantId + RegistryConstants.PATH_SEPARATOR + cloudServiceName; Resource cloudServiceInfoResource; if (govRegistry.resourceExists(cloudServiceInfoPath)) { cloudServiceInfoResource = govRegistry.get(cloudServiceInfoPath); String isActiveStr = cloudServiceInfoResource.getProperty( StratosConstants.CLOUD_SERVICE_IS_ACTIVE_PROP_KEY); return "true".equals(isActiveStr); } return false; } public static boolean isServiceNameValid(CloudServicesDescConfig cloudServicesDesc, String cloudServiceName) { if(cloudServiceName == null) { return false; } java.util.Collection<CloudServiceConfig> cloudServiceConfigList = cloudServicesDesc.getCloudServiceConfigs().values(); if (cloudServiceName.equals(StratosConstants.CLOUD_MANAGER_SERVICE)) { return false; } for (CloudServiceConfig cloudServiceConfig : cloudServiceConfigList) { if (cloudServiceConfig.getName().equals(cloudServiceName)) { return true; } } return false; } }
apache-2.0
spcui/autotest
frontend/client/src/autotest/afe/CheckBoxPanel.java
1139
package autotest.afe; import java.util.ArrayList; import java.util.List; public class CheckBoxPanel { public static interface Display { public ICheckBox generateCheckBox(int index); } private List<ICheckBox> checkBoxes = new ArrayList<ICheckBox>(); private Display display; public void bindDisplay(Display display) { this.display = display; } public ICheckBox generateCheckBox() { return display.generateCheckBox(checkBoxes.size()); } public void add(ICheckBox checkBox) { checkBoxes.add(checkBox); } public List<ICheckBox> getChecked() { List<ICheckBox> result = new ArrayList<ICheckBox>(); for(ICheckBox checkBox : checkBoxes) { if (checkBox.getValue()) { result.add(checkBox); } } return result; } public void setEnabled(boolean enabled) { for(ICheckBox thisBox : checkBoxes) { thisBox.setEnabled(enabled); } } public void reset() { for (ICheckBox thisBox : checkBoxes) { thisBox.setValue(false); } } }
gpl-2.0
nacc/autotest
frontend/client/src/autotest/tko/LabelField.java
472
package autotest.tko; import autotest.common.Utils; public abstract class LabelField extends ParameterizedField { @Override public String getSqlCondition(String value) { String condition = " IS NOT NULL"; if (value.equals(Utils.JSON_NULL)) { condition = " IS NULL"; } return getFilteringName() + condition; } @Override public String getFilteringName() { return getQuotedSqlName() + ".id"; } }
gpl-2.0
asedunov/intellij-community
python/src/com/jetbrains/python/codeInsight/functionTypeComments/PyFunctionTypeAnnotationElementTypes.java
1200
/* * Copyright 2000-2016 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.jetbrains.python.codeInsight.functionTypeComments; import com.jetbrains.python.codeInsight.functionTypeComments.psi.PyFunctionTypeAnnotation; import com.jetbrains.python.codeInsight.functionTypeComments.psi.PyParameterTypeList; import com.jetbrains.python.psi.PyElementType; /** * @author Mikhail Golubev */ public interface PyFunctionTypeAnnotationElementTypes { PyElementType FUNCTION_SIGNATURE = new PyElementType("FUNCTION_SIGNATURE", PyFunctionTypeAnnotation.class); PyElementType PARAMETER_TYPE_LIST = new PyElementType("PARAMETER_TYPE_LIST", PyParameterTypeList.class); }
apache-2.0