gt
stringclasses
1 value
context
stringlengths
2.05k
161k
/** * WTFPL * Version 2, December 2004 * * Copyright (C) sponge * Planet Earth * * See... * * http://sam.zoy.org/wtfpl/ * and * http://en.wikipedia.org/wiki/WTFPL * * ...for any additional details and license questions. */ package net.i2p.BOB; import java.io.ByteArrayInputStream; import java.io.IOException; import java.net.InetAddress; import java.net.ServerSocket; import java.util.Properties; import java.util.concurrent.atomic.AtomicBoolean; import net.i2p.I2PException; import net.i2p.client.I2PClient; import net.i2p.client.streaming.I2PServerSocket; import net.i2p.client.streaming.I2PSocketManager; import net.i2p.client.streaming.I2PSocketManagerFactory; import net.i2p.util.Log; /** * * Multiplex listeners for TCP and I2P * * @author sponge */ public class MUXlisten implements Runnable { private NamedDB database, info; private Logger _log; private I2PSocketManager socketManager; private ByteArrayInputStream prikey; private ThreadGroup tg; private String N; private ServerSocket listener = null; private int backlog = 50; // should this be more? less? boolean go_out; boolean come_in; private AtomicBoolean lock; private AtomicBoolean lives; /** * Constructor Will fail if INPORT is occupied. * * @param info DB entry for this tunnel * @param database master database of tunnels * @param _log * @throws net.i2p.I2PException * @throws java.io.IOException */ MUXlisten(AtomicBoolean lock, NamedDB database, NamedDB info, Logger _log) throws I2PException, IOException, RuntimeException { try { int port = 0; InetAddress host = null; this.lock = lock; this.tg = null; this.database = database; this.info = info; this._log = _log; lives = new AtomicBoolean(false); this.database.getWriteLock(); this.info.getWriteLock(); this.info.add("STARTING", Boolean.valueOf(true)); this.info.releaseWriteLock(); this.database.releaseWriteLock(); this.database.getReadLock(); this.info.getReadLock(); N = this.info.get("NICKNAME").toString(); prikey = new ByteArrayInputStream((byte[]) info.get("KEYS")); // Make a new copy so that anything else won't muck with our database. Properties R = (Properties) info.get("PROPERTIES"); Properties Q = new Properties(); Lifted.copyProperties(R, Q); this.database.releaseReadLock(); this.info.releaseReadLock(); this.database.getReadLock(); this.info.getReadLock(); this.go_out = info.exists("OUTPORT"); this.come_in = info.exists("INPORT"); if (this.come_in) { port = Integer.parseInt(info.get("INPORT").toString()); host = InetAddress.getByName(info.get("INHOST").toString()); } this.database.releaseReadLock(); this.info.releaseReadLock(); String i2cpHost = Q.getProperty(I2PClient.PROP_TCP_HOST, "127.0.0.1"); int i2cpPort = 7654; String i2cpPortStr = Q.getProperty(I2PClient.PROP_TCP_PORT, "7654"); try { i2cpPort = Integer.parseInt(i2cpPortStr); } catch (NumberFormatException nfe) { throw new IllegalArgumentException("Invalid I2CP port specified [" + i2cpPortStr + "]"); } if (this.come_in) { this.listener = new ServerSocket(port, backlog, host); } socketManager = I2PSocketManagerFactory.createManager( prikey, i2cpHost, i2cpPort, Q); } catch (IOException e) { // Something went bad. this.database.getWriteLock(); this.info.getWriteLock(); this.info.add("STARTING", Boolean.valueOf(false)); this.info.releaseWriteLock(); this.database.releaseWriteLock(); throw new IOException(e.toString()); } catch (RuntimeException e) { // Something went bad. this.database.getWriteLock(); this.info.getWriteLock(); this.info.add("STARTING", Boolean.valueOf(false)); this.info.releaseWriteLock(); this.database.releaseWriteLock(); throw new RuntimeException(e); } catch (Exception e) { // Something else went bad. this.database.getWriteLock(); this.info.getWriteLock(); this.info.add("STARTING", Boolean.valueOf(false)); this.info.releaseWriteLock(); this.database.releaseWriteLock(); e.printStackTrace(); throw new RuntimeException(e); } } private void rlock() throws Exception { database.getReadLock(); info.getReadLock(); } private void runlock() throws Exception { database.releaseReadLock(); info.releaseReadLock(); } private void wlock() throws Exception { database.getWriteLock(); info.getWriteLock(); } private void wunlock() throws Exception { info.releaseWriteLock(); database.releaseWriteLock(); } /** * MUX sockets, fire off a thread to connect, get destination info, and do I/O * */ public void run() { I2PServerSocket SS = null; Thread t = null; Thread q = null; try { try { wlock(); try { info.add("RUNNING", Boolean.valueOf(true)); } catch (Exception e) { lock.set(false); wunlock(); return; } } catch (Exception e) { lock.set(false); return; } try { wunlock(); } catch (Exception e) { lock.set(false); return; } lives.set(true); lock.set(false); quit: { try { tg = new ThreadGroup(N); { // toss the connections to a new threads. // will wrap with TCP and UDP when UDP works if (go_out) { // I2P -> TCP SS = socketManager.getServerSocket(); I2Plistener conn = new I2Plistener(SS, socketManager, info, database, _log, lives); t = new Thread(tg, conn, "BOBI2Plistener " + N); t.start(); } if (come_in) { // TCP -> I2P TCPlistener conn = new TCPlistener(listener, socketManager, info, database, _log, lives); q = new Thread(tg, conn, "BOBTCPlistener " + N); q.start(); } try { wlock(); try { info.add("STARTING", Boolean.valueOf(false)); } catch (Exception e) { wunlock(); break quit; } } catch (Exception e) { break quit; } try { wunlock(); } catch (Exception e) { break quit; } boolean spin = true; while (spin && lives.get()) { try { Thread.sleep(1000); //sleep for 1 second } catch (InterruptedException e) { break quit; } try { rlock(); try { spin = info.get("STOPPING").equals(Boolean.FALSE); } catch (Exception e) { runlock(); break quit; } } catch (Exception e) { break quit; } try { runlock(); } catch (Exception e) { break quit; } } } // die } catch (Exception e) { // System.out.println("MUXlisten: Caught an exception" + e); break quit; } } // quit } finally { lives.set(false); // Some grace time. try { Thread.sleep(100); } catch (InterruptedException ex) { } try { wlock(); try { info.add("STARTING", Boolean.valueOf(false)); info.add("STOPPING", Boolean.valueOf(true)); info.add("RUNNING", Boolean.valueOf(false)); } catch (Exception e) { lock.set(false); wunlock(); return; } wunlock(); } catch (Exception e) { } // Start cleanup. while (!lock.compareAndSet(false, true)) { // wait } if (SS != null) { try { SS.close(); } catch (I2PException ex) { } } if (listener != null) { try { listener.close(); } catch (IOException e) { } } // Some grace time. try { Thread.sleep(100); } catch (InterruptedException ex) { } // Hopefully nuke stuff here... { String groupName = tg.getName(); try { _log.warn("destroySocketManager " + groupName); socketManager.destroySocketManager(); _log.warn("destroySocketManager Successful" + groupName); } catch (Exception e) { // nop _log.warn("destroySocketManager Failed" + groupName); _log.warn(e.toString()); } } // zero out everything. try { wlock(); try { info.add("STARTING", Boolean.valueOf(false)); info.add("STOPPING", Boolean.valueOf(false)); info.add("RUNNING", Boolean.valueOf(false)); } catch (Exception e) { lock.set(false); wunlock(); return; } wunlock(); } catch (Exception e) { } lock.set(false); // Should we force waiting for all threads?? // Wait around till all threads are collected. if (tg != null) { String groupName = tg.getName(); // System.out.println("BOB: MUXlisten: Starting thread collection for: " + groupName); _log.warn("BOB: MUXlisten: Starting thread collection for: " + groupName); if (tg.activeCount() + tg.activeGroupCount() != 0) { // visit(tg, 0, groupName); int foo = tg.activeCount() + tg.activeGroupCount(); // hopefully no longer needed! // int bar = lives; // System.out.println("BOB: MUXlisten: Waiting on threads for " + groupName); // System.out.println("\nBOB: MUXlisten: ThreadGroup dump BEGIN " + groupName); // visit(tg, 0, groupName); // System.out.println("BOB: MUXlisten: ThreadGroup dump END " + groupName + "\n"); // Happily spin forever :-( while (foo != 0) { foo = tg.activeCount() + tg.activeGroupCount(); // if (lives != bar && lives != 0) { // System.out.println("\nBOB: MUXlisten: ThreadGroup dump BEGIN " + groupName); // visit(tg, 0, groupName); // System.out.println("BOB: MUXlisten: ThreadGroup dump END " + groupName + "\n"); // } // bar = lives; try { Thread.sleep(100); //sleep for 100 ms (One tenth second) } catch (InterruptedException ex) { // nop } } } // System.out.println("BOB: MUXlisten: Threads went away. Success: " + groupName); _log.warn("BOB: MUXlisten: Threads went away. Success: " + groupName); tg.destroy(); // Zap reference to the ThreadGroup so the JVM can GC it. tg = null; } try { socketManager.destroySocketManager(); } catch (Exception e) { // nop } } } // Debugging... None of this is normally used. /** * Find the root thread group and print them all. * */ private void visitAllThreads() { ThreadGroup root = Thread.currentThread().getThreadGroup().getParent(); while (root.getParent() != null) { root = root.getParent(); } // Visit each thread group visit(root, 0, root.getName()); } /** * Recursively visits all thread groups under `group' and dumps them. * @param group ThreadGroup to visit * @param level Current level */ private static void visit(ThreadGroup group, int level, String tn) { // Get threads in `group' int numThreads = group.activeCount(); Thread[] threads = new Thread[numThreads * 2]; numThreads = group.enumerate(threads, false); String indent = "------------------------------------".substring(0, level) + "-> "; // Enumerate each thread in `group' and print it. for (int i = 0; i < numThreads; i++) { // Get thread Thread thread = threads[i]; System.out.println("BOB: MUXlisten: " + tn + ": " + indent + thread.toString()); } // Get thread subgroups of `group' int numGroups = group.activeGroupCount(); ThreadGroup[] groups = new ThreadGroup[numGroups * 2]; numGroups = group.enumerate(groups, false); // Recursively visit each subgroup for (int i = 0; i < numGroups; i++) { visit(groups[i], level + 1, groups[i].getName()); } } }
/* Copyright 2018 Ericsson AB. For a full list of individual contributors, please see the commit history. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package com.ericsson.eiffel.semantics.events; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import com.google.gson.annotations.Expose; import com.google.gson.annotations.SerializedName; public class EiffelTestCaseFinishedEventMeta implements Meta { /** * * (Required) * */ @SerializedName("id") @Expose private String id; /** * * (Required) * */ @SerializedName("type") @Expose private EiffelTestCaseFinishedEventMeta.Type type; /** * * (Required) * */ @SerializedName("version") @Expose private EiffelTestCaseFinishedEventMeta.Version version = EiffelTestCaseFinishedEventMeta.Version.fromValue("1.1.0"); /** * * (Required) * */ @SerializedName("time") @Expose private Long time; @SerializedName("tags") @Expose private List<String> tags = new ArrayList<String>(); @SerializedName("source") @Expose private Source source; @SerializedName("security") @Expose private Security security; /** * * (Required) * */ public String getId() { return id; } /** * * (Required) * */ public void setId(String id) { this.id = id; } /** * * (Required) * */ public EiffelTestCaseFinishedEventMeta.Type getType() { return type; } /** * * (Required) * */ public void setType(EiffelTestCaseFinishedEventMeta.Type type) { this.type = type; } /** * * (Required) * */ public EiffelTestCaseFinishedEventMeta.Version getVersion() { return version; } /** * * (Required) * */ public void setVersion(EiffelTestCaseFinishedEventMeta.Version version) { this.version = version; } /** * * (Required) * */ public Long getTime() { return time; } /** * * (Required) * */ public void setTime(Long time) { this.time = time; } public List<String> getTags() { return tags; } public void setTags(List<String> tags) { this.tags = tags; } public Source getSource() { return source; } public void setSource(Source source) { this.source = source; } public Security getSecurity() { return security; } public void setSecurity(Security security) { this.security = security; } public enum Type { @SerializedName("EiffelTestCaseFinishedEvent") EIFFEL_TEST_CASE_FINISHED_EVENT("EiffelTestCaseFinishedEvent"); private final String value; private final static Map<String, EiffelTestCaseFinishedEventMeta.Type> CONSTANTS = new HashMap<String, EiffelTestCaseFinishedEventMeta.Type>(); static { for (EiffelTestCaseFinishedEventMeta.Type c: values()) { CONSTANTS.put(c.value, c); } } private Type(String value) { this.value = value; } @Override public String toString() { return this.value; } public String value() { return this.value; } public static EiffelTestCaseFinishedEventMeta.Type fromValue(String value) { EiffelTestCaseFinishedEventMeta.Type constant = CONSTANTS.get(value); if (constant == null) { throw new IllegalArgumentException(value); } else { return constant; } } } public enum Version { @SerializedName("1.1.0") _1_1_0("1.1.0"); private final String value; private final static Map<String, EiffelTestCaseFinishedEventMeta.Version> CONSTANTS = new HashMap<String, EiffelTestCaseFinishedEventMeta.Version>(); static { for (EiffelTestCaseFinishedEventMeta.Version c: values()) { CONSTANTS.put(c.value, c); } } private Version(String value) { this.value = value; } @Override public String toString() { return this.value; } public String value() { return this.value; } public static EiffelTestCaseFinishedEventMeta.Version fromValue(String value) { EiffelTestCaseFinishedEventMeta.Version constant = CONSTANTS.get(value); if (constant == null) { throw new IllegalArgumentException(value); } else { return constant; } } } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package javax.mail; import java.net.MalformedURLException; import java.net.URL; import junit.framework.TestCase; /** * @version $Rev: 593290 $ $Date: 2007-11-08 21:18:29 +0100 (Do, 08. Nov 2007) $ */ public class URLNameTest extends TestCase { public URLNameTest(String name) { super(name); } public void testURLNameString() { String s; URLName name; s = "http://www.apache.org"; name = new URLName(s); assertEquals(s, name.toString()); assertEquals("http", name.getProtocol()); assertEquals("www.apache.org", name.getHost()); assertEquals(-1, name.getPort()); assertNull(name.getFile()); assertNull(name.getRef()); assertNull(name.getUsername()); assertNull(name.getPassword()); try { assertEquals(new URL(s), name.getURL()); } catch (MalformedURLException e) { fail(); } s = "http://www.apache.org/file/file1#ref"; name = new URLName(s); assertEquals(s, name.toString()); assertEquals("http", name.getProtocol()); assertEquals("www.apache.org", name.getHost()); assertEquals(-1, name.getPort()); assertEquals("file/file1", name.getFile()); assertEquals("ref", name.getRef()); assertNull(name.getUsername()); assertNull(name.getPassword()); try { assertEquals(new URL(s), name.getURL()); } catch (MalformedURLException e) { fail(); } s = "http://www.apache.org/file/"; name = new URLName(s); assertEquals(s, name.toString()); assertEquals("http", name.getProtocol()); assertEquals("www.apache.org", name.getHost()); assertEquals(-1, name.getPort()); assertEquals("file/", name.getFile()); assertNull(name.getRef()); assertNull(name.getUsername()); assertNull(name.getPassword()); try { assertEquals(new URL(s), name.getURL()); } catch (MalformedURLException e) { fail(); } s = "http://john@www.apache.org/file/"; name = new URLName(s); assertEquals(s, name.toString()); assertEquals("http", name.getProtocol()); assertEquals("www.apache.org", name.getHost()); assertEquals(-1, name.getPort()); assertEquals("file/", name.getFile()); assertNull(name.getRef()); assertEquals("john", name.getUsername()); assertNull(name.getPassword()); try { assertEquals(new URL(s), name.getURL()); } catch (MalformedURLException e) { fail(); } s = "http://john:doe@www.apache.org/file/"; name = new URLName(s); assertEquals(s, name.toString()); assertEquals("http", name.getProtocol()); assertEquals("www.apache.org", name.getHost()); assertEquals(-1, name.getPort()); assertEquals("file/", name.getFile()); assertNull(name.getRef()); assertEquals("john", name.getUsername()); assertEquals("doe", name.getPassword()); try { assertEquals(new URL(s), name.getURL()); } catch (MalformedURLException e) { fail(); } s = "http://john%40gmail.com:doe@www.apache.org/file/"; name = new URLName(s); assertEquals(s, name.toString()); assertEquals("http", name.getProtocol()); assertEquals("www.apache.org", name.getHost()); assertEquals(-1, name.getPort()); assertEquals("file/", name.getFile()); assertNull(name.getRef()); assertEquals("john@gmail.com", name.getUsername()); assertEquals("doe", name.getPassword()); try { assertEquals(new URL(s), name.getURL()); } catch (MalformedURLException e) { fail(); } s = "file/file2"; name = new URLName(s); assertNull(name.getProtocol()); assertNull(name.getHost()); assertEquals(-1, name.getPort()); assertEquals("file/file2", name.getFile()); assertNull(name.getRef()); assertNull(name.getUsername()); assertNull(name.getPassword()); try { name.getURL(); fail(); } catch (MalformedURLException e) { // OK } name = new URLName((String) null); assertNull( name.getProtocol()); assertNull(name.getHost()); assertEquals(-1, name.getPort()); assertNull(name.getFile()); assertNull(name.getRef()); assertNull(name.getUsername()); assertNull(name.getPassword()); try { name.getURL(); fail(); } catch (MalformedURLException e) { // OK } name = new URLName(""); assertNull( name.getProtocol()); assertNull(name.getHost()); assertEquals(-1, name.getPort()); assertNull(name.getFile()); assertNull(name.getRef()); assertNull(name.getUsername()); assertNull(name.getPassword()); try { name.getURL(); fail(); } catch (MalformedURLException e) { // OK } } public void testURLNameAll() { URLName name; name = new URLName(null, null, -1, null, null, null); assertNull(name.getProtocol()); assertNull(name.getHost()); assertEquals(-1, name.getPort()); assertNull(name.getFile()); assertNull(name.getRef()); assertNull(name.getUsername()); assertNull(name.getPassword()); try { name.getURL(); fail(); } catch (MalformedURLException e) { // OK } name = new URLName("", "", -1, "", "", ""); assertNull(name.getProtocol()); assertNull(name.getHost()); assertEquals(-1, name.getPort()); assertNull(name.getFile()); assertNull(name.getRef()); assertNull(name.getUsername()); assertNull(name.getPassword()); try { name.getURL(); fail(); } catch (MalformedURLException e) { // OK } name = new URLName("http", "www.apache.org", -1, null, null, null); assertEquals("http://www.apache.org", name.toString()); assertEquals("http", name.getProtocol()); assertEquals("www.apache.org", name.getHost()); assertEquals(-1, name.getPort()); assertNull(name.getFile()); assertNull(name.getRef()); assertNull(name.getUsername()); assertNull(name.getPassword()); try { assertEquals(new URL("http://www.apache.org"), name.getURL()); } catch (MalformedURLException e) { fail(); } name = new URLName("http", "www.apache.org", 8080, "", "", ""); assertEquals("http://www.apache.org:8080", name.toString()); assertEquals("http", name.getProtocol()); assertEquals("www.apache.org", name.getHost()); assertEquals(8080, name.getPort()); assertNull(name.getFile()); assertNull(name.getRef()); assertNull(name.getUsername()); assertNull(name.getPassword()); try { assertEquals(new URL("http://www.apache.org:8080"), name.getURL()); } catch (MalformedURLException e) { fail(); } name = new URLName("http", "www.apache.org", -1, "file/file2", "", ""); assertEquals("http://www.apache.org/file/file2", name.toString()); assertEquals("http", name.getProtocol()); assertEquals("www.apache.org", name.getHost()); assertEquals(-1, name.getPort()); assertEquals("file/file2", name.getFile()); assertNull(name.getRef()); assertNull(name.getUsername()); assertNull(name.getPassword()); try { assertEquals(new URL("http://www.apache.org/file/file2"), name.getURL()); } catch (MalformedURLException e) { fail(); } name = new URLName("http", "www.apache.org", -1, "file/file2", "john", ""); assertEquals("http://john@www.apache.org/file/file2", name.toString()); assertEquals("http", name.getProtocol()); assertEquals("www.apache.org", name.getHost()); assertEquals(-1, name.getPort()); assertEquals("file/file2", name.getFile()); assertNull(name.getRef()); assertEquals("john", name.getUsername()); assertNull(name.getPassword()); try { assertEquals(new URL("http://john@www.apache.org/file/file2"), name.getURL()); } catch (MalformedURLException e) { fail(); } name = new URLName("http", "www.apache.org", -1, "file/file2", "john", "doe"); assertEquals("http://john:doe@www.apache.org/file/file2", name.toString()); assertEquals("http", name.getProtocol()); assertEquals("www.apache.org", name.getHost()); assertEquals(-1, name.getPort()); assertEquals("file/file2", name.getFile()); assertNull(name.getRef()); assertEquals("john", name.getUsername()); assertEquals("doe", name.getPassword()); try { assertEquals(new URL("http://john:doe@www.apache.org/file/file2"), name.getURL()); } catch (MalformedURLException e) { fail(); } name = new URLName("http", "www.apache.org", -1, "file/file2", "john@gmail.com", "doe"); assertEquals("http://john%40gmail.com:doe@www.apache.org/file/file2", name.toString()); assertEquals("http", name.getProtocol()); assertEquals("www.apache.org", name.getHost()); assertEquals(-1, name.getPort()); assertEquals("file/file2", name.getFile()); assertNull(name.getRef()); assertEquals("john@gmail.com", name.getUsername()); assertEquals("doe", name.getPassword()); try { assertEquals(new URL("http://john%40gmail.com:doe@www.apache.org/file/file2"), name.getURL()); } catch (MalformedURLException e) { fail(); } name = new URLName("http", "www.apache.org", -1, "file/file2", "", "doe"); assertEquals("http://www.apache.org/file/file2", name.toString()); assertEquals("http", name.getProtocol()); assertEquals("www.apache.org", name.getHost()); assertEquals(-1, name.getPort()); assertEquals("file/file2", name.getFile()); assertNull(name.getRef()); assertNull(name.getUsername()); assertNull(name.getPassword()); try { assertEquals(new URL("http://www.apache.org/file/file2"), name.getURL()); } catch (MalformedURLException e) { fail(); } } public void testURLNameURL() throws MalformedURLException { URL url; URLName name; url = new URL("http://www.apache.org"); name = new URLName(url); assertEquals("http", name.getProtocol()); assertEquals("www.apache.org", name.getHost()); assertEquals(-1, name.getPort()); assertNull(name.getFile()); assertNull(name.getRef()); assertNull(name.getUsername()); assertNull(name.getPassword()); try { assertEquals(url, name.getURL()); } catch (MalformedURLException e) { fail(); } } public void testEquals() throws MalformedURLException { URLName name1 = new URLName("http://www.apache.org"); assertEquals(name1, new URLName("http://www.apache.org")); assertEquals(name1, new URLName(new URL("http://www.apache.org"))); assertEquals(name1, new URLName("http", "www.apache.org", -1, null, null, null)); assertEquals(name1, new URLName("http://www.apache.org#foo")); // wierd but ref is not part of the equals contract assertTrue(!name1.equals(new URLName("http://www.apache.org:8080"))); assertTrue(!name1.equals(new URLName("http://cvs.apache.org"))); assertTrue(!name1.equals(new URLName("https://www.apache.org"))); name1 = new URLName("http://john:doe@www.apache.org"); assertEquals(name1, new URLName(new URL("http://john:doe@www.apache.org"))); assertEquals(name1, new URLName("http", "www.apache.org", -1, null, "john", "doe")); assertTrue(!name1.equals(new URLName("http://john:xxx@www.apache.org"))); assertTrue(!name1.equals(new URLName("http://xxx:doe@www.apache.org"))); assertTrue(!name1.equals(new URLName("http://www.apache.org"))); assertEquals(new URLName("http://john@www.apache.org"), new URLName("http", "www.apache.org", -1, null, "john", null)); assertEquals(new URLName("http://www.apache.org"), new URLName("http", "www.apache.org", -1, null, null, "doe")); } public void testHashCode() { URLName name1 = new URLName("http://www.apache.org/file"); URLName name2 = new URLName("http://www.apache.org/file#ref"); assertTrue(name1.equals(name2)); assertTrue(name1.hashCode() == name2.hashCode()); } public void testNullProtocol() { URLName name1 = new URLName(null, "www.apache.org", -1, null, null, null); assertTrue(!name1.equals(name1)); } public void testOpaqueSchemes() { String s; URLName name; // not strictly opaque but no protocol handler installed s = "foo://jdoe@apache.org/INBOX"; name = new URLName(s); assertEquals(s, name.toString()); assertEquals("foo", name.getProtocol()); assertEquals("apache.org", name.getHost()); assertEquals(-1, name.getPort()); assertEquals("INBOX", name.getFile()); assertNull(name.getRef()); assertEquals("jdoe", name.getUsername()); assertNull(name.getPassword()); // TBD as I am not sure what other URL formats to use } }
/** * Appcelerator Titanium Mobile * Copyright (c) 2009-2012 by Appcelerator, Inc. All Rights Reserved. * Licensed under the terms of the Apache Public License * Please see the LICENSE included with this distribution for details. */ package ti.modules.titanium.ui.widget; import java.lang.ref.WeakReference; import org.appcelerator.kroll.KrollDict; import org.appcelerator.kroll.KrollProxy; import org.appcelerator.kroll.common.Log; import org.appcelerator.titanium.TiApplication; import org.appcelerator.titanium.TiBaseActivity; import org.appcelerator.titanium.TiBaseActivity.DialogWrapper; import org.appcelerator.titanium.TiC; import org.appcelerator.titanium.proxy.TiViewProxy; import org.appcelerator.titanium.util.TiConvert; import org.appcelerator.titanium.view.TiUIView; import android.app.Activity; import android.app.AlertDialog; import android.app.AlertDialog.Builder; import android.content.DialogInterface; import android.content.DialogInterface.OnCancelListener; import android.support.v4.view.ViewCompat; import android.widget.ListView; public class TiUIDialog extends TiUIView { private static final String TAG = "TiUIDialog"; private static final int BUTTON_MASK = 0x10000000; protected Builder builder; protected TiUIView view; private DialogWrapper dialogWrapper; protected class ClickHandler implements DialogInterface.OnClickListener { private int result; public ClickHandler(int id) { this.result = id; } public void onClick(DialogInterface dialog, int which) { handleEvent(result); hide(null); } } public TiUIDialog(TiViewProxy proxy) { super(proxy); Log.d(TAG, "Creating a dialog", Log.DEBUG_MODE); createBuilder(); } private Activity getCurrentActivity() { Activity currentActivity = TiApplication.getInstance().getCurrentActivity(); if (currentActivity == null) { currentActivity = proxy.getActivity(); } return currentActivity; } private Builder getBuilder() { if (builder == null) { createBuilder(); } return builder; } @Override public void processProperties(KrollDict d) { String[] buttonText = null; if (d.containsKey(TiC.PROPERTY_TITLE)) { getBuilder().setTitle(d.getString(TiC.PROPERTY_TITLE)); } if (d.containsKey(TiC.PROPERTY_MESSAGE)) { getBuilder().setMessage(d.getString(TiC.PROPERTY_MESSAGE)); } if (d.containsKey(TiC.PROPERTY_BUTTON_NAMES)) { buttonText = d.getStringArray(TiC.PROPERTY_BUTTON_NAMES); } else if (d.containsKey(TiC.PROPERTY_OK)) { buttonText = new String[]{d.getString(TiC.PROPERTY_OK)}; } if (d.containsKeyAndNotNull(TiC.PROPERTY_ANDROID_VIEW)) { processView((TiViewProxy) proxy.getProperty(TiC.PROPERTY_ANDROID_VIEW)); } else if (d.containsKey(TiC.PROPERTY_OPTIONS)) { String[] optionText = d.getStringArray(TiC.PROPERTY_OPTIONS); int selectedIndex = d.containsKey(TiC.PROPERTY_SELECTED_INDEX) ? d.getInt(TiC.PROPERTY_SELECTED_INDEX) : -1; if(selectedIndex >= optionText.length){ Log.d(TAG, "Ooops invalid selected index specified: " + selectedIndex, Log.DEBUG_MODE); selectedIndex = -1; } processOptions(optionText, selectedIndex); } if (d.containsKey(TiC.PROPERTY_PERSISTENT)) { dialogWrapper.setPersistent(d.getBoolean(TiC.PROPERTY_PERSISTENT)); } if (buttonText != null) { processButtons(buttonText); } super.processProperties(d); } private void processOptions(String[] optionText,int selectedIndex) { getBuilder().setSingleChoiceItems(optionText, selectedIndex , new DialogInterface.OnClickListener() { public void onClick(DialogInterface dialog, int which) { handleEvent(which); hide(null); } }); } private void processButtons(String[] buttonText) { getBuilder().setPositiveButton(null, null); getBuilder().setNegativeButton(null, null); getBuilder().setNeutralButton(null, null); getBuilder().setOnCancelListener(new OnCancelListener() { @Override public void onCancel(DialogInterface dialog) { dialog = null; if (view != null) { view.getProxy().releaseViews(); view = null; } } }); for (int id = 0; id < buttonText.length; id++) { String text = buttonText[id]; ClickHandler clicker = new ClickHandler(id | BUTTON_MASK); switch (id) { case 0: getBuilder().setPositiveButton(text, clicker); break; case 1: getBuilder().setNeutralButton(text, clicker); break; case 2: getBuilder().setNegativeButton(text, clicker); break; default: Log.e(TAG, "Only 3 buttons are supported"); } } } private void processView(TiViewProxy proxy) { if (proxy != null) { //reset the child view context to parent context proxy.setActivity(dialogWrapper.getActivity()); view = proxy.getOrCreateView(); getBuilder().setView(view.getNativeView()); } } @Override public void propertyChanged(String key, Object oldValue, Object newValue, KrollProxy proxy) { Log.d(TAG, "Property: " + key + " old: " + oldValue + " new: " + newValue, Log.DEBUG_MODE); AlertDialog dialog = dialogWrapper.getDialog(); if (key.equals(TiC.PROPERTY_TITLE)) { if (dialog != null) { dialog.setTitle((String) newValue); } } else if (key.equals(TiC.PROPERTY_MESSAGE)) { if (dialog != null) { dialog.setMessage((String) newValue); } } else if (key.equals(TiC.PROPERTY_BUTTON_NAMES)) { if (dialog != null) { dialog.dismiss(); dialog = null; } processButtons(TiConvert.toStringArray((Object[]) newValue)); } else if (key.equals(TiC.PROPERTY_OK) && !proxy.hasProperty(TiC.PROPERTY_BUTTON_NAMES)) { if (dialog != null) { dialog.dismiss(); dialog = null; } processButtons(new String[]{TiConvert.toString(newValue)}); } else if (key.equals(TiC.PROPERTY_OPTIONS)) { if (dialog != null) { dialog.dismiss(); dialog = null; } getBuilder().setView(null); int selectedIndex = -1; if (proxy.hasProperty(TiC.PROPERTY_SELECTED_INDEX)) { selectedIndex = TiConvert.toInt(proxy.getProperty(TiC.PROPERTY_SELECTED_INDEX)); } processOptions(TiConvert.toStringArray((Object[]) newValue), selectedIndex); } else if (key.equals(TiC.PROPERTY_SELECTED_INDEX)) { if (dialog != null) { dialog.dismiss(); dialog = null; } getBuilder().setView(null); if (proxy.hasProperty(TiC.PROPERTY_OPTIONS)) { processOptions(TiConvert.toStringArray((Object[]) proxy.getProperty(TiC.PROPERTY_OPTIONS)), TiConvert.toInt(newValue)); } } else if (key.equals(TiC.PROPERTY_ANDROID_VIEW)) { if (dialog != null) { dialog.dismiss(); dialog = null; } if (newValue != null) { processView((TiViewProxy) newValue); } else { proxy.setProperty(TiC.PROPERTY_ANDROID_VIEW, null); } } else if (key.equals(TiC.PROPERTY_PERSISTENT) && newValue != null) { dialogWrapper.setPersistent(TiConvert.toBoolean(newValue)); } else if (key.indexOf("accessibility") == 0) { if (dialog != null) { ListView listView = dialog.getListView(); if (listView != null) { if (key.equals(TiC.PROPERTY_ACCESSIBILITY_HIDDEN)) { int importance = ViewCompat.IMPORTANT_FOR_ACCESSIBILITY_AUTO; if (newValue != null && TiConvert.toBoolean(newValue)) { importance = ViewCompat.IMPORTANT_FOR_ACCESSIBILITY_NO; } ViewCompat.setImportantForAccessibility(listView, importance); } else { listView.setContentDescription(composeContentDescription()); } } } } else { super.propertyChanged(key, oldValue, newValue, proxy); } } public void show(KrollDict options) { AlertDialog dialog = dialogWrapper.getDialog(); if (dialog == null) { if (dialogWrapper.getActivity() == null) { TiBaseActivity dialogActivity = (TiBaseActivity) getCurrentActivity(); dialogWrapper.setActivity(new WeakReference<TiBaseActivity>(dialogActivity)); } processProperties(proxy.getProperties()); getBuilder().setOnCancelListener(new OnCancelListener() { @Override public void onCancel(DialogInterface dlg) { int cancelIndex = (proxy.hasProperty(TiC.PROPERTY_CANCEL)) ? TiConvert.toInt(proxy.getProperty(TiC.PROPERTY_CANCEL)) : -1; Log.d(TAG, "onCancelListener called. Sending index: " + cancelIndex, Log.DEBUG_MODE); handleEvent(cancelIndex); hide(null); } }); dialog = getBuilder().create(); // Initially apply accessibility properties here, the first time // the dialog actually becomes available. After this, propertyChanged // can also be used. ListView listView = dialog.getListView(); if (listView != null) { listView.setContentDescription(composeContentDescription()); int importance = ViewCompat.IMPORTANT_FOR_ACCESSIBILITY_AUTO; if (proxy != null) { Object propertyValue = proxy.getProperty(TiC.PROPERTY_ACCESSIBILITY_HIDDEN); if (propertyValue != null && TiConvert.toBoolean(propertyValue)) { importance = ViewCompat.IMPORTANT_FOR_ACCESSIBILITY_NO; } } ViewCompat.setImportantForAccessibility(listView, importance); } dialogWrapper.setDialog(dialog); builder = null; } try { Activity dialogActivity = dialogWrapper.getActivity(); if (dialogActivity != null && !dialogActivity.isFinishing()) { if (dialogActivity instanceof TiBaseActivity) { //add dialog to its activity so we can clean it up later to prevent memory leak. ((TiBaseActivity) dialogActivity).addDialog(dialogWrapper); dialog.show(); } } else { dialog = null; Log.w(TAG, "Dialog activity is destroyed, unable to show dialog with message: " + TiConvert.toString(proxy.getProperty(TiC.PROPERTY_MESSAGE))); } } catch (Throwable t) { Log.w(TAG, "Context must have gone away: " + t.getMessage(), t); } } public void hide(KrollDict options) { AlertDialog dialog = dialogWrapper.getDialog(); if (dialog != null) { dialog.dismiss(); dialogWrapper.getActivity().removeDialog(dialog); } if (view != null) { view.getProxy().releaseViews(); view = null; } } private void createBuilder() { Activity currentActivity = getCurrentActivity(); if (currentActivity != null) { this.builder = new AlertDialog.Builder(currentActivity); this.builder.setCancelable(true); //Native dialogs are persistent by default. TiBaseActivity dialogActivity = (TiBaseActivity)currentActivity; dialogWrapper = dialogActivity.new DialogWrapper(null, true, new WeakReference<TiBaseActivity>(dialogActivity)); } else { Log.e (TAG, "Unable to find an activity for dialog."); } } public void handleEvent(int id) { int cancelIndex = (proxy.hasProperty(TiC.PROPERTY_CANCEL)) ? TiConvert.toInt(proxy.getProperty(TiC.PROPERTY_CANCEL)) : -1; KrollDict data = new KrollDict(); if ((id & BUTTON_MASK) != 0) { data.put(TiC.PROPERTY_BUTTON, true); id &= ~BUTTON_MASK; } else { data.put(TiC.PROPERTY_BUTTON, false); // If an option was selected and the user accepted it, update the proxy. if (proxy.hasProperty(TiC.PROPERTY_OPTIONS)) { proxy.setProperty(TiC.PROPERTY_SELECTED_INDEX, id); } } data.put(TiC.EVENT_PROPERTY_INDEX, id); data.put(TiC.PROPERTY_CANCEL, id == cancelIndex); fireEvent(TiC.EVENT_CLICK, data); } }
/* * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package app.metatron.discovery.domain.datasource.connection.jdbc; import org.apache.commons.lang3.StringUtils; import org.junit.Assert; import org.junit.Test; import org.springframework.data.domain.PageRequest; import org.springframework.data.domain.Pageable; import java.util.HashMap; import java.util.List; import java.util.Map; import app.metatron.discovery.common.GlobalObjectMapper; import app.metatron.discovery.domain.dataconnection.DataConnection; import app.metatron.discovery.domain.dataconnection.DataConnectionHelper; import app.metatron.discovery.domain.dataconnection.dialect.HiveDialect; import app.metatron.discovery.extension.dataconnection.jdbc.dialect.JdbcDialect; /** * Created by kyungtaak on 2016. 6. 16.. */ public class PrestoConnectionTest { private JdbcConnectionService jdbcConnectionService = new JdbcConnectionService(); private Pageable pageable = new PageRequest(0, 100); @Test public void checkPrestoConnection() { DataConnection connection = new DataConnection("PRESTO"); connection.setHostname("localhost"); // connection.setDatabase("polaris_datasources"); connection.setUsername("polaris"); connection.setPassword("polaris"); connection.setPort(8080); System.out.println(jdbcConnectionService.checkConnection(connection)); } @Test public void showPrestoDatabases() { DataConnection connection = new DataConnection("PRESTO"); connection.setHostname("localhost"); connection.setPort(8080); connection.setCatalog("hive"); connection.setUsername("hive"); connection.setPassword("hive"); System.out.println(jdbcConnectionService.getDatabases(connection, null, pageable)); } @Test public void showPrestoTables() { DataConnection connection = new DataConnection("PRESTO"); connection.setHostname("localhost"); connection.setCatalog("hive"); connection.setUsername("polaris"); connection.setPassword("polaris"); connection.setPort(8080); System.out.println(jdbcConnectionService.getTableNames(connection, "default", pageable)); } @Test public void searchPrestoTables() { DataConnection connection = new DataConnection("PRESTO"); connection.setHostname("localhost"); connection.setCatalog("hive"); connection.setUsername("hive"); connection.setPassword("hive"); connection.setPort(8080); PageRequest pageRequest = new PageRequest(0, 20); String searchKeyword = ""; String schema = "default"; Map<String, Object> tableMap = jdbcConnectionService.getTables(connection, schema, searchKeyword, pageRequest); List<Map<String, Object>> tableList = (List) tableMap.get("tables"); Map<String, Object> pageInfo = (Map) tableMap.get("page"); System.out.println("pageInfo = " + pageInfo); for(Map<String, Object> tableMapObj : tableList){ System.out.println(tableMapObj); String tableName = (String) tableMapObj.get("name"); Assert.assertTrue(StringUtils.containsIgnoreCase(tableName, searchKeyword)); } } @Test public void searchPrestoSchemas() { DataConnection connection = new DataConnection("PRESTO"); connection.setHostname("localhost"); connection.setCatalog("hive"); connection.setUsername("hive"); connection.setPassword("hive"); connection.setPort(8080); PageRequest pageRequest = new PageRequest(0, 20); String searchKeyword = ""; Map<String, Object> databaseList = jdbcConnectionService.getDatabases(connection, searchKeyword, pageRequest); System.out.println(databaseList); } @Test public void showTableColumnPresto() { DataConnection connection = new DataConnection("PRESTO"); connection.setHostname("localhost"); connection.setCatalog("hive"); connection.setUsername("hive"); connection.setPassword("hive"); connection.setPort(8080); PageRequest pageRequest = new PageRequest(0, 20); String schemaName = "col_test"; String tableName = "aabldvcf283"; String columnNamePattern = ""; Map<String, Object> columnMaps = jdbcConnectionService.getTableColumns(connection, schemaName, tableName, columnNamePattern, pageRequest); List<Map> columnList = (List) columnMaps.get("columns"); Map<String, Object> pageInfo = (Map) columnMaps.get("page"); System.out.println("pageInfo = " + pageInfo); for(Map<String, Object> columnMap : columnList){ System.out.println(columnMap); } } @Test public void showTableInfoPresto() { DataConnection connection = new DataConnection("PRESTO"); connection.setHostname("localhost"); connection.setCatalog("hive"); connection.setUsername("hive"); connection.setPassword("hive"); connection.setPort(8080); String schemaName = "default"; String tableName = "sales"; Map<String, Object> tableDescMap = jdbcConnectionService.showTableDescription(connection, schemaName, tableName); for(String key : tableDescMap.keySet()){ System.out.println(key + " = " + tableDescMap.get(key)); } } @Test public void changeDatabase() { DataConnection connection = new DataConnection("PRESTO"); connection.setHostname("localhost"); connection.setCatalog("hive"); connection.setUsername("hive"); connection.setPassword("hive"); connection.setPort(8080); JdbcDialect dialect = DataConnectionHelper.lookupDialect(connection); String webSocketId = "test1"; String database1 = "default"; String database2 = "test1"; // DataSource dataSource = WorkbenchDataSourceManager.createDataSourceInfo(connection, webSocketId, true). // getSingleConnectionDataSource(); // // JdbcTemplate jdbcTemplate = new JdbcTemplate(dataSource); // // jdbcConnectionService.changeDatabase(connection, database1, dataSource); // List<Map<String, Object>> tables1 = jdbcTemplate.queryForList(dialect.getTableQuery(connection, null, null, null, null, null, null)); // // jdbcConnectionService.changeDatabase(connection, database2, dataSource); // // List<Map<String, Object>> tables2 = jdbcTemplate.queryForList(dialect.getTableQuery(connection, null, null, null, null, null, null)); // // System.out.println(tables1); // System.out.println(tables2); } @Test public void searchTableWithMetastoreInfo() { DataConnection connection = new DataConnection("PRESTO"); connection.setHostname("localhost"); connection.setCatalog("hive"); connection.setUsername("hive"); connection.setPassword("hive"); connection.setPort(8080); Map<String, String> propMap = new HashMap<>(); propMap.put(HiveDialect.PROPERTY_KEY_METASTORE_HOST, "localhost"); propMap.put(HiveDialect.PROPERTY_KEY_METASTORE_PORT, "3306"); propMap.put(HiveDialect.PROPERTY_KEY_METASTORE_SCHEMA, "hivemeta"); propMap.put(HiveDialect.PROPERTY_KEY_METASTORE_USERNAME, "hiveuser"); propMap.put(HiveDialect.PROPERTY_KEY_METASTORE_PASSWORD, "hive1234"); connection.setProperties(GlobalObjectMapper.writeValueAsString(propMap)); PageRequest pageRequest = new PageRequest(0, 20); String searchKeyword = ""; String schema = "default"; Map<String, Object> tableMap = jdbcConnectionService.getTables(connection, schema, searchKeyword, pageRequest); List<Map<String, Object>> tableList = (List) tableMap.get("tables"); Map<String, Object> pageInfo = (Map) tableMap.get("page"); System.out.println("pageInfo = " + pageInfo); for(Map<String, Object> tableMapObj : tableList){ System.out.println(tableMapObj); String tableName = (String) tableMapObj.get("name"); Assert.assertTrue(StringUtils.containsIgnoreCase(tableName, searchKeyword)); } } }
/* * $Id$ * * SARL is an general-purpose agent programming language. * More details on http://www.sarl.io * * Copyright (C) 2014-2017 the original authors or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.sarl.lang.core; import java.lang.ref.WeakReference; import java.util.UUID; import org.eclipse.xtext.xbase.lib.Inline; import org.eclipse.xtext.xbase.lib.Pure; import io.sarl.lang.util.ClearableReference; /** This class represents a part of trait of an agent. * * @author $Author: srodriguez$ * @author $Author: sgalland$ * @version $FullVersion$ * @mavengroupid $GroupId$ * @mavenartifactid $ArtifactId$ */ public abstract class AgentTrait extends AgentProtectedAPIObject { private WeakReference<Agent> agentRef; private transient Object sreSpecificData; /** Construct a trait to the given agent. * * @param agent - the owner of this trait. */ AgentTrait(Agent agent) { this.agentRef = new WeakReference<>(agent); } /** Construct a trait. */ AgentTrait() { this.agentRef = new WeakReference<>(null); } @Override @Pure protected String attributesToString() { final StringBuilder result = new StringBuilder(); result.append("owner = "); //$NON-NLS-1$ result.append(getOwner()); return result.toString(); } @Override @Pure public String toString() { return getClass().getSimpleName() + " [" + attributesToString() //$NON-NLS-1$ + "]"; //$NON-NLS-1$ } /** Set the agent that has this trait. * * @param agent - the owner of this trait. */ void setOwner(Agent agent) { this.agentRef = new WeakReference<>(agent); } /** Replies the agent that has this trait. * * @return the owner. */ @Pure protected Agent getOwner() { return this.agentRef.get(); } /** Replies the identifier of the agent that has this trait. * * @return the UUID of the owner. * @since 0.6 */ @Pure @Inline("getOwner().getID()") protected final UUID getID() { return getOwner().getID(); } @Override @Pure protected final <S extends Capacity> S getSkill(Class<S> capacity) { assert capacity != null; return $castSkill(capacity, $getSkill(capacity)); } /** Cast the skill reference to the given capacity type. * * @param <S> the expected capacity type. * @param capacity the expected capacity type. * @param skillReference the skill reference. * @return the skill casted to the given capacity. */ @Pure protected <S extends Capacity> S $castSkill(Class<S> capacity, ClearableReference<Skill> skillReference) { if (skillReference != null) { final S skill = capacity.cast(skillReference.get()); if (skill != null) { return skill; } } throw new UnimplementedCapacityException(capacity, getOwner().getID()); } @Override protected ClearableReference<Skill> $getSkill(Class<? extends Capacity> capacity) { final Agent owner = getOwner(); if (owner == null) { throw new UnimplementedCapacityException(capacity, null); } return owner.$getSkill(capacity); } @Override @Inline("setSkill($2, $1)") protected <S extends Skill> void operator_mappedTo(Class<? extends Capacity> capacity, S skill) { setSkill(skill, capacity); } @Override @SafeVarargs protected final <S extends Skill> S setSkill(S skill, Class<? extends Capacity>... capacities) { final Agent owner = getOwner(); if (owner == null) { return skill; } return owner.setSkill(skill, capacities); } @Override protected <S extends Capacity> S clearSkill(Class<S> capacity) { final Agent owner = getOwner(); if (owner == null) { return null; } return owner.clearSkill(capacity); } @Override @Pure protected boolean hasSkill(Class<? extends Capacity> capacity) { final Agent owner = getOwner(); if (owner == null) { return false; } return owner.hasSkill(capacity); } @Override @Pure protected boolean isMe(Address address) { final Agent owner = getOwner(); if (owner == null) { return false; } return owner.isMe(address); } @Override @Pure protected boolean isMe(UUID uID) { final Agent owner = getOwner(); if (owner == null) { return false; } return owner.isMe(uID); } @Override @Pure protected boolean isFromMe(Event event) { final Agent owner = getOwner(); if (owner == null) { return false; } return owner.isFromMe(event); } /** Replies the data associated to this agent trait by the SRE. * * @param <S> the type of the data. * @param type the type of the data. * @return the SRE-specific data. * @since 0.5 */ @Pure <S> S getSreSpecificData(Class<S> type) { return type.cast(this.sreSpecificData); } /** Change the data associated to this agent trait by the SRE. * * @param data the SRE-specific data. * @since 0.5 */ void setSreSpecificData(Object data) { this.sreSpecificData = data; } }
/* * Copyright 2000-2015 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intellij.tasks.vcs; import com.intellij.openapi.application.ModalityState; import com.intellij.openapi.vcs.ProjectLevelVcsManager; import com.intellij.openapi.vcs.changes.*; import com.intellij.openapi.vcs.changes.committed.MockAbstractVcs; import com.intellij.openapi.vcs.changes.ui.CommitChangeListDialog; import com.intellij.openapi.vcs.impl.projectlevelman.AllVcses; import com.intellij.tasks.*; import com.intellij.tasks.impl.LocalTaskImpl; import com.intellij.tasks.impl.TaskChangelistSupport; import com.intellij.tasks.impl.TaskManagerImpl; import com.intellij.testFramework.fixtures.CodeInsightFixtureTestCase; import com.intellij.util.containers.ContainerUtil; import icons.TasksIcons; import org.jetbrains.annotations.NotNull; import javax.swing.*; import java.util.Collections; import java.util.Date; import java.util.List; /** * @author Dmitry Avdeev * Date: 3/5/12 */ public class TaskVcsTest extends CodeInsightFixtureTestCase { private ChangeListManagerImpl myChangeListManager; private TaskManagerImpl myTaskManager; public void testInitialState() { assertEquals(1, myTaskManager.getLocalTasks().size()); final LocalTask defaultTask = myTaskManager.getLocalTasks().get(0); assertEquals(defaultTask, myTaskManager.getActiveTask()); assertTrue(defaultTask.isDefault()); assertEquals(1, myChangeListManager.getChangeListsCopy().size()); assertEquals(1, defaultTask.getChangeLists().size()); assertEquals(defaultTask, myTaskManager.getAssociatedTask(myChangeListManager.getChangeListsCopy().get(0))); assertEquals(defaultTask.getChangeLists().get(0).id, myChangeListManager.getChangeListsCopy().get(0).getId()); assertEquals(defaultTask.getChangeLists().get(0), new ChangeListInfo(myChangeListManager.getChangeListsCopy().get(0))); } public void testSwitchingTasks() throws Exception { final LocalTask defaultTask = myTaskManager.getLocalTasks().get(0); Task task = myRepository.findTask("TEST-001"); assertNotNull(task); myTaskManager.activateTask(task, false); assertEquals(2, myTaskManager.getLocalTasks().size()); LocalTask localTask = myTaskManager.getActiveTask(); assertEquals(task, localTask); assertEquals(0, localTask.getChangeLists().size()); assertEquals(1, defaultTask.getChangeLists().size()); assertEquals(1, myChangeListManager.getChangeListsCopy().size()); assertEquals(defaultTask, myTaskManager.getAssociatedTask(myChangeListManager.getChangeListsCopy().get(0))); myTaskManager.activateTask(defaultTask, false); assertEquals(0, localTask.getChangeLists().size()); assertEquals(1, defaultTask.getChangeLists().size()); assertEquals(1, myChangeListManager.getChangeListsCopy().size()); assertEquals(defaultTask, myTaskManager.getAssociatedTask(myChangeListManager.getChangeListsCopy().get(0))); activateAndCreateChangelist(localTask); assertEquals(1, localTask.getChangeLists().size()); assertEquals(1, defaultTask.getChangeLists().size()); assertEquals(2, myChangeListManager.getChangeListsCopy().size()); LocalChangeList activeChangeList = myChangeListManager.getDefaultChangeList(); LocalChangeList anotherChangeList = myChangeListManager.getChangeListsCopy().get(1 - myChangeListManager.getChangeListsCopy().indexOf(activeChangeList)); assertNotNull(activeChangeList); assertEquals(localTask, myTaskManager.getAssociatedTask(activeChangeList)); assertEquals("TEST-001 Summary", activeChangeList.getName()); assertEquals(defaultTask, myTaskManager.getAssociatedTask(anotherChangeList)); assertEquals(LocalChangeList.DEFAULT_NAME, anotherChangeList.getName()); myTaskManager.activateTask(defaultTask, false); myChangeListManager.waitUntilRefreshed(); assertEquals(1, localTask.getChangeLists().size()); assertEquals(1, defaultTask.getChangeLists().size()); assertEquals(2, myChangeListManager.getChangeListsCopy().size()); activeChangeList = myChangeListManager.getDefaultChangeList(); anotherChangeList = myChangeListManager.getChangeListsCopy().get(1 - myChangeListManager.getChangeListsCopy().indexOf(activeChangeList)); assertNotNull(activeChangeList); assertEquals(defaultTask, myTaskManager.getAssociatedTask(activeChangeList)); assertEquals(LocalChangeList.DEFAULT_NAME, activeChangeList.getName()); assertEquals(localTask, myTaskManager.getAssociatedTask(anotherChangeList)); assertEquals("TEST-001 Summary", anotherChangeList.getName()); } public void testAddChangeListViaCreateChangeListAction() throws Exception { Task task = myRepository.findTask("TEST-001"); assertNotNull(task); activateAndCreateChangelist(task); myChangeListManager.waitUntilRefreshed(); LocalTask defaultTask = myTaskManager.findTask(LocalTaskImpl.DEFAULT_TASK_ID); assertNotNull(defaultTask); activateAndCreateChangelist(defaultTask); myChangeListManager.waitUntilRefreshed(); assertEquals(defaultTask, myTaskManager.getActiveTask()); LocalTask anotherTask = myTaskManager.findTask("TEST-001"); assertNotNull(anotherTask); myTaskManager.createChangeList(defaultTask, "Default (1)"); myChangeListManager.waitUntilRefreshed(); assertEquals(1, anotherTask.getChangeLists().size()); assertEquals(2, defaultTask.getChangeLists().size()); assertEquals(3, myChangeListManager.getChangeListsCopy().size()); LocalChangeList defaultChangeListActive = myChangeListManager.findChangeList("Default (1)"); assertNotNull(defaultChangeListActive); assertTrue(defaultChangeListActive.isDefault()); LocalChangeList defaultChangeListInactive = myChangeListManager.findChangeList(LocalChangeList.DEFAULT_NAME); assertNotNull(defaultChangeListInactive); LocalChangeList anotherChangeList = myChangeListManager.findChangeList("TEST-001 Summary"); assertNotNull(anotherChangeList); assertEquals(defaultTask, myTaskManager.getAssociatedTask(defaultChangeListActive)); assertEquals("Default (1)", defaultChangeListActive.getName()); assertEquals(defaultTask, myTaskManager.getAssociatedTask(defaultChangeListInactive)); assertEquals(LocalChangeList.DEFAULT_NAME, defaultChangeListInactive.getName()); assertEquals(anotherTask, myTaskManager.getAssociatedTask(anotherChangeList)); assertEquals("TEST-001 Summary", anotherChangeList.getName()); } public void testRemoveChangelistViaVcsAction() throws Exception { Task task = myRepository.findTask("TEST-001"); assertNotNull(task); activateAndCreateChangelist(task); myChangeListManager.waitUntilRefreshed(); LocalTask defaultTask = myTaskManager.findTask(LocalTaskImpl.DEFAULT_TASK_ID); assertNotNull(defaultTask); activateAndCreateChangelist(defaultTask); myChangeListManager.waitUntilRefreshed(); assertEquals(defaultTask, myTaskManager.getActiveTask()); LocalTask anotherTask = myTaskManager.findTask("TEST-001"); assertNotNull(anotherTask); LocalChangeList defaultChangeList = myChangeListManager.findChangeList(LocalChangeList.DEFAULT_NAME); assertNotNull(defaultChangeList); LocalChangeList anotherChangeList = myChangeListManager.findChangeList("TEST-001 Summary"); assertNotNull(anotherChangeList); removeChangeList(anotherChangeList); assertEquals(1, anotherTask.getChangeLists().size()); assertEquals(1, defaultTask.getChangeLists().size()); assertEquals(1, myChangeListManager.getChangeListsCopy().size()); assertEquals(defaultTask, myTaskManager.getAssociatedTask(defaultChangeList)); assertEquals(LocalChangeList.DEFAULT_NAME, defaultChangeList.getName()); } private void activateAndCreateChangelist(Task task) { LocalTask localTask = myTaskManager.activateTask(task, false); if (localTask.getChangeLists().isEmpty()) { myTaskManager.createChangeList(localTask, myTaskManager.getChangelistName(localTask)); } } public void testAddChangeListViaVcsAction() throws Exception { Task task = myRepository.findTask("TEST-001"); assertNotNull(task); activateAndCreateChangelist(task); myChangeListManager.waitUntilRefreshed(); LocalTask defaultTask = myTaskManager.findTask(LocalTaskImpl.DEFAULT_TASK_ID); assertNotNull(defaultTask); activateAndCreateChangelist(defaultTask); myChangeListManager.waitUntilRefreshed(); assertEquals(defaultTask, myTaskManager.getActiveTask()); LocalTask anotherTask = myTaskManager.findTask("TEST-001"); assertNotNull(anotherTask); addChangeList("Default (1)", ""); assertEquals(1, anotherTask.getChangeLists().size()); assertEquals(2, defaultTask.getChangeLists().size()); assertEquals(3, myChangeListManager.getChangeListsCopy().size()); LocalChangeList defaultChangeListActive = myChangeListManager.findChangeList(LocalChangeList.DEFAULT_NAME); assertNotNull(defaultChangeListActive); assertTrue(myChangeListManager.getDefaultListName(), defaultChangeListActive.isDefault()); LocalChangeList defaultChangeListInactive = myChangeListManager.findChangeList("Default (1)"); assertNotNull(defaultChangeListInactive); LocalChangeList anotherChangeList = myChangeListManager.findChangeList("TEST-001 Summary"); assertNotNull(anotherChangeList); assertEquals(defaultTask, myTaskManager.getAssociatedTask(defaultChangeListActive)); assertEquals(LocalChangeList.DEFAULT_NAME, defaultChangeListActive.getName()); assertEquals(defaultTask, myTaskManager.getAssociatedTask(defaultChangeListInactive)); assertEquals("Default (1)", defaultChangeListInactive.getName()); assertEquals(anotherTask, myTaskManager.getAssociatedTask(anotherChangeList)); assertEquals("TEST-001 Summary", anotherChangeList.getName()); } public void testTrackContext() { myTaskManager.getState().trackContextForNewChangelist = true; addChangeList("New Changelist", ""); assertEquals(2, myTaskManager.getLocalTasks().size()); assertEquals(2, myChangeListManager.getChangeListsCopy().size()); LocalChangeList newChangeList = myChangeListManager.findChangeList("New Changelist"); assertNotNull(newChangeList); LocalTask newTask = myTaskManager.getAssociatedTask(newChangeList); assertNotNull(newTask); assertEquals("New Changelist", newTask.getSummary()); myTaskManager.getState().trackContextForNewChangelist = false; } public void testCreateComment() throws Exception { myRepository.setShouldFormatCommitMessage(true); myRepository.setCommitMessageFormat("{id} {summary} {number} {project}"); Task task = myRepository.findTask("TEST-001"); assertNotNull(task); activateAndCreateChangelist(task); myChangeListManager.waitUntilRefreshed(); LocalTask localTask = myTaskManager.getActiveTask(); assertNotNull(localTask); assertEquals("TEST-001", localTask.getId()); List<ChangeListInfo> lists = localTask.getChangeLists(); assertEquals(1, lists.size()); assertEquals("TEST-001 Summary 001 TEST", lists.get(0).comment); } public void testSaveContextOnCommitForExistingTask() throws Exception { myTaskManager.getState().saveContextOnCommit = true; assertEquals(1, myTaskManager.getLocalTasks().size()); Task task = myRepository.findTask("TEST-001"); assertNotNull(task); assertEquals(1, myChangeListManager.getChangeListsCopy().size()); // default change list should be here activateAndCreateChangelist(task); myChangeListManager.waitUntilRefreshed(); assertEquals(2, myTaskManager.getLocalTasks().size()); List<LocalChangeList> copy = myChangeListManager.getChangeListsCopy(); assertEquals(copy.toString(), 2, copy.size()); LocalTask localTask = myTaskManager.getActiveTask(); List<ChangeListInfo> changelists = localTask.getChangeLists(); ChangeListInfo info = changelists.get(0); LocalChangeList changeList = myChangeListManager.getChangeList(info.id); assertNotNull(changeList); CommitChangeListDialog.commitChanges(getProject(), Collections.<Change>emptyList(), changeList, null, changeList.getName()); assertEquals(2, myTaskManager.getLocalTasks().size()); // no extra task created assertEquals(2, myChangeListManager.getChangeListsCopy().size()); assertEquals(localTask, myTaskManager.getAssociatedTask(changeList)); // association should survive } public void testSaveContextOnCommit() throws Exception { myTaskManager.getState().saveContextOnCommit = true; assertEquals(1, myTaskManager.getLocalTasks().size()); assertEquals(1, myChangeListManager.getChangeListsCopy().size()); LocalChangeList changeList = addChangeList("New Changelist", ""); assertEquals(1, myTaskManager.getLocalTasks().size()); assertEquals(2, myChangeListManager.getChangeListsCopy().size()); CommitChangeListDialog.commitChanges(getProject(), Collections.<Change>emptyList(), changeList, null, changeList.getName()); assertEquals(2, myTaskManager.getLocalTasks().size()); // extra task created assertEquals(2, myChangeListManager.getChangeListsCopy().size()); assertTrue(ContainerUtil.exists(myTaskManager.getLocalTasks(), task -> { return task.getSummary().equals("New Changelist"); })); } private LocalChangeList addChangeList(String title, String comment) { final LocalChangeList list = myChangeListManager.addChangeList(title, comment); new TaskChangelistSupport(getProject(), myTaskManager).addControls(new JPanel(), null).consume(list); return list; } private void removeChangeList(LocalChangeList changeList) { myChangeListManager.removeChangeList(changeList); myTaskManager.getChangeListListener().changeListRemoved(changeList); } public void testProjectWithDash() throws Exception { LocalTaskImpl task = new LocalTaskImpl("foo-bar-001", "summary") { @Override public TaskRepository getRepository() { return myRepository; } @Override public boolean isIssue() { return true; } }; assertEquals("foo-bar", task.getProject()); assertEquals("001", task.getNumber()); String name = myTaskManager.getChangelistName(task); assertEquals("foo-bar-001 summary", name); } public void testIds() throws Exception { LocalTaskImpl task = new LocalTaskImpl("", ""); assertEquals("", task.getNumber()); assertEquals(null, task.getProject()); task = new LocalTaskImpl("-", ""); assertEquals("-", task.getNumber()); assertEquals(null, task.getProject()); task = new LocalTaskImpl("foo", ""); assertEquals("foo", task.getNumber()); assertEquals(null, task.getProject()); task = new LocalTaskImpl("112", ""); assertEquals("112", task.getNumber()); assertEquals(null, task.getProject()); } public void testRestoreChangelist() throws Exception { final LocalTaskImpl task = new LocalTaskImpl("foo", "bar"); activateAndCreateChangelist(task); activateAndCreateChangelist(new LocalTaskImpl("next", "")); final String changelistName = myTaskManager.getChangelistName(task); myChangeListManager.removeChangeList(changelistName); myChangeListManager.invokeAfterUpdate(() -> { assertTrue(myTaskManager.isLocallyClosed(task)); activateAndCreateChangelist(task); assertNotNull(myChangeListManager.findChangeList(changelistName)); }, InvokeAfterUpdateMode.SYNCHRONOUS_NOT_CANCELLABLE, "foo", ModalityState.NON_MODAL); } public void testSuggestBranchName() throws Exception { Task task = myRepository.findTask("TEST-001"); assertNotNull(task); assertTrue(task.isIssue()); assertEquals("TEST-001", myTaskManager.suggestBranchName(task)); LocalTaskImpl simple = new LocalTaskImpl("1", "simple"); assertEquals("simple", myTaskManager.suggestBranchName(simple)); LocalTaskImpl strange = new LocalTaskImpl("1", "very long and strange summary"); assertEquals("very-long", myTaskManager.suggestBranchName(strange)); myTaskManager.getState().branchNameFormat = "{id} {summary}"; LocalTaskImpl withIllegalSymbolsInIssue = new LocalTaskImpl("1", "contains Illegal$Symbols"); withIllegalSymbolsInIssue.setIssue(true); assertEquals("1-contains-Illegal$Symbols", myTaskManager.suggestBranchName(withIllegalSymbolsInIssue)); } private TestRepository myRepository; private MockAbstractVcs myVcs; @Override public void setUp() throws Exception { super.setUp(); myVcs = new MockAbstractVcs(getProject()); AllVcses.getInstance(getProject()).registerManually(myVcs); myChangeListManager = (ChangeListManagerImpl)ChangeListManager.getInstance(getProject()); myTaskManager = (TaskManagerImpl)TaskManager.getManager(getProject()); ProjectLevelVcsManager.getInstance(getProject()).setDirectoryMapping("", myVcs.getName()); ProjectLevelVcsManager.getInstance(getProject()).hasActiveVcss(); myRepository = new TestRepository(); myRepository.setTasks(new Task() { @NotNull @Override public String getId() { return "TEST-001"; } @NotNull @Override public String getSummary() { return "Summary"; } @Override public String getDescription() { return null; } @NotNull @Override public Comment[] getComments() { return new Comment[0]; } @NotNull @Override public Icon getIcon() { return TasksIcons.Unknown; } @NotNull @Override public TaskType getType() { return TaskType.BUG; } @Override public Date getUpdated() { return null; } @Override public Date getCreated() { return null; } @Override public boolean isClosed() { return false; } @Override public boolean isIssue() { return true; } @Override public String getIssueUrl() { return null; } @Override public TaskRepository getRepository() { return myRepository; } }); myTaskManager.setRepositories(Collections.singletonList(myRepository)); } @Override protected void tearDown() throws Exception { try { myTaskManager.setRepositories(Collections.<TaskRepository>emptyList()); AllVcses.getInstance(getProject()).unregisterManually(myVcs); } finally { myTaskManager = null; myVcs = null; myChangeListManager = null; super.tearDown(); } } }
/* * Copyright 2010 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package net.oauth.jsontoken; import static com.google.common.base.Preconditions.checkNotNull; import com.google.gson.JsonElement; import com.google.gson.JsonObject; import com.google.gson.JsonPrimitive; import java.security.SignatureException; import java.time.Duration; import java.time.Instant; import javax.annotation.Nullable; import net.oauth.jsontoken.crypto.AsciiStringSigner; import net.oauth.jsontoken.crypto.SignatureAlgorithm; import net.oauth.jsontoken.crypto.Signer; import net.oauth.jsontoken.exceptions.ErrorCode; import net.oauth.jsontoken.exceptions.InvalidJsonTokenException; import org.apache.commons.codec.binary.Base64; /** A JSON Token. */ public class JsonToken { // header names public static final String ALGORITHM_HEADER = "alg"; public static final String KEY_ID_HEADER = "kid"; public static final String TYPE_HEADER = "typ"; // standard claim names (payload parameters) public static final String ISSUER = "iss"; public static final String ISSUED_AT = "iat"; public static final String EXPIRATION = "exp"; public static final String AUDIENCE = "aud"; // default encoding for all Json token public static final String BASE64URL_ENCODING = "base64url"; public static final Duration DEFAULT_LIFETIME = Duration.ofMinutes(2); protected final Clock clock; private final JsonObject header; private final JsonObject payload; private final String tokenString; // The following fields are only valid when signing the token. private final Signer signer; private String signature; private String baseString; /** * Public constructor, use empty data type. * * @param signer the signer that will sign the token. */ public JsonToken(Signer signer) { this(signer, new SystemClock()); } /** * Public constructor. * * @param signer the signer that will sign the token * @param clock a clock whose notion of current time will determine the not-before timestamp of * the token, if not explicitly set. */ public JsonToken(Signer signer, Clock clock) { this.signer = checkNotNull(signer); this.clock = checkNotNull(clock); this.header = createHeader(signer); this.payload = new JsonObject(); this.signature = null; this.baseString = null; this.tokenString = null; String issuer = signer.getIssuer(); if (issuer != null) { setParam(ISSUER, issuer); } } /** * Public constructor used when parsing a JsonToken {@link JsonToken} (as opposed to create a * token). This constructor takes Json payload and clock as parameters, set all other signing * related parameters to null. * * @param payload A payload JSON object. * @param clock a clock whose notion of current time will determine the not-before timestamp of * the token, if not explicitly set. * @param tokenString The original token string we parsed to get this payload. */ public JsonToken(JsonObject header, JsonObject payload, Clock clock, String tokenString) { this.header = header; this.payload = payload; this.clock = clock; this.baseString = null; this.signature = null; this.signer = null; this.tokenString = tokenString; } /** * Public constructor used when parsing a JsonToken {@link JsonToken} (as opposed to create a * token). This constructor takes Json payload as parameter, set all other signing related * parameters to null. * * @param payload A payload JSON object. */ public JsonToken(JsonObject payload) { this.header = null; this.payload = payload; this.baseString = null; this.tokenString = null; this.signature = null; this.signer = null; this.clock = null; } /** * Public constructor used when parsing a JsonToken {@link JsonToken} (as opposed to create a * token). This constructor takes Json payload and clock as parameters, set all other signing * related parameters to null. * * @param payload A payload JSON object. * @param clock a clock whose notion of current time will determine the not-before timestamp of * the token, if not explicitly set. */ public JsonToken(JsonObject payload, Clock clock) { this.header = null; this.payload = payload; this.clock = clock; this.baseString = null; this.tokenString = null; this.signature = null; this.signer = null; } /** * Returns the serialized representation of this token, i.e., * keyId.sig.base64(payload).base64(data_type).base64(encoding).base64(alg) * * <p>This is what a client (token issuer) would send to a token verifier over the wire. * * @throws SignatureException if the token can't be signed. */ public String serializeAndSign() throws SignatureException { String baseString = computeSignatureBaseString(); String sig = getSignature(); return JsonTokenUtil.toDotFormat(baseString, sig); } /** Returns a human-readable version of the token. */ @Override public String toString() { return JsonTokenUtil.toJson(payload); } @Nullable public String getIssuer() { return getParamAsString(ISSUER); } @Nullable public Instant getIssuedAt() { return getParamAsInstant(ISSUED_AT); } /** * Sets the {@code iat} (issued at) timestamp parameter. * * <p><b>Note:</b> sub-second precision is truncated. */ public void setIssuedAt(Instant instant) { setParam(ISSUED_AT, instant.getEpochSecond()); } @Nullable public Instant getExpiration() { return getParamAsInstant(EXPIRATION); } /** * Sets the {@code exp} (expiration) timestamp parameter. * * <p><b>Note:</b> sub-second precision is truncated. */ public void setExpiration(Instant instant) { setParam(EXPIRATION, instant.getEpochSecond()); } @Nullable public String getAudience() { return getParamAsString(AUDIENCE); } public void setAudience(String audience) { setParam(AUDIENCE, audience); } public void setParam(String name, String value) { payload.addProperty(name, value); } public void setParam(String name, Number value) { payload.addProperty(name, value); } @Nullable public JsonPrimitive getParamAsPrimitive(String param) { JsonElement element = payload.get(param); if (element != null && element.isJsonPrimitive()) { return (JsonPrimitive) element; } return null; } @Nullable public JsonObject getPayloadAsJsonObject() { return payload; } @Nullable public String getKeyId() { if (header == null) { return null; } JsonElement keyIdName = header.get(KEY_ID_HEADER); return keyIdName != null ? keyIdName.getAsString() : null; } /** * @throws IllegalStateException if the header does not exist * @throws IllegalArgumentException if the signature algorithm is not supported */ public SignatureAlgorithm getSignatureAlgorithm() { if (header == null) { throw new IllegalStateException("JWT has no algorithm or header"); } JsonElement algorithmName = header.get(ALGORITHM_HEADER); if (algorithmName == null) { throw new IllegalStateException( "JWT header is missing the required '" + ALGORITHM_HEADER + "' parameter", new InvalidJsonTokenException(ErrorCode.BAD_HEADER)); } try { return SignatureAlgorithm.getFromJsonName(algorithmName.getAsString()); } catch (IllegalArgumentException e) { throw new IllegalArgumentException( e.getMessage(), new InvalidJsonTokenException(ErrorCode.UNSUPPORTED_ALGORITHM)); } } public String getTokenString() { return tokenString; } /** @throws IllegalStateException if the header does not exist */ public JsonObject getHeader() { if (header == null) { throw new IllegalStateException("JWT has no header"); } return header; } @Nullable private String getParamAsString(String param) { JsonPrimitive primitive = getParamAsPrimitive(param); return primitive == null ? null : primitive.getAsString(); } @Nullable private Instant getParamAsInstant(String param) { JsonPrimitive primitive = getParamAsPrimitive(param); if (primitive != null && (primitive.isNumber() || primitive.isString())) { try { // JWT represents time in seconds return Instant.ofEpochSecond(primitive.getAsLong()); } catch (NumberFormatException e) { return null; } } return null; } /** @throws IllegalStateException if the header does not exist */ protected String computeSignatureBaseString() { if (baseString != null && !baseString.isEmpty()) { return baseString; } baseString = JsonTokenUtil.toDotFormat( JsonTokenUtil.toBase64(getHeader()), JsonTokenUtil.toBase64(payload)); return baseString; } private static JsonObject createHeader(Signer signer) { JsonObject newHeader = new JsonObject(); SignatureAlgorithm signatureAlgorithm = signer.getSignatureAlgorithm(); if (signatureAlgorithm != null) { newHeader.addProperty(ALGORITHM_HEADER, signatureAlgorithm.getNameForJson()); } String keyId = signer.getKeyId(); if (keyId != null) { newHeader.addProperty(KEY_ID_HEADER, keyId); } return newHeader; } /** @throws SignatureException if the signer does not exist */ private String getSignature() throws SignatureException { if (signature != null && !signature.isEmpty()) { return signature; } if (signer == null) { throw new SignatureException( "can't sign JsonToken with signer", new InvalidJsonTokenException(ErrorCode.ILLEGAL_STATE)); } // now, generate the signature AsciiStringSigner asciiSigner = new AsciiStringSigner(signer); return Base64.encodeBase64URLSafeString(asciiSigner.sign(baseString)); } }
/* * Copyright (c) 2015-present, Facebook, Inc. * All rights reserved. * * This source code is licensed under the BSD-style license found in the * LICENSE file in the root directory of this source tree. An additional grant * of patent rights can be found in the PATENTS file in the same directory. */ package com.facebook.drawee.generic; import javax.annotation.Nullable; import android.content.res.Resources; import android.graphics.Canvas; import android.graphics.Color; import android.graphics.ColorFilter; import android.graphics.Matrix; import android.graphics.PointF; import android.graphics.RectF; import android.graphics.drawable.Animatable; import android.graphics.drawable.BitmapDrawable; import android.graphics.drawable.ColorDrawable; import android.graphics.drawable.Drawable; import android.os.Build; import com.facebook.common.internal.Preconditions; import com.facebook.drawee.drawable.FadeDrawable; import com.facebook.drawee.drawable.ForwardingDrawable; import com.facebook.drawee.drawable.MatrixDrawable; import com.facebook.drawee.drawable.RoundedBitmapDrawable; import com.facebook.drawee.drawable.RoundedColorDrawable; import com.facebook.drawee.drawable.RoundedCornersDrawable; import com.facebook.drawee.drawable.ScaleTypeDrawable; import com.facebook.drawee.drawable.SettableDrawable; import com.facebook.drawee.drawable.VisibilityAwareDrawable; import com.facebook.drawee.drawable.VisibilityCallback; import com.facebook.drawee.interfaces.SettableDraweeHierarchy; import static com.facebook.drawee.drawable.ScalingUtils.ScaleType; /** * A SettableDraweeHierarchy that displays placeholder image until the actual image is set. * If provided, failure image will be used in case of failure (placeholder otherwise). * If provided, retry image will be used in case of failure when retrying is enabled. * If provided, progressbar will be displayed until fully loaded. * Each image can be displayed with a different scale type (or no scaling at all). * Fading between the layers is supported. * * <p> * Example hierarchy with placeholder, retry, failure and one actual image: * <pre> * o FadeDrawable (top level drawable) * | * +--o ScaleTypeDrawable * | | * | +--o Drawable (placeholder image) * | * +--o ScaleTypeDrawable * | | * | +--o SettableDrawable * | | * | +--o Drawable (actual image) * | * +--o ScaleTypeDrawable * | | * | +--o Drawable (retry image) * | * +--o ScaleTypeDrawable * | * +--o Drawable (failure image) * </pre> * * <p> * Note: * - ScaleType and Matrix transformations will be added only if specified. If both are unspecified, * then the branch for that image will be attached directly. * - It is not permitted to set both ScaleType transformation and Matrix transformation for the * same image. * - A Matrix transformation is only supported for actual image. * - All branches (placeholder, failure, retry, actual image, progressBar) are optional. * If some branch is not specified it won't be created. The exception is placeholder branch, * which will, if not specified, be created with a transparent drawable. * - If overlays and/or backgrounds are specified, they are added to the same fade drawable, and * are always displayed. * - Instance of some drawable should be used by only one DH. If more than one DH is being built * with the same builder, different drawable instances must be specified for each DH. */ public class GenericDraweeHierarchy implements SettableDraweeHierarchy { private static class RootFadeDrawable extends FadeDrawable implements VisibilityAwareDrawable { @Nullable private VisibilityCallback mVisibilityCallback; public RootFadeDrawable(Drawable[] layers) { super(layers); } @Override public int getIntrinsicWidth() { return -1; } @Override public int getIntrinsicHeight() { return -1; } @Override public void setVisibilityCallback(@Nullable VisibilityCallback visibilityCallback) { mVisibilityCallback = visibilityCallback; } @Override public boolean setVisible(boolean visible, boolean restart) { if (mVisibilityCallback != null) { mVisibilityCallback.onVisibilityChange(visible); } return super.setVisible(visible, restart); } @Override public void draw(Canvas canvas) { if (mVisibilityCallback != null) { mVisibilityCallback.onDraw(); } super.draw(canvas); } } private Drawable mEmptyPlaceholderDrawable; private final Drawable mEmptyActualImageDrawable = new ColorDrawable(Color.TRANSPARENT); private final Drawable mEmptyControllerOverlayDrawable = new ColorDrawable(Color.TRANSPARENT); private final Resources mResources; private final Drawable mTopLevelDrawable; private final FadeDrawable mFadeDrawable; private final SettableDrawable mActualImageSettableDrawable; private final int mPlaceholderImageIndex; private final int mProgressBarImageIndex; private final int mActualImageIndex; private final int mRetryImageIndex; private final int mFailureImageIndex; private final int mControllerOverlayIndex; private RoundingParams mRoundingParams; GenericDraweeHierarchy(GenericDraweeHierarchyBuilder builder) { mResources = builder.getResources(); mRoundingParams = builder.getRoundingParams(); int numLayers = 0; // backgrounds int numBackgrounds = (builder.getBackgrounds() != null) ? builder.getBackgrounds().size() : 0; int backgroundsIndex = numLayers; numLayers += numBackgrounds; // placeholder image branch Drawable placeholderImageBranch = builder.getPlaceholderImage(); if (placeholderImageBranch == null) { placeholderImageBranch = getEmptyPlaceholderDrawable(); } placeholderImageBranch = maybeApplyRounding( mRoundingParams, mResources, placeholderImageBranch); placeholderImageBranch = maybeWrapWithScaleType( placeholderImageBranch, builder.getPlaceholderImageScaleType()); mPlaceholderImageIndex = numLayers++; // actual image branch Drawable actualImageBranch = null; mActualImageSettableDrawable = new SettableDrawable(mEmptyActualImageDrawable); actualImageBranch = mActualImageSettableDrawable; actualImageBranch = maybeWrapWithScaleType( actualImageBranch, builder.getActualImageScaleType(), builder.getActualImageFocusPoint()); actualImageBranch = maybeWrapWithMatrix( actualImageBranch, builder.getActualImageMatrix()); actualImageBranch.setColorFilter(builder.getActualImageColorFilter()); mActualImageIndex = numLayers++; // progressBar image branch Drawable progressBarImageBranch = builder.getProgressBarImage(); if (progressBarImageBranch != null) { progressBarImageBranch = maybeWrapWithScaleType( progressBarImageBranch, builder.getProgressBarImageScaleType()); mProgressBarImageIndex = numLayers++; } else { mProgressBarImageIndex = -1; } // retry image branch Drawable retryImageBranch = builder.getRetryImage(); if (retryImageBranch != null) { retryImageBranch = maybeWrapWithScaleType( retryImageBranch, builder.getRetryImageScaleType()); mRetryImageIndex = numLayers++; } else { mRetryImageIndex = -1; } // failure image branch Drawable failureImageBranch = builder.getFailureImage(); if (failureImageBranch != null) { failureImageBranch = maybeWrapWithScaleType( failureImageBranch, builder.getFailureImageScaleType()); mFailureImageIndex = numLayers++; } else { mFailureImageIndex = -1; } // overlays int overlaysIndex = numLayers; int numOverlays = ((builder.getOverlays() != null) ? builder.getOverlays().size() : 0) + ((builder.getPressedStateOverlay() != null) ? 1 : 0); numLayers += numOverlays; // controller overlay mControllerOverlayIndex = numLayers++; // array of layers Drawable[] layers = new Drawable[numLayers]; if (numBackgrounds > 0) { int index = 0; for (Drawable background : builder.getBackgrounds()) { layers[backgroundsIndex + index++] = background; } } if (mPlaceholderImageIndex >= 0) { layers[mPlaceholderImageIndex] = placeholderImageBranch; } if (mActualImageIndex >= 0) { layers[mActualImageIndex] = actualImageBranch; } if (mProgressBarImageIndex >= 0) { layers[mProgressBarImageIndex] = progressBarImageBranch; } if (mRetryImageIndex >= 0) { layers[mRetryImageIndex] = retryImageBranch; } if (mFailureImageIndex >= 0) { layers[mFailureImageIndex] = failureImageBranch; } if (numOverlays > 0) { int index = 0; if (builder.getOverlays() != null) { for (Drawable overlay : builder.getOverlays()) { layers[overlaysIndex + index++] = overlay; } } if (builder.getPressedStateOverlay() != null) { layers[overlaysIndex + index++] = builder.getPressedStateOverlay(); } } if (mControllerOverlayIndex >= 0) { layers[mControllerOverlayIndex] = mEmptyControllerOverlayDrawable; } Drawable root; // fade drawable composed of branches mFadeDrawable = new RootFadeDrawable(layers); mFadeDrawable.setTransitionDuration(builder.getFadeDuration()); root = mFadeDrawable; // rounded corners drawable (optional) root = maybeWrapWithRoundedCorners(mRoundingParams, root); // top-level drawable mTopLevelDrawable = root; mTopLevelDrawable.mutate(); resetFade(); } private static Drawable maybeWrapWithScaleType( Drawable drawable, @Nullable ScaleType scaleType) { return maybeWrapWithScaleType(drawable, scaleType, null); } private static Drawable maybeWrapWithScaleType( Drawable drawable, @Nullable ScaleType scaleType, @Nullable PointF focusPoint) { Preconditions.checkNotNull(drawable); if (scaleType == null) { return drawable; } ScaleTypeDrawable scaleTypeDrawable = new ScaleTypeDrawable(drawable, scaleType); if (focusPoint != null) { scaleTypeDrawable.setFocusPoint(focusPoint); } return scaleTypeDrawable; } private static Drawable maybeWrapWithMatrix( Drawable drawable, @Nullable Matrix matrix) { Preconditions.checkNotNull(drawable); if (matrix == null) { return drawable; } return new MatrixDrawable(drawable, matrix); } private static Drawable maybeWrapWithRoundedCorners( @Nullable RoundingParams roundingParams, Drawable drawable) { if (roundingParams != null && roundingParams.getRoundingMethod() == RoundingParams.RoundingMethod.OVERLAY_COLOR) { RoundedCornersDrawable roundedCornersDrawable = new RoundedCornersDrawable(drawable); roundedCornersDrawable.setCircle(roundingParams.getRoundAsCircle()); roundedCornersDrawable.setRadii(roundingParams.getCornersRadii()); roundedCornersDrawable.setOverlayColor(roundingParams.getOverlayColor()); roundedCornersDrawable.setBorder( roundingParams.getBorderColor(), roundingParams.getBorderWidth()); return roundedCornersDrawable; } else { return drawable; } } private static Drawable maybeApplyRounding( @Nullable RoundingParams roundingParams, Resources resources, Drawable drawable) { if (roundingParams != null && roundingParams.getRoundingMethod() == RoundingParams.RoundingMethod.BITMAP_ONLY) { if (drawable instanceof BitmapDrawable) { RoundedBitmapDrawable roundedBitmapDrawable = RoundedBitmapDrawable.fromBitmapDrawable(resources, (BitmapDrawable) drawable); roundedBitmapDrawable.setCircle(roundingParams.getRoundAsCircle()); roundedBitmapDrawable.setCornerRadii(roundingParams.getCornersRadii()); roundedBitmapDrawable.setBorder( roundingParams.getBorderColor(), roundingParams.getBorderWidth()); return roundedBitmapDrawable; } if (drawable instanceof ColorDrawable && Build.VERSION.SDK_INT >= Build.VERSION_CODES.HONEYCOMB) { RoundedColorDrawable roundedColorDrawable = RoundedColorDrawable.fromColorDrawable((ColorDrawable) drawable); roundedColorDrawable.setCircle(roundingParams.getRoundAsCircle()); roundedColorDrawable.setRadii(roundingParams.getCornersRadii()); roundedColorDrawable.setBorder( roundingParams.getBorderColor(), roundingParams.getBorderWidth()); return roundedColorDrawable; } } return drawable; } private void resetActualImages() { if (mActualImageSettableDrawable != null) { mActualImageSettableDrawable.setDrawable(mEmptyActualImageDrawable); } } private void resetFade() { if (mFadeDrawable != null) { mFadeDrawable.beginBatchMode(); // turn on all layers (backgrounds, branches, overlays) mFadeDrawable.fadeInAllLayers(); // turn off branches (leaving backgrounds and overlays on) fadeOutBranches(); // turn on placeholder fadeInLayer(mPlaceholderImageIndex); mFadeDrawable.finishTransitionImmediately(); mFadeDrawable.endBatchMode(); } } private void fadeOutBranches() { fadeOutLayer(mPlaceholderImageIndex); fadeOutLayer(mActualImageIndex); fadeOutLayer(mProgressBarImageIndex); fadeOutLayer(mRetryImageIndex); fadeOutLayer(mFailureImageIndex); } private void fadeInLayer(int index) { if (index >= 0) { mFadeDrawable.fadeInLayer(index); } } private void fadeOutLayer(int index) { if (index >= 0) { mFadeDrawable.fadeOutLayer(index); } } private void setProgress(float progress) { if (mProgressBarImageIndex < 0) { return; } Drawable progressBarDrawable = getLayerChildDrawable(mProgressBarImageIndex); // display progressbar when not fully loaded, hide otherwise if (progress >= 0.999f) { if (progressBarDrawable instanceof Animatable) { ((Animatable) progressBarDrawable).stop(); } fadeOutLayer(mProgressBarImageIndex); } else { if (progressBarDrawable instanceof Animatable) { ((Animatable) progressBarDrawable).start(); } fadeInLayer(mProgressBarImageIndex); } // set drawable level, scaled to [0, 10000] per drawable specification progressBarDrawable.setLevel(Math.round(progress * 10000)); } // SettableDraweeHierarchy interface @Override public Drawable getTopLevelDrawable() { return mTopLevelDrawable; } @Override public void reset() { resetActualImages(); resetFade(); } @Override public void setImage(Drawable drawable, float progress, boolean immediate) { drawable = maybeApplyRounding(mRoundingParams, mResources, drawable); drawable.mutate(); mActualImageSettableDrawable.setDrawable(drawable); mFadeDrawable.beginBatchMode(); fadeOutBranches(); fadeInLayer(mActualImageIndex); setProgress(progress); if (immediate) { mFadeDrawable.finishTransitionImmediately(); } mFadeDrawable.endBatchMode(); } @Override public void setProgress(float progress, boolean immediate) { mFadeDrawable.beginBatchMode(); setProgress(progress); if (immediate) { mFadeDrawable.finishTransitionImmediately(); } mFadeDrawable.endBatchMode(); } @Override public void setFailure(Throwable throwable) { mFadeDrawable.beginBatchMode(); fadeOutBranches(); if (mFailureImageIndex >= 0) { fadeInLayer(mFailureImageIndex); } else { fadeInLayer(mPlaceholderImageIndex); } mFadeDrawable.endBatchMode(); } @Override public void setRetry(Throwable throwable) { mFadeDrawable.beginBatchMode(); fadeOutBranches(); if (mRetryImageIndex >= 0) { fadeInLayer(mRetryImageIndex); } else { fadeInLayer(mPlaceholderImageIndex); } mFadeDrawable.endBatchMode(); } @Override public void setControllerOverlay(@Nullable Drawable drawable) { if (drawable == null) { drawable = mEmptyControllerOverlayDrawable; } mFadeDrawable.setDrawable(mControllerOverlayIndex, drawable); } // Helper methods for accessing layers /** * Gets the drawable at the specified index while skipping MatrixDrawable and ScaleTypeDrawable. * * <p> If <code>returnParent</code> is set, parent drawable will be returned instead. If * MatrixDrawable or ScaleTypeDrawable is found at that index, it will be returned as a parent. * Otherwise, the FadeDrawable will be returned as a parent. */ private Drawable getLayerDrawable(int index, boolean returnParent) { Drawable parent = mFadeDrawable; Drawable child = mFadeDrawable.getDrawable(index); if (child instanceof MatrixDrawable) { parent = child; child = parent.getCurrent(); } if (child instanceof ScaleTypeDrawable) { parent = child; child = parent.getCurrent(); } return returnParent ? parent : child; } /** * Returns the ScaleTypeDrawable at the specified index, or null if not found. */ private @Nullable ScaleTypeDrawable findLayerScaleTypeDrawable(int index) { Drawable drawable = mFadeDrawable.getDrawable(index); if (drawable instanceof MatrixDrawable) { drawable = drawable.getCurrent(); } if (drawable instanceof ScaleTypeDrawable) { return (ScaleTypeDrawable) drawable; } else { return null; } } /** * Sets a child drawable at the specified index. * * <p> Note: This uses {@link #getLayerDrawable} to find the parent drawable. Given drawable is * then set as its child. */ private void setLayerChildDrawable(int index, Drawable drawable) { Drawable parent = getLayerDrawable(index, true /* returnParent */); if (parent == mFadeDrawable) { mFadeDrawable.setDrawable(index, drawable); } else { ((ForwardingDrawable) parent).setCurrent(drawable); } } /** * Gets the child drawable at the specified index. */ private Drawable getLayerChildDrawable(int index) { return getLayerDrawable(index, false /* returnParent */); } private Drawable getEmptyPlaceholderDrawable() { if (mEmptyPlaceholderDrawable == null) { mEmptyPlaceholderDrawable = new ColorDrawable(Color.TRANSPARENT); } return mEmptyPlaceholderDrawable; } // Mutability /** Sets the actual image focus point. */ public void setActualImageFocusPoint(PointF focusPoint) { Preconditions.checkNotNull(focusPoint); ScaleTypeDrawable scaleTypeDrawable = findLayerScaleTypeDrawable(mActualImageIndex); if (scaleTypeDrawable == null) { throw new UnsupportedOperationException("ScaleTypeDrawable not found!"); } scaleTypeDrawable.setFocusPoint(focusPoint); } /** Sets the actual image scale type. */ public void setActualImageScaleType(ScaleType scaleType) { Preconditions.checkNotNull(scaleType); ScaleTypeDrawable scaleTypeDrawable = findLayerScaleTypeDrawable(mActualImageIndex); if (scaleTypeDrawable == null) { throw new UnsupportedOperationException("ScaleTypeDrawable not found!"); } scaleTypeDrawable.setScaleType(scaleType); } /** Sets the color filter to be applied on the actual image. */ public void setActualImageColorFilter(ColorFilter colorfilter) { mFadeDrawable.getDrawable(mActualImageIndex).setColorFilter(colorfilter); } /** * Gets the post-scaling bounds of the actual image. * * <p> Note: the returned bounds are not cropped. * @param outBounds rect to fill with bounds */ public void getActualImageBounds(RectF outBounds) { mActualImageSettableDrawable.getTransformedBounds(outBounds); } /** * Sets a new placeholder drawable. * * <p>The placeholder scale type will not be changed. */ public void setPlaceholderImage(Drawable drawable) { if (drawable == null) { drawable = getEmptyPlaceholderDrawable(); } drawable = maybeApplyRounding(mRoundingParams, mResources, drawable); setLayerChildDrawable(mPlaceholderImageIndex, drawable); } /** * Sets a new placeholder drawable using the supplied resource ID. * * <p>The placeholder scale type will not be changed. * @param resourceId an identifier of an Android drawable or color resource. */ public void setPlaceholderImage(int resourceId) { setPlaceholderImage(mResources.getDrawable(resourceId)); } /** * Sets the rounding params. */ public void setRoundingParams(RoundingParams roundingParams) { Preconditions.checkState( mRoundingParams != null && roundingParams != null && roundingParams.getRoundingMethod() == mRoundingParams.getRoundingMethod(), "Rounding method cannot be changed and it has to be set during construction time."); mRoundingParams = roundingParams; if (roundingParams.getRoundingMethod() == RoundingParams.RoundingMethod.OVERLAY_COLOR) { RoundedCornersDrawable roundedCornersDrawable = (RoundedCornersDrawable) mTopLevelDrawable; roundedCornersDrawable.setCircle(roundingParams.getRoundAsCircle()); roundedCornersDrawable.setRadii(roundingParams.getCornersRadii()); roundedCornersDrawable.setOverlayColor(roundingParams.getOverlayColor()); roundedCornersDrawable.setBorder( roundingParams.getBorderColor(), roundingParams.getBorderWidth()); } } /** * Gets the rounding params. * @return rounding params */ public RoundingParams getRoundingParams() { return mRoundingParams; } }
/* * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.facebook.presto.util; import com.facebook.presto.metadata.FunctionManager; import com.facebook.presto.spi.PrestoException; import com.facebook.presto.spi.type.Type; import it.unimi.dsi.fastutil.Hash; import it.unimi.dsi.fastutil.booleans.BooleanOpenHashSet; import it.unimi.dsi.fastutil.doubles.DoubleHash; import it.unimi.dsi.fastutil.doubles.DoubleOpenCustomHashSet; import it.unimi.dsi.fastutil.longs.LongHash; import it.unimi.dsi.fastutil.longs.LongOpenCustomHashSet; import it.unimi.dsi.fastutil.objects.ObjectOpenCustomHashSet; import java.lang.invoke.MethodHandle; import java.lang.invoke.MethodType; import java.util.Collection; import java.util.Set; import static com.facebook.presto.spi.StandardErrorCode.GENERIC_INTERNAL_ERROR; import static com.facebook.presto.spi.function.OperatorType.EQUAL; import static com.facebook.presto.spi.function.OperatorType.HASH_CODE; import static com.facebook.presto.sql.analyzer.TypeSignatureProvider.fromTypes; import static com.google.common.base.Throwables.throwIfInstanceOf; import static com.google.common.base.Verify.verify; import static java.lang.Boolean.TRUE; import static java.lang.Math.toIntExact; public final class FastutilSetHelper { private FastutilSetHelper() {} @SuppressWarnings("unchecked") public static Set<?> toFastutilHashSet(Set<?> set, Type type, FunctionManager functionManager) { // 0.25 as the load factor is chosen because the argument set is assumed to be small (<10000), // and the return set is assumed to be read-heavy. // The performance of InCodeGenerator heavily depends on the load factor being small. Class<?> javaElementType = type.getJavaType(); if (javaElementType == long.class) { return new LongOpenCustomHashSet((Collection<Long>) set, 0.25f, new LongStrategy(functionManager, type)); } if (javaElementType == double.class) { return new DoubleOpenCustomHashSet((Collection<Double>) set, 0.25f, new DoubleStrategy(functionManager, type)); } if (javaElementType == boolean.class) { return new BooleanOpenHashSet((Collection<Boolean>) set, 0.25f); } else if (!type.getJavaType().isPrimitive()) { return new ObjectOpenCustomHashSet(set, 0.25f, new ObjectStrategy(functionManager, type)); } else { throw new UnsupportedOperationException("Unsupported native type in set: " + type.getJavaType() + " with type " + type.getTypeSignature()); } } public static boolean in(boolean booleanValue, BooleanOpenHashSet set) { return set.contains(booleanValue); } public static boolean in(double doubleValue, DoubleOpenCustomHashSet set) { return set.contains(doubleValue); } public static boolean in(long longValue, LongOpenCustomHashSet set) { return set.contains(longValue); } public static boolean in(Object objectValue, ObjectOpenCustomHashSet<?> set) { return set.contains(objectValue); } private static final class LongStrategy implements LongHash.Strategy { private final MethodHandle hashCodeHandle; private final MethodHandle equalsHandle; private LongStrategy(FunctionManager functionManager, Type type) { hashCodeHandle = functionManager.getBuiltInScalarFunctionImplementation(functionManager.resolveOperator(HASH_CODE, fromTypes(type))).getMethodHandle(); equalsHandle = functionManager.getBuiltInScalarFunctionImplementation(functionManager.resolveOperator(EQUAL, fromTypes(type, type))).getMethodHandle(); } @Override public int hashCode(long value) { try { return Long.hashCode((long) hashCodeHandle.invokeExact(value)); } catch (Throwable t) { throwIfInstanceOf(t, Error.class); throwIfInstanceOf(t, PrestoException.class); throw new PrestoException(GENERIC_INTERNAL_ERROR, t); } } @Override public boolean equals(long a, long b) { try { Boolean result = (Boolean) equalsHandle.invokeExact(a, b); // FastutilHashSet is not intended be used for indeterminate values lookup verify(result != null, "result is null"); return TRUE.equals(result); } catch (Throwable t) { throwIfInstanceOf(t, Error.class); throwIfInstanceOf(t, PrestoException.class); throw new PrestoException(GENERIC_INTERNAL_ERROR, t); } } } private static final class DoubleStrategy implements DoubleHash.Strategy { private final MethodHandle hashCodeHandle; private final MethodHandle equalsHandle; private DoubleStrategy(FunctionManager functionManager, Type type) { hashCodeHandle = functionManager.getBuiltInScalarFunctionImplementation(functionManager.resolveOperator(HASH_CODE, fromTypes(type))).getMethodHandle(); equalsHandle = functionManager.getBuiltInScalarFunctionImplementation(functionManager.resolveOperator(EQUAL, fromTypes(type, type))).getMethodHandle(); } @Override public int hashCode(double value) { try { return Long.hashCode((long) hashCodeHandle.invokeExact(value)); } catch (Throwable t) { throwIfInstanceOf(t, Error.class); throwIfInstanceOf(t, PrestoException.class); throw new PrestoException(GENERIC_INTERNAL_ERROR, t); } } @Override public boolean equals(double a, double b) { try { Boolean result = (Boolean) equalsHandle.invokeExact(a, b); // FastutilHashSet is not intended be used for indeterminate values lookup verify(result != null, "result is null"); return TRUE.equals(result); } catch (Throwable t) { throwIfInstanceOf(t, Error.class); throwIfInstanceOf(t, PrestoException.class); throw new PrestoException(GENERIC_INTERNAL_ERROR, t); } } } private static final class ObjectStrategy implements Hash.Strategy { private final MethodHandle hashCodeHandle; private final MethodHandle equalsHandle; private ObjectStrategy(FunctionManager functionManager, Type type) { hashCodeHandle = functionManager.getBuiltInScalarFunctionImplementation(functionManager.resolveOperator(HASH_CODE, fromTypes(type))) .getMethodHandle() .asType(MethodType.methodType(long.class, Object.class)); equalsHandle = functionManager.getBuiltInScalarFunctionImplementation(functionManager.resolveOperator(EQUAL, fromTypes(type, type))) .getMethodHandle() .asType(MethodType.methodType(Boolean.class, Object.class, Object.class)); } @Override public int hashCode(Object value) { try { return toIntExact(Long.hashCode((long) hashCodeHandle.invokeExact(value))); } catch (Throwable t) { throwIfInstanceOf(t, Error.class); throwIfInstanceOf(t, PrestoException.class); throw new PrestoException(GENERIC_INTERNAL_ERROR, t); } } @Override public boolean equals(Object a, Object b) { try { Boolean result = (Boolean) equalsHandle.invokeExact(a, b); // FastutilHashSet is not intended be used for indeterminate values lookup verify(result != null, "result is null"); return TRUE.equals(result); } catch (Throwable t) { throwIfInstanceOf(t, Error.class); throwIfInstanceOf(t, PrestoException.class); throw new PrestoException(GENERIC_INTERNAL_ERROR, t); } } } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.cassandra.schema; import java.nio.ByteBuffer; import java.util.*; import java.util.stream.Collectors; import com.google.common.collect.HashMultimap; import com.google.common.collect.ImmutableList; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.apache.cassandra.config.*; import org.apache.cassandra.cql3.*; import org.apache.cassandra.cql3.functions.FunctionName; import org.apache.cassandra.cql3.functions.UDAggregate; import org.apache.cassandra.cql3.functions.UDFunction; import org.apache.cassandra.db.*; import org.apache.cassandra.db.compaction.AbstractCompactionStrategy; import org.apache.cassandra.db.marshal.*; import org.apache.cassandra.db.rows.RowIterator; import org.apache.cassandra.db.rows.UnfilteredRowIterators; import org.apache.cassandra.exceptions.InvalidRequestException; import org.apache.cassandra.utils.ByteBufferUtil; import org.apache.cassandra.utils.FBUtilities; import org.apache.cassandra.utils.concurrent.OpOrder; import static java.lang.String.format; import static org.apache.cassandra.utils.ByteBufferUtil.bytes; import static org.apache.cassandra.utils.FBUtilities.fromJsonMap; /** * This majestic class performs migration from legacy (pre-3.0) system.schema_* schema tables to the new and glorious * system_schema keyspace. * * The goal is to not lose any information in the migration - including the timestamps. */ @SuppressWarnings("deprecation") public final class LegacySchemaMigrator { private LegacySchemaMigrator() { } private static final Logger logger = LoggerFactory.getLogger(LegacySchemaMigrator.class); static final List<CFMetaData> LegacySchemaTables = ImmutableList.of(SystemKeyspace.LegacyKeyspaces, SystemKeyspace.LegacyColumnfamilies, SystemKeyspace.LegacyColumns, SystemKeyspace.LegacyTriggers, SystemKeyspace.LegacyUsertypes, SystemKeyspace.LegacyFunctions, SystemKeyspace.LegacyAggregates); public static void migrate() { // read metadata from the legacy schema tables Collection<Keyspace> keyspaces = readSchema(); // if already upgraded, or starting a new 3.0 node, abort early if (keyspaces.isEmpty()) { unloadLegacySchemaTables(); return; } // write metadata to the new schema tables logger.info("Moving {} keyspaces from legacy schema tables to the new schema keyspace ({})", keyspaces.size(), SchemaKeyspace.NAME); keyspaces.forEach(LegacySchemaMigrator::storeKeyspaceInNewSchemaTables); // flush the new tables before truncating the old ones SchemaKeyspace.flush(); // truncate the original tables (will be snapshotted now, and will have been snapshotted by pre-flight checks) logger.info("Truncating legacy schema tables"); truncateLegacySchemaTables(); // remove legacy schema tables from Schema, so that their presence doesn't give the users any wrong ideas unloadLegacySchemaTables(); logger.info("Completed migration of legacy schema tables"); } static void unloadLegacySchemaTables() { KeyspaceMetadata systemKeyspace = Schema.instance.getKSMetaData(SystemKeyspace.NAME); Tables systemTables = systemKeyspace.tables; for (CFMetaData table : LegacySchemaTables) systemTables = systemTables.without(table.cfName); LegacySchemaTables.forEach(Schema.instance::unload); Schema.instance.setKeyspaceMetadata(systemKeyspace.withSwapped(systemTables)); } private static void truncateLegacySchemaTables() { LegacySchemaTables.forEach(table -> Schema.instance.getColumnFamilyStoreInstance(table.cfId).truncateBlocking()); } private static void storeKeyspaceInNewSchemaTables(Keyspace keyspace) { logger.info("Migrating keyspace {}", keyspace); Mutation mutation = SchemaKeyspace.makeCreateKeyspaceMutation(keyspace.name, keyspace.params, keyspace.timestamp); for (Table table : keyspace.tables) SchemaKeyspace.addTableToSchemaMutation(table.metadata, table.timestamp, true, mutation); for (Type type : keyspace.types) SchemaKeyspace.addTypeToSchemaMutation(type.metadata, type.timestamp, mutation); for (Function function : keyspace.functions) SchemaKeyspace.addFunctionToSchemaMutation(function.metadata, function.timestamp, mutation); for (Aggregate aggregate : keyspace.aggregates) SchemaKeyspace.addAggregateToSchemaMutation(aggregate.metadata, aggregate.timestamp, mutation); mutation.apply(); } /* * Read all keyspaces metadata (including nested tables, types, and functions), with their modification timestamps */ private static Collection<Keyspace> readSchema() { String query = format("SELECT keyspace_name FROM %s.%s", SystemKeyspace.NAME, SystemKeyspace.LEGACY_KEYSPACES); Collection<String> keyspaceNames = new ArrayList<>(); query(query).forEach(row -> keyspaceNames.add(row.getString("keyspace_name"))); keyspaceNames.removeAll(Schema.SYSTEM_KEYSPACE_NAMES); Collection<Keyspace> keyspaces = new ArrayList<>(); keyspaceNames.forEach(name -> keyspaces.add(readKeyspace(name))); return keyspaces; } private static Keyspace readKeyspace(String keyspaceName) { long timestamp = readKeyspaceTimestamp(keyspaceName); KeyspaceParams params = readKeyspaceParams(keyspaceName); Collection<Table> tables = readTables(keyspaceName); Collection<Type> types = readTypes(keyspaceName); Collection<Function> functions = readFunctions(keyspaceName); Collection<Aggregate> aggregates = readAggregates(keyspaceName); return new Keyspace(timestamp, keyspaceName, params, tables, types, functions, aggregates); } /* * Reading keyspace params */ private static long readKeyspaceTimestamp(String keyspaceName) { String query = format("SELECT writeTime(durable_writes) AS timestamp FROM %s.%s WHERE keyspace_name = ?", SystemKeyspace.NAME, SystemKeyspace.LEGACY_KEYSPACES); return query(query, keyspaceName).one().getLong("timestamp"); } private static KeyspaceParams readKeyspaceParams(String keyspaceName) { String query = format("SELECT * FROM %s.%s WHERE keyspace_name = ?", SystemKeyspace.NAME, SystemKeyspace.LEGACY_KEYSPACES); UntypedResultSet.Row row = query(query, keyspaceName).one(); boolean durableWrites = row.getBoolean("durable_writes"); Map<String, String> replication = new HashMap<>(); replication.putAll(fromJsonMap(row.getString("strategy_options"))); replication.put(ReplicationParams.CLASS, row.getString("strategy_class")); return KeyspaceParams.create(durableWrites, replication); } /* * Reading tables */ private static Collection<Table> readTables(String keyspaceName) { String query = format("SELECT columnfamily_name FROM %s.%s WHERE keyspace_name = ?", SystemKeyspace.NAME, SystemKeyspace.LEGACY_COLUMNFAMILIES); Collection<String> tableNames = new ArrayList<>(); query(query, keyspaceName).forEach(row -> tableNames.add(row.getString("columnfamily_name"))); Collection<Table> tables = new ArrayList<>(); tableNames.forEach(name -> tables.add(readTable(keyspaceName, name))); return tables; } private static Table readTable(String keyspaceName, String tableName) { long timestamp = readTableTimestamp(keyspaceName, tableName); CFMetaData metadata = readTableMetadata(keyspaceName, tableName); return new Table(timestamp, metadata); } private static long readTableTimestamp(String keyspaceName, String tableName) { String query = format("SELECT writeTime(type) AS timestamp FROM %s.%s WHERE keyspace_name = ? AND columnfamily_name = ?", SystemKeyspace.NAME, SystemKeyspace.LEGACY_COLUMNFAMILIES); return query(query, keyspaceName, tableName).one().getLong("timestamp"); } private static CFMetaData readTableMetadata(String keyspaceName, String tableName) { String tableQuery = format("SELECT * FROM %s.%s WHERE keyspace_name = ? AND columnfamily_name = ?", SystemKeyspace.NAME, SystemKeyspace.LEGACY_COLUMNFAMILIES); UntypedResultSet.Row tableRow = query(tableQuery, keyspaceName, tableName).one(); String columnsQuery = format("SELECT * FROM %s.%s WHERE keyspace_name = ? AND columnfamily_name = ?", SystemKeyspace.NAME, SystemKeyspace.LEGACY_COLUMNS); UntypedResultSet columnRows = query(columnsQuery, keyspaceName, tableName); String triggersQuery = format("SELECT * FROM %s.%s WHERE keyspace_name = ? AND columnfamily_name = ?", SystemKeyspace.NAME, SystemKeyspace.LEGACY_TRIGGERS); UntypedResultSet triggerRows = query(triggersQuery, keyspaceName, tableName); return decodeTableMetadata(tableRow, columnRows, triggerRows); } private static CFMetaData decodeTableMetadata(UntypedResultSet.Row tableRow, UntypedResultSet columnRows, UntypedResultSet triggerRows) { String ksName = tableRow.getString("keyspace_name"); String cfName = tableRow.getString("columnfamily_name"); AbstractType<?> rawComparator = TypeParser.parse(tableRow.getString("comparator")); AbstractType<?> subComparator = tableRow.has("subcomparator") ? TypeParser.parse(tableRow.getString("subcomparator")) : null; boolean isSuper = "super".equals(tableRow.getString("type").toLowerCase()); boolean isDense = tableRow.getBoolean("is_dense"); boolean isCompound = rawComparator instanceof CompositeType; // We don't really use the default validator but as we have it for backward compatibility, we use it to know if it's a counter table AbstractType<?> defaultValidator = TypeParser.parse(tableRow.getString("default_validator")); boolean isCounter = defaultValidator instanceof CounterColumnType; /* * With CASSANDRA-5202 we stopped inferring the cf id from the combination of keyspace/table names, * and started storing the generated uuids in system.schema_columnfamilies. * * In 3.0 we SHOULD NOT see tables like that (2.0-created, non-upgraded). * But in the off-chance that we do, we generate the deterministic uuid here. */ UUID cfId = tableRow.has("cf_id") ? tableRow.getUUID("cf_id") : CFMetaData.generateLegacyCfId(ksName, cfName); boolean isCQLTable = !isSuper && !isDense && isCompound; boolean isStaticCompactTable = !isDense && !isCompound; // Internally, compact tables have a specific layout, see CompactTables. But when upgrading from // previous versions, they may not have the expected schema, so detect if we need to upgrade and do // it in createColumnsFromColumnRows. // We can remove this once we don't support upgrade from versions < 3.0. boolean needsUpgrade = !isCQLTable && checkNeedsUpgrade(columnRows, isSuper, isStaticCompactTable); List<ColumnDefinition> columnDefs = createColumnsFromColumnRows(columnRows, ksName, cfName, rawComparator, subComparator, isSuper, isCQLTable, isStaticCompactTable, needsUpgrade); Indexes indexes = createIndexesFromColumnRows(columnRows, ksName, cfName, rawComparator, subComparator, isSuper, isCQLTable, isStaticCompactTable, needsUpgrade); if (needsUpgrade) { addDefinitionForUpgrade(columnDefs, ksName, cfName, isStaticCompactTable, isSuper, rawComparator, subComparator, defaultValidator); } CFMetaData cfm = CFMetaData.create(ksName, cfName, cfId, isDense, isCompound, isSuper, isCounter, false, // legacy schema did not contain views columnDefs, DatabaseDescriptor.getPartitioner()); cfm.indexes(indexes); if (tableRow.has("dropped_columns")) addDroppedColumns(cfm, rawComparator, tableRow.getMap("dropped_columns", UTF8Type.instance, LongType.instance)); return cfm.params(decodeTableParams(tableRow)) .triggers(createTriggersFromTriggerRows(triggerRows)); } private static TableParams decodeTableParams(UntypedResultSet.Row row) { TableParams.Builder params = TableParams.builder(); params.readRepairChance(row.getDouble("read_repair_chance")) .dcLocalReadRepairChance(row.getDouble("local_read_repair_chance")) .gcGraceSeconds(row.getInt("gc_grace_seconds")); if (row.has("comment")) params.comment(row.getString("comment")); if (row.has("memtable_flush_period_in_ms")) params.memtableFlushPeriodInMs(row.getInt("memtable_flush_period_in_ms")); params.caching(CachingParams.fromMap(fromJsonMap(row.getString("caching")))); if (row.has("default_time_to_live")) params.defaultTimeToLive(row.getInt("default_time_to_live")); if (row.has("speculative_retry")) params.speculativeRetry(SpeculativeRetryParam.fromString(row.getString("speculative_retry"))); params.compression(CompressionParams.fromMap(fromJsonMap(row.getString("compression_parameters")))); params.compaction(compactionFromRow(row)); if (row.has("min_index_interval")) params.minIndexInterval(row.getInt("min_index_interval")); if (row.has("max_index_interval")) params.maxIndexInterval(row.getInt("max_index_interval")); if (row.has("bloom_filter_fp_chance")) params.bloomFilterFpChance(row.getDouble("bloom_filter_fp_chance")); return params.build(); } /* * The method is needed - to migrate max_compaction_threshold and min_compaction_threshold * to the compaction map, where they belong. * * We must use reflection to validate the options because not every compaction strategy respects and supports * the threshold params (LCS doesn't, STCS and DTCS do). */ @SuppressWarnings("unchecked") private static CompactionParams compactionFromRow(UntypedResultSet.Row row) { Class<? extends AbstractCompactionStrategy> klass = CFMetaData.createCompactionStrategy(row.getString("compaction_strategy_class")); Map<String, String> options = fromJsonMap(row.getString("compaction_strategy_options")); int minThreshold = row.getInt("min_compaction_threshold"); int maxThreshold = row.getInt("max_compaction_threshold"); Map<String, String> optionsWithThresholds = new HashMap<>(options); optionsWithThresholds.putIfAbsent(CompactionParams.Option.MIN_THRESHOLD.toString(), Integer.toString(minThreshold)); optionsWithThresholds.putIfAbsent(CompactionParams.Option.MAX_THRESHOLD.toString(), Integer.toString(maxThreshold)); try { Map<String, String> unrecognizedOptions = (Map<String, String>) klass.getMethod("validateOptions", Map.class).invoke(null, optionsWithThresholds); if (unrecognizedOptions.isEmpty()) options = optionsWithThresholds; } catch (Exception e) { throw new RuntimeException(e); } return CompactionParams.create(klass, options); } // Should only be called on compact tables private static boolean checkNeedsUpgrade(UntypedResultSet defs, boolean isSuper, boolean isStaticCompactTable) { if (isSuper) { // Check if we've added the "supercolumn map" column yet or not for (UntypedResultSet.Row row : defs) if (row.getString("column_name").isEmpty()) return false; return true; } // For static compact tables, we need to upgrade if the regular definitions haven't been converted to static yet, // i.e. if we don't have a static definition yet. if (isStaticCompactTable) return !hasKind(defs, ColumnDefinition.Kind.STATIC); // For dense compact tables, we need to upgrade if we don't have a compact value definition return !hasRegularColumns(defs); } private static boolean hasRegularColumns(UntypedResultSet columnRows) { for (UntypedResultSet.Row row : columnRows) { /* * We need to special case and ignore the empty compact column (pre-3.0, COMPACT STORAGE, primary-key only tables), * since deserializeKind() will otherwise just return a REGULAR. * We want the proper EmptyType regular column to be added by addDefinitionForUpgrade(), so we need * checkNeedsUpgrade() to return true in this case. * See CASSANDRA-9874. */ if (isEmptyCompactValueColumn(row)) return false; if (deserializeKind(row.getString("type")) == ColumnDefinition.Kind.REGULAR) return true; } return false; } private static boolean isEmptyCompactValueColumn(UntypedResultSet.Row row) { return "compact_value".equals(row.getString("type")) && row.getString("column_name").isEmpty(); } private static void addDefinitionForUpgrade(List<ColumnDefinition> defs, String ksName, String cfName, boolean isStaticCompactTable, boolean isSuper, AbstractType<?> rawComparator, AbstractType<?> subComparator, AbstractType<?> defaultValidator) { CompactTables.DefaultNames names = CompactTables.defaultNameGenerator(defs); if (isSuper) { defs.add(ColumnDefinition.regularDef(ksName, cfName, CompactTables.SUPER_COLUMN_MAP_COLUMN_STR, MapType.getInstance(subComparator, defaultValidator, true))); } else if (isStaticCompactTable) { defs.add(ColumnDefinition.clusteringKeyDef(ksName, cfName, names.defaultClusteringName(), rawComparator, null)); defs.add(ColumnDefinition.regularDef(ksName, cfName, names.defaultCompactValueName(), defaultValidator)); } else { // For dense compact tables, we get here if we don't have a compact value column, in which case we should add it // (we use EmptyType to recognize that the compact value was not declared by the use (see CreateTableStatement too)) defs.add(ColumnDefinition.regularDef(ksName, cfName, names.defaultCompactValueName(), EmptyType.instance)); } } private static boolean hasKind(UntypedResultSet defs, ColumnDefinition.Kind kind) { for (UntypedResultSet.Row row : defs) if (deserializeKind(row.getString("type")) == kind) return true; return false; } /* * Prior to 3.0 we used to not store the type of the dropped columns, relying on all collection info being * present in the comparator, forever. That allowed us to perform certain validations in AlterTableStatement * (namely not allowing to re-add incompatible collection columns, with the same name, but a different type). * * In 3.0, we no longer preserve the original comparator, and reconstruct it from the columns instead. That means * that we should preserve the type of the dropped columns now, and, during migration, fetch the types from * the original comparator if necessary. */ private static void addDroppedColumns(CFMetaData cfm, AbstractType<?> comparator, Map<String, Long> droppedTimes) { AbstractType<?> last = comparator.getComponents().get(comparator.componentsCount() - 1); Map<ByteBuffer, CollectionType> collections = last instanceof ColumnToCollectionType ? ((ColumnToCollectionType) last).defined : Collections.emptyMap(); for (Map.Entry<String, Long> entry : droppedTimes.entrySet()) { String name = entry.getKey(); ByteBuffer nameBytes = UTF8Type.instance.decompose(name); long time = entry.getValue(); AbstractType<?> type = collections.containsKey(nameBytes) ? collections.get(nameBytes) : BytesType.instance; cfm.getDroppedColumns().put(nameBytes, new CFMetaData.DroppedColumn(name, type, time)); } } private static List<ColumnDefinition> createColumnsFromColumnRows(UntypedResultSet rows, String keyspace, String table, AbstractType<?> rawComparator, AbstractType<?> rawSubComparator, boolean isSuper, boolean isCQLTable, boolean isStaticCompactTable, boolean needsUpgrade) { List<ColumnDefinition> columns = new ArrayList<>(); for (UntypedResultSet.Row row : rows) { // Skip the empty compact value column. Make addDefinitionForUpgrade() re-add the proper REGULAR one. if (isEmptyCompactValueColumn(row)) continue; columns.add(createColumnFromColumnRow(row, keyspace, table, rawComparator, rawSubComparator, isSuper, isCQLTable, isStaticCompactTable, needsUpgrade)); } return columns; } private static ColumnDefinition createColumnFromColumnRow(UntypedResultSet.Row row, String keyspace, String table, AbstractType<?> rawComparator, AbstractType<?> rawSubComparator, boolean isSuper, boolean isCQLTable, boolean isStaticCompactTable, boolean needsUpgrade) { ColumnDefinition.Kind kind = deserializeKind(row.getString("type")); if (needsUpgrade && isStaticCompactTable && kind == ColumnDefinition.Kind.REGULAR) kind = ColumnDefinition.Kind.STATIC; Integer componentIndex = null; // Note that the component_index is not useful for non-primary key parts (it never really in fact since there is // no particular ordering of non-PK columns, we only used to use it as a simplification but that's not needed // anymore) if (kind.isPrimaryKeyKind() && row.has("component_index")) componentIndex = row.getInt("component_index"); // Note: we save the column name as string, but we should not assume that it is an UTF8 name, we // we need to use the comparator fromString method AbstractType<?> comparator = isCQLTable ? UTF8Type.instance : CompactTables.columnDefinitionComparator(kind, isSuper, rawComparator, rawSubComparator); ColumnIdentifier name = ColumnIdentifier.getInterned(comparator.fromString(row.getString("column_name")), comparator); AbstractType<?> validator = parseType(row.getString("validator")); return new ColumnDefinition(keyspace, table, name, validator, componentIndex, kind); } private static Indexes createIndexesFromColumnRows(UntypedResultSet rows, String keyspace, String table, AbstractType<?> rawComparator, AbstractType<?> rawSubComparator, boolean isSuper, boolean isCQLTable, boolean isStaticCompactTable, boolean needsUpgrade) { Indexes.Builder indexes = Indexes.builder(); for (UntypedResultSet.Row row : rows) { IndexMetadata.IndexType indexType = null; if (row.has("index_type")) indexType = IndexMetadata.IndexType.valueOf(row.getString("index_type")); if (indexType == null) continue; Map<String, String> indexOptions = null; if (row.has("index_options")) indexOptions = fromJsonMap(row.getString("index_options")); String indexName = null; if (row.has("index_name")) indexName = row.getString("index_name"); ColumnDefinition column = createColumnFromColumnRow(row, keyspace, table, rawComparator, rawSubComparator, isSuper, isCQLTable, isStaticCompactTable, needsUpgrade); indexes.add(IndexMetadata.legacyIndex(column, indexName, indexType, indexOptions)); } return indexes.build(); } private static ColumnDefinition.Kind deserializeKind(String kind) { if ("clustering_key".equalsIgnoreCase(kind)) return ColumnDefinition.Kind.CLUSTERING; if ("compact_value".equalsIgnoreCase(kind)) return ColumnDefinition.Kind.REGULAR; return Enum.valueOf(ColumnDefinition.Kind.class, kind.toUpperCase()); } private static Triggers createTriggersFromTriggerRows(UntypedResultSet rows) { Triggers.Builder triggers = org.apache.cassandra.schema.Triggers.builder(); rows.forEach(row -> triggers.add(createTriggerFromTriggerRow(row))); return triggers.build(); } private static TriggerMetadata createTriggerFromTriggerRow(UntypedResultSet.Row row) { String name = row.getString("trigger_name"); String classOption = row.getTextMap("trigger_options").get("class"); return new TriggerMetadata(name, classOption); } /* * Reading user types */ private static Collection<Type> readTypes(String keyspaceName) { String query = format("SELECT type_name FROM %s.%s WHERE keyspace_name = ?", SystemKeyspace.NAME, SystemKeyspace.LEGACY_USERTYPES); Collection<String> typeNames = new ArrayList<>(); query(query, keyspaceName).forEach(row -> typeNames.add(row.getString("type_name"))); Collection<Type> types = new ArrayList<>(); typeNames.forEach(name -> types.add(readType(keyspaceName, name))); return types; } private static Type readType(String keyspaceName, String typeName) { long timestamp = readTypeTimestamp(keyspaceName, typeName); UserType metadata = readTypeMetadata(keyspaceName, typeName); return new Type(timestamp, metadata); } /* * Unfortunately there is not a single REGULAR column in system.schema_usertypes, so annoyingly we cannot * use the writeTime() CQL function, and must resort to a lower level. */ private static long readTypeTimestamp(String keyspaceName, String typeName) { ColumnFamilyStore store = org.apache.cassandra.db.Keyspace.open(SystemKeyspace.NAME) .getColumnFamilyStore(SystemKeyspace.LEGACY_USERTYPES); ClusteringComparator comparator = store.metadata.comparator; Slices slices = Slices.with(comparator, Slice.make(comparator, typeName)); int nowInSec = FBUtilities.nowInSeconds(); DecoratedKey key = store.metadata.decorateKey(AsciiType.instance.fromString(keyspaceName)); SinglePartitionReadCommand command = SinglePartitionSliceCommand.create(store.metadata, nowInSec, key, slices); try (OpOrder.Group op = store.readOrdering.start(); RowIterator partition = UnfilteredRowIterators.filter(command.queryMemtableAndDisk(store, op), nowInSec)) { return partition.next().primaryKeyLivenessInfo().timestamp(); } } private static UserType readTypeMetadata(String keyspaceName, String typeName) { String query = format("SELECT * FROM %s.%s WHERE keyspace_name = ? AND type_name = ?", SystemKeyspace.NAME, SystemKeyspace.LEGACY_USERTYPES); UntypedResultSet.Row row = query(query, keyspaceName, typeName).one(); List<ByteBuffer> names = row.getList("field_names", UTF8Type.instance) .stream() .map(ByteBufferUtil::bytes) .collect(Collectors.toList()); List<AbstractType<?>> types = row.getList("field_types", UTF8Type.instance) .stream() .map(LegacySchemaMigrator::parseType) .collect(Collectors.toList()); return new UserType(keyspaceName, bytes(typeName), names, types); } /* * Reading UDFs */ private static Collection<Function> readFunctions(String keyspaceName) { String query = format("SELECT function_name, signature FROM %s.%s WHERE keyspace_name = ?", SystemKeyspace.NAME, SystemKeyspace.LEGACY_FUNCTIONS); HashMultimap<String, List<String>> functionSignatures = HashMultimap.create(); query(query, keyspaceName).forEach(row -> functionSignatures.put(row.getString("function_name"), row.getList("signature", UTF8Type.instance))); Collection<Function> functions = new ArrayList<>(); functionSignatures.entries().forEach(pair -> functions.add(readFunction(keyspaceName, pair.getKey(), pair.getValue()))); return functions; } private static Function readFunction(String keyspaceName, String functionName, List<String> signature) { long timestamp = readFunctionTimestamp(keyspaceName, functionName, signature); UDFunction metadata = readFunctionMetadata(keyspaceName, functionName, signature); return new Function(timestamp, metadata); } private static long readFunctionTimestamp(String keyspaceName, String functionName, List<String> signature) { String query = format("SELECT writeTime(return_type) AS timestamp " + "FROM %s.%s " + "WHERE keyspace_name = ? AND function_name = ? AND signature = ?", SystemKeyspace.NAME, SystemKeyspace.LEGACY_FUNCTIONS); return query(query, keyspaceName, functionName, signature).one().getLong("timestamp"); } private static UDFunction readFunctionMetadata(String keyspaceName, String functionName, List<String> signature) { String query = format("SELECT * FROM %s.%s WHERE keyspace_name = ? AND function_name = ? AND signature = ?", SystemKeyspace.NAME, SystemKeyspace.LEGACY_FUNCTIONS); UntypedResultSet.Row row = query(query, keyspaceName, functionName, signature).one(); FunctionName name = new FunctionName(keyspaceName, functionName); List<ColumnIdentifier> argNames = new ArrayList<>(); if (row.has("argument_names")) for (String arg : row.getList("argument_names", UTF8Type.instance)) argNames.add(new ColumnIdentifier(arg, true)); List<AbstractType<?>> argTypes = new ArrayList<>(); if (row.has("argument_types")) for (String type : row.getList("argument_types", UTF8Type.instance)) argTypes.add(parseType(type)); AbstractType<?> returnType = parseType(row.getString("return_type")); String language = row.getString("language"); String body = row.getString("body"); boolean calledOnNullInput = row.getBoolean("called_on_null_input"); try { return UDFunction.create(name, argNames, argTypes, returnType, calledOnNullInput, language, body); } catch (InvalidRequestException e) { return UDFunction.createBrokenFunction(name, argNames, argTypes, returnType, calledOnNullInput, language, body, e); } } /* * Reading UDAs */ private static Collection<Aggregate> readAggregates(String keyspaceName) { String query = format("SELECT aggregate_name, signature FROM %s.%s WHERE keyspace_name = ?", SystemKeyspace.NAME, SystemKeyspace.LEGACY_AGGREGATES); HashMultimap<String, List<String>> aggregateSignatures = HashMultimap.create(); query(query, keyspaceName).forEach(row -> aggregateSignatures.put(row.getString("aggregate_name"), row.getList("signature", UTF8Type.instance))); Collection<Aggregate> aggregates = new ArrayList<>(); aggregateSignatures.entries().forEach(pair -> aggregates.add(readAggregate(keyspaceName, pair.getKey(), pair.getValue()))); return aggregates; } private static Aggregate readAggregate(String keyspaceName, String aggregateName, List<String> signature) { long timestamp = readAggregateTimestamp(keyspaceName, aggregateName, signature); UDAggregate metadata = readAggregateMetadata(keyspaceName, aggregateName, signature); return new Aggregate(timestamp, metadata); } private static long readAggregateTimestamp(String keyspaceName, String aggregateName, List<String> signature) { String query = format("SELECT writeTime(return_type) AS timestamp " + "FROM %s.%s " + "WHERE keyspace_name = ? AND aggregate_name = ? AND signature = ?", SystemKeyspace.NAME, SystemKeyspace.LEGACY_AGGREGATES); return query(query, keyspaceName, aggregateName, signature).one().getLong("timestamp"); } private static UDAggregate readAggregateMetadata(String keyspaceName, String functionName, List<String> signature) { String query = format("SELECT * FROM %s.%s WHERE keyspace_name = ? AND function_name = ? AND signature = ?", SystemKeyspace.NAME, SystemKeyspace.LEGACY_AGGREGATES); UntypedResultSet.Row row = query(query, keyspaceName, functionName, signature).one(); FunctionName name = new FunctionName(keyspaceName, functionName); List<String> types = row.getList("argument_types", UTF8Type.instance); List<AbstractType<?>> argTypes = new ArrayList<>(); if (types != null) { argTypes = new ArrayList<>(types.size()); for (String type : types) argTypes.add(parseType(type)); } AbstractType<?> returnType = parseType(row.getString("return_type")); FunctionName stateFunc = new FunctionName(keyspaceName, row.getString("state_func")); FunctionName finalFunc = row.has("final_func") ? new FunctionName(keyspaceName, row.getString("final_func")) : null; AbstractType<?> stateType = row.has("state_type") ? parseType(row.getString("state_type")) : null; ByteBuffer initcond = row.has("initcond") ? row.getBytes("initcond") : null; try { return UDAggregate.create(name, argTypes, returnType, stateFunc, finalFunc, stateType, initcond); } catch (InvalidRequestException reason) { return UDAggregate.createBroken(name, argTypes, returnType, initcond, reason); } } private static UntypedResultSet query(String query, Object... values) { return QueryProcessor.executeOnceInternal(query, values); } private static AbstractType<?> parseType(String str) { return TypeParser.parse(str); } private static final class Keyspace { final long timestamp; final String name; final KeyspaceParams params; final Collection<Table> tables; final Collection<Type> types; final Collection<Function> functions; final Collection<Aggregate> aggregates; Keyspace(long timestamp, String name, KeyspaceParams params, Collection<Table> tables, Collection<Type> types, Collection<Function> functions, Collection<Aggregate> aggregates) { this.timestamp = timestamp; this.name = name; this.params = params; this.tables = tables; this.types = types; this.functions = functions; this.aggregates = aggregates; } } private static final class Table { final long timestamp; final CFMetaData metadata; Table(long timestamp, CFMetaData metadata) { this.timestamp = timestamp; this.metadata = metadata; } } private static final class Type { final long timestamp; final UserType metadata; Type(long timestamp, UserType metadata) { this.timestamp = timestamp; this.metadata = metadata; } } private static final class Function { final long timestamp; final UDFunction metadata; Function(long timestamp, UDFunction metadata) { this.timestamp = timestamp; this.metadata = metadata; } } private static final class Aggregate { final long timestamp; final UDAggregate metadata; Aggregate(long timestamp, UDAggregate metadata) { this.timestamp = timestamp; this.metadata = metadata; } } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.pig.tools.pigstats; import java.io.IOException; import java.util.List; import java.util.Map; import java.util.regex.Matcher; import java.util.regex.Pattern; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.mapred.Counters; import org.apache.hadoop.mapred.JobClient; import org.apache.hadoop.mapred.RunningJob; import org.apache.hadoop.mapred.jobcontrol.Job; import org.apache.hadoop.mapred.jobcontrol.JobControl; import org.apache.pig.backend.hadoop.executionengine.mapReduceLayer.JobControlCompiler; import org.apache.pig.backend.hadoop.executionengine.mapReduceLayer.MapReduceOper; import org.apache.pig.backend.hadoop.executionengine.mapReduceLayer.NativeMapReduceOper; import org.apache.pig.backend.hadoop.executionengine.mapReduceLayer.plans.MROperPlan; import org.apache.pig.backend.hadoop.executionengine.physicalLayer.relationalOperators.POStore; import org.apache.pig.impl.PigContext; import org.apache.pig.tools.pigstats.PigStats.JobGraph; /** * A utility class for Pig Statistics */ public abstract class PigStatsUtil { public static final String MULTI_STORE_RECORD_COUNTER = "Output records in "; public static final String MULTI_STORE_COUNTER_GROUP = "MultiStoreCounters"; public static final String TASK_COUNTER_GROUP = "org.apache.hadoop.mapred.Task$Counter"; public static final String FS_COUNTER_GROUP = "FileSystemCounters"; public static final String MAP_INPUT_RECORDS = "MAP_INPUT_RECORDS"; public static final String MAP_OUTPUT_RECORDS = "MAP_OUTPUT_RECORDS"; public static final String REDUCE_INPUT_RECORDS = "REDUCE_INPUT_RECORDS"; public static final String REDUCE_OUTPUT_RECORDS = "REDUCE_OUTPUT_RECORDS"; public static final String HDFS_BYTES_WRITTEN = "HDFS_BYTES_WRITTEN"; public static final String HDFS_BYTES_READ = "HDFS_BYTES_READ"; public static final String MULTI_INPUTS_RECORD_COUNTER = "Input records from "; public static final String MULTI_INPUTS_COUNTER_GROUP = "MultiInputCounters"; private static final Log LOG = LogFactory.getLog(PigStatsUtil.class); // Restrict total string size of a counter name to 64 characters. // Leave 24 characters for prefix string. private static final int COUNTER_NAME_LIMIT = 40; /** * Returns the count for the given counter name in the counter group * 'MultiStoreCounters' * * @param job the MR job * @param jobClient the Hadoop job client * @param counterName the counter name * @return the count of the given counter name */ @SuppressWarnings("deprecation") public static long getMultiStoreCount(Job job, JobClient jobClient, String counterName) { long value = -1; try { RunningJob rj = jobClient.getJob(job.getAssignedJobID()); if (rj != null) { Counters.Counter counter = rj.getCounters().getGroup( MULTI_STORE_COUNTER_GROUP).getCounterForName(counterName); value = counter.getValue(); } } catch (IOException e) { LOG.warn("Failed to get the counter for " + counterName, e); } return value; } /** * Returns the counter name for the given {@link POStore} * * @param store the POStore * @return the counter name */ public static String getMultiStoreCounterName(POStore store) { String shortName = getShortName(store.getSFile().getFileName()); return (shortName == null) ? null : MULTI_STORE_RECORD_COUNTER + "_" + store.getIndex() + "_" + shortName; } /** * Returns the counter name for the given input file name * * @param fname the input file name * @return the counter name */ public static String getMultiInputsCounterName(String fname, int index) { String shortName = getShortName(fname); return (shortName == null) ? null : MULTI_INPUTS_RECORD_COUNTER + "_" + index + "_" + shortName; } private static final String SEPARATOR = "/"; private static final String SEMICOLON = ";"; private static String getShortName(String uri) { int scolon = uri.indexOf(SEMICOLON); int slash; if (scolon!=-1) { slash = uri.lastIndexOf(SEPARATOR, scolon); } else { slash = uri.lastIndexOf(SEPARATOR); } String shortName = null; if (scolon==-1) { shortName = uri.substring(slash+1); } if (slash < scolon) { shortName = uri.substring(slash+1, scolon); } if (shortName != null && shortName.length() > COUNTER_NAME_LIMIT) { shortName = shortName.substring(shortName.length() - COUNTER_NAME_LIMIT); } return shortName; } /** * Starts collecting statistics for the given MR plan * * @param pc the Pig context * @param client the Hadoop job client * @param jcc the job compiler * @param plan the MR plan */ public static void startCollection(PigContext pc, JobClient client, JobControlCompiler jcc, MROperPlan plan) { SimplePigStats ps = (SimplePigStats)PigStats.start(); ps.start(pc, client, jcc, plan); ScriptState.get().emitLaunchStartedNotification(plan.size()); } /** * Stops collecting statistics for a MR plan * * @param display if true, log collected statistics in the Pig log * file at INFO level */ public static void stopCollection(boolean display) { SimplePigStats ps = (SimplePigStats)PigStats.get(); ps.stop(); if (!ps.isSuccessful()) { LOG.error(ps.getNumberFailedJobs() + " map reduce job(s) failed!"); String errMsg = ps.getErrorMessage(); if (errMsg != null) { LOG.error("Error message: " + errMsg); } } ScriptState.get().emitLaunchCompletedNotification( ps.getNumberSuccessfulJobs()); if (display) ps.display(); } /** * Returns an empty PigStats object * * @return an empty PigStats object */ public static PigStats getEmptyPigStats() { return PigStats.start(); } /** * Returns the PigStats with the given return code * * @param code the return code * @return the PigStats with the given return code */ public static PigStats getPigStats(int code) { PigStats ps = PigStats.get(); ps.setReturnCode(code); return ps; } /** * Logs the statistics in the Pig log file at INFO level */ public static void displayStatistics() { ((SimplePigStats)PigStats.get()).display(); } /** * Updates the {@link JobGraph} of the {@link PigStats}. The initial * {@link JobGraph} is created without job ids using {@link MROperPlan}, * before any job is submitted for execution. The {@link JobGraph} then * is updated with job ids after jobs are executed. * * @param jobMroMap the map that maps {@link Job}s to {@link MapReduceOper}s */ public static void updateJobMroMap(Map<Job, MapReduceOper> jobMroMap) { SimplePigStats ps = (SimplePigStats)PigStats.get(); for (Map.Entry<Job, MapReduceOper> entry : jobMroMap.entrySet()) { MapReduceOper mro = entry.getValue(); ps.mapMROperToJob(mro, entry.getKey()); } } /** * Updates the statistics after a patch of jobs is done * * @param jc the job control */ public static void accumulateStats(JobControl jc) { SimplePigStats ps = (SimplePigStats)PigStats.get(); ScriptState ss = ScriptState.get(); for (Job job : jc.getSuccessfulJobs()) { JobStats js = addSuccessJobStats(ps, job); if (js != null) { ss.emitjobFinishedNotification(js); } } for (Job job : jc.getFailedJobs()) { JobStats js = addFailedJobStats(ps, job); if (js != null) { js.setErrorMsg(job.getMessage()); ss.emitJobFailedNotification(js); } } } public static void setErrorMessage(String msg) { PigStats.get().setErrorMessage(msg); } public static void setErrorCode(int code) { PigStats.get().setErrorCode(code); } public static void setBackendException(Job job, Exception e) { ((SimplePigStats)PigStats.get()).setBackendException(job, e); } private static Pattern pattern = Pattern.compile("tmp(-)?[\\d]{1,10}$"); public static boolean isTempFile(String fileName) { Matcher result = pattern.matcher(fileName); return result.find(); } private static JobStats addFailedJobStats(SimplePigStats ps, Job job) { if (ps.isJobSeen(job)) return null; JobStats js = ps.addJobStats(job); if (js == null) { LOG.warn("unable to add failed job stats"); } else { js.setSuccessful(false); js.addOutputStatistics(); js.addInputStatistics(); } return js; } public static JobStats addNativeJobStats(PigStats ps, NativeMapReduceOper mr, boolean success) { return addNativeJobStats(ps, mr, success, null); } public static void setStatsMap(Map<String, List<PigStats>> statsMap) { EmbeddedPigStats stats = new EmbeddedPigStats(statsMap); PigStats.set(stats); } public static JobStats addNativeJobStats(PigStats ps, NativeMapReduceOper mr, boolean success, Exception e) { if (ps.isEmbedded()) { throw new IllegalArgumentException(); } JobStats js = ((SimplePigStats)ps).addJobStatsForNative(mr); if(js == null) { LOG.warn("unable to add native job stats"); } else { js.setSuccessful(success); if(e != null) js.setBackendException(e); } return js; } private static JobStats addSuccessJobStats(SimplePigStats ps, Job job) { if (ps.isJobSeen(job)) return null; JobStats js = ps.addJobStats(job); if (js == null) { LOG.warn("unable to add job stats"); } else { js.setSuccessful(true); js.addMapReduceStatistics(ps.getJobClient(), job.getJobConf()); JobClient client = ps.getJobClient(); RunningJob rjob = null; try { rjob = client.getJob(job.getAssignedJobID()); } catch (IOException e) { LOG.warn("Failed to get running job", e); } if (rjob == null) { LOG.warn("Failed to get RunningJob for job " + job.getAssignedJobID()); } else { js.addCounters(rjob); } js.addOutputStatistics(); js.addInputStatistics(); } return js; } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.fs.http.server; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.fs.XAttrCodec; import org.apache.hadoop.fs.XAttrSetFlag; import org.apache.hadoop.fs.http.client.HttpFSFileSystem; import org.apache.hadoop.fs.http.client.HttpFSFileSystem.Operation; import org.apache.hadoop.hdfs.client.HdfsClientConfigKeys; import org.apache.hadoop.lib.service.FileSystemAccess; import org.apache.hadoop.lib.wsrs.BooleanParam; import org.apache.hadoop.lib.wsrs.EnumParam; import org.apache.hadoop.lib.wsrs.EnumSetParam; import org.apache.hadoop.lib.wsrs.LongParam; import org.apache.hadoop.lib.wsrs.Param; import org.apache.hadoop.lib.wsrs.ParametersProvider; import org.apache.hadoop.lib.wsrs.ShortParam; import org.apache.hadoop.lib.wsrs.StringParam; import org.apache.hadoop.util.StringUtils; import javax.ws.rs.ext.Provider; import java.util.HashMap; import java.util.Map; import java.util.regex.Pattern; /** * HttpFS ParametersProvider. */ @Provider @InterfaceAudience.Private @SuppressWarnings("unchecked") public class HttpFSParametersProvider extends ParametersProvider { private static final Map<Enum, Class<Param<?>>[]> PARAMS_DEF = new HashMap<Enum, Class<Param<?>>[]>(); static { PARAMS_DEF.put(Operation.OPEN, new Class[]{OffsetParam.class, LenParam.class}); PARAMS_DEF.put(Operation.GETFILESTATUS, new Class[]{}); PARAMS_DEF.put(Operation.LISTSTATUS, new Class[]{FilterParam.class}); PARAMS_DEF.put(Operation.GETHOMEDIRECTORY, new Class[]{}); PARAMS_DEF.put(Operation.GETCONTENTSUMMARY, new Class[]{}); PARAMS_DEF.put(Operation.GETFILECHECKSUM, new Class[]{}); PARAMS_DEF.put(Operation.GETFILEBLOCKLOCATIONS, new Class[]{}); PARAMS_DEF.put(Operation.GETACLSTATUS, new Class[]{}); PARAMS_DEF.put(Operation.GETTRASHROOT, new Class[]{}); PARAMS_DEF.put(Operation.INSTRUMENTATION, new Class[]{}); PARAMS_DEF.put(Operation.APPEND, new Class[]{DataParam.class}); PARAMS_DEF.put(Operation.CONCAT, new Class[]{SourcesParam.class}); PARAMS_DEF.put(Operation.TRUNCATE, new Class[]{NewLengthParam.class}); PARAMS_DEF.put(Operation.CREATE, new Class[]{PermissionParam.class, OverwriteParam.class, ReplicationParam.class, BlockSizeParam.class, DataParam.class, UnmaskedPermissionParam.class}); PARAMS_DEF.put(Operation.MKDIRS, new Class[]{PermissionParam.class, UnmaskedPermissionParam.class}); PARAMS_DEF.put(Operation.RENAME, new Class[]{DestinationParam.class}); PARAMS_DEF.put(Operation.SETOWNER, new Class[]{OwnerParam.class, GroupParam.class}); PARAMS_DEF.put(Operation.SETPERMISSION, new Class[]{PermissionParam.class}); PARAMS_DEF.put(Operation.SETREPLICATION, new Class[]{ReplicationParam.class}); PARAMS_DEF.put(Operation.SETTIMES, new Class[]{ModifiedTimeParam.class, AccessTimeParam.class}); PARAMS_DEF.put(Operation.DELETE, new Class[]{RecursiveParam.class}); PARAMS_DEF.put(Operation.SETACL, new Class[]{AclPermissionParam.class}); PARAMS_DEF.put(Operation.REMOVEACL, new Class[]{}); PARAMS_DEF.put(Operation.MODIFYACLENTRIES, new Class[]{AclPermissionParam.class}); PARAMS_DEF.put(Operation.REMOVEACLENTRIES, new Class[]{AclPermissionParam.class}); PARAMS_DEF.put(Operation.REMOVEDEFAULTACL, new Class[]{}); PARAMS_DEF.put(Operation.SETXATTR, new Class[]{XAttrNameParam.class, XAttrValueParam.class, XAttrSetFlagParam.class}); PARAMS_DEF.put(Operation.REMOVEXATTR, new Class[]{XAttrNameParam.class}); PARAMS_DEF.put(Operation.GETXATTRS, new Class[]{XAttrNameParam.class, XAttrEncodingParam.class}); PARAMS_DEF.put(Operation.LISTXATTRS, new Class[]{}); PARAMS_DEF.put(Operation.LISTSTATUS_BATCH, new Class[]{StartAfterParam.class}); PARAMS_DEF.put(Operation.GETALLSTORAGEPOLICY, new Class[] {}); PARAMS_DEF.put(Operation.GETSTORAGEPOLICY, new Class[] {}); PARAMS_DEF.put(Operation.SETSTORAGEPOLICY, new Class[] {PolicyNameParam.class}); PARAMS_DEF.put(Operation.UNSETSTORAGEPOLICY, new Class[] {}); PARAMS_DEF.put(Operation.ALLOWSNAPSHOT, new Class[] {}); PARAMS_DEF.put(Operation.DISALLOWSNAPSHOT, new Class[] {}); PARAMS_DEF.put(Operation.CREATESNAPSHOT, new Class[] {SnapshotNameParam.class}); PARAMS_DEF.put(Operation.DELETESNAPSHOT, new Class[] {SnapshotNameParam.class}); PARAMS_DEF.put(Operation.RENAMESNAPSHOT, new Class[] {OldSnapshotNameParam.class, SnapshotNameParam.class}); PARAMS_DEF.put(Operation.GETSNAPSHOTDIFF, new Class[] {OldSnapshotNameParam.class, SnapshotNameParam.class}); PARAMS_DEF.put(Operation.GETSNAPSHOTTABLEDIRECTORYLIST, new Class[] {}); } public HttpFSParametersProvider() { super(HttpFSFileSystem.OP_PARAM, HttpFSFileSystem.Operation.class, PARAMS_DEF); } /** * Class for access-time parameter. */ @InterfaceAudience.Private public static class AccessTimeParam extends LongParam { /** * Parameter name. */ public static final String NAME = HttpFSFileSystem.ACCESS_TIME_PARAM; /** * Constructor. */ public AccessTimeParam() { super(NAME, -1l); } } /** * Class for block-size parameter. */ @InterfaceAudience.Private public static class BlockSizeParam extends LongParam { /** * Parameter name. */ public static final String NAME = HttpFSFileSystem.BLOCKSIZE_PARAM; /** * Constructor. */ public BlockSizeParam() { super(NAME, -1l); } } /** * Class for data parameter. */ @InterfaceAudience.Private public static class DataParam extends BooleanParam { /** * Parameter name. */ public static final String NAME = "data"; /** * Constructor. */ public DataParam() { super(NAME, false); } } /** * Class for operation parameter. */ @InterfaceAudience.Private public static class OperationParam extends EnumParam<HttpFSFileSystem.Operation> { /** * Parameter name. */ public static final String NAME = HttpFSFileSystem.OP_PARAM; /** * Constructor. */ public OperationParam(String operation) { super(NAME, HttpFSFileSystem.Operation.class, HttpFSFileSystem.Operation.valueOf( StringUtils.toUpperCase(operation))); } } /** * Class for delete's recursive parameter. */ @InterfaceAudience.Private public static class RecursiveParam extends BooleanParam { /** * Parameter name. */ public static final String NAME = HttpFSFileSystem.RECURSIVE_PARAM; /** * Constructor. */ public RecursiveParam() { super(NAME, false); } } /** * Class for filter parameter. */ @InterfaceAudience.Private public static class FilterParam extends StringParam { /** * Parameter name. */ public static final String NAME = "filter"; /** * Constructor. */ public FilterParam() { super(NAME, null); } } /** * Class for group parameter. */ @InterfaceAudience.Private public static class GroupParam extends StringParam { /** * Parameter name. */ public static final String NAME = HttpFSFileSystem.GROUP_PARAM; /** * Constructor. */ public GroupParam() { super(NAME, null); } } /** * Class for len parameter. */ @InterfaceAudience.Private public static class LenParam extends LongParam { /** * Parameter name. */ public static final String NAME = "length"; /** * Constructor. */ public LenParam() { super(NAME, -1l); } } /** * Class for modified-time parameter. */ @InterfaceAudience.Private public static class ModifiedTimeParam extends LongParam { /** * Parameter name. */ public static final String NAME = HttpFSFileSystem.MODIFICATION_TIME_PARAM; /** * Constructor. */ public ModifiedTimeParam() { super(NAME, -1l); } } /** * Class for offset parameter. */ @InterfaceAudience.Private public static class OffsetParam extends LongParam { /** * Parameter name. */ public static final String NAME = "offset"; /** * Constructor. */ public OffsetParam() { super(NAME, 0l); } } /** * Class for newlength parameter. */ @InterfaceAudience.Private public static class NewLengthParam extends LongParam { /** * Parameter name. */ public static final String NAME = HttpFSFileSystem.NEW_LENGTH_PARAM; /** * Constructor. */ public NewLengthParam() { super(NAME, 0l); } } /** * Class for overwrite parameter. */ @InterfaceAudience.Private public static class OverwriteParam extends BooleanParam { /** * Parameter name. */ public static final String NAME = HttpFSFileSystem.OVERWRITE_PARAM; /** * Constructor. */ public OverwriteParam() { super(NAME, true); } } /** * Class for owner parameter. */ @InterfaceAudience.Private public static class OwnerParam extends StringParam { /** * Parameter name. */ public static final String NAME = HttpFSFileSystem.OWNER_PARAM; /** * Constructor. */ public OwnerParam() { super(NAME, null); } } /** * Class for permission parameter. */ @InterfaceAudience.Private public static class PermissionParam extends ShortParam { /** * Parameter name. */ public static final String NAME = HttpFSFileSystem.PERMISSION_PARAM; /** * Constructor. */ public PermissionParam() { super(NAME, HttpFSFileSystem.DEFAULT_PERMISSION, 8); } } /** * Class for unmaskedpermission parameter. */ @InterfaceAudience.Private public static class UnmaskedPermissionParam extends ShortParam { /** * Parameter name. */ public static final String NAME = HttpFSFileSystem.UNMASKED_PERMISSION_PARAM; /** * Constructor. */ public UnmaskedPermissionParam() { super(NAME, (short) -1, 8); } } /** * Class for AclPermission parameter. */ @InterfaceAudience.Private public static class AclPermissionParam extends StringParam { /** * Parameter name. */ public static final String NAME = HttpFSFileSystem.ACLSPEC_PARAM; /** * Constructor. */ public AclPermissionParam() { super(NAME, HttpFSFileSystem.ACLSPEC_DEFAULT, Pattern.compile(HttpFSServerWebApp.get() .get(FileSystemAccess.class) .getFileSystemConfiguration() .get(HdfsClientConfigKeys.DFS_WEBHDFS_ACL_PERMISSION_PATTERN_KEY, HdfsClientConfigKeys.DFS_WEBHDFS_ACL_PERMISSION_PATTERN_DEFAULT))); } } /** * Class for replication parameter. */ @InterfaceAudience.Private public static class ReplicationParam extends ShortParam { /** * Parameter name. */ public static final String NAME = HttpFSFileSystem.REPLICATION_PARAM; /** * Constructor. */ public ReplicationParam() { super(NAME, (short) -1); } } /** * Class for concat sources parameter. */ @InterfaceAudience.Private public static class SourcesParam extends StringParam { /** * Parameter name. */ public static final String NAME = HttpFSFileSystem.SOURCES_PARAM; /** * Constructor. */ public SourcesParam() { super(NAME, null); } } /** * Class for to-path parameter. */ @InterfaceAudience.Private public static class DestinationParam extends StringParam { /** * Parameter name. */ public static final String NAME = HttpFSFileSystem.DESTINATION_PARAM; /** * Constructor. */ public DestinationParam() { super(NAME, null); } } /** * Class for xattr parameter. */ @InterfaceAudience.Private public static class XAttrNameParam extends StringParam { public static final String XATTR_NAME_REGX = "^(user\\.|trusted\\.|system\\.|security\\.).+"; /** * Parameter name. */ public static final String NAME = HttpFSFileSystem.XATTR_NAME_PARAM; private static final Pattern pattern = Pattern.compile(XATTR_NAME_REGX); /** * Constructor. */ public XAttrNameParam() { super(NAME, null, pattern); } } /** * Class for xattr parameter. */ @InterfaceAudience.Private public static class XAttrValueParam extends StringParam { /** * Parameter name. */ public static final String NAME = HttpFSFileSystem.XATTR_VALUE_PARAM; /** * Constructor. */ public XAttrValueParam() { super(NAME, null); } } /** * Class for xattr parameter. */ @InterfaceAudience.Private public static class XAttrSetFlagParam extends EnumSetParam<XAttrSetFlag> { /** * Parameter name. */ public static final String NAME = HttpFSFileSystem.XATTR_SET_FLAG_PARAM; /** * Constructor. */ public XAttrSetFlagParam() { super(NAME, XAttrSetFlag.class, null); } } /** * Class for xattr parameter. */ @InterfaceAudience.Private public static class XAttrEncodingParam extends EnumParam<XAttrCodec> { /** * Parameter name. */ public static final String NAME = HttpFSFileSystem.XATTR_ENCODING_PARAM; /** * Constructor. */ public XAttrEncodingParam() { super(NAME, XAttrCodec.class, null); } } /** * Class for startafter parameter. */ @InterfaceAudience.Private public static class StartAfterParam extends StringParam { /** * Parameter name. */ public static final String NAME = HttpFSFileSystem.START_AFTER_PARAM; /** * Constructor. */ public StartAfterParam() { super(NAME, null); } } /** * Class for policyName parameter. */ @InterfaceAudience.Private public static class PolicyNameParam extends StringParam { /** * Parameter name. */ public static final String NAME = HttpFSFileSystem.POLICY_NAME_PARAM; /** * Constructor. */ public PolicyNameParam() { super(NAME, null); } } /** * Class for SnapshotName parameter. */ public static class SnapshotNameParam extends StringParam { /** * Parameter name. */ public static final String NAME = HttpFSFileSystem.SNAPSHOT_NAME_PARAM; /** * Constructor. */ public SnapshotNameParam() { super(NAME, null); } } /** * Class for OldSnapshotName parameter. */ public static class OldSnapshotNameParam extends StringParam { /** * Parameter name. */ public static final String NAME = HttpFSFileSystem.OLD_SNAPSHOT_NAME_PARAM; /** * Constructor. */ public OldSnapshotNameParam() { super(NAME, null); } } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package com.github.cxfplus.jaxbplus; import java.lang.reflect.Field; import java.lang.reflect.Method; import java.util.List; import javax.xml.bind.JAXBElement; import org.apache.cxf.common.util.ASMHelper; import org.apache.cxf.databinding.WrapperHelper; final class WrapperHelperCompiler extends ASMHelper { final Class<?> wrapperType; final Method setMethods[]; final Method getMethods[]; final Method jaxbMethods[]; final Field fields[]; final Object objectFactory; final ClassWriter cw; private WrapperHelperCompiler(Class<?> wrapperType, Method setMethods[], Method getMethods[], Method jaxbMethods[], Field fields[], Object objectFactory) { this.wrapperType = wrapperType; this.setMethods = setMethods; this.getMethods = getMethods; this.jaxbMethods = jaxbMethods; this.fields = fields; this.objectFactory = objectFactory; cw = createClassWriter(); } static WrapperHelper compileWrapperHelper(Class<?> wrapperType, Method setMethods[], Method getMethods[], Method jaxbMethods[], Field fields[], Object objectFactory) { try { return new WrapperHelperCompiler(wrapperType, setMethods, getMethods, jaxbMethods, fields, objectFactory).compile(); } catch (Throwable t) { // Some error - probably a bad version of ASM or similar } return null; } public WrapperHelper compile() { if (cw == null) { return null; } int count = 1; String newClassName = wrapperType.getName() + "_WrapperTypeHelper" + count; newClassName = newClassName.replaceAll("\\$", "."); newClassName = periodToSlashes(newClassName); Class<?> cls = super.findClass(newClassName.replace('/', '.'), wrapperType); while (cls != null) { try { WrapperHelper helper = WrapperHelper.class.cast(cls.newInstance()); if (!helper.getSignature().equals(computeSignature())) { count++; newClassName = wrapperType.getName() + "_WrapperTypeHelper" + count; newClassName = newClassName.replaceAll("\\$", "."); newClassName = periodToSlashes(newClassName); cls = super.findClass(newClassName.replace('/', '.'), wrapperType); } else { return helper; } } catch (Exception e) { return null; } } cw.visit(Opcodes.V1_5, Opcodes.ACC_PUBLIC | Opcodes.ACC_SUPER, newClassName, null, "java/lang/Object", new String[] {periodToSlashes(WrapperHelper.class.getName())}); addConstructor(newClassName, objectFactory == null ? null : objectFactory.getClass()); boolean b = addSignature(); if (b) { b = addCreateWrapperObject(newClassName, objectFactory == null ? null : objectFactory.getClass()); } if (b) { b = addGetWrapperParts(newClassName, wrapperType); } try { if (b) { cw.visitEnd(); byte bt[] = cw.toByteArray(); Class<?> cl = loadClass(newClassName.replace('/', '.'), wrapperType, bt); Object o = cl.newInstance(); return WrapperHelper.class.cast(o); } } catch (Throwable e) { // ignore, we'll just fall down to reflection based } return null; } private String computeSignature() { StringBuilder b = new StringBuilder(); b.append(setMethods.length).append(':'); for (int x = 0; x < setMethods.length; x++) { if (getMethods[x] == null) { b.append("null,"); } else { b.append(getMethods[x].getName()).append('/'); b.append(getMethods[x].getReturnType().getName()).append(','); } } return b.toString(); } private boolean addSignature() { String sig = computeSignature(); MethodVisitor mv = cw.visitMethod(Opcodes.ACC_PUBLIC, "getSignature", "()Ljava/lang/String;", null, null); mv.visitCode(); mv.visitLdcInsn(sig); Label l0 = createLabel(); mv.visitLabel(l0); mv.visitLineNumber(100, l0); mv.visitInsn(Opcodes.ARETURN); mv.visitMaxs(0, 0); mv.visitEnd(); return true; } private void addConstructor(String newClassName, Class<?> objectFactoryCls) { if (objectFactoryCls != null) { String ofName = "L" + periodToSlashes(objectFactoryCls.getName()) + ";"; FieldVisitor fv = cw.visitField(0, "factory", ofName, null, null); fv.visitEnd(); } MethodVisitor mv = cw.visitMethod(Opcodes.ACC_PUBLIC, "<init>", "()V", null, null); mv.visitCode(); Label l0 = createLabel(); mv.visitLabel(l0); mv.visitLineNumber(102, l0); mv.visitVarInsn(Opcodes.ALOAD, 0); mv.visitMethodInsn(Opcodes.INVOKESPECIAL, "java/lang/Object", "<init>", "()V",false); if (objectFactoryCls != null) { mv.visitVarInsn(Opcodes.ALOAD, 0); mv.visitTypeInsn(Opcodes.NEW, periodToSlashes(objectFactoryCls.getName())); mv.visitInsn(Opcodes.DUP); mv.visitMethodInsn(Opcodes.INVOKESPECIAL, periodToSlashes(objectFactoryCls.getName()), "<init>", "()V",false); mv.visitFieldInsn(Opcodes.PUTFIELD, periodToSlashes(newClassName), "factory", "L" + periodToSlashes(objectFactoryCls.getName()) + ";"); } mv.visitInsn(Opcodes.RETURN); Label l1 = createLabel(); mv.visitLabel(l1); mv.visitLineNumber(103, l0); mv.visitLocalVariable("this", "L" + newClassName + ";", null, l0, l1, 0); mv.visitMaxs(0, 0); mv.visitEnd(); } private boolean addCreateWrapperObject(String newClassName, Class<?> objectFactoryClass) { MethodVisitor mv = cw.visitMethod(Opcodes.ACC_PUBLIC, "createWrapperObject", "(Ljava/util/List;)Ljava/lang/Object;", "(Ljava/util/List<*>;)Ljava/lang/Object;", new String[] { "org/apache/cxf/interceptor/Fault" }); mv.visitCode(); Label lBegin = createLabel(); mv.visitLabel(lBegin); mv.visitLineNumber(104, lBegin); mv.visitTypeInsn(Opcodes.NEW, periodToSlashes(wrapperType.getName())); mv.visitInsn(Opcodes.DUP); mv.visitMethodInsn(Opcodes.INVOKESPECIAL, periodToSlashes(wrapperType.getName()), "<init>", "()V",false); mv.visitVarInsn(Opcodes.ASTORE, 2); for (int x = 0; x < setMethods.length; x++) { if (getMethods[x] == null) { if (setMethods[x] == null && fields[x] == null) { // null placeholder continue; } else { return false; } } Class<?> tp = getMethods[x].getReturnType(); mv.visitVarInsn(Opcodes.ALOAD, 2); if (List.class.isAssignableFrom(tp)) { doCollection(mv, x); } else { if (JAXBElement.class.isAssignableFrom(tp)) { mv.visitVarInsn(Opcodes.ALOAD, 0); mv.visitFieldInsn(Opcodes.GETFIELD, periodToSlashes(newClassName), "factory", "L" + periodToSlashes(objectFactoryClass.getName()) + ";"); } mv.visitVarInsn(Opcodes.ALOAD, 1); mv.visitIntInsn(Opcodes.SIPUSH, x); mv.visitMethodInsn(Opcodes.INVOKEINTERFACE, "java/util/List", "get", "(I)Ljava/lang/Object;",true); if (tp.isPrimitive()) { mv.visitTypeInsn(Opcodes.CHECKCAST, NONPRIMITIVE_MAP.get(tp)); mv.visitMethodInsn(Opcodes.INVOKEVIRTUAL, NONPRIMITIVE_MAP.get(tp), tp.getName() + "Value", "()" + PRIMITIVE_MAP.get(tp),false); } else if (JAXBElement.class.isAssignableFrom(tp)) { mv.visitTypeInsn(Opcodes.CHECKCAST, periodToSlashes(jaxbMethods[x].getParameterTypes()[0].getName())); mv.visitMethodInsn(Opcodes.INVOKEVIRTUAL, periodToSlashes(objectFactoryClass.getName()), jaxbMethods[x].getName(), getMethodSignature(jaxbMethods[x]),false); } else if (tp.isArray()) { mv.visitTypeInsn(Opcodes.CHECKCAST, getClassCode(tp)); } else { mv.visitTypeInsn(Opcodes.CHECKCAST, periodToSlashes(tp.getName())); } mv.visitMethodInsn(Opcodes.INVOKEVIRTUAL, periodToSlashes(wrapperType.getName()), setMethods[x].getName(), "(" + getClassCode(tp) + ")V",false); } } mv.visitVarInsn(Opcodes.ALOAD, 2); mv.visitInsn(Opcodes.ARETURN); Label lEnd = createLabel(); mv.visitLabel(lEnd); mv.visitLocalVariable("this", "L" + newClassName + ";", null, lBegin, lEnd, 0); mv.visitLocalVariable("lst", "Ljava/util/List;", "Ljava/util/List<*>;", lBegin, lEnd, 1); mv.visitLocalVariable("ok", "L" + periodToSlashes(wrapperType.getName()) + ";", null, lBegin, lEnd, 2); mv.visitMaxs(0, 0); mv.visitEnd(); return true; } private void doCollection(MethodVisitor mv, int x) { // List aVal = obj.getA(); // List newA = (List)lst.get(99); // if (aVal == null) { // obj.setA(newA); // } else if (newA != null) { // aVal.addAll(newA); // } Label l3 = createLabel(); mv.visitLabel(l3); mv.visitLineNumber(114, l3); mv.visitMethodInsn(Opcodes.INVOKEVIRTUAL, periodToSlashes(wrapperType.getName()), getMethods[x].getName(), getMethodSignature(getMethods[x]),false); mv.visitVarInsn(Opcodes.ASTORE, 3); mv.visitVarInsn(Opcodes.ALOAD, 1); mv.visitIntInsn(Opcodes.SIPUSH, x); mv.visitMethodInsn(Opcodes.INVOKEINTERFACE, "java/util/List", "get", "(I)Ljava/lang/Object;",true); mv.visitTypeInsn(Opcodes.CHECKCAST, "java/util/List"); mv.visitVarInsn(Opcodes.ASTORE, 4); mv.visitVarInsn(Opcodes.ALOAD, 3); Label nonNullLabel = createLabel(); mv.visitJumpInsn(Opcodes.IFNONNULL, nonNullLabel); if (setMethods[x] == null) { mv.visitTypeInsn(Opcodes.NEW, "java/lang/RuntimeException"); mv.visitInsn(Opcodes.DUP); mv.visitLdcInsn(getMethods[x].getName() + " returned null and there isn't a set method."); mv.visitMethodInsn(Opcodes.INVOKESPECIAL, "java/lang/RuntimeException", "<init>", "(Ljava/lang/String;)V",false); mv.visitInsn(Opcodes.ATHROW); } else { mv.visitVarInsn(Opcodes.ALOAD, 2); mv.visitVarInsn(Opcodes.ALOAD, 4); mv.visitTypeInsn(Opcodes.CHECKCAST, getMethods[x].getReturnType().getName().replace('.', '/')); mv.visitMethodInsn(Opcodes.INVOKEVIRTUAL, periodToSlashes(wrapperType.getName()), setMethods[x].getName(), getMethodSignature(setMethods[x]),false); } Label jumpOverLabel = createLabel(); mv.visitJumpInsn(Opcodes.GOTO, jumpOverLabel); mv.visitLabel(nonNullLabel); mv.visitLineNumber(106, nonNullLabel); mv.visitVarInsn(Opcodes.ALOAD, 4); mv.visitJumpInsn(Opcodes.IFNULL, jumpOverLabel); mv.visitVarInsn(Opcodes.ALOAD, 3); mv.visitVarInsn(Opcodes.ALOAD, 4); mv.visitMethodInsn(Opcodes.INVOKEINTERFACE, "java/util/List", "addAll", "(Ljava/util/Collection;)Z",true); mv.visitInsn(Opcodes.POP); mv.visitLabel(jumpOverLabel); mv.visitLineNumber(107, jumpOverLabel); } private boolean addGetWrapperParts(String newClassName, Class<?> wrapperClass) { MethodVisitor mv = cw.visitMethod(Opcodes.ACC_PUBLIC, "getWrapperParts", "(Ljava/lang/Object;)Ljava/util/List;", "(Ljava/lang/Object;)Ljava/util/List<Ljava/lang/Object;>;", new String[] { "org/apache/cxf/interceptor/Fault" }); mv.visitCode(); Label lBegin = createLabel(); mv.visitLabel(lBegin); mv.visitLineNumber(108, lBegin); // the ret List mv.visitTypeInsn(Opcodes.NEW, "java/util/ArrayList"); mv.visitInsn(Opcodes.DUP); mv.visitMethodInsn(Opcodes.INVOKESPECIAL, "java/util/ArrayList", "<init>", "()V",false); mv.visitVarInsn(Opcodes.ASTORE, 2); // cast the Object to the wrapperType type mv.visitVarInsn(Opcodes.ALOAD, 1); mv.visitTypeInsn(Opcodes.CHECKCAST, periodToSlashes(wrapperClass.getName())); mv.visitVarInsn(Opcodes.ASTORE, 3); for (int x = 0; x < getMethods.length; x++) { Method method = getMethods[x]; if (method == null && fields[x] != null) { // fallback to reflection mode return false; } if (method == null) { Label l3 = createLabel(); mv.visitLabel(l3); mv.visitLineNumber(200 + x, l3); mv.visitVarInsn(Opcodes.ALOAD, 2); mv.visitInsn(Opcodes.ACONST_NULL); mv.visitMethodInsn(Opcodes.INVOKEINTERFACE, "java/util/List", "add", "(Ljava/lang/Object;)Z",true); mv.visitInsn(Opcodes.POP); } else { Label l3 = createLabel(); mv.visitLabel(l3); mv.visitLineNumber(250 + x, l3); mv.visitVarInsn(Opcodes.ALOAD, 2); mv.visitVarInsn(Opcodes.ALOAD, 3); mv.visitMethodInsn(Opcodes.INVOKEVIRTUAL, periodToSlashes(wrapperClass.getName()), method.getName(), getMethodSignature(method),false); if (method.getReturnType().isPrimitive()) { // wrap into Object type createObjectWrapper(mv, method.getReturnType()); } if (JAXBElement.class.isAssignableFrom(method.getReturnType())) { Label jumpOverLabel = createLabel(); mv.visitInsn(Opcodes.DUP); mv.visitJumpInsn(Opcodes.IFNULL, jumpOverLabel); mv.visitMethodInsn(Opcodes.INVOKEVIRTUAL, "javax/xml/bind/JAXBElement", "getValue", "()Ljava/lang/Object;",false); mv.visitLabel(jumpOverLabel); } mv.visitMethodInsn(Opcodes.INVOKEINTERFACE, "java/util/List", "add", "(Ljava/lang/Object;)Z",true); mv.visitInsn(Opcodes.POP); } } // return the list Label l2 = createLabel(); mv.visitLabel(l2); mv.visitLineNumber(108, l2); mv.visitVarInsn(Opcodes.ALOAD, 2); mv.visitInsn(Opcodes.ARETURN); Label lEnd = createLabel(); mv.visitLabel(lEnd); mv.visitLocalVariable("this", "L" + newClassName + ";", null, lBegin, lEnd, 0); mv.visitLocalVariable("o", "Ljava/lang/Object;", null, lBegin, lEnd, 1); mv.visitLocalVariable("ret", "Ljava/util/List;", "Ljava/util/List<Ljava/lang/Object;>;", lBegin, lEnd, 2); mv.visitLocalVariable("ok", "L" + periodToSlashes(wrapperClass.getName()) + ";", null, lBegin, lEnd, 3); mv.visitMaxs(0, 0); mv.visitEnd(); return true; } private static void createObjectWrapper(MethodVisitor mv, Class<?> cl) { mv.visitMethodInsn(Opcodes.INVOKESTATIC, NONPRIMITIVE_MAP.get(cl), "valueOf", "(" + PRIMITIVE_MAP.get(cl) + ")L" + NONPRIMITIVE_MAP.get(cl) + ";",false); } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.smartdata.hdfs.metric.fetcher; import org.apache.hadoop.fs.XAttr; import org.apache.hadoop.hdfs.DFSClient; import org.apache.hadoop.hdfs.inotify.Event; import org.apache.hadoop.hdfs.protocol.HdfsFileStatus; import org.apache.hadoop.io.WritableUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.smartdata.conf.SmartConf; import org.smartdata.hdfs.CompatibilityHelperLoader; import org.smartdata.hdfs.HadoopUtil; import org.smartdata.metastore.DBType; import org.smartdata.metastore.MetaStore; import org.smartdata.metastore.MetaStoreException; import org.smartdata.model.BackUpInfo; import org.smartdata.model.FileDiff; import org.smartdata.model.FileDiffType; import org.smartdata.model.FileInfo; import java.io.ByteArrayInputStream; import java.io.DataInputStream; import java.util.ArrayList; import java.util.Arrays; import java.util.List; import java.io.IOException; import java.net.URI; import java.net.URISyntaxException; /** * This is a very preliminary and buggy applier, can further enhance by referring to * {@link org.apache.hadoop.hdfs.server.namenode.FSEditLogLoader} */ public class InotifyEventApplier { private final MetaStore metaStore; private DFSClient client; private static final Logger LOG = LoggerFactory.getLogger(InotifyEventFetcher.class); private List<String> ignoreEventDirs; private List<String> fetchEventDirs; private NamespaceFetcher namespaceFetcher; public InotifyEventApplier(MetaStore metaStore, DFSClient client) { this.metaStore = metaStore; this.client = client; initialize(); } public InotifyEventApplier(MetaStore metaStore, DFSClient client, NamespaceFetcher namespaceFetcher) { this(metaStore, client); this.namespaceFetcher = namespaceFetcher; } private void initialize(){ SmartConf conf = new SmartConf(); ignoreEventDirs = conf.getIgnoreDir(); fetchEventDirs = conf.getCoverDir(); } public void apply(List<Event> events) throws IOException, MetaStoreException, InterruptedException { List<String> statements = new ArrayList<>(); for (Event event : events) { List<String> gen = getSqlStatement(event); if (gen != null && !gen.isEmpty()) { for (String s : gen) { if (s != null && s.length() > 0) { statements.add(s); } } } } this.metaStore.execute(statements); } //check if the dir is in ignoreList public void apply(Event[] events) throws IOException, MetaStoreException, InterruptedException { this.apply(Arrays.asList(events)); } private boolean shouldIgnore(String path) { String toCheck = path.endsWith("/") ? path : path + "/"; for (String s : ignoreEventDirs) { if (toCheck.startsWith(s)) { return true; } } if (fetchEventDirs.isEmpty()) { return false; } for (String s : fetchEventDirs) { if (toCheck.startsWith(s)) { return false; } } return true; } private List<String> getSqlStatement(Event event) throws IOException, MetaStoreException, InterruptedException { String path; String srcPath, dstPath; LOG.debug("Even Type = {}", event.getEventType().toString()); switch (event.getEventType()) { case CREATE: path = ((Event.CreateEvent) event).getPath(); if (shouldIgnore(path)) { return Arrays.asList(); } LOG.trace("event type:" + event.getEventType().name() + ", path:" + ((Event.CreateEvent) event).getPath()); return Arrays.asList(this.getCreateSql((Event.CreateEvent) event)); case CLOSE: path = ((Event.CloseEvent) event).getPath(); if (shouldIgnore(path)) { return Arrays.asList(); } LOG.trace("event type:" + event.getEventType().name() + ", path:" + ((Event.CloseEvent) event).getPath()); return Arrays.asList(this.getCloseSql((Event.CloseEvent) event)); case RENAME: srcPath = ((Event.RenameEvent) event).getSrcPath(); dstPath = ((Event.RenameEvent) event).getDstPath(); if (shouldIgnore(srcPath) && shouldIgnore(dstPath)) { return Arrays.asList(); } LOG.trace("event type:" + event.getEventType().name() + ", src path:" + ((Event.RenameEvent) event).getSrcPath() + ", dest path:" + ((Event.RenameEvent) event).getDstPath()); return this.getRenameSql((Event.RenameEvent)event); case METADATA: path = ((Event.MetadataUpdateEvent)event).getPath(); if (shouldIgnore(path)) { return Arrays.asList(); } LOG.trace("event type:" + event.getEventType().name() + ", path:" + ((Event.MetadataUpdateEvent)event).getPath()); return Arrays.asList(this.getMetaDataUpdateSql((Event.MetadataUpdateEvent)event)); case APPEND: path = ((Event.AppendEvent)event).getPath(); if (shouldIgnore(path)) { return Arrays.asList(); } LOG.trace("event type:" + event.getEventType().name() + ", path:" + ((Event.AppendEvent)event).getPath()); return this.getAppendSql((Event.AppendEvent)event); case UNLINK: path = ((Event.UnlinkEvent)event).getPath(); if (shouldIgnore(path)) { return Arrays.asList(); } LOG.trace("event type:" + event.getEventType().name() + ", path:" + ((Event.UnlinkEvent)event).getPath()); return this.getUnlinkSql((Event.UnlinkEvent)event); } return Arrays.asList(); } //Todo: times and ec policy id, etc. private String getCreateSql(Event.CreateEvent createEvent) throws IOException, MetaStoreException { HdfsFileStatus fileStatus = client.getFileInfo(createEvent.getPath()); if (fileStatus == null) { LOG.debug("Can not get HdfsFileStatus for file " + createEvent.getPath()); return ""; } FileInfo fileInfo = HadoopUtil.convertFileStatus(fileStatus, createEvent.getPath()); if (inBackup(fileInfo.getPath())) { if (!fileInfo.isdir()) { // ignore dir FileDiff fileDiff = new FileDiff(FileDiffType.APPEND); fileDiff.setSrc(fileInfo.getPath()); fileDiff.getParameters().put("-offset", String.valueOf(0)); // Note that "-length 0" means create an empty file fileDiff.getParameters() .put("-length", String.valueOf(fileInfo.getLength())); // TODO add support in CopyFileAction or split into two file diffs //add modification_time and access_time to filediff fileDiff.getParameters().put("-mtime", "" + fileInfo.getModificationTime()); // fileDiff.getParameters().put("-atime", "" + fileInfo.getAccessTime()); //add owner to filediff fileDiff.getParameters().put("-owner", "" + fileInfo.getOwner()); fileDiff.getParameters().put("-group", "" + fileInfo.getGroup()); //add Permission to filediff fileDiff.getParameters().put("-permission", "" + fileInfo.getPermission()); //add replication count to file diff fileDiff.getParameters().put("-replication", "" + fileInfo.getBlockReplication()); metaStore.insertFileDiff(fileDiff); } } metaStore.deleteFileByPath(fileInfo.getPath()); metaStore.deleteFileState(fileInfo.getPath()); metaStore.insertFile(fileInfo); return ""; } private boolean inBackup(String src) throws MetaStoreException { if (metaStore.srcInbackup(src)) { return true; } return false; } //Todo: should update mtime? atime? private String getCloseSql(Event.CloseEvent closeEvent) throws IOException, MetaStoreException { FileDiff fileDiff = new FileDiff(FileDiffType.APPEND); fileDiff.setSrc(closeEvent.getPath()); long newLen = closeEvent.getFileSize(); long currLen = 0l; // TODO make sure offset is correct if (inBackup(closeEvent.getPath())) { FileInfo fileInfo = metaStore.getFile(closeEvent.getPath()); if (fileInfo == null) { // TODO add metadata currLen = 0; } else { currLen = fileInfo.getLength(); } if (currLen != newLen) { fileDiff.getParameters().put("-offset", String.valueOf(currLen)); fileDiff.getParameters() .put("-length", String.valueOf(newLen - currLen)); metaStore.insertFileDiff(fileDiff); } } return String.format( "UPDATE file SET length = %s, modification_time = %s WHERE path = '%s';", closeEvent.getFileSize(), closeEvent.getTimestamp(), closeEvent.getPath()); } //Todo: should update mtime? atime? // private String getTruncateSql(Event.TruncateEvent truncateEvent) { // return String.format( // "UPDATE file SET length = %s, modification_time = %s WHERE path = '%s';", // truncateEvent.getFileSize(), truncateEvent.getTimestamp(), truncateEvent.getPath()); // } private List<String> getRenameSql(Event.RenameEvent renameEvent) throws IOException, MetaStoreException, InterruptedException { String src = renameEvent.getSrcPath(); String dest = renameEvent.getDstPath(); List<String> ret = new ArrayList<>(); HdfsFileStatus status = client.getFileInfo(dest); FileInfo info = metaStore.getFile(src); // TODO: consider src or dest is ignored by SSM if (inBackup(src)) { // rename the file if the renamed file is still under the backup src dir // if not, insert a delete file diff if (inBackup(dest)) { FileDiff fileDiff = new FileDiff(FileDiffType.RENAME); fileDiff.setSrc(src); fileDiff.getParameters().put("-dest", dest); metaStore.insertFileDiff(fileDiff); } else { insertDeleteDiff(src, info.isdir()); } } else if (inBackup(dest)) { // tackle such case: rename file from outside into backup dir if (!info.isdir()) { FileDiff fileDiff = new FileDiff(FileDiffType.APPEND); fileDiff.setSrc(dest); fileDiff.getParameters().put("-offset", String.valueOf(0)); fileDiff.getParameters() .put("-length", String.valueOf(info.getLength())); metaStore.insertFileDiff(fileDiff); } else { List<FileInfo> fileInfos = metaStore.getFilesByPrefix(src.endsWith("/") ? src : src + "/"); for (FileInfo fileInfo : fileInfos) { // TODO: cover subdir with no file case if (fileInfo.isdir()) { continue; } FileDiff fileDiff = new FileDiff(FileDiffType.APPEND); fileDiff.setSrc(fileInfo.getPath().replaceFirst(src, dest)); fileDiff.getParameters().put("-offset", String.valueOf(0)); fileDiff.getParameters() .put("-length", String.valueOf(fileInfo.getLength())); metaStore.insertFileDiff(fileDiff); } } } if (status == null) { LOG.debug("Get rename dest status failed, {} -> {}", src, dest); } // The dest path which the src is renamed to should be checked in file table // to avoid duplicated record for one same path. FileInfo destInfo = metaStore.getFile(dest); if (destInfo != null) { metaStore.deleteFileByPath(dest); } // src is not in file table because it is not fetched or other reason if (info == null) { if (status != null) { //info = HadoopUtil.convertFileStatus(status, dest); //metaStore.insertFile(info); namespaceFetcher.startFetch(dest); while(!namespaceFetcher.fetchFinished()) { LOG.info("Fetching the files under " + dest); Thread.sleep(100); } namespaceFetcher.stop(); } } else { // if the dest is ignored, delete src info from file table // TODO: tackle with file_state and small_state if (shouldIgnore(dest)) { // fuzzy matching is used to delete content under the dir if (info.isdir()) { ret.add(String.format("DELETE FROM file WHERE path LIKE '%s/%%';", src)); } ret.add(String.format("DELETE FROM file WHERE path = '%s';", src)); return ret; } else { ret.add(String.format("UPDATE file SET path = replace(path, '%s', '%s') " + "WHERE path = '%s';", src, dest, src)); ret.add(String.format("UPDATE file_state SET path = replace(path, '%s', '%s') " + "WHERE path = '%s';", src, dest, src)); ret.add(String.format("UPDATE small_file SET path = replace(path, '%s', '%s') " + "WHERE path = '%s';", src, dest, src)); if (info.isdir()) { if (metaStore.getDbType() == DBType.MYSQL) { ret.add(String.format("UPDATE file SET path = CONCAT('%s', SUBSTR(path, %d)) " + "WHERE path LIKE '%s/%%';", dest, src.length() + 1, src)); ret.add(String.format("UPDATE file_state SET path = CONCAT('%s', SUBSTR(path, %d)) " + "WHERE path LIKE '%s/%%';", dest, src.length() + 1, src)); ret.add(String.format("UPDATE small_file SET path = CONCAT('%s', SUBSTR(path, %d)) " + "WHERE path LIKE '%s/%%';", dest, src.length() + 1, src)); } else if (metaStore.getDbType() == DBType.SQLITE) { ret.add(String.format("UPDATE file SET path = '%s' || SUBSTR(path, %d) " + "WHERE path LIKE '%s/%%';", dest, src.length() + 1, src)); ret.add(String.format("UPDATE file_state SET path = '%s' || SUBSTR(path, %d) " + "WHERE path LIKE '%s/%%';", dest, src.length() + 1, src)); ret.add(String.format("UPDATE small_file SET path = '%s' || SUBSTR(path, %d) " + "WHERE path LIKE '%s/%%';", dest, src.length() + 1, src)); } } } } return ret; } private String getMetaDataUpdateSql(Event.MetadataUpdateEvent metadataUpdateEvent) throws MetaStoreException { FileDiff fileDiff = null; if (inBackup(metadataUpdateEvent.getPath())) { fileDiff = new FileDiff(FileDiffType.METADATA); fileDiff.setSrc(metadataUpdateEvent.getPath()); } switch (metadataUpdateEvent.getMetadataType()) { case TIMES: if (metadataUpdateEvent.getMtime() > 0 && metadataUpdateEvent.getAtime() > 0) { if (fileDiff != null) { fileDiff.getParameters().put("-mtime", "" + metadataUpdateEvent.getMtime()); // fileDiff.getParameters().put("-access_time", "" + metadataUpdateEvent.getAtime()); metaStore.insertFileDiff(fileDiff); } return String.format( "UPDATE file SET modification_time = %s, access_time = %s WHERE path = '%s';", metadataUpdateEvent.getMtime(), metadataUpdateEvent.getAtime(), metadataUpdateEvent.getPath()); } else if (metadataUpdateEvent.getMtime() > 0) { if (fileDiff != null) { fileDiff.getParameters().put("-mtime", "" + metadataUpdateEvent.getMtime()); metaStore.insertFileDiff(fileDiff); } return String.format( "UPDATE file SET modification_time = %s WHERE path = '%s';", metadataUpdateEvent.getMtime(), metadataUpdateEvent.getPath()); } else if (metadataUpdateEvent.getAtime() > 0) { // if (fileDiff != null) { // fileDiff.getParameters().put("-access_time", "" + metadataUpdateEvent.getAtime()); // metaStore.insertFileDiff(fileDiff); // } return String.format( "UPDATE file SET access_time = %s WHERE path = '%s';", metadataUpdateEvent.getAtime(), metadataUpdateEvent.getPath()); } else { return ""; } case OWNER: if (fileDiff != null) { fileDiff.getParameters().put("-owner", "" + metadataUpdateEvent.getOwnerName()); metaStore.insertFileDiff(fileDiff); } return String.format( "UPDATE file SET owner = '%s', owner_group = '%s' WHERE path = '%s';", metadataUpdateEvent.getOwnerName(), metadataUpdateEvent.getGroupName(), metadataUpdateEvent.getPath()); case PERMS: if (fileDiff != null) { fileDiff.getParameters().put("-permission", "" + metadataUpdateEvent.getPerms().toShort()); metaStore.insertFileDiff(fileDiff); } return String.format( "UPDATE file SET permission = %s WHERE path = '%s';", metadataUpdateEvent.getPerms().toShort(), metadataUpdateEvent.getPath()); case REPLICATION: if (fileDiff != null) { fileDiff.getParameters().put("-replication", "" + metadataUpdateEvent.getReplication()); metaStore.insertFileDiff(fileDiff); } return String.format( "UPDATE file SET block_replication = %s WHERE path = '%s';", metadataUpdateEvent.getReplication(), metadataUpdateEvent.getPath()); case XATTRS: final String EC_POLICY = "hdfs.erasurecoding.policy"; //Todo if (LOG.isDebugEnabled()) { String message = "\n"; for (XAttr xAttr : metadataUpdateEvent.getxAttrs()) { message += xAttr.toString() + "\n"; } LOG.debug(message); } // The following code should be executed merely on HDFS3.x. for (XAttr xAttr : metadataUpdateEvent.getxAttrs()) { if (xAttr.getName().equals(EC_POLICY)) { try { String ecPolicyName = WritableUtils.readString( new DataInputStream(new ByteArrayInputStream(xAttr.getValue()))); byte ecPolicyId = CompatibilityHelperLoader.getHelper(). getErasureCodingPolicyByName(client, ecPolicyName); if (ecPolicyId == (byte) -1) { LOG.error("Unrecognized EC policy for updating!"); } return String.format("UPDATE file SET ec_policy_id = %s WHERE path = '%s'", ecPolicyId, metadataUpdateEvent.getPath()); } catch (IOException ex) { LOG.error("Error occurred for updating ecPolicy!", ex); } } } break; case ACLS: return ""; } return ""; } private List<String> getAppendSql(Event.AppendEvent appendEvent) { //Do nothing; return Arrays.asList(); } private List<String> getUnlinkSql(Event.UnlinkEvent unlinkEvent) throws MetaStoreException { // delete root, i.e., / String root = "/"; if (root.equals(unlinkEvent.getPath())) { LOG.warn("Deleting root directory!!!"); insertDeleteDiff(root, true); return Arrays.asList( String.format("DELETE FROM file WHERE path like '%s%%'", root), String.format("DELETE FROM file_state WHERE path like '%s%%'", root), String.format("DELETE FROM small_file WHERE path like '%s%%'", root)); } String path = unlinkEvent.getPath(); // file has no "/" appended in the metaStore FileInfo fileInfo = metaStore.getFile(path.endsWith("/") ? path.substring(0, path.length() - 1) : path); if (fileInfo == null) return Arrays.asList(); if (fileInfo.isdir()) { insertDeleteDiff(unlinkEvent.getPath(), true); // delete all files in this dir from file table return Arrays.asList( String.format("DELETE FROM file WHERE path LIKE '%s/%%';", unlinkEvent.getPath()), String.format("DELETE FROM file WHERE path = '%s';", unlinkEvent.getPath()), String.format("DELETE FROM file_state WHERE path LIKE '%s/%%';", unlinkEvent.getPath()), String.format("DELETE FROM file_state WHERE path = '%s';", unlinkEvent.getPath()), String.format("DELETE FROM small_file WHERE path LIKE '%s/%%';", unlinkEvent.getPath()), String.format("DELETE FROM small_file WHERE path = '%s';", unlinkEvent.getPath())); } else { insertDeleteDiff(unlinkEvent.getPath(), false); // delete file in file table return Arrays.asList( String.format("DELETE FROM file WHERE path = '%s';", unlinkEvent.getPath()), String.format("DELETE FROM file_state WHERE path = '%s';", unlinkEvent.getPath()), String.format("DELETE FROM small_file WHERE path = '%s';", unlinkEvent.getPath())); } } // TODO: just insert a fileDiff for this kind of path. // It seems that there is no need to see if path matches with one dir in FileInfo. private void insertDeleteDiff(String path, boolean isDir) throws MetaStoreException { if (isDir) { path = path.endsWith("/") ? path.substring(0, path.length() - 1) : path; List<FileInfo> fileInfos = metaStore.getFilesByPrefix(path); for (FileInfo fileInfo : fileInfos) { if (fileInfo.isdir()) { if (path.equals(fileInfo.getPath())) { insertDeleteDiff(fileInfo.getPath()); break; } } } } else { insertDeleteDiff(path); } } private void insertDeleteDiff(String path) throws MetaStoreException { // TODO: remove "/" appended in src or dest in backup_file table String pathWithSlash = path.endsWith("/") ? path : path + "/"; if (inBackup(pathWithSlash)) { List<BackUpInfo> backUpInfos = metaStore.getBackUpInfoBySrc(pathWithSlash); for (BackUpInfo backUpInfo : backUpInfos) { String destPath = pathWithSlash.replaceFirst(backUpInfo.getSrc(), backUpInfo.getDest()); try { // tackle root path case URI namenodeUri = new URI(destPath); String root = "hdfs://" + namenodeUri.getHost() + ":" + String.valueOf(namenodeUri.getPort()); if (destPath.equals(root) || destPath.equals(root + "/") || destPath.equals("/")) { for (String srcFilePath : getFilesUnderDir(pathWithSlash)) { FileDiff fileDiff = new FileDiff(FileDiffType.DELETE); fileDiff.setSrc(srcFilePath); String destFilePath = srcFilePath.replaceFirst(backUpInfo.getSrc(), backUpInfo.getDest()); fileDiff.getParameters().put("-dest", destFilePath); metaStore.insertFileDiff(fileDiff); } } else { FileDiff fileDiff = new FileDiff(FileDiffType.DELETE); // use the path getting from event with no slash appended fileDiff.setSrc(path); // put sync's dest path in parameter for delete use fileDiff.getParameters().put("-dest", destPath); metaStore.insertFileDiff(fileDiff); } } catch (URISyntaxException e) { LOG.error("Error occurs!", e); } } } } private List<String> getFilesUnderDir(String dir) throws MetaStoreException { dir = dir.endsWith("/") ? dir : dir + "/"; List<String> fileList = new ArrayList<>(); List<String> subdirList = new ArrayList<>(); // get fileInfo in asc order of path to guarantee that // the subdir is tackled prior to files or dirs under it List<FileInfo> fileInfos = metaStore.getFilesByPrefixInOrder(dir); for (FileInfo fileInfo : fileInfos) { // just delete subdir instead of deleting all files under it if (isUnderDir(fileInfo.getPath(), subdirList)) { continue; } fileList.add(fileInfo.getPath()); if (fileInfo.isdir()) { subdirList.add(fileInfo.getPath()); } } return fileList; } private boolean isUnderDir(String path, List<String> dirs) { if (dirs.isEmpty()) { return false; } for (String subdir : dirs) { if (path.startsWith(subdir)) { return true; } } return false; } }
/* * @(#) ListMap.java * * javautil Java Utility Library * Copyright (c) 2013, 2014, 2015, 2016, 2017 Peter Wall * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in all * copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE * SOFTWARE. */ package net.pwall.util; import java.io.Serializable; import java.util.AbstractSet; import java.util.ArrayList; import java.util.Collection; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.NoSuchElementException; import java.util.Objects; import java.util.Set; /** * An implementation of {@link Map} that uses a {@link List} to hold the entries, retaining the * order of insertion. This class is useful when the order of insertion of entries is * significant, but it suffers from serious performance degradation when the numbers of entries * exceeds a handful. * * @author Peter Wall * @param <K> the key type * @param <V> the value type */ public class ListMap<K, V> implements Map<K, V>, Serializable { private static final long serialVersionUID = -5594713182082941289L; protected List<ListMap.Entry<K, V>> list; /** * Construct an empty {@code ListMap}. */ public ListMap() { list = new ArrayList<>(); } /** * Construct an empty {@code ListMap} with a specified initial capacity. * * @param capacity the initial capacity */ public ListMap(int capacity) { list = new ArrayList<>(capacity); } /** * Construct a {@code ListMap} with the contents of another {@link Map}. * * @param m the other {@link Map} */ public ListMap(Map<? extends K, ? extends V> m) { this(m.size()); putAll(m); } /** * Get a value from the {@code ListMap}. * * @param key the key of the value * @return the value, or {@code null} if not found * @see Map#get(Object) */ @Override public V get(Object key) { int index = findIndex(Objects.requireNonNull(key)); return index < 0 ? null : list.get(index).getValue(); } /** * Test whether the {@code ListMap} contains a specified key. * * @param key the key to test for * @return {@code true} if the key is found * @see Map#containsKey(Object) */ @Override public boolean containsKey(Object key) { return findIndex(Objects.requireNonNull(key)) >= 0; } /** * Store a value in the {@code ListMap} with the specified key. * * @param key the key * @param value the value * @return the previous value stored with that key, or {@code null} if no previous * value * @see Map#put(Object, Object) */ @Override public V put(K key, V value) { int index = findIndex(Objects.requireNonNull(key)); if (index >= 0) { Entry<K, V> entry = list.get(index); V oldValue = entry.getValue(); entry.setValue(value); return oldValue; } list.add(new Entry<>(key, value)); return null; } /** * Remove the specified key-value mapping from the {@code ListMap}. * * @param key the key * @return the value stored with that key, or {@code null} if key not used * @see Map#remove(Object) */ @Override public V remove(Object key) { int index = findIndex(Objects.requireNonNull(key)); if (index >= 0) return list.remove(index).getValue(); return null; } /** * Get the number of values in the {@code ListMap}. * * @return the number of values * @see Map#size() */ @Override public int size() { return list.size(); } /** * Test whether the {@code ListMap}is empty. * * @return {@code true} if the {@code ListMap} is empty * @see Map#isEmpty() */ @Override public boolean isEmpty() { return size() == 0; } /** * Test whether the {@code ListMap} contains the specified value. * * @param value the value * @return {@code true} if the {@code ListMap} contains the value * @see Map#containsValue(Object) */ @Override public boolean containsValue(Object value) { for (int i = 0, n = list.size(); i < n; i++) if (Objects.equals(list.get(i).getValue(), value)) return true; return false; } /** * Add all the members of another {@link Map} to this {@code ListMap}. * * @param m the other {@link Map} * @see Map#putAll(Map) */ @Override public void putAll(Map<? extends K, ? extends V> m) { for (Map.Entry<? extends K, ? extends V> e : m.entrySet()) put(e.getKey(), e.getValue()); } /** * Remove all members from this {@code ListMap}. * * @see Map#clear() */ @Override public void clear() { list.clear(); } /** * Get a {@link Set} representing the keys in use in the {@code ListMap}. * * @return the {@link Set} of keys * @see Map#keySet() */ @Override public Set<K> keySet() { return new KeySet(); } /** * Get a {@link Collection} of the values in the {@code ListMap}. * * @return the {@link Collection} of values * @see Map#values() */ @Override public Collection<V> values() { return new ValueCollection(); } /** * Get a {@link Set} of the key-value pairs in use in the {@code ListMap}. * * @return the {@link Set} of key-value pairs * @see Map#entrySet() */ @Override public Set<Map.Entry<K, V>> entrySet() { return new EntrySet(); } /** * Get the hash code for this {@code ListMap}. * * @return the hash code * @see Object#hashCode() */ @Override public int hashCode() { int result = 0; for (int i = 0, n = list.size(); i < n; i++) result ^= list.get(i).hashCode(); return result; } /** * Compare this {@code ListMap} with another object for equality. * * @param other the other object * @return {@code true} if the other object is a {@code ListMap} and is identical to * this object * @see Object#equals(Object) */ @Override public boolean equals(Object other) { if (other == this) return true; if (!(other instanceof ListMap<?, ?>)) return false; ListMap<?, ?> otherMapping = (ListMap<?, ?>)other; if (list.size() != otherMapping.list.size()) return false; for (Entry<K, V> entry : list) if (!Objects.equals(entry.getValue(), otherMapping.get(entry.getKey()))) return false; return true; } /** * Convert the map to {@code String} (usually for diagnostic purposes). * * @return a string in the form {key=value, ...} */ @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append('{'); int n = list.size(); if (n > 0) { int i = 0; while (true) { Entry<K, V> e = list.get(i++); if (e.value == this) sb.append(e.key).append('=').append("(this Map)"); else sb.append(e); if (i >= n) break; sb.append(',').append(' '); } } sb.append('}'); return sb.toString(); } /** * Get an {@link Entry} by index. * * @param index the index * @return the list entry */ public Entry<K, V> getEntry(int index) { return list.get(index); } /** * Find the index for the specified key. * * @param key the key * @return the index for this key, or -1 if not found */ protected int findIndex(Object key) { for (int i = 0, n = list.size(); i < n; i++) if (list.get(i).getKey().equals(key)) return i; return -1; } /** * Inner class to represent a key-value pair in the {@code ListMap}. * * @param <KK> the key type * @param <VV> the value type */ public static class Entry<KK, VV> implements Map.Entry<KK, VV>, Serializable { private static final long serialVersionUID = -7610378954393786210L; private final KK key; private VV value; /** * Construct an {@code Entry} with the given key and value. * * @param key the key * @param value the value */ public Entry(KK key, VV value) { this.key = key; this.value = value; } /** * {@inheritDoc} */ @Override public KK getKey() { return key; } /** * {@inheritDoc} */ @Override public VV getValue() { return value; } /** * {@inheritDoc} */ @Override public VV setValue(VV value) { VV oldValue = this.value; this.value = value; return oldValue; } /** * {@inheritDoc} */ @Override public boolean equals(Object other) { if (other == this) return true; if (!(other instanceof Map.Entry)) return false; Map.Entry<?, ?> otherEntry = (Map.Entry<?, ?>)other; return Objects.equals(key, otherEntry.getKey()) && Objects.equals(value, otherEntry.getValue()); } /** * {@inheritDoc} */ @Override public int hashCode() { return Objects.hash(key, value); } /** * Convert the entry to {@code String} (usually for diagnostic purposes). * * @return a string in the form key=value */ @Override public String toString() { return key.toString() + '=' + value.toString(); } } /** * An {@link Iterator} over the {@link Set} of key-value pairs in the {@code ListMap}. */ private class EntryIterator extends BaseIterator<Map.Entry<K, V>> { /** * {@inheritDoc} */ @Override public Entry<K, V> next() { return nextEntry(); } } /** * An {@link Iterator} over the {@link Set} of keys in the {@code ListMap}. */ private class KeyIterator extends BaseIterator<K> { /** * {@inheritDoc} */ @Override public K next() { return nextEntry().getKey(); } } /** * An {@link Iterator} over the {@link Collection} of values in the {@code ListMap}. */ private class ValueIterator extends BaseIterator<V> { /** * {@inheritDoc} */ @Override public V next() { return nextEntry().getValue(); } } /** * Abstract base class for various iterators. * * @param <T> the returned type */ private abstract class BaseIterator<T> implements Iterator<T> { private int index; /** * Construct a {@code BaseIterator} starting at index 0. */ public BaseIterator() { index = 0; } /** * Get the next {@code Entry} from the list. The different derived iterator types will * return the entry itself, or the key or value. * * @return the next {@code Entry} */ public Entry<K, V> nextEntry() { if (!hasNext()) throw new NoSuchElementException(); return list.get(index++); } /** * {@inheritDoc} */ @Override public boolean hasNext() { return index < list.size(); } } /** * A collection of the key-value pairs in the {@code ListMap}. * * @see #entrySet() */ private class EntrySet extends CollectionBase<Map.Entry<K, V>> { /** * {@inheritDoc} */ @Override public boolean contains(Object o) { return list.contains(o); } /** * {@inheritDoc} */ @Override public Iterator<Map.Entry<K, V>> iterator() { return new EntryIterator(); } /** * {@inheritDoc} */ @Override public boolean containsAll(Collection<?> c) { for (Object o : c) if (!list.contains(o)) return false; return true; } } /** * A collection of the keys in the {@code ListMap}. * * @see #keySet() */ private class KeySet extends CollectionBase<K> { /** * {@inheritDoc} */ @Override public boolean contains(Object o) { return containsKey(o); } /** * {@inheritDoc} */ @Override public Iterator<K> iterator() { return new KeyIterator(); } /** * {@inheritDoc} */ @Override public boolean containsAll(Collection<?> c) { for (Object o : c) if (!containsKey(o)) return false; return true; } } /** * A collection of the values in the {@code ListMap}. * * @see #values() */ private class ValueCollection extends CollectionBase<V> { /** * {@inheritDoc} */ @Override public boolean contains(Object o) { return containsValue(o); } /** * {@inheritDoc} */ @Override public Iterator<V> iterator() { return new ValueIterator(); } /** * {@inheritDoc} */ @Override public boolean containsAll(Collection<?> c) { for (Object o : c) if (!containsValue(o)) return false; return true; } } /** * Abstract base class for various returned collections. All modifying operations throw an * {@link UnsupportedOperationException}. * * @param <T> the returned type */ private abstract class CollectionBase<T> extends AbstractSet<T> { /** * Return the number of elements in the set. All returned collections are the same size * as the underlying collection. * * @return the number of elements in the collection */ @Override public int size() { return list.size(); } /** * Remove an object from the collection - not supported. * * @param o the object * @return (never returns normally) * @throws UnsupportedOperationException in all cases */ @Override public boolean remove(Object o) { throw new UnsupportedOperationException(); } /** * Add all elements from another collection - not supported. * * @param c the other collection * @return (never returns normally) * @throws UnsupportedOperationException in all cases */ @Override public boolean addAll(Collection<? extends T> c) { throw new UnsupportedOperationException(); } /** * Remove all elements not matching those in another collection - not supported. * * @param c the other collection * @return (never returns normally) * @throws UnsupportedOperationException in all cases */ @Override public boolean retainAll(Collection<?> c) { throw new UnsupportedOperationException(); } /** * Remove all elements matching those in another collection - not supported. * * @param c the other collection * @return (never returns normally) * @throws UnsupportedOperationException in all cases */ @Override public boolean removeAll(Collection<?> c) { throw new UnsupportedOperationException(); } /** * Clear the collection - not supported. * * @throws UnsupportedOperationException in all cases */ @Override public void clear() { throw new UnsupportedOperationException(); } } }
package org.usfirst.frc.team4453.robot.subsystems; import org.usfirst.frc.team4453.library.Tilt; import org.usfirst.frc.team4453.robot.RobotMap; import org.usfirst.frc.team4453.robot.commands.TiltWithJoystick; import edu.wpi.first.wpilibj.CANTalon; import edu.wpi.first.wpilibj.Compressor; import edu.wpi.first.wpilibj.DigitalInput; import edu.wpi.first.wpilibj.DoubleSolenoid; import edu.wpi.first.wpilibj.Encoder; import edu.wpi.first.wpilibj.Timer; import edu.wpi.first.wpilibj.command.PIDSubsystem; import edu.wpi.first.wpilibj.livewindow.LiveWindow; import edu.wpi.first.wpilibj.smartdashboard.SmartDashboard; /** * */ public class Shooter extends PIDSubsystem { private static CANTalon shooterMaster; private static CANTalon shooterSlave; private static CANTalon tiltMaster; private static CANTalon tiltSlave; private static Encoder tiltEncoder; private static DigitalInput minLimit; private static boolean isReady = false; private static boolean autoTilt = false; private Compressor compressor; private DoubleSolenoid shooterSolenoid; private static final double Kp = 2.5; //0.025; private static final double Ki = 0.0; private static final double Kd = 0.0; private static final double ENCODER_PULSES_PER_REV = 360.0; private static final double TILT_LEAD_SCREW_PITCH = 0.25; private static final double TILT_ENCODER_DIST_PER_PULSE = TILT_LEAD_SCREW_PITCH / ENCODER_PULSES_PER_REV; private static final double MIN_DIST = 0.0; private static final double MAX_DIST = Tilt.calcSide(50.0); // Initialize your subsystem here public Shooter() { super("Shooter", Kp, Ki, Kd); System.out.println("Shooter starting..."); // Use these to get going: // setSetpoint() - Sets where the PID controller should move the system // to // enable() - Enables the PID controller. // Define shooter motors as Master and Slave where we control the // master motor and the slave follows. // The master motor runs in reverse. // Note: Running the slave in reverse did not work shooterMaster = new CANTalon(RobotMap.LEFT_SHOOTER_MOTOR); shooterSlave = new CANTalon(RobotMap.RIGHT_SHOOTER_MOTOR); shooterSlave.changeControlMode(CANTalon.TalonControlMode.Follower); shooterSlave.set(shooterMaster.getDeviceID()); shooterMaster.reverseOutput(true); // Define tilt motors as Master and Slave where we control the // master motor and the slave follows. tiltMaster = new CANTalon(RobotMap.LEFT_TILT_MOTOR); tiltSlave = new CANTalon(RobotMap.RIGHT_TILT_MOTOR); tiltSlave.changeControlMode(CANTalon.TalonControlMode.Follower); tiltSlave.set(tiltMaster.getDeviceID()); tiltMaster.reverseOutput(false); // Define the low limit position switch minLimit = new DigitalInput(RobotMap.SHOOTER_MIN_LIMIT); // Define the compressor and solenoid // Note: by defining any solenoid the compressor is automatically enabled compressor = new Compressor(RobotMap.COMPRESSOR); shooterSolenoid = new DoubleSolenoid(RobotMap.SHOOTER_SOLENOID1ST, RobotMap.SHOOTER_SOLENOID2ND); // Define the tilt encoder use to control the position of the shooter's tilt angle // Configure encoder parameters - taken from example; commented out values are arbitrary tiltEncoder = new Encoder( RobotMap.TILT_ENCODER_A, RobotMap.TILT_ENCODER_B, false, Encoder.EncodingType.k4X); // tiltEncoder.setMaxPeriod(0.1); // tiltEncoder.setMinRate(10); tiltEncoder.setDistancePerPulse(TILT_ENCODER_DIST_PER_PULSE); // tiltEncoder.setReverseDirection(true); // tiltEncoder.setPIDSourceType(PIDSourceType.kDisplacement); // tiltEncoder.setSamplesToAverage(7); tiltEncoder.reset(); // Configure PID Controller, reset position, and enable getPIDController().setContinuous(false); getPIDController().setInputRange(MIN_DIST, MAX_DIST); // about 0.0 and 6.6 inches // getPIDController().setAbsoluteTolerance(5.0); // did not work getPIDController().setPercentTolerance(0.5); // 0.5 / 100.0 * (max - min) = 0.033 inches // getPIDController().setToleranceBuffer(3); // average over three cycles for onTarget // getPIDController().setOutputRange(-1.0, 1.0); getPIDController().enable(); // Setup LiveWindow for Test Mode LiveWindow.addActuator("Tilt PID", "PID Subsystem", getPIDController()); LiveWindow.addActuator("Shooter", "Master CIM", (CANTalon) shooterMaster); LiveWindow.addActuator("Shooter", "Slave CIM", (CANTalon) shooterSlave); LiveWindow.addActuator("Shooter", "Compressor", compressor); LiveWindow.addActuator("Shooter", "Solenoid", shooterSolenoid); LiveWindow.addActuator("Tilt", "Master CIM", (CANTalon) tiltMaster); LiveWindow.addActuator("Tilt", "Slave CIM", (CANTalon) tiltSlave); LiveWindow.addSensor("Tilt", "Tilt Encoder", tiltEncoder); LiveWindow.addSensor("Tilt", "Tilt Limit", minLimit); System.out.println("Shooter is running"); } public void initDefaultCommand() { // Set the default command for a subsystem here. //setDefaultCommand(new MySpecialCommand()); setDefaultCommand(new TiltWithJoystick()); } protected double returnPIDInput() { // Return your input value for the PID loop // e.g. a sensor, like a potentiometer: // yourPot.getAverageVoltage() / kYourMaxVoltage; double dist = tiltGetDist(); // SmartDashboard.putNumber("Encoder PPR ", ENCODER_PULSES_PER_REV); // SmartDashboard.putNumber("Screw Pitch ", TILT_LEAD_SCREW_PITCH); // SmartDashboard.putNumber("Encoder DistPP X1000", TILT_ENCODER_DIST_PER_PULSE*1000); SmartDashboard.putNumber("Tilt Setpoint (angle) ", Tilt.calcAngle(getSetpoint())); SmartDashboard.putNumber("Tilt Setpoint (dist) ", getSetpoint()); SmartDashboard.putNumber("Tilt Encoder (Raw)", tiltEncoder.get()); SmartDashboard.putNumber("Tilt Encoder (Dist)", tiltGetDist()); SmartDashboard.putNumber("Tilt Encoder (Angle)", tiltGetAngle()); SmartDashboard.putNumber("Tilt Encoder (Rate)", tiltEncoder.getRate()); SmartDashboard.putString("Tilt Encoder Dir", (tiltEncoder.getDirection() ? "Forward" : "Reverse")); SmartDashboard.putBoolean("Tilt Limit Switch", minLimit.get()); return dist; } protected void usePIDOutput(double output) { // Use output to drive your system, like a motor // e.g. yourMotor.set(output); SmartDashboard.putNumber("Tilt PID Output", -output); tiltMaster.set(-output); } public double tiltGetDist() { return tiltEncoder.getDistance(); } public void tiltSetDist(double dist) { getPIDController().setSetpoint(dist); } public double tiltGetAngle() { return Tilt.calcAngle(tiltEncoder.getDistance()); } public void tiltSetAngle(double angle) { getPIDController().setSetpoint(Tilt.calcSide(angle)); } public double tiltGetSetPoint() { return getPIDController().getSetpoint(); } public void tiltSetSetPoint(double setPoint) { getPIDController().setSetpoint(setPoint); } public void tiltResetEncoder() { tiltEncoder.reset(); } public boolean tiltGetLowerLimit() { return minLimit.get(); } public void shooterFire() { shooterMaster.set(1.0); } public void shooterCollect() { shooterMaster.set(-0.5); } public void shooterStop() { shooterMaster.set(0.0); } public void tiltLower() { tiltMaster.set(1.0); } public void tiltRaise() { tiltMaster.set(-1.0); } public void tiltStop() { tiltMaster.set(0.0); } public void tiltReset() { while (! tiltGetLowerLimit()) { tiltLower(); } tiltStop(); Timer.delay(1.0); // allow motor to stop tiltResetEncoder(); getPIDController().enable(); // enable toter PID controller tiltSetDist(0.0); } public void solenoidFire() { shooterSolenoid.set(DoubleSolenoid.Value.kForward); } public void solenoidReset() { shooterSolenoid.set(DoubleSolenoid.Value.kReverse); } public void solenoidClear() { shooterSolenoid.set(DoubleSolenoid.Value.kOff); } public boolean getIsReady() { return isReady; } public void setIsReady(boolean b) { isReady = b; } public boolean getAutoTilt() { return autoTilt; } public void setAutoTilt(boolean b) { autoTilt = b; } }
/* * Copyright 2000-2015 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intellij.unscramble; import com.intellij.execution.ui.RunContentDescriptor; import com.intellij.icons.AllIcons; import com.intellij.ide.IdeBundle; import com.intellij.ide.util.PropertiesComponent; import com.intellij.openapi.fileChooser.FileChooser; import com.intellij.openapi.fileChooser.FileChooserDescriptor; import com.intellij.openapi.fileChooser.FileChooserDescriptorFactory; import com.intellij.openapi.help.HelpManager; import com.intellij.openapi.project.DumbService; import com.intellij.openapi.project.Project; import com.intellij.openapi.ui.DialogWrapper; import com.intellij.openapi.util.Comparing; import com.intellij.openapi.util.Condition; import com.intellij.openapi.util.io.FileUtil; import com.intellij.openapi.util.registry.Registry; import com.intellij.openapi.util.text.StringUtil; import com.intellij.openapi.vcs.ProjectLevelVcsManager; import com.intellij.openapi.vcs.configurable.VcsContentAnnotationConfigurable; import com.intellij.ui.GuiUtils; import com.intellij.ui.ListCellRendererWrapper; import com.intellij.ui.TextFieldWithHistory; import com.intellij.util.ArrayUtil; import com.intellij.util.containers.ContainerUtil; import com.intellij.util.text.CharArrayUtil; import org.jetbrains.annotations.NonNls; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import javax.swing.*; import java.awt.*; import java.awt.event.ActionEvent; import java.awt.event.ActionListener; import java.io.BufferedReader; import java.io.IOException; import java.io.StringReader; import java.util.ArrayList; import java.util.List; import static com.intellij.util.containers.ContainerUtil.ar; /** * @author cdr */ public class UnscrambleDialog extends DialogWrapper { @NonNls private static final String PROPERTY_LOG_FILE_HISTORY_URLS = "UNSCRAMBLE_LOG_FILE_URL"; @NonNls private static final String PROPERTY_LOG_FILE_LAST_URL = "UNSCRAMBLE_LOG_FILE_LAST_URL"; @NonNls private static final String PROPERTY_UNSCRAMBLER_NAME_USED = "UNSCRAMBLER_NAME_USED"; private static final Condition<ThreadState> DEADLOCK_CONDITION = state -> state.isDeadlocked(); private static final String[] IMPORTANT_THREAD_DUMP_WORDS = ar("tid", "nid", "wait", "parking", "prio", "os_prio", "java"); private final Project myProject; private JPanel myEditorPanel; private JPanel myLogFileChooserPanel; private JComboBox myUnscrambleChooser; private JPanel myPanel; private TextFieldWithHistory myLogFile; private JCheckBox myUseUnscrambler; private JPanel myUnscramblePanel; private JCheckBox myOnTheFly; private JPanel myBottomPanel; private JPanel mySettingsPanel; protected AnalyzeStacktraceUtil.StacktraceEditorPanel myStacktraceEditorPanel; private VcsContentAnnotationConfigurable myConfigurable; public UnscrambleDialog(@NotNull Project project) { super(false); myProject = project; populateRegisteredUnscramblerList(); myUnscrambleChooser.addActionListener(new ActionListener() { @Override public void actionPerformed(ActionEvent e) { UnscrambleSupport unscrambleSupport = getSelectedUnscrambler(); GuiUtils.enableChildren(myLogFileChooserPanel, unscrambleSupport != null); updateUnscramblerSettings(); } }); myUseUnscrambler.addActionListener(new ActionListener() { @Override public void actionPerformed(ActionEvent e) { useUnscramblerChanged(); } }); myOnTheFly.setSelected(Registry.get("analyze.exceptions.on.the.fly").asBoolean()); myOnTheFly.addActionListener(new ActionListener() { @Override public void actionPerformed(ActionEvent e) { Registry.get("analyze.exceptions.on.the.fly").setValue(myOnTheFly.isSelected()); } }); createLogFileChooser(); createEditor(); reset(); setTitle(IdeBundle.message("unscramble.dialog.title")); init(); } private void useUnscramblerChanged() { boolean selected = myUseUnscrambler.isSelected(); GuiUtils.enableChildren(myUnscramblePanel, selected, myUseUnscrambler); if (selected) { updateUnscramblerSettings(); } } private void updateUnscramblerSettings() { UnscrambleSupport unscrambleSupport = (UnscrambleSupport)myUnscrambleChooser.getSelectedItem(); JComponent settingsComponent = unscrambleSupport == null ? null : unscrambleSupport.createSettingsComponent(); mySettingsPanel.removeAll(); if (settingsComponent != null) { mySettingsPanel.add(settingsComponent, BorderLayout.CENTER); } myUnscramblePanel.validate(); } private void reset() { final List<String> savedUrls = getSavedLogFileUrls(); myLogFile.setHistorySize(10); myLogFile.setHistory(savedUrls); String lastUrl = getPropertyValue(PROPERTY_LOG_FILE_LAST_URL); if (lastUrl == null && !savedUrls.isEmpty()) { lastUrl = savedUrls.get(savedUrls.size() - 1); } if (lastUrl != null) { myLogFile.setText(lastUrl); myLogFile.setSelectedItem(lastUrl); } final UnscrambleSupport selectedUnscrambler = getSavedUnscrambler(); final int count = myUnscrambleChooser.getItemCount(); int index = 0; if (selectedUnscrambler != null) { for (int i = 0; i < count; i++) { final UnscrambleSupport unscrambleSupport = (UnscrambleSupport)myUnscrambleChooser.getItemAt(i); if (unscrambleSupport != null && Comparing.strEqual(unscrambleSupport.getPresentableName(), selectedUnscrambler.getPresentableName())) { index = i; break; } } } if (count > 0) { myUseUnscrambler.setEnabled(true); myUnscrambleChooser.setSelectedIndex(index); myUseUnscrambler.setSelected(selectedUnscrambler != null); } else { myUseUnscrambler.setEnabled(false); } useUnscramblerChanged(); updateUnscramblerSettings(); myStacktraceEditorPanel.pasteTextFromClipboard(); } private void createUIComponents() { myBottomPanel = new JPanel(new BorderLayout()); if (ProjectLevelVcsManager.getInstance(myProject).hasActiveVcss()) { myConfigurable = new VcsContentAnnotationConfigurable(myProject); myBottomPanel.add(myConfigurable.createComponent(), BorderLayout.CENTER); myConfigurable.reset(); } } @Nullable private UnscrambleSupport getSavedUnscrambler() { final String savedUnscramblerName = getPropertyValue(PROPERTY_UNSCRAMBLER_NAME_USED); UnscrambleSupport selectedUnscrambler = null; for (UnscrambleSupport unscrambleSupport : UnscrambleSupport.EP_NAME.getExtensions()) { if (Comparing.strEqual(unscrambleSupport.getPresentableName(), savedUnscramblerName)) { selectedUnscrambler = unscrambleSupport; } } return selectedUnscrambler; } @NotNull public static List<String> getSavedLogFileUrls() { final List<String> res = new ArrayList<>(); final String savedUrl = PropertiesComponent.getInstance().getValue(PROPERTY_LOG_FILE_HISTORY_URLS); final String[] strings = savedUrl == null ? ArrayUtil.EMPTY_STRING_ARRAY : savedUrl.split(":::"); for (int i = 0; i != strings.length; ++i) { res.add(strings[i]); } return res; } @Nullable private UnscrambleSupport getSelectedUnscrambler() { if (!myUseUnscrambler.isSelected()) return null; return (UnscrambleSupport)myUnscrambleChooser.getSelectedItem(); } private void createEditor() { myStacktraceEditorPanel = AnalyzeStacktraceUtil.createEditorPanel(myProject, myDisposable); myEditorPanel.setLayout(new BorderLayout()); myEditorPanel.add(myStacktraceEditorPanel, BorderLayout.CENTER); } @Override @NotNull protected Action[] createActions(){ return new Action[]{createNormalizeTextAction(), getOKAction(), getCancelAction(), getHelpAction()}; } @Override public JComponent getPreferredFocusedComponent() { JRootPane pane = getRootPane(); return pane != null ? pane.getDefaultButton() : super.getPreferredFocusedComponent(); } private void createLogFileChooser() { myLogFile = new TextFieldWithHistory(); JPanel panel = GuiUtils.constructFieldWithBrowseButton(myLogFile, new ActionListener() { @Override public void actionPerformed(ActionEvent e) { FileChooserDescriptor descriptor = FileChooserDescriptorFactory.createSingleFileNoJarsDescriptor(); FileChooser.chooseFiles(descriptor, myProject, null, files -> myLogFile.setText(FileUtil.toSystemDependentName(files.get(files.size() - 1).getPath()))); } }); myLogFileChooserPanel.setLayout(new BorderLayout()); myLogFileChooserPanel.add(panel, BorderLayout.CENTER); } private void populateRegisteredUnscramblerList() { for (UnscrambleSupport unscrambleSupport : UnscrambleSupport.EP_NAME.getExtensions()) { //noinspection unchecked myUnscrambleChooser.addItem(unscrambleSupport); } //noinspection unchecked myUnscrambleChooser.setRenderer(new ListCellRendererWrapper<UnscrambleSupport>() { @Override public void customize(JList list, UnscrambleSupport unscrambleSupport, int index, boolean selected, boolean hasFocus) { setText(unscrambleSupport == null ? IdeBundle.message("unscramble.no.unscrambler.item") : unscrambleSupport.getPresentableName()); } }); } @Override protected JComponent createCenterPanel() { return myPanel; } @Override public void dispose() { if (isOK()){ final List<String> list = myLogFile.getHistory(); PropertiesComponent.getInstance().setValue(PROPERTY_LOG_FILE_HISTORY_URLS, list.isEmpty() ? null : StringUtil.join(list, ":::"), null); UnscrambleSupport selectedUnscrambler = getSelectedUnscrambler(); saveProperty(PROPERTY_UNSCRAMBLER_NAME_USED, selectedUnscrambler == null ? null : selectedUnscrambler.getPresentableName()); saveProperty(PROPERTY_LOG_FILE_LAST_URL, StringUtil.nullize(myLogFile.getText())); } super.dispose(); } // IDEA-125302 The Analyze Stacktrace menu option remembers only one log file across multiple projects private void saveProperty(@NotNull String name, @Nullable String value) { PropertiesComponent.getInstance(myProject).setValue(name, value); PropertiesComponent.getInstance().setValue(name, value); } @Nullable private String getPropertyValue(@NotNull String name) { String projectValue = PropertiesComponent.getInstance(myProject).getValue(name); if (projectValue != null) { return projectValue; } return PropertiesComponent.getInstance().getValue(name); } public void setText(String trace) { myStacktraceEditorPanel.setText(trace); } public Action createNormalizeTextAction() { return new NormalizeTextAction(); } private final class NormalizeTextAction extends AbstractAction { public NormalizeTextAction(){ putValue(NAME, IdeBundle.message("unscramble.normalize.button")); putValue(DEFAULT_ACTION, Boolean.FALSE); } @Override public void actionPerformed(ActionEvent e){ String text = myStacktraceEditorPanel.getText(); myStacktraceEditorPanel.setText(normalizeText(text)); } } public static String normalizeText(@NonNls String text) { StringBuilder builder = new StringBuilder(text.length()); text = text.replaceAll("(\\S[ \\t\\x0B\\f\\r]+)(at\\s+)", "$1\n$2"); text = text.replaceAll("(\\\\n|\\\\r|\\\\t)+(at\\s+)", "\n$2"); String[] lines = text.split("\n"); boolean first = true; boolean inAuxInfo = false; for (final String line : lines) { //noinspection HardCodedStringLiteral if (!inAuxInfo && (line.startsWith("JNI global references") || line.trim().equals("Heap"))) { builder.append("\n"); inAuxInfo = true; } if (inAuxInfo) { builder.append(trimSuffix(line)).append("\n"); continue; } if (line.startsWith("at breakpoint")) { // possible thread status mixed with "at ..." builder.append(" ").append(trimSuffix(line)); continue; } if (!first && (mustHaveNewLineBefore(line) || StringUtil.endsWith(builder, ")"))) { if (!StringUtil.endsWith(builder, "\n")) builder.append("\n"); if (line.startsWith("\"")) builder.append("\n"); // Additional line break for thread names } first = false; int i = builder.lastIndexOf("\n"); CharSequence lastLine = i == -1 ? builder : builder.subSequence(i + 1, builder.length()); if (!line.matches("\\s+.*") && lastLine.length() > 0) { if (lastLine.toString().matches("\\s*at") //separate 'at' from filename || ContainerUtil.or(IMPORTANT_THREAD_DUMP_WORDS, word -> line.startsWith(word))) { builder.append(" "); } } builder.append(trimSuffix(line)); } return builder.toString(); } private static String trimSuffix(final String line) { int len = line.length(); while ((0 < len) && (line.charAt(len-1) <= ' ')) { len--; } return (len < line.length()) ? line.substring(0, len) : line; } private static boolean mustHaveNewLineBefore(String line) { final int nonWs = CharArrayUtil.shiftForward(line, 0, " \t"); if (nonWs < line.length()) { line = line.substring(nonWs); } if (line.startsWith("at")) return true; // Start of the new stack frame entry if (line.startsWith("Caused")) return true; // Caused by message if (line.startsWith("- locked")) return true; // "Locked a monitor" logging if (line.startsWith("- waiting")) return true; // "Waiting for monitor" logging if (line.startsWith("- parking to wait")) return true; if (line.startsWith("java.lang.Thread.State")) return true; if (line.startsWith("\"")) return true; // Start of the new thread (thread name) return false; } @Override protected void doOKAction() { if (myConfigurable != null && myConfigurable.isModified()) { myConfigurable.apply(); } DumbService.getInstance(myProject).withAlternativeResolveEnabled(() -> { if (performUnscramble()) { myLogFile.addCurrentTextToHistory(); close(OK_EXIT_CODE); } }); } @Override public void doHelpAction() { HelpManager.getInstance().invokeHelp("find.analyzeStackTrace"); } private boolean performUnscramble() { UnscrambleSupport selectedUnscrambler = getSelectedUnscrambler(); JComponent settings = mySettingsPanel.getComponentCount() == 0 ? null : (JComponent)mySettingsPanel.getComponent(0); return showUnscrambledText(selectedUnscrambler, myLogFile.getText(), settings, myProject, myStacktraceEditorPanel.getText()) != null; } @Nullable static <T extends JComponent> RunContentDescriptor showUnscrambledText(@Nullable UnscrambleSupport<T> unscrambleSupport, String logName, @Nullable T settings, Project project, String textToUnscramble) { String unscrambledTrace = unscrambleSupport == null ? textToUnscramble : unscrambleSupport.unscramble(project,textToUnscramble, logName, settings); if (unscrambledTrace == null) return null; List<ThreadState> threadStates = ThreadDumpParser.parse(unscrambledTrace); return addConsole(project, threadStates, unscrambledTrace); } private static RunContentDescriptor addConsole(final Project project, final List<ThreadState> threadDump, String unscrambledTrace) { Icon icon = null; String message = IdeBundle.message("unscramble.unscrambled.stacktrace.tab"); if (!threadDump.isEmpty()) { message = IdeBundle.message("unscramble.unscrambled.threaddump.tab"); icon = AllIcons.Debugger.ThreadStates.Threaddump; } else { String name = getExceptionName(unscrambledTrace); if (name != null) { message = name; icon = AllIcons.Debugger.ThreadStates.Exception; } } if (ContainerUtil.find(threadDump, DEADLOCK_CONDITION) != null) { message = IdeBundle.message("unscramble.unscrambled.deadlock.tab"); icon = AllIcons.Debugger.KillProcess; } return AnalyzeStacktraceUtil.addConsole(project, threadDump.size() > 1 ? new ThreadDumpConsoleFactory(project, threadDump) : null, message, unscrambledTrace, icon); } @Override protected String getDimensionServiceKey(){ return "#com.intellij.unscramble.UnscrambleDialog"; } @Nullable private static String getExceptionName(String unscrambledTrace) { @SuppressWarnings("IOResourceOpenedButNotSafelyClosed") BufferedReader reader = new BufferedReader(new StringReader(unscrambledTrace)); for (int i = 0; i < 3; i++) { try { String line = reader.readLine(); if (line == null) return null; line = line.trim(); String name = getExceptionAbbreviation(line); if (name != null) return name; } catch (IOException e) { return null; } } return null; } @Nullable private static String getExceptionAbbreviation(String line) { int lastDelimiter = 0; for (int j = 0; j < line.length(); j++) { char c = line.charAt(j); if (c == '.' || c == '$') { lastDelimiter = j; continue; } if (!StringUtil.isJavaIdentifierPart(c)) { return null; } } String clazz = line.substring(lastDelimiter); String abbreviate = abbreviate(clazz); return abbreviate.length() > 1 ? abbreviate : clazz; } private static String abbreviate(String s) { StringBuilder builder = new StringBuilder(); for (int i = 0; i < s.length(); i++) { char c = s.charAt(i); if (Character.isUpperCase(c)) { builder.append(c); } } return builder.toString(); } }
/* * Copyright 2016 Martin Winandy * * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the * specific language governing permissions and limitations under the License. */ package org.tinylog.pattern; import java.time.LocalDate; import java.time.ZoneOffset; import java.time.ZonedDateTime; import org.junit.Rule; import org.junit.Test; import org.tinylog.Level; import org.tinylog.core.LogEntry; import org.tinylog.rules.SystemStreamCollector; import org.tinylog.util.LogEntryBuilder; import static org.assertj.core.api.Assertions.assertThat; /** * Tests for {@link FormatPatternParser}. */ public final class FormatPatternParserTest { private static final String NEW_LINE = System.lineSeparator(); /** * Redirects and collects system output streams. */ @Rule public final SystemStreamCollector systemStream = new SystemStreamCollector(false); /** * Verifies that a plain text will be kept and output correctly. */ @Test public void plainText() { assertThat(render("Hello World!", LogEntryBuilder.empty().create())).isEqualTo("Hello World!"); } /** * Verifies that {@code {date}} can be parsed and the returned token will output the date of issue. */ @Test public void dateWithDefaultPattern() { LocalDate date = LocalDate.of(1985, 6, 3); assertThat(render("date", LogEntryBuilder.empty().date(date).create())).contains("1985", "06", "03"); } /** * Verifies that {@code {date}} can be parsed with a defined pattern and the returned token will output the date of * issue as defined in that pattern. */ @Test public void dateWithDefinedPattern() { LocalDate date = LocalDate.of(1985, 6, 3); assertThat(render("date: yyyy-MM-dd", LogEntryBuilder.empty().date(date).create())).isEqualTo("1985-06-03"); } /** * Verifies that a default pattern will be used, if the custom pattern for {@code {date}} is invalid. */ @Test public void dateWithInvalidPattern() { LocalDate date = LocalDate.of(1985, 6, 3); assertThat(render("date: inval'd", LogEntryBuilder.empty().date(date).create())).contains("1985", "06", "03"); assertThat(systemStream.consumeErrorOutput()).containsOnlyOnce("ERROR").containsOnlyOnce("inval'd"); } /** * Verifies that {@code {timestamp}} can be parsed and the returned token will output the timestamp of issue, in seconds. */ @Test public void timestampWithDefaultPattern() { ZonedDateTime date = LocalDate.of(1985, 6, 3).atStartOfDay(ZoneOffset.UTC); assertThat(render("timestamp", LogEntryBuilder.empty().date(date).create())).isEqualTo("486604800"); } /** * Verifies that {@code {timestamp}} can be parsed with a specified milliseconds pattern and the returned token will output the date of * issue as a timestamp in milliseconds. */ @Test public void timestampWithMillisecondsPattern() { ZonedDateTime date = LocalDate.of(1985, 6, 3).atStartOfDay(ZoneOffset.UTC); assertThat(render("timestamp: milliseconds", LogEntryBuilder.empty().date(date).create())).isEqualTo("486604800000"); } /** * Verifies that the default seconds pattern will be used, if the custom pattern for {@code {timestamp}} is invalid. */ @Test public void timestampWithUnknownPattern() { ZonedDateTime date = LocalDate.of(1985, 6, 3).atStartOfDay(ZoneOffset.UTC); assertThat(render("timestamp: inval'd", LogEntryBuilder.empty().date(date).create())).isEqualTo("486604800"); } /** * Verifies that {@code {pid}} can be parsed and the returned token will output the process ID. */ @Test public void processId() { assertThat(render("pid", LogEntryBuilder.empty().create())).isEqualTo(Long.toString(ProcessHandle.current().pid())); } /** * Verifies that {@code {thread}} can be parsed and the returned token will output the thread name. */ @Test public void threadName() { Thread thread = new Thread("My Thread"); assertThat(render("thread", LogEntryBuilder.empty().thread(thread).create())).isEqualTo("My Thread"); } /** * Verifies that {@code {threadId}} can be parsed and the returned token will output the thread ID. */ @Test public void threadId() { Thread thread = Thread.currentThread(); assertThat(render("thread-id", LogEntryBuilder.empty().thread(thread).create())).isEqualTo(Long.toString(thread.getId())); } /** * Verifies that {@code {context}} can be parsed and the returned token will output the defined thread context * value. */ @Test public void context() { assertThat(render("context: pi", LogEntryBuilder.empty().create())).isEmpty(); assertThat(render("context: pi", LogEntryBuilder.empty().context("pi", "3.14").create())).isEqualTo("3.14"); } /** * Verifies that {@code {context}} without a defined key will produce an error. */ @Test public void contextMissingKey() { assertThat(render("context", LogEntryBuilder.empty().create())).isEmpty(); assertThat(systemStream.consumeErrorOutput()).containsOnlyOnce("ERROR").containsOnlyOnce("context"); } /** * Verifies that {@code {context}} can be parsed with a default value for non-existent mappings and the returned * token will output the defined thread context value. */ @Test public void contextDefault() { assertThat(render("context: pi, -", LogEntryBuilder.empty().create())).isEqualTo("-"); assertThat(render("context: pi, -", LogEntryBuilder.empty().context("pi", "3.14").create())).isEqualTo("3.14"); } /** * Verifies that {@code {context}} with a default value for non-existent mappings, but without a defined key will * produce an error. */ @Test public void contextDefaultMissingKey() { assertThat(render("context: ,-", LogEntryBuilder.empty().create())).isEmpty(); assertThat(systemStream.consumeErrorOutput()).containsOnlyOnce("ERROR").containsOnlyOnce("context"); } /** * Verifies that {@code {class}} can be parsed and the returned token will output the fully-qualified class name. */ @Test public void fullClassName() { assertThat(render("class", LogEntryBuilder.empty().className("my.package.MyClass").create())).isEqualTo("my.package.MyClass"); } /** * Verifies that {@code {className}} can be parsed and the returned token will output the class name without * package. */ @Test public void simpleClassName() { assertThat(render("class-name", LogEntryBuilder.empty().className("my.package.MyClass").create())).isEqualTo("MyClass"); } /** * Verifies that {@code {package}} can be parsed and the returned token will output the package name. */ @Test public void packageName() { assertThat(render("package", LogEntryBuilder.empty().className("my.package.MyClass").create())).isEqualTo("my.package"); } /** * Verifies that {@code {method}} can be parsed and the returned token will output the method name. */ @Test public void methodName() { assertThat(render("method", LogEntryBuilder.empty().methodName("foo").create())).isEqualTo("foo"); } /** * Verifies that {@code {file}} can be parsed and the returned token will output the file name. */ @Test public void fileName() { assertThat(render("file", LogEntryBuilder.empty().fileName("MyFile.java").create())).isEqualTo("MyFile.java"); } /** * Verifies that {@code {line}} can be parsed and the returned token will output the source line number. */ @Test public void lineNumber() { assertThat(render("line", LogEntryBuilder.empty().lineNumber(42).create())).isEqualTo("42"); } /** * Verifies that {@code {tag}} can be parsed and the returned token will output the logger tag if existing. */ @Test public void tag() { assertThat(render("tag", LogEntryBuilder.empty().tag("SYSTEM").create())).isEqualTo("SYSTEM"); assertThat(render("tag", LogEntryBuilder.empty().create())).isEmpty(); } /** * Verifies that {@code {tag}} can be parsed with a default value for non-existent tags and the returned token will * output the logger tag if existing. */ @Test public void tagDefault() { assertThat(render("tag", LogEntryBuilder.empty().tag("SYSTEM").create())).isEqualTo("SYSTEM"); assertThat(render("tag: -", LogEntryBuilder.empty().create())).isEqualTo("-"); } /** * Verifies that {@code {level}} can be parsed and the returned token will output the severity level. */ @Test public void level() { assertThat(render("level", LogEntryBuilder.empty().level(Level.DEBUG).create())).isEqualTo("DEBUG"); } /** * Verifies that {@code {message}} can be parsed and the returned token will output the text message as well as the * exception, if no throwable filters are defined. */ @Test public void unfilteredMessage() { Exception exception = new NullPointerException(); assertThat(render("message", LogEntryBuilder.empty().message("Hello World!").exception(exception).create())) .startsWith("Hello World!") .contains(NullPointerException.class.getName()) .contains("at org.tinylog") .hasLineCount(exception.getStackTrace().length + 1); } /** * Verifies that {@code {message}} can be parsed and the returned token will output a filtered exception, * if a throwable filter is defined. */ @Test public void filteredMessage() { Exception exception = new NullPointerException(); assertThat(render("message", LogEntryBuilder.empty().exception(exception).create(), "strip: org.tinylog")) .startsWith(NullPointerException.class.getName()) .contains("at ") .doesNotContain("at org.tinylog"); } /** * Verifies that {@code {messageOnly}} can be parsed and the returned token will output the text message, but not * the exception. */ @Test public void messageOnly() { Exception exception = new NullPointerException(); assertThat(render("message-only", LogEntryBuilder.empty().message("Hello World!").exception(exception).create())) .isEqualTo("Hello World!"); } /** * Verifies that {@code {exception}} can be parsed and the returned token will output the exception, * if no throwable filters are defined. */ @Test public void unfilteredException() { Exception exception = new NullPointerException(); assertThat(render("exception", LogEntryBuilder.empty().exception(exception).create())) .contains(NullPointerException.class.getName()) .hasLineCount(exception.getStackTrace().length + 1); } /** * Verifies that {@code {exception}} can be parsed and the returned token will output a filtered exception, * if a throwable filter is defined. */ @Test public void filteredException() { Exception exception = new NullPointerException(); assertThat(render("exception", LogEntryBuilder.empty().exception(exception).create(), "strip: org.tinylog")) .startsWith(NullPointerException.class.getName()) .contains("at ") .doesNotContain("at org.tinylog"); } /** * Verifies that {@code {opening-curly-bracket"}} can be parsed and outputs a single opening curly bracket '{'. */ @Test public void openingCurlyBracket() { assertThat(render("opening-curly-bracket", LogEntryBuilder.empty().create())).isEqualTo("{"); } /** * Verifies that {@code {closing-curly-bracket"}} can be parsed and outputs a single closing curly bracket '}'. */ @Test public void closingCurlyBracket() { assertThat(render("closing-curly-bracket", LogEntryBuilder.empty().create())).isEqualTo("}"); } /** * Verifies that {@code {pipe}} can be parsed and outputs a single vertical bar '|'. */ @Test public void pipe() { assertThat(render("pipe", LogEntryBuilder.empty().create())).isEqualTo("|"); } /** * Verifies that {@code {any | min-size=X}} can be parsed and the returned token will apply minimum size. */ @Test public void minimumSize() { assertThat(render("{level | min-size=6}", LogEntryBuilder.empty().level(Level.INFO).create())).isEqualTo("INFO "); } /** * Verifies that {@code {{any}:|min-size=X}} can be parsed and the returned token will apply minimum size. */ @Test public void nestedMinimumSize() { assertThat(render("{{level}:|min-size=6}", LogEntryBuilder.empty().level(Level.INFO).create())).isEqualTo("INFO: "); } /** * Verifies that invalid minimum size values will produce an error. */ @Test public void invalidMinimumSize() { assertThat(render("{level | min-size=-1}", LogEntryBuilder.empty().level(Level.INFO).create())).isEqualTo("INFO"); assertThat(systemStream.consumeErrorOutput()) .containsOnlyOnce("ERROR") .containsOnlyOnce("min-size") .containsOnlyOnce("-1"); } /** * Verifies that {@code {any | indent=X}} can be parsed and the returned token will apply indentation. */ @Test public void indentation() { assertThat(render("{message | indent=2}", LogEntryBuilder.empty().message("12" + NEW_LINE + "3").create())) .isEqualTo("12" + NEW_LINE + " 3"); } /** * Verifies that invalid indentation values will produce an error. */ @Test public void invalidIndentation() { assertThat(render("{level | indent=ABC}", LogEntryBuilder.empty().level(Level.INFO).create())).isEqualTo("INFO"); assertThat(systemStream.consumeErrorOutput()) .containsOnlyOnce("ERROR") .containsOnlyOnce("indent") .containsOnlyOnce("ABC"); } /** * Verifies that a combination of multiple placeholders can be parsed and the returned token will output the * expected values. */ @Test public void combined() { assertThat(render("<{file}/{message}>", LogEntryBuilder.empty().fileName("MyFile.java").message("Hello World!").create())) .isEqualTo("<MyFile.java/Hello World!>"); } /** * Verifies that a nested placeholder can be parsed and the returned token will output the expected value. */ @Test public void nested() { assertThat(render("{{message}}", LogEntryBuilder.empty().message("Hello World!").create())).isEqualTo("Hello World!"); } /** * Verifies that a missing opening curly bracket will produce an error. */ @Test public void missingOpeningCurlyBracket() { assertThat(render("message}", LogEntryBuilder.empty().message("Hello World!").create())).isEqualTo("message}"); assertThat(systemStream.consumeErrorOutput()) .containsOnlyOnce("ERROR") .containsIgnoringCase("opening curly bracket") .containsOnlyOnce("message}"); } /** * Verifies that a missing closing curly bracket will produce an error. */ @Test public void missingClosingCurlyBracket() { assertThat(render("{message", LogEntryBuilder.empty().message("Hello World!").create())).isEqualTo("{message"); assertThat(systemStream.consumeErrorOutput()) .containsOnlyOnce("ERROR") .containsIgnoringCase("closing curly bracket") .containsOnlyOnce("{message"); } /** * Verifies that missing values for a style option will produce an error. */ @Test public void missingStyleOptionValue() { assertThat(render("{level | min-size}", LogEntryBuilder.empty().level(Level.INFO).create())).isEqualTo("INFO"); assertThat(systemStream.consumeErrorOutput()) .containsOnlyOnce("ERROR") .containsOnlyOnce("min-size"); } /** * Verifies that unknown style options will produce an error. */ @Test public void unknownStyleOption() { assertThat(render("{level | test=42}", LogEntryBuilder.empty().level(Level.INFO).create())).isEqualTo("INFO"); assertThat(systemStream.consumeErrorOutput()) .containsOnlyOnce("ERROR") .containsOnlyOnce("test"); } /** * Parses a pattern and renders the returned token afterwards. * * @param pattern * Pattern to parse * @param entry * Log entry for rendering the produced token * @return Render result of produced token */ private String render(final String pattern, final LogEntry entry) { return render(pattern, entry, null); } /** * Parses a pattern and renders the returned token afterwards. * * @param pattern * Pattern to parse * @param entry * Log entry for rendering the produced token * @param filters * Throwable filters to apply for exceptions and other throwables * @return Render result of produced token */ private String render(final String pattern, final LogEntry entry, final String filters) { Token token = new FormatPatternParser(filters).parse(pattern); if (token == null) { return null; } else { StringBuilder builder = new StringBuilder(); token.render(entry, builder); return builder.toString(); } } }
/** * Licensed to the Apache Software Foundation (ASF) under one or more contributor license agreements. See the NOTICE file distributed with this work for additional information regarding copyright ownership. The ASF licenses this file to you under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. * * @author Wei Zhang, Language Technology Institute, School of Computer Science, Carnegie-Mellon University. * email: wei.zhang@cs.cmu.edu * */ package edu.cmu.geoparser.ui.geolocator.GUI; import java.awt.Color; import java.awt.EventQueue; import javax.swing.JFrame; import javax.swing.JMenuBar; import javax.swing.JMenuItem; import javax.swing.JMenu; import javax.swing.JScrollPane; import javax.swing.JTextArea; import javax.swing.JLabel; import javax.swing.JTextField; import javax.swing.JButton; import java.awt.event.ActionListener; import java.awt.event.ActionEvent; import java.io.BufferedReader; import java.io.BufferedWriter; import java.io.FileNotFoundException; import java.io.IOException; import java.io.UnsupportedEncodingException; import java.util.HashMap; import java.util.HashSet; import java.util.Iterator; import java.util.List; import java.util.Map.Entry; import javax.swing.BorderFactory; import javax.swing.border.Border; import edu.cmu.geoparser.Disambiguation.ContextDisamb; import edu.cmu.geoparser.io.GetReader; import edu.cmu.geoparser.io.GetWriter; import edu.cmu.geoparser.model.Tweet; import edu.cmu.geoparser.nlp.languagedetector.LangDetector; import edu.cmu.geoparser.nlp.ner.FeatureExtractor.FeatureGenerator; import edu.cmu.geoparser.parser.english.EnglishParser; import edu.cmu.geoparser.resource.trie.IndexSupportedTrie; public class Desktop { private JFrame frame; private JTextField txtCusers; private JTextField txtCusersoutput; private BufferedWriter bw;// for writing to the file private BufferedReader br;// for reading to the file private StringBuilder sb;// for showing the text in the output box private String gazpath, resroot, enNER, langd; private IndexSupportedTrie topotrie; private FeatureGenerator enfgen; private EnglishParser enparser; private ContextDisamb c; /** * Launch the application. */ public static void main(String[] args) { EventQueue.invokeLater(new Runnable() { public void run() { try { Desktop window = new Desktop(); window.frame.setVisible(true); } catch (Exception e) { e.printStackTrace(); } } }); } /** * Create the application. */ public Desktop() { initialize(); } /** * Initialize the contents of the frame. */ private void initialize() { /** * Initialize tagging resourses, get ready for tagging. */ final IndexSupportedTrie topotrie = new IndexSupportedTrie("GeoNames/cities1000.txt", "GazIndex/",true, false); final EnglishParser enparser = new EnglishParser("res/", topotrie, false); final ContextDisamb c = new ContextDisamb(); /** * Initialize the main window */ Border blackline = BorderFactory.createLineBorder(Color.black); frame = new JFrame(); frame.setBounds(100, 100, 588, 486); frame.setDefaultCloseOperation(JFrame.EXIT_ON_CLOSE); frame.setTitle("GeoLocator"); JMenuBar menuBar = new JMenuBar(); frame.setJMenuBar(menuBar); JMenu mnFile = new JMenu("File"); menuBar.add(mnFile); JMenuItem mntmNew = new JMenuItem("New"); mnFile.add(mntmNew); JMenuItem mntmOpen = new JMenuItem("Open"); mnFile.add(mntmOpen); JMenuItem mntmExit = new JMenuItem("Exit"); mntmExit.addActionListener(new ActionListener() { public void actionPerformed(ActionEvent e) { System.exit(0); } }); mnFile.add(mntmExit); JMenu mnHelp = new JMenu("Help"); menuBar.add(mnHelp); JMenuItem mntmAbout = new JMenuItem("About"); mnHelp.add(mntmAbout); mntmAbout.addActionListener(new ActionListener() { public void actionPerformed(ActionEvent e) { About window = new About(); window.frame.setVisible(true); } }); frame.getContentPane().setLayout(null); /** * Message Box text area. */ final JTextArea txtrMessageBox = new JTextArea(); txtrMessageBox .setText("Welcome to Geo-parser! It's developed by the Language Technology Institute in CMU."); txtrMessageBox.setEditable(false); txtrMessageBox.setLineWrap(true); txtrMessageBox.setBounds(10, 11, 288, 178); frame.getContentPane().add(txtrMessageBox); /** * Output box text area */ final JTextArea txtrOutputBox = new JTextArea(); txtrOutputBox.setText("Run the algorithm, and part of the results will be shown here."); txtrOutputBox.setBounds(10, 217, 288, 178); txtrOutputBox.setBorder(blackline); txtrOutputBox.setLineWrap(true); JScrollPane oscrollPane = new JScrollPane(txtrOutputBox); oscrollPane.setVisible(true); frame.getContentPane().add(txtrOutputBox); /** * input path, read in inbox content as file name */ txtCusers = new JTextField(); txtCusers.setText("Input file"); txtCusers.setToolTipText("Input path"); txtCusers.setBounds(338, 11, 180, 20); frame.getContentPane().add(txtCusers); txtCusers.setColumns(10); /** * output path, read in output box content as file name */ txtCusersoutput = new JTextField(); txtCusersoutput.setText("output file"); txtCusersoutput.setToolTipText("Output path"); txtCusersoutput.setBounds(338, 59, 180, 20); frame.getContentPane().add(txtCusersoutput); txtCusersoutput.setColumns(10); /** * Input file button clicked */ JButton btnNewButton = new JButton("Input File"); btnNewButton.setToolTipText("Select file of toponyms to tag."); btnNewButton.setBounds(528, 10, 33, 23); frame.getContentPane().add(btnNewButton); btnNewButton.addActionListener(new ActionListener() { public void actionPerformed(ActionEvent arg0) { // to do: fill out the input text area with selected file path. } }); /** * output file button clicked, output file is ready */ JButton btnNewButton_1 = new JButton("Ouput File"); btnNewButton_1.setToolTipText("Select file to store results."); btnNewButton_1.setBounds(528, 58, 33, 23); frame.getContentPane().add(btnNewButton_1); btnNewButton_1.addActionListener(new ActionListener() { public void actionPerformed(ActionEvent arg0) { // to do: fill out the output text area with selected file path. } }); JButton btnRun = new JButton("Run"); btnRun.setToolTipText("Run geolocator algorithm on above chosen files."); btnRun.setBounds(338, 218, 89, 23); frame.getContentPane().add(btnRun); btnRun.addActionListener(new ActionListener() { public void actionPerformed(ActionEvent arg0) { try { br = GetReader.getUTF8FileReader(txtCusers.getText().trim()); bw = GetWriter.getFileWriter(txtCusersoutput.getText().trim()); } catch (FileNotFoundException e1) { // TODO Auto-generated catch block e1.printStackTrace(); } catch (UnsupportedEncodingException e1) { // TODO Auto-generated catch block e1.printStackTrace(); } catch (IOException e) { // TODO Auto-generated catch block e.printStackTrace(); } sb = new StringBuilder(); Tweet t; String line = null; try { txtrMessageBox.setText("Tagging"); while ((line = br.readLine()) != null) { sb.append(line).append("\t"); t = new Tweet(); t.setText(line); List<String> topo = enparser.parse(t); System.out.println(topo.toString()); HashSet<String> reducedmatch = new HashSet<String>(); for (String s : topo) reducedmatch.add(s.substring(3, s.length() - 3)); HashMap<String, String[]> result = c.returnBestTopo(topotrie, reducedmatch); Iterator<Entry<String, String[]>> i = result.entrySet().iterator(); while (i.hasNext()) { Entry<String, String[]> a = i.next(); sb.append("[ ").append(a.getKey()) // .append(",").append(a.getValue()[0]).append(",") // .append(a.getValue()[1]).append(",").append(a.getValue()[2]) .append(" ] "); } sb.append("\n"); } txtrOutputBox.setText(sb.toString()); bw.write(sb.toString()); bw.close(); br.close(); } catch (IOException e) { // TODO Auto-generated catch block e.printStackTrace(); } } }); JLabel lblNoteThereShould = new JLabel("Note: The text should not exceed 100MB."); lblNoteThereShould.setBounds(20, 192, 278, 14); frame.getContentPane().add(lblNoteThereShould); } }
/* * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.facebook.presto.server; import com.facebook.airlift.concurrent.BoundedExecutor; import com.facebook.airlift.configuration.AbstractConfigurationAwareModule; import com.facebook.airlift.http.server.TheServlet; import com.facebook.airlift.stats.GcMonitor; import com.facebook.airlift.stats.JmxGcMonitor; import com.facebook.airlift.stats.PauseMeter; import com.facebook.drift.client.address.AddressSelector; import com.facebook.drift.transport.netty.client.DriftNettyClientModule; import com.facebook.drift.transport.netty.server.DriftNettyServerModule; import com.facebook.presto.GroupByHashPageIndexerFactory; import com.facebook.presto.PagesIndexPageSorter; import com.facebook.presto.SystemSessionProperties; import com.facebook.presto.block.BlockEncodingManager; import com.facebook.presto.block.BlockJsonSerde; import com.facebook.presto.client.NodeVersion; import com.facebook.presto.client.ServerInfo; import com.facebook.presto.common.block.Block; import com.facebook.presto.common.block.BlockEncoding; import com.facebook.presto.common.block.BlockEncodingSerde; import com.facebook.presto.common.type.Type; import com.facebook.presto.common.type.TypeManager; import com.facebook.presto.connector.ConnectorManager; import com.facebook.presto.connector.system.SystemConnectorModule; import com.facebook.presto.cost.FilterStatsCalculator; import com.facebook.presto.cost.ScalarStatsCalculator; import com.facebook.presto.cost.StatsNormalizer; import com.facebook.presto.event.SplitMonitor; import com.facebook.presto.execution.ExecutionFailureInfo; import com.facebook.presto.execution.ExplainAnalyzeContext; import com.facebook.presto.execution.LocationFactory; import com.facebook.presto.execution.MemoryRevokingScheduler; import com.facebook.presto.execution.NodeTaskMap; import com.facebook.presto.execution.QueryManagerConfig; import com.facebook.presto.execution.SqlTaskManager; import com.facebook.presto.execution.StageInfo; import com.facebook.presto.execution.TaskInfo; import com.facebook.presto.execution.TaskManagementExecutor; import com.facebook.presto.execution.TaskManager; import com.facebook.presto.execution.TaskManagerConfig; import com.facebook.presto.execution.TaskStatus; import com.facebook.presto.execution.executor.MultilevelSplitQueue; import com.facebook.presto.execution.executor.TaskExecutor; import com.facebook.presto.execution.scheduler.FlatNetworkTopology; import com.facebook.presto.execution.scheduler.LegacyNetworkTopology; import com.facebook.presto.execution.scheduler.NetworkTopology; import com.facebook.presto.execution.scheduler.NodeScheduler; import com.facebook.presto.execution.scheduler.NodeSchedulerConfig; import com.facebook.presto.execution.scheduler.NodeSchedulerExporter; import com.facebook.presto.execution.scheduler.nodeSelection.NodeSelectionStats; import com.facebook.presto.index.IndexManager; import com.facebook.presto.memory.LocalMemoryManager; import com.facebook.presto.memory.LocalMemoryManagerExporter; import com.facebook.presto.memory.MemoryInfo; import com.facebook.presto.memory.MemoryManagerConfig; import com.facebook.presto.memory.MemoryPoolAssignmentsRequest; import com.facebook.presto.memory.MemoryResource; import com.facebook.presto.memory.NodeMemoryConfig; import com.facebook.presto.memory.ReservedSystemMemoryConfig; import com.facebook.presto.metadata.AnalyzePropertyManager; import com.facebook.presto.metadata.CatalogManager; import com.facebook.presto.metadata.ColumnPropertyManager; import com.facebook.presto.metadata.DiscoveryNodeManager; import com.facebook.presto.metadata.ForNodeManager; import com.facebook.presto.metadata.FunctionManager; import com.facebook.presto.metadata.HandleJsonModule; import com.facebook.presto.metadata.InternalNodeManager; import com.facebook.presto.metadata.Metadata; import com.facebook.presto.metadata.MetadataManager; import com.facebook.presto.metadata.SchemaPropertyManager; import com.facebook.presto.metadata.SessionPropertyManager; import com.facebook.presto.metadata.StaticCatalogStore; import com.facebook.presto.metadata.StaticCatalogStoreConfig; import com.facebook.presto.metadata.StaticFunctionNamespaceStore; import com.facebook.presto.metadata.StaticFunctionNamespaceStoreConfig; import com.facebook.presto.metadata.TablePropertyManager; import com.facebook.presto.metadata.ViewDefinition; import com.facebook.presto.operator.ExchangeClientConfig; import com.facebook.presto.operator.ExchangeClientFactory; import com.facebook.presto.operator.ExchangeClientSupplier; import com.facebook.presto.operator.ForExchange; import com.facebook.presto.operator.LookupJoinOperators; import com.facebook.presto.operator.OperatorStats; import com.facebook.presto.operator.PagesIndex; import com.facebook.presto.operator.TableCommitContext; import com.facebook.presto.operator.index.IndexJoinLookupStats; import com.facebook.presto.server.remotetask.HttpLocationFactory; import com.facebook.presto.server.thrift.FixedAddressSelector; import com.facebook.presto.server.thrift.ThriftServerInfoClient; import com.facebook.presto.server.thrift.ThriftServerInfoService; import com.facebook.presto.server.thrift.ThriftTaskClient; import com.facebook.presto.server.thrift.ThriftTaskService; import com.facebook.presto.spi.ConnectorSplit; import com.facebook.presto.spi.PageIndexerFactory; import com.facebook.presto.spi.PageSorter; import com.facebook.presto.spi.relation.DeterminismEvaluator; import com.facebook.presto.spi.relation.DomainTranslator; import com.facebook.presto.spi.relation.PredicateCompiler; import com.facebook.presto.spi.relation.VariableReferenceExpression; import com.facebook.presto.spiller.FileSingleStreamSpillerFactory; import com.facebook.presto.spiller.GenericPartitioningSpillerFactory; import com.facebook.presto.spiller.GenericSpillerFactory; import com.facebook.presto.spiller.LocalSpillManager; import com.facebook.presto.spiller.NodeSpillConfig; import com.facebook.presto.spiller.PartitioningSpillerFactory; import com.facebook.presto.spiller.SingleStreamSpillerFactory; import com.facebook.presto.spiller.SpillerFactory; import com.facebook.presto.spiller.SpillerStats; import com.facebook.presto.split.PageSinkManager; import com.facebook.presto.split.PageSinkProvider; import com.facebook.presto.split.PageSourceManager; import com.facebook.presto.split.PageSourceProvider; import com.facebook.presto.split.SplitManager; import com.facebook.presto.sql.Serialization.ExpressionDeserializer; import com.facebook.presto.sql.Serialization.ExpressionSerializer; import com.facebook.presto.sql.Serialization.FunctionCallDeserializer; import com.facebook.presto.sql.Serialization.VariableReferenceExpressionDeserializer; import com.facebook.presto.sql.Serialization.VariableReferenceExpressionSerializer; import com.facebook.presto.sql.SqlEnvironmentConfig; import com.facebook.presto.sql.analyzer.FeaturesConfig; import com.facebook.presto.sql.gen.ExpressionCompiler; import com.facebook.presto.sql.gen.JoinCompiler; import com.facebook.presto.sql.gen.JoinFilterFunctionCompiler; import com.facebook.presto.sql.gen.OrderingCompiler; import com.facebook.presto.sql.gen.PageFunctionCompiler; import com.facebook.presto.sql.gen.RowExpressionPredicateCompiler; import com.facebook.presto.sql.parser.SqlParser; import com.facebook.presto.sql.parser.SqlParserOptions; import com.facebook.presto.sql.planner.CompilerConfig; import com.facebook.presto.sql.planner.ConnectorPlanOptimizerManager; import com.facebook.presto.sql.planner.LocalExecutionPlanner; import com.facebook.presto.sql.planner.NodePartitioningManager; import com.facebook.presto.sql.planner.PartitioningProviderManager; import com.facebook.presto.sql.planner.PlanFragment; import com.facebook.presto.sql.planner.sanity.PlanChecker; import com.facebook.presto.sql.relational.RowExpressionDeterminismEvaluator; import com.facebook.presto.sql.relational.RowExpressionDomainTranslator; import com.facebook.presto.sql.tree.Expression; import com.facebook.presto.sql.tree.FunctionCall; import com.facebook.presto.statusservice.NodeStatusService; import com.facebook.presto.transaction.TransactionManagerConfig; import com.facebook.presto.type.TypeDeserializer; import com.facebook.presto.type.TypeRegistry; import com.facebook.presto.util.FinalizerService; import com.facebook.presto.util.GcStatusMonitor; import com.facebook.presto.version.EmbedVersion; import com.google.common.collect.ImmutableList; import com.google.inject.Binder; import com.google.inject.Key; import com.google.inject.Provides; import com.google.inject.Scopes; import com.twitter.presto.maintenance.MaintenanceCoordinatorModule; import io.airlift.slice.Slice; import io.airlift.units.DataSize; import io.airlift.units.Duration; import javax.annotation.PreDestroy; import javax.inject.Inject; import javax.inject.Singleton; import javax.servlet.Servlet; import java.util.List; import java.util.concurrent.ExecutorService; import java.util.concurrent.ScheduledExecutorService; import static com.facebook.airlift.concurrent.ConcurrentScheduledExecutor.createConcurrentScheduledExecutor; import static com.facebook.airlift.concurrent.Threads.daemonThreadsNamed; import static com.facebook.airlift.configuration.ConditionalModule.installModuleIf; import static com.facebook.airlift.configuration.ConfigBinder.configBinder; import static com.facebook.airlift.discovery.client.DiscoveryBinder.discoveryBinder; import static com.facebook.airlift.http.client.HttpClientBinder.httpClientBinder; import static com.facebook.airlift.jaxrs.JaxrsBinder.jaxrsBinder; import static com.facebook.airlift.json.JsonBinder.jsonBinder; import static com.facebook.airlift.json.JsonCodecBinder.jsonCodecBinder; import static com.facebook.drift.client.guice.DriftClientBinder.driftClientBinder; import static com.facebook.drift.server.guice.DriftServerBinder.driftServerBinder; import static com.facebook.presto.execution.scheduler.NodeSchedulerConfig.NetworkTopologyType.FLAT; import static com.facebook.presto.execution.scheduler.NodeSchedulerConfig.NetworkTopologyType.LEGACY; import static com.facebook.presto.server.smile.SmileCodecBinder.smileCodecBinder; import static com.google.common.base.Strings.nullToEmpty; import static com.google.inject.multibindings.MapBinder.newMapBinder; import static com.google.inject.multibindings.Multibinder.newSetBinder; import static com.google.inject.multibindings.OptionalBinder.newOptionalBinder; import static io.airlift.units.DataSize.Unit.MEGABYTE; import static java.util.Objects.requireNonNull; import static java.util.concurrent.Executors.newCachedThreadPool; import static java.util.concurrent.Executors.newScheduledThreadPool; import static java.util.concurrent.TimeUnit.SECONDS; import static org.weakref.jmx.guice.ExportBinder.newExporter; public class ServerMainModule extends AbstractConfigurationAwareModule { private final SqlParserOptions sqlParserOptions; public ServerMainModule(SqlParserOptions sqlParserOptions) { requireNonNull(sqlParserOptions, "sqlParserOptions is null"); this.sqlParserOptions = SqlParserOptions.copyOf(sqlParserOptions); } @Override protected void setup(Binder binder) { ServerConfig serverConfig = buildConfigObject(ServerConfig.class); if (serverConfig.isCoordinator()) { install(new CoordinatorModule()); if (serverConfig.isMaintenanceCoordinator()) { install(new MaintenanceCoordinatorModule()); } } else { install(new WorkerModule()); } install(new InternalCommunicationModule()); configBinder(binder).bindConfig(FeaturesConfig.class); binder.bind(PlanChecker.class).in(Scopes.SINGLETON); binder.bind(SqlParser.class).in(Scopes.SINGLETON); binder.bind(SqlParserOptions.class).toInstance(sqlParserOptions); sqlParserOptions.useEnhancedErrorHandler(serverConfig.isEnhancedErrorReporting()); jaxrsBinder(binder).bind(ThrowableMapper.class); configBinder(binder).bindConfig(QueryManagerConfig.class); configBinder(binder).bindConfig(SqlEnvironmentConfig.class); jsonCodecBinder(binder).bindJsonCodec(ViewDefinition.class); newOptionalBinder(binder, ExplainAnalyzeContext.class); // GC Monitor binder.bind(GcMonitor.class).to(JmxGcMonitor.class).in(Scopes.SINGLETON); // session properties binder.bind(SessionPropertyManager.class).in(Scopes.SINGLETON); binder.bind(SystemSessionProperties.class).in(Scopes.SINGLETON); binder.bind(SessionPropertyDefaults.class).in(Scopes.SINGLETON); // schema properties binder.bind(SchemaPropertyManager.class).in(Scopes.SINGLETON); // table properties binder.bind(TablePropertyManager.class).in(Scopes.SINGLETON); // column properties binder.bind(ColumnPropertyManager.class).in(Scopes.SINGLETON); // analyze properties binder.bind(AnalyzePropertyManager.class).in(Scopes.SINGLETON); // node manager discoveryBinder(binder).bindSelector("presto"); binder.bind(DiscoveryNodeManager.class).in(Scopes.SINGLETON); binder.bind(InternalNodeManager.class).to(DiscoveryNodeManager.class).in(Scopes.SINGLETON); newExporter(binder).export(DiscoveryNodeManager.class).withGeneratedName(); httpClientBinder(binder).bindHttpClient("node-manager", ForNodeManager.class) .withTracing() .withConfigDefaults(config -> { config.setRequestTimeout(new Duration(10, SECONDS)); }); driftClientBinder(binder).bindDriftClient(ThriftServerInfoClient.class, ForNodeManager.class) .withAddressSelector(((addressSelectorBinder, annotation, prefix) -> addressSelectorBinder.bind(AddressSelector.class).annotatedWith(annotation).to(FixedAddressSelector.class))); // node scheduler // TODO: remove from NodePartitioningManager and move to CoordinatorModule configBinder(binder).bindConfig(NodeSchedulerConfig.class); binder.bind(NodeScheduler.class).in(Scopes.SINGLETON); binder.bind(NodeSelectionStats.class).in(Scopes.SINGLETON); newExporter(binder).export(NodeSelectionStats.class).withGeneratedName(); binder.bind(NodeSchedulerExporter.class).in(Scopes.SINGLETON); binder.bind(NodeTaskMap.class).in(Scopes.SINGLETON); newExporter(binder).export(NodeScheduler.class).withGeneratedName(); // network topology // TODO: move to CoordinatorModule when NodeScheduler is moved install(installModuleIf( NodeSchedulerConfig.class, config -> LEGACY.equalsIgnoreCase(config.getNetworkTopology()), moduleBinder -> moduleBinder.bind(NetworkTopology.class).to(LegacyNetworkTopology.class).in(Scopes.SINGLETON))); install(installModuleIf( NodeSchedulerConfig.class, config -> FLAT.equalsIgnoreCase(config.getNetworkTopology()), moduleBinder -> moduleBinder.bind(NetworkTopology.class).to(FlatNetworkTopology.class).in(Scopes.SINGLETON))); // task execution jaxrsBinder(binder).bind(TaskResource.class); newExporter(binder).export(TaskResource.class).withGeneratedName(); jaxrsBinder(binder).bind(TaskExecutorResource.class); newExporter(binder).export(TaskExecutorResource.class).withGeneratedName(); binder.bind(TaskManagementExecutor.class).in(Scopes.SINGLETON); binder.bind(SqlTaskManager.class).in(Scopes.SINGLETON); binder.bind(TaskManager.class).to(Key.get(SqlTaskManager.class)); // memory revoking scheduler binder.bind(MemoryRevokingScheduler.class).in(Scopes.SINGLETON); // Add monitoring for JVM pauses binder.bind(PauseMeter.class).in(Scopes.SINGLETON); newExporter(binder).export(PauseMeter.class).withGeneratedName(); binder.bind(GcStatusMonitor.class).in(Scopes.SINGLETON); configBinder(binder).bindConfig(MemoryManagerConfig.class); configBinder(binder).bindConfig(NodeMemoryConfig.class); configBinder(binder).bindConfig(ReservedSystemMemoryConfig.class); binder.bind(LocalMemoryManager.class).in(Scopes.SINGLETON); binder.bind(LocalMemoryManagerExporter.class).in(Scopes.SINGLETON); binder.bind(EmbedVersion.class).in(Scopes.SINGLETON); newExporter(binder).export(TaskManager.class).withGeneratedName(); binder.bind(TaskExecutor.class).in(Scopes.SINGLETON); newExporter(binder).export(TaskExecutor.class).withGeneratedName(); binder.bind(MultilevelSplitQueue.class).in(Scopes.SINGLETON); newExporter(binder).export(MultilevelSplitQueue.class).withGeneratedName(); binder.bind(LocalExecutionPlanner.class).in(Scopes.SINGLETON); configBinder(binder).bindConfig(CompilerConfig.class); binder.bind(ExpressionCompiler.class).in(Scopes.SINGLETON); newExporter(binder).export(ExpressionCompiler.class).withGeneratedName(); binder.bind(PageFunctionCompiler.class).in(Scopes.SINGLETON); newExporter(binder).export(PageFunctionCompiler.class).withGeneratedName(); configBinder(binder).bindConfig(TaskManagerConfig.class); binder.bind(IndexJoinLookupStats.class).in(Scopes.SINGLETON); newExporter(binder).export(IndexJoinLookupStats.class).withGeneratedName(); binder.bind(AsyncHttpExecutionMBean.class).in(Scopes.SINGLETON); newExporter(binder).export(AsyncHttpExecutionMBean.class).withGeneratedName(); binder.bind(JoinFilterFunctionCompiler.class).in(Scopes.SINGLETON); newExporter(binder).export(JoinFilterFunctionCompiler.class).withGeneratedName(); binder.bind(JoinCompiler.class).in(Scopes.SINGLETON); newExporter(binder).export(JoinCompiler.class).withGeneratedName(); binder.bind(OrderingCompiler.class).in(Scopes.SINGLETON); newExporter(binder).export(OrderingCompiler.class).withGeneratedName(); binder.bind(PagesIndex.Factory.class).to(PagesIndex.DefaultFactory.class); binder.bind(LookupJoinOperators.class).in(Scopes.SINGLETON); jsonCodecBinder(binder).bindJsonCodec(TaskStatus.class); jsonCodecBinder(binder).bindJsonCodec(StageInfo.class); jsonCodecBinder(binder).bindJsonCodec(TaskInfo.class); jsonCodecBinder(binder).bindJsonCodec(OperatorStats.class); jsonCodecBinder(binder).bindJsonCodec(ExecutionFailureInfo.class); jsonCodecBinder(binder).bindJsonCodec(TableCommitContext.class); smileCodecBinder(binder).bindSmileCodec(TaskStatus.class); smileCodecBinder(binder).bindSmileCodec(TaskInfo.class); jaxrsBinder(binder).bind(PagesResponseWriter.class); // exchange client binder.bind(ExchangeClientSupplier.class).to(ExchangeClientFactory.class).in(Scopes.SINGLETON); httpClientBinder(binder).bindHttpClient("exchange", ForExchange.class) .withTracing() .withFilter(GenerateTraceTokenRequestFilter.class) .withConfigDefaults(config -> { config.setRequestTimeout(new Duration(10, SECONDS)); config.setMaxConnectionsPerServer(250); config.setMaxContentLength(new DataSize(32, MEGABYTE)); }); binder.install(new DriftNettyClientModule()); driftClientBinder(binder).bindDriftClient(ThriftTaskClient.class, ForExchange.class) .withAddressSelector(((addressSelectorBinder, annotation, prefix) -> addressSelectorBinder.bind(AddressSelector.class).annotatedWith(annotation).to(FixedAddressSelector.class))); configBinder(binder).bindConfig(ExchangeClientConfig.class); binder.bind(ExchangeExecutionMBean.class).in(Scopes.SINGLETON); newExporter(binder).export(ExchangeExecutionMBean.class).withGeneratedName(); // execution binder.bind(LocationFactory.class).to(HttpLocationFactory.class).in(Scopes.SINGLETON); // memory manager jaxrsBinder(binder).bind(MemoryResource.class); jsonCodecBinder(binder).bindJsonCodec(MemoryInfo.class); jsonCodecBinder(binder).bindJsonCodec(MemoryPoolAssignmentsRequest.class); smileCodecBinder(binder).bindSmileCodec(MemoryInfo.class); smileCodecBinder(binder).bindSmileCodec(MemoryPoolAssignmentsRequest.class); // transaction manager configBinder(binder).bindConfig(TransactionManagerConfig.class); // data stream provider binder.bind(PageSourceManager.class).in(Scopes.SINGLETON); binder.bind(PageSourceProvider.class).to(PageSourceManager.class).in(Scopes.SINGLETON); // page sink provider binder.bind(PageSinkManager.class).in(Scopes.SINGLETON); binder.bind(PageSinkProvider.class).to(PageSinkManager.class).in(Scopes.SINGLETON); // metadata binder.bind(StaticCatalogStore.class).in(Scopes.SINGLETON); configBinder(binder).bindConfig(StaticCatalogStoreConfig.class); binder.bind(StaticFunctionNamespaceStore.class).in(Scopes.SINGLETON); configBinder(binder).bindConfig(StaticFunctionNamespaceStoreConfig.class); binder.bind(FunctionManager.class).in(Scopes.SINGLETON); binder.bind(MetadataManager.class).in(Scopes.SINGLETON); binder.bind(Metadata.class).to(MetadataManager.class).in(Scopes.SINGLETON); // row expression utils binder.bind(DomainTranslator.class).to(RowExpressionDomainTranslator.class).in(Scopes.SINGLETON); binder.bind(PredicateCompiler.class).to(RowExpressionPredicateCompiler.class).in(Scopes.SINGLETON); binder.bind(DeterminismEvaluator.class).to(RowExpressionDeterminismEvaluator.class).in(Scopes.SINGLETON); // type binder.bind(TypeRegistry.class).in(Scopes.SINGLETON); binder.bind(TypeManager.class).to(TypeRegistry.class).in(Scopes.SINGLETON); jsonBinder(binder).addDeserializerBinding(Type.class).to(TypeDeserializer.class); newSetBinder(binder, Type.class); // plan jsonBinder(binder).addKeySerializerBinding(VariableReferenceExpression.class).to(VariableReferenceExpressionSerializer.class); jsonBinder(binder).addKeyDeserializerBinding(VariableReferenceExpression.class).to(VariableReferenceExpressionDeserializer.class); // split manager binder.bind(SplitManager.class).in(Scopes.SINGLETON); // partitioning provider manager binder.bind(PartitioningProviderManager.class).in(Scopes.SINGLETON); // node partitioning manager binder.bind(NodePartitioningManager.class).in(Scopes.SINGLETON); // connector plan optimizer manager binder.bind(ConnectorPlanOptimizerManager.class).in(Scopes.SINGLETON); // index manager binder.bind(IndexManager.class).in(Scopes.SINGLETON); // handle resolver binder.install(new HandleJsonModule()); // connector binder.bind(ScalarStatsCalculator.class).in(Scopes.SINGLETON); binder.bind(StatsNormalizer.class).in(Scopes.SINGLETON); binder.bind(FilterStatsCalculator.class).in(Scopes.SINGLETON); binder.bind(ConnectorManager.class).in(Scopes.SINGLETON); // system connector binder.install(new SystemConnectorModule()); // splits jsonCodecBinder(binder).bindJsonCodec(TaskUpdateRequest.class); jsonCodecBinder(binder).bindJsonCodec(ConnectorSplit.class); jsonCodecBinder(binder).bindJsonCodec(PlanFragment.class); smileCodecBinder(binder).bindSmileCodec(TaskUpdateRequest.class); smileCodecBinder(binder).bindSmileCodec(ConnectorSplit.class); smileCodecBinder(binder).bindSmileCodec(PlanFragment.class); jsonBinder(binder).addSerializerBinding(Slice.class).to(SliceSerializer.class); jsonBinder(binder).addDeserializerBinding(Slice.class).to(SliceDeserializer.class); jsonBinder(binder).addSerializerBinding(Expression.class).to(ExpressionSerializer.class); jsonBinder(binder).addDeserializerBinding(Expression.class).to(ExpressionDeserializer.class); jsonBinder(binder).addDeserializerBinding(FunctionCall.class).to(FunctionCallDeserializer.class); // split monitor binder.bind(SplitMonitor.class).in(Scopes.SINGLETON); // Determine the NodeVersion NodeVersion nodeVersion = new NodeVersion(serverConfig.getPrestoVersion()); binder.bind(NodeVersion.class).toInstance(nodeVersion); // presto announcement discoveryBinder(binder).bindHttpAnnouncement("presto") .addProperty("node_version", nodeVersion.toString()) .addProperty("coordinator", String.valueOf(serverConfig.isCoordinator())) .addProperty("connectorIds", nullToEmpty(serverConfig.getDataSources())); // server info resource jaxrsBinder(binder).bind(ServerInfoResource.class); jsonCodecBinder(binder).bindJsonCodec(ServerInfo.class); // node status resource jaxrsBinder(binder).bind(StatusResource.class); jsonCodecBinder(binder).bindJsonCodec(NodeStatus.class); // plugin manager binder.bind(PluginManager.class).in(Scopes.SINGLETON); configBinder(binder).bindConfig(PluginManagerConfig.class); binder.bind(CatalogManager.class).in(Scopes.SINGLETON); // block encodings binder.bind(BlockEncodingManager.class).in(Scopes.SINGLETON); binder.bind(BlockEncodingSerde.class).to(BlockEncodingManager.class).in(Scopes.SINGLETON); newSetBinder(binder, BlockEncoding.class); jsonBinder(binder).addSerializerBinding(Block.class).to(BlockJsonSerde.Serializer.class); jsonBinder(binder).addDeserializerBinding(Block.class).to(BlockJsonSerde.Deserializer.class); // thread visualizer jaxrsBinder(binder).bind(ThreadResource.class); // PageSorter binder.bind(PageSorter.class).to(PagesIndexPageSorter.class).in(Scopes.SINGLETON); // PageIndexer binder.bind(PageIndexerFactory.class).to(GroupByHashPageIndexerFactory.class).in(Scopes.SINGLETON); // Finalizer binder.bind(FinalizerService.class).in(Scopes.SINGLETON); // Spiller binder.bind(SpillerFactory.class).to(GenericSpillerFactory.class).in(Scopes.SINGLETON); binder.bind(SingleStreamSpillerFactory.class).to(FileSingleStreamSpillerFactory.class).in(Scopes.SINGLETON); binder.bind(PartitioningSpillerFactory.class).to(GenericPartitioningSpillerFactory.class).in(Scopes.SINGLETON); binder.bind(SpillerStats.class).in(Scopes.SINGLETON); newExporter(binder).export(SpillerFactory.class).withGeneratedName(); binder.bind(LocalSpillManager.class).in(Scopes.SINGLETON); configBinder(binder).bindConfig(NodeSpillConfig.class); // Thrift RPC binder.install(new DriftNettyServerModule()); driftServerBinder(binder).bindService(ThriftTaskService.class); driftServerBinder(binder).bindService(ThriftServerInfoService.class); // Async page transport newMapBinder(binder, String.class, Servlet.class, TheServlet.class) .addBinding("/v1/task/async/*") .to(AsyncPageTransportServlet.class) .in(Scopes.SINGLETON); // cleanup binder.bind(ExecutorCleanup.class).in(Scopes.SINGLETON); //Optional Status Detector newOptionalBinder(binder, NodeStatusService.class); } @Provides @Singleton @ForExchange public static ScheduledExecutorService createExchangeExecutor(ExchangeClientConfig config) { return newScheduledThreadPool(config.getClientThreads(), daemonThreadsNamed("exchange-client-%s")); } @Provides @Singleton @ForAsyncRpc public static ExecutorService createAsyncHttpResponseCoreExecutor() { return newCachedThreadPool(daemonThreadsNamed("async-http-response-%s")); } @Provides @Singleton @ForAsyncRpc public static BoundedExecutor createAsyncHttpResponseExecutor(@ForAsyncRpc ExecutorService coreExecutor, TaskManagerConfig config) { return new BoundedExecutor(coreExecutor, config.getHttpResponseThreads()); } @Provides @Singleton @ForAsyncRpc public static ScheduledExecutorService createAsyncHttpTimeoutExecutor(TaskManagerConfig config) { return createConcurrentScheduledExecutor("async-http-timeout", config.getHttpTimeoutConcurrency(), config.getHttpTimeoutThreads()); } public static class ExecutorCleanup { private final List<ExecutorService> executors; @Inject public ExecutorCleanup( @ForExchange ScheduledExecutorService exchangeExecutor, @ForAsyncRpc ExecutorService httpResponseExecutor, @ForAsyncRpc ScheduledExecutorService httpTimeoutExecutor) { executors = ImmutableList.of( exchangeExecutor, httpResponseExecutor, httpTimeoutExecutor); } @PreDestroy public void shutdown() { executors.forEach(ExecutorService::shutdownNow); } } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.ambari.server.checks; import static org.easymock.EasyMock.anyString; import static org.easymock.EasyMock.expect; import java.util.HashMap; import java.util.LinkedHashSet; import java.util.Map; import java.util.Set; import org.apache.ambari.annotations.UpgradeCheckInfo; import org.apache.ambari.server.AmbariException; import org.apache.ambari.server.api.services.AmbariMetaInfo; import org.apache.ambari.server.orm.entities.RepositoryVersionEntity; import org.apache.ambari.server.state.CheckHelper; import org.apache.ambari.server.state.Cluster; import org.apache.ambari.server.state.Clusters; import org.apache.ambari.server.state.Service; import org.apache.ambari.server.state.repository.ClusterVersionSummary; import org.apache.ambari.server.state.repository.VersionDefinitionXml; import org.apache.ambari.spi.ClusterInformation; import org.apache.ambari.spi.RepositoryType; import org.apache.ambari.spi.RepositoryVersion; import org.apache.ambari.spi.upgrade.UpgradeCheckDescription; import org.apache.ambari.spi.upgrade.UpgradeCheckGroup; import org.apache.ambari.spi.upgrade.UpgradeCheckRequest; import org.apache.ambari.spi.upgrade.UpgradeCheckResult; import org.apache.ambari.spi.upgrade.UpgradeCheckType; import org.apache.ambari.spi.upgrade.UpgradeType; import org.easymock.EasyMock; import org.easymock.EasyMockSupport; import org.easymock.Mock; import org.junit.Before; import org.junit.Test; import org.mockito.Mockito; import com.google.common.collect.Lists; import com.google.common.collect.Sets; import com.google.inject.Provider; import junit.framework.Assert; /** * Unit tests for ClusterCheck */ public class ClusterCheckTest extends EasyMockSupport { @Mock private Clusters clusters; /** * Used to mock out what services will be provided to us by the VDF/cluster. */ @Mock private ClusterVersionSummary m_clusterVersionSummary; /** * */ @Mock private VersionDefinitionXml m_vdfXml; private static final UpgradeCheckDescription m_description = new UpgradeCheckDescription( "Test Check", UpgradeCheckType.CLUSTER, "Test Check", "Test Failure Reason"); private MockCheckHelper m_mockCheckHelper = new MockCheckHelper(); @Before public void setup() throws Exception { injectMocks(this); } @Test public void testFormatEntityList() { ClusterCheck check = new TestCheckImpl(UpgradeCheckType.HOST); Assert.assertEquals("", check.formatEntityList(null)); final LinkedHashSet<String> failedOn = new LinkedHashSet<>(); Assert.assertEquals("", check.formatEntityList(failedOn)); failedOn.add("host1"); Assert.assertEquals("host1", check.formatEntityList(failedOn)); failedOn.add("host2"); Assert.assertEquals("host1 and host2", check.formatEntityList(failedOn)); failedOn.add("host3"); Assert.assertEquals("host1, host2 and host3", check.formatEntityList(failedOn)); check = new TestCheckImpl(UpgradeCheckType.CLUSTER); Assert.assertEquals("host1, host2 and host3", check.formatEntityList(failedOn)); check = new TestCheckImpl(UpgradeCheckType.SERVICE); Assert.assertEquals("host1, host2 and host3", check.formatEntityList(failedOn)); check = new TestCheckImpl(null); Assert.assertEquals("host1, host2 and host3", check.formatEntityList(failedOn)); } @Test public void testIsApplicable() throws Exception{ final String clusterName = "c1"; final Cluster cluster = createMock(Cluster.class); Map<String, Service> services = new HashMap<String, Service>(){{ put("SERVICE1", null); put("SERVICE2", null); put("SERVICE3", null); }}; Set<String> oneServiceList = Sets.newHashSet("SERVICE1"); Set<String> atLeastOneServiceList = Sets.newHashSet("SERVICE1", "MISSING_SERVICE"); Set<String> allServicesList = Sets.newHashSet("SERVICE1", "SERVICE2"); Set<String> missingServiceList = Sets.newHashSet("MISSING_SERVICE"); expect(clusters.getCluster(anyString())).andReturn(cluster).atLeastOnce(); expect(cluster.getServices()).andReturn(services).atLeastOnce(); RepositoryVersion repositoryVersion = createNiceMock(RepositoryVersion.class); expect(repositoryVersion.getId()).andReturn(1L).anyTimes(); expect(repositoryVersion.getRepositoryType()).andReturn(RepositoryType.STANDARD).anyTimes(); RepositoryVersionEntity repositoryVersionEntity = createNiceMock(RepositoryVersionEntity.class); expect(repositoryVersionEntity.getType()).andReturn(RepositoryType.STANDARD).anyTimes(); expect(repositoryVersionEntity.getRepositoryXml()).andReturn(m_vdfXml).atLeastOnce(); expect(m_vdfXml.getClusterSummary(EasyMock.anyObject(Cluster.class), EasyMock.anyObject(AmbariMetaInfo.class))).andReturn(m_clusterVersionSummary).atLeastOnce(); expect(m_clusterVersionSummary.getAvailableServiceNames()).andReturn( allServicesList).atLeastOnce(); final AmbariMetaInfo ami = createNiceMock(AmbariMetaInfo.class); m_mockCheckHelper.setMetaInfoProvider(new Provider<AmbariMetaInfo>() { @Override public AmbariMetaInfo get() { return ami; } }); Mockito.when(m_mockCheckHelper.m_repositoryVersionDAO.findByPK(Mockito.anyLong())).thenReturn( repositoryVersionEntity); replayAll(); TestCheckImpl check = new TestCheckImpl(UpgradeCheckType.SERVICE); check.checkHelperProvider = new Provider<CheckHelper>() { @Override public CheckHelper get() { return m_mockCheckHelper; } }; ClusterInformation clusterInformation = new ClusterInformation(clusterName, false, null, null, null); UpgradeCheckRequest request = new UpgradeCheckRequest(clusterInformation, UpgradeType.ROLLING, repositoryVersion, null, null); // case, where we need at least one service to be present check.setApplicableServices(oneServiceList); Assert.assertTrue(m_mockCheckHelper.getApplicableChecks(request, Lists.newArrayList(check)).size() == 1); check.setApplicableServices(atLeastOneServiceList); Assert.assertTrue(m_mockCheckHelper.getApplicableChecks(request, Lists.newArrayList(check)).size() == 1); check.setApplicableServices(missingServiceList); Assert.assertTrue(m_mockCheckHelper.getApplicableChecks(request, Lists.newArrayList(check)).size() == 0); } /** * Tests that even though the services are installed, the check doesn't match * since it's for a service not in the PATCH. * * @throws Exception */ @Test public void testIsApplicableForPatch() throws Exception { final String clusterName = "c1"; final Cluster cluster = createMock(Cluster.class); Map<String, Service> services = new HashMap<String, Service>() { { put("SERVICE1", null); put("SERVICE2", null); put("SERVICE3", null); } }; Set<String> oneServiceList = Sets.newHashSet("SERVICE1"); expect(clusters.getCluster(anyString())).andReturn(cluster).atLeastOnce(); expect(cluster.getServices()).andReturn(services).atLeastOnce(); RepositoryVersion repositoryVersion = createNiceMock(RepositoryVersion.class); expect(repositoryVersion.getId()).andReturn(1L).anyTimes(); expect(repositoryVersion.getRepositoryType()).andReturn(RepositoryType.STANDARD).anyTimes(); RepositoryVersionEntity repositoryVersionEntity = createNiceMock(RepositoryVersionEntity.class); expect(repositoryVersionEntity.getType()).andReturn(RepositoryType.STANDARD).anyTimes(); expect(repositoryVersionEntity.getRepositoryXml()).andReturn(m_vdfXml).atLeastOnce(); expect(m_vdfXml.getClusterSummary(EasyMock.anyObject(Cluster.class), EasyMock.anyObject(AmbariMetaInfo.class))).andReturn( m_clusterVersionSummary).atLeastOnce(); // the cluster summary will only return 1 service for the upgrade, even // though this cluster has 2 services installed expect(m_clusterVersionSummary.getAvailableServiceNames()).andReturn( oneServiceList).atLeastOnce(); m_mockCheckHelper.m_clusters = clusters; Mockito.when(m_mockCheckHelper.m_repositoryVersionDAO.findByPK(Mockito.anyLong())).thenReturn( repositoryVersionEntity); final AmbariMetaInfo ami = createNiceMock(AmbariMetaInfo.class); m_mockCheckHelper.setMetaInfoProvider(new Provider<AmbariMetaInfo>() { @Override public AmbariMetaInfo get() { return ami; } }); replayAll(); TestCheckImpl check = new TestCheckImpl(UpgradeCheckType.SERVICE); check.checkHelperProvider = new Provider<CheckHelper>() { @Override public CheckHelper get() { return m_mockCheckHelper; } }; ClusterInformation clusterInformation = new ClusterInformation(clusterName, false, null, null, null); UpgradeCheckRequest request = new UpgradeCheckRequest(clusterInformation, UpgradeType.ROLLING, repositoryVersion, null, null); // since the check is for SERVICE2, it should not match even though its // installed since the repository is only for SERVICE1 check.setApplicableServices(Sets.newHashSet("SERVICE2")); Assert.assertTrue(m_mockCheckHelper.getApplicableChecks(request, Lists.newArrayList(check)).size() == 0); // ok, so now change the check to match against SERVICE1 check.setApplicableServices(Sets.newHashSet("SERVICE1")); Assert.assertTrue(m_mockCheckHelper.getApplicableChecks(request, Lists.newArrayList(check)).size() == 1); } @UpgradeCheckInfo( group = UpgradeCheckGroup.DEFAULT, order = 1.0f, required = { UpgradeType.ROLLING, UpgradeType.NON_ROLLING, UpgradeType.HOST_ORDERED }) private class TestCheckImpl extends ClusterCheck { private UpgradeCheckType m_type; private Set<String> m_applicableServices = Sets.newHashSet(); TestCheckImpl(UpgradeCheckType type) { super(null); m_type = type; clustersProvider = new Provider<Clusters>() { @Override public Clusters get() { return clusters; } }; } /** * {@inheritDoc} */ @Override public UpgradeCheckDescription getCheckDescription() { return m_description; } @Override public UpgradeCheckResult perform(UpgradeCheckRequest request) throws AmbariException { return new UpgradeCheckResult(this); } /** * {@inheritDoc} */ @Override public Set<String> getApplicableServices() { return m_applicableServices; } void setApplicableServices(Set<String> applicableServices) { m_applicableServices = applicableServices; } } @UpgradeCheckInfo(group = UpgradeCheckGroup.DEFAULT, order = 1.0f, required = { UpgradeType.ROLLING }) private class RollingTestCheckImpl extends ClusterCheck { private UpgradeCheckType m_type; RollingTestCheckImpl(UpgradeCheckType type) { super(null); m_type = type; clustersProvider = new Provider<Clusters>() { @Override public Clusters get() { return clusters; } }; } /** * {@inheritDoc} */ @Override public UpgradeCheckDescription getCheckDescription() { return m_description; } @Override public UpgradeCheckResult perform(UpgradeCheckRequest request) throws AmbariException { return new UpgradeCheckResult(this); } } @UpgradeCheckInfo(group = UpgradeCheckGroup.DEFAULT, order = 1.0f) private class NotRequiredCheckTest extends ClusterCheck { private UpgradeCheckType m_type; NotRequiredCheckTest(UpgradeCheckType type) { super(null); m_type = type; clustersProvider = new Provider<Clusters>() { @Override public Clusters get() { return clusters; } }; } /** * {@inheritDoc} */ @Override public UpgradeCheckDescription getCheckDescription() { return m_description; } @Override public UpgradeCheckResult perform(UpgradeCheckRequest request) throws AmbariException { return new UpgradeCheckResult(this); } } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.ignite.internal.processors.cache.persistence.wal.serializer; import java.io.DataInput; import java.io.EOFException; import java.io.IOException; import java.nio.ByteBuffer; import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.UUID; import org.apache.ignite.IgniteCheckedException; import org.apache.ignite.internal.pagemem.FullPageId; import org.apache.ignite.internal.pagemem.wal.record.CacheState; import org.apache.ignite.internal.pagemem.wal.record.CheckpointRecord; import org.apache.ignite.internal.pagemem.wal.record.DataEntry; import org.apache.ignite.internal.pagemem.wal.record.DataRecord; import org.apache.ignite.internal.pagemem.wal.record.LazyDataEntry; import org.apache.ignite.internal.pagemem.wal.record.MemoryRecoveryRecord; import org.apache.ignite.internal.pagemem.wal.record.MetastoreDataRecord; import org.apache.ignite.internal.pagemem.wal.record.PageSnapshot; import org.apache.ignite.internal.pagemem.wal.record.TxRecord; import org.apache.ignite.internal.pagemem.wal.record.WALRecord; import org.apache.ignite.internal.pagemem.wal.record.delta.DataPageInsertFragmentRecord; import org.apache.ignite.internal.pagemem.wal.record.delta.DataPageInsertRecord; import org.apache.ignite.internal.pagemem.wal.record.delta.DataPageRemoveRecord; import org.apache.ignite.internal.pagemem.wal.record.delta.DataPageSetFreeListPageRecord; import org.apache.ignite.internal.pagemem.wal.record.delta.DataPageUpdateRecord; import org.apache.ignite.internal.pagemem.wal.record.delta.FixCountRecord; import org.apache.ignite.internal.pagemem.wal.record.delta.FixLeftmostChildRecord; import org.apache.ignite.internal.pagemem.wal.record.delta.FixRemoveId; import org.apache.ignite.internal.pagemem.wal.record.delta.InitNewPageRecord; import org.apache.ignite.internal.pagemem.wal.record.delta.InnerReplaceRecord; import org.apache.ignite.internal.pagemem.wal.record.delta.InsertRecord; import org.apache.ignite.internal.pagemem.wal.record.delta.MergeRecord; import org.apache.ignite.internal.pagemem.wal.record.delta.MetaPageAddRootRecord; import org.apache.ignite.internal.pagemem.wal.record.delta.MetaPageCutRootRecord; import org.apache.ignite.internal.pagemem.wal.record.delta.MetaPageInitRecord; import org.apache.ignite.internal.pagemem.wal.record.delta.MetaPageInitRootInlineRecord; import org.apache.ignite.internal.pagemem.wal.record.delta.MetaPageInitRootRecord; import org.apache.ignite.internal.pagemem.wal.record.delta.MetaPageUpdateLastAllocatedIndex; import org.apache.ignite.internal.pagemem.wal.record.delta.MetaPageUpdateLastSuccessfulFullSnapshotId; import org.apache.ignite.internal.pagemem.wal.record.delta.MetaPageUpdateLastSuccessfulSnapshotId; import org.apache.ignite.internal.pagemem.wal.record.delta.MetaPageUpdateNextSnapshotId; import org.apache.ignite.internal.pagemem.wal.record.delta.MetaPageUpdatePartitionDataRecord; import org.apache.ignite.internal.pagemem.wal.record.delta.NewRootInitRecord; import org.apache.ignite.internal.pagemem.wal.record.delta.PageListMetaResetCountRecord; import org.apache.ignite.internal.pagemem.wal.record.delta.PagesListAddPageRecord; import org.apache.ignite.internal.pagemem.wal.record.delta.PagesListInitNewPageRecord; import org.apache.ignite.internal.pagemem.wal.record.delta.PagesListRemovePageRecord; import org.apache.ignite.internal.pagemem.wal.record.delta.PagesListSetNextRecord; import org.apache.ignite.internal.pagemem.wal.record.delta.PagesListSetPreviousRecord; import org.apache.ignite.internal.pagemem.wal.record.delta.PartitionDestroyRecord; import org.apache.ignite.internal.pagemem.wal.record.delta.PartitionMetaStateRecord; import org.apache.ignite.internal.pagemem.wal.record.delta.RecycleRecord; import org.apache.ignite.internal.pagemem.wal.record.delta.RemoveRecord; import org.apache.ignite.internal.pagemem.wal.record.delta.ReplaceRecord; import org.apache.ignite.internal.pagemem.wal.record.delta.RotatedIdPartRecord; import org.apache.ignite.internal.pagemem.wal.record.delta.SplitExistingPageRecord; import org.apache.ignite.internal.pagemem.wal.record.delta.SplitForwardPageRecord; import org.apache.ignite.internal.pagemem.wal.record.delta.TrackingPageDeltaRecord; import org.apache.ignite.internal.processors.cache.CacheObject; import org.apache.ignite.internal.processors.cache.CacheObjectContext; import org.apache.ignite.internal.processors.cache.GridCacheContext; import org.apache.ignite.internal.processors.cache.GridCacheOperation; import org.apache.ignite.internal.processors.cache.GridCacheSharedContext; import org.apache.ignite.internal.processors.cache.KeyCacheObject; import org.apache.ignite.internal.processors.cache.distributed.dht.GridDhtPartitionState; import org.apache.ignite.internal.processors.cache.persistence.tree.io.BPlusIO; import org.apache.ignite.internal.processors.cache.persistence.tree.io.BPlusInnerIO; import org.apache.ignite.internal.processors.cache.persistence.tree.io.CacheVersionIO; import org.apache.ignite.internal.processors.cache.persistence.wal.ByteBufferBackedDataInput; import org.apache.ignite.internal.processors.cache.persistence.wal.FileWALPointer; import org.apache.ignite.internal.processors.cache.persistence.wal.record.HeaderRecord; import org.apache.ignite.internal.processors.cache.version.GridCacheVersion; import org.apache.ignite.internal.processors.cacheobject.IgniteCacheObjectProcessor; import org.apache.ignite.internal.util.typedef.internal.U; /** * Record data V1 serializer. */ public class RecordDataV1Serializer implements RecordDataSerializer { /** Length of HEADER record data. */ static final int HEADER_RECORD_DATA_SIZE = /*Magic*/8 + /*Version*/4; /** Cache shared context */ private final GridCacheSharedContext cctx; /** Size of page used for PageMemory regions */ private final int pageSize; /** Cache object processor to reading {@link DataEntry DataEntries} */ private final IgniteCacheObjectProcessor co; /** Serializer of {@link TxRecord} records. */ private TxRecordSerializer txRecordSerializer; /** * @param cctx Cache shared context. */ public RecordDataV1Serializer(GridCacheSharedContext cctx) { this.cctx = cctx; this.txRecordSerializer = new TxRecordSerializer(); this.co = cctx.kernalContext().cacheObjects(); this.pageSize = cctx.database().pageSize(); } /** {@inheritDoc} */ @Override public int size(WALRecord record) throws IgniteCheckedException { switch (record.type()) { case PAGE_RECORD: assert record instanceof PageSnapshot; PageSnapshot pageRec = (PageSnapshot)record; return pageRec.pageData().length + 12; case CHECKPOINT_RECORD: CheckpointRecord cpRec = (CheckpointRecord)record; assert cpRec.checkpointMark() == null || cpRec.checkpointMark() instanceof FileWALPointer : "Invalid WAL record: " + cpRec; int cacheStatesSize = cacheStatesSize(cpRec.cacheGroupStates()); FileWALPointer walPtr = (FileWALPointer)cpRec.checkpointMark(); return 18 + cacheStatesSize + (walPtr == null ? 0 : 16); case META_PAGE_INIT: return /*cache ID*/4 + /*page ID*/8 + /*ioType*/2 + /*ioVer*/2 + /*tree root*/8 + /*reuse root*/8; case PARTITION_META_PAGE_UPDATE_COUNTERS: return /*cache ID*/4 + /*page ID*/8 + /*upd cntr*/8 + /*rmv id*/8 + /*part size*/4 + /*counters page id*/8 + /*state*/ 1 + /*allocatedIdxCandidate*/ 4; case MEMORY_RECOVERY: return 8; case PARTITION_DESTROY: return /*cacheId*/4 + /*partId*/4; case DATA_RECORD: DataRecord dataRec = (DataRecord)record; return 4 + dataSize(dataRec); case METASTORE_DATA_RECORD: MetastoreDataRecord metastoreDataRec = (MetastoreDataRecord)record; return 4 + metastoreDataRec.key().getBytes().length + 4 + (metastoreDataRec.value() != null ? metastoreDataRec.value().length : 0); case HEADER_RECORD: return HEADER_RECORD_DATA_SIZE; case DATA_PAGE_INSERT_RECORD: DataPageInsertRecord diRec = (DataPageInsertRecord)record; return 4 + 8 + 2 + diRec.payload().length; case DATA_PAGE_UPDATE_RECORD: DataPageUpdateRecord uRec = (DataPageUpdateRecord)record; return 4 + 8 + 2 + 4 + uRec.payload().length; case DATA_PAGE_INSERT_FRAGMENT_RECORD: final DataPageInsertFragmentRecord difRec = (DataPageInsertFragmentRecord)record; return 4 + 8 + 8 + 4 + difRec.payloadSize(); case DATA_PAGE_REMOVE_RECORD: return 4 + 8 + 1; case DATA_PAGE_SET_FREE_LIST_PAGE: return 4 + 8 + 8; case INIT_NEW_PAGE_RECORD: return 4 + 8 + 2 + 2 + 8; case BTREE_META_PAGE_INIT_ROOT: return 4 + 8 + 8; case BTREE_META_PAGE_INIT_ROOT2: return 4 + 8 + 8 + 2; case BTREE_META_PAGE_ADD_ROOT: return 4 + 8 + 8; case BTREE_META_PAGE_CUT_ROOT: return 4 + 8; case BTREE_INIT_NEW_ROOT: NewRootInitRecord<?> riRec = (NewRootInitRecord<?>)record; return 4 + 8 + 8 + 2 + 2 + 8 + 8 + riRec.io().getItemSize(); case BTREE_PAGE_RECYCLE: return 4 + 8 + 8; case BTREE_PAGE_INSERT: InsertRecord<?> inRec = (InsertRecord<?>)record; return 4 + 8 + 2 + 2 + 2 + 8 + inRec.io().getItemSize(); case BTREE_FIX_LEFTMOST_CHILD: return 4 + 8 + 8; case BTREE_FIX_COUNT: return 4 + 8 + 2; case BTREE_PAGE_REPLACE: ReplaceRecord<?> rRec = (ReplaceRecord<?>)record; return 4 + 8 + 2 + 2 + 2 + rRec.io().getItemSize(); case BTREE_PAGE_REMOVE: return 4 + 8 + 2 + 2; case BTREE_PAGE_INNER_REPLACE: return 4 + 8 + 2 + 8 + 2 + 8; case BTREE_FORWARD_PAGE_SPLIT: return 4 + 8 + 8 + 2 + 2 + 8 + 2 + 2; case BTREE_EXISTING_PAGE_SPLIT: return 4 + 8 + 2 + 8; case BTREE_PAGE_MERGE: return 4 + 8 + 8 + 2 + 8 + 1; case BTREE_FIX_REMOVE_ID: return 4 + 8 + 8; case PAGES_LIST_SET_NEXT: return 4 + 8 + 8; case PAGES_LIST_SET_PREVIOUS: return 4 + 8 + 8; case PAGES_LIST_INIT_NEW_PAGE: return 4 + 8 + 4 + 4 + 8 + 8 + 8; case PAGES_LIST_ADD_PAGE: return 4 + 8 + 8; case PAGES_LIST_REMOVE_PAGE: return 4 + 8 + 8; case TRACKING_PAGE_DELTA: return 4 + 8 + 8 + 8 + 8; case META_PAGE_UPDATE_LAST_SUCCESSFUL_SNAPSHOT_ID: return 4 + 8 + 8 + 8; case META_PAGE_UPDATE_LAST_SUCCESSFUL_FULL_SNAPSHOT_ID: return 4 + 8 + 8; case META_PAGE_UPDATE_NEXT_SNAPSHOT_ID: return 4 + 8 + 8; case META_PAGE_UPDATE_LAST_ALLOCATED_INDEX: return 4 + 8 + 4; case PART_META_UPDATE_STATE: return /*cacheId*/ 4 + /*partId*/ 4 + /*State*/1 + /*Update Counter*/ 8; case PAGE_LIST_META_RESET_COUNT_RECORD: return /*cacheId*/ 4 + /*pageId*/ 8; case ROTATED_ID_PART_RECORD: return 4 + 8 + 1; case SWITCH_SEGMENT_RECORD: return 0; case TX_RECORD: return txRecordSerializer.size((TxRecord)record); default: throw new UnsupportedOperationException("Type: " + record.type()); } } /** {@inheritDoc} */ @Override public WALRecord readRecord(WALRecord.RecordType type, ByteBufferBackedDataInput in) throws IOException, IgniteCheckedException { WALRecord res; switch (type) { case PAGE_RECORD: byte[] arr = new byte[pageSize]; int cacheId = in.readInt(); long pageId = in.readLong(); in.readFully(arr); res = new PageSnapshot(new FullPageId(pageId, cacheId), arr); break; case CHECKPOINT_RECORD: long msb = in.readLong(); long lsb = in.readLong(); boolean hasPtr = in.readByte() != 0; int idx = hasPtr ? in.readInt() : 0; int off = hasPtr ? in.readInt() : 0; int len = hasPtr ? in.readInt() : 0; Map<Integer, CacheState> states = readPartitionStates(in); boolean end = in.readByte() != 0; FileWALPointer walPtr = hasPtr ? new FileWALPointer(idx, off, len) : null; CheckpointRecord cpRec = new CheckpointRecord(new UUID(msb, lsb), walPtr, end); cpRec.cacheGroupStates(states); res = cpRec; break; case META_PAGE_INIT: cacheId = in.readInt(); pageId = in.readLong(); int ioType = in.readUnsignedShort(); int ioVer = in.readUnsignedShort(); long treeRoot = in.readLong(); long reuseListRoot = in.readLong(); res = new MetaPageInitRecord(cacheId, pageId, ioType, ioVer, treeRoot, reuseListRoot); break; case PARTITION_META_PAGE_UPDATE_COUNTERS: cacheId = in.readInt(); pageId = in.readLong(); long updCntr = in.readLong(); long rmvId = in.readLong(); int partSize = in.readInt(); long countersPageId = in.readLong(); byte state = in.readByte(); int allocatedIdxCandidate = in.readInt(); res = new MetaPageUpdatePartitionDataRecord(cacheId, pageId, updCntr, rmvId, partSize, countersPageId, state, allocatedIdxCandidate); break; case MEMORY_RECOVERY: long ts = in.readLong(); res = new MemoryRecoveryRecord(ts); break; case PARTITION_DESTROY: cacheId = in.readInt(); int partId = in.readInt(); res = new PartitionDestroyRecord(cacheId, partId); break; case DATA_RECORD: int entryCnt = in.readInt(); List<DataEntry> entries = new ArrayList<>(entryCnt); for (int i = 0; i < entryCnt; i++) entries.add(readDataEntry(in)); res = new DataRecord(entries, 0L); break; case METASTORE_DATA_RECORD: int strLen = in.readInt(); byte[] strBytes = new byte[strLen]; in.readFully(strBytes); String key = new String(strBytes); int valLen = in.readInt(); assert valLen >= 0; byte[] val; if (valLen > 0) { val = new byte[valLen]; in.readFully(val); } else val = null; return new MetastoreDataRecord(key, val); case HEADER_RECORD: long magic = in.readLong(); if (magic != HeaderRecord.REGULAR_MAGIC && magic != HeaderRecord.COMPACTED_MAGIC) throw new EOFException("Magic is corrupted [actual=" + U.hexLong(magic) + ']'); int ver = in.readInt(); res = new HeaderRecord(ver); break; case DATA_PAGE_INSERT_RECORD: { cacheId = in.readInt(); pageId = in.readLong(); int size = in.readUnsignedShort(); in.ensure(size); byte[] payload = new byte[size]; in.readFully(payload); res = new DataPageInsertRecord(cacheId, pageId, payload); break; } case DATA_PAGE_UPDATE_RECORD: { cacheId = in.readInt(); pageId = in.readLong(); int itemId = in.readInt(); int size = in.readUnsignedShort(); in.ensure(size); byte[] payload = new byte[size]; in.readFully(payload); res = new DataPageUpdateRecord(cacheId, pageId, itemId, payload); break; } case DATA_PAGE_INSERT_FRAGMENT_RECORD: { cacheId = in.readInt(); pageId = in.readLong(); final long lastLink = in.readLong(); final int payloadSize = in.readInt(); final byte[] payload = new byte[payloadSize]; in.readFully(payload); res = new DataPageInsertFragmentRecord(cacheId, pageId, payload, lastLink); break; } case DATA_PAGE_REMOVE_RECORD: cacheId = in.readInt(); pageId = in.readLong(); int itemId = in.readUnsignedByte(); res = new DataPageRemoveRecord(cacheId, pageId, itemId); break; case DATA_PAGE_SET_FREE_LIST_PAGE: cacheId = in.readInt(); pageId = in.readLong(); long freeListPage = in.readLong(); res = new DataPageSetFreeListPageRecord(cacheId, pageId, freeListPage); break; case INIT_NEW_PAGE_RECORD: cacheId = in.readInt(); pageId = in.readLong(); ioType = in.readUnsignedShort(); ioVer = in.readUnsignedShort(); long virtualPageId = in.readLong(); res = new InitNewPageRecord(cacheId, pageId, ioType, ioVer, virtualPageId); break; case BTREE_META_PAGE_INIT_ROOT: cacheId = in.readInt(); pageId = in.readLong(); long rootId = in.readLong(); res = new MetaPageInitRootRecord(cacheId, pageId, rootId); break; case BTREE_META_PAGE_INIT_ROOT2: cacheId = in.readInt(); pageId = in.readLong(); long rootId2 = in.readLong(); int inlineSize = in.readShort(); res = new MetaPageInitRootInlineRecord(cacheId, pageId, rootId2, inlineSize); break; case BTREE_META_PAGE_ADD_ROOT: cacheId = in.readInt(); pageId = in.readLong(); rootId = in.readLong(); res = new MetaPageAddRootRecord(cacheId, pageId, rootId); break; case BTREE_META_PAGE_CUT_ROOT: cacheId = in.readInt(); pageId = in.readLong(); res = new MetaPageCutRootRecord(cacheId, pageId); break; case BTREE_INIT_NEW_ROOT: cacheId = in.readInt(); pageId = in.readLong(); rootId = in.readLong(); ioType = in.readUnsignedShort(); ioVer = in.readUnsignedShort(); long leftId = in.readLong(); long rightId = in.readLong(); BPlusIO<?> io = BPlusIO.getBPlusIO(ioType, ioVer); byte[] rowBytes = new byte[io.getItemSize()]; in.readFully(rowBytes); res = new NewRootInitRecord<>(cacheId, pageId, rootId, (BPlusInnerIO<?>)io, leftId, rowBytes, rightId); break; case BTREE_PAGE_RECYCLE: cacheId = in.readInt(); pageId = in.readLong(); long newPageId = in.readLong(); res = new RecycleRecord(cacheId, pageId, newPageId); break; case BTREE_PAGE_INSERT: cacheId = in.readInt(); pageId = in.readLong(); ioType = in.readUnsignedShort(); ioVer = in.readUnsignedShort(); int itemIdx = in.readUnsignedShort(); rightId = in.readLong(); io = BPlusIO.getBPlusIO(ioType, ioVer); rowBytes = new byte[io.getItemSize()]; in.readFully(rowBytes); res = new InsertRecord<>(cacheId, pageId, io, itemIdx, rowBytes, rightId); break; case BTREE_FIX_LEFTMOST_CHILD: cacheId = in.readInt(); pageId = in.readLong(); rightId = in.readLong(); res = new FixLeftmostChildRecord(cacheId, pageId, rightId); break; case BTREE_FIX_COUNT: cacheId = in.readInt(); pageId = in.readLong(); int cnt = in.readUnsignedShort(); res = new FixCountRecord(cacheId, pageId, cnt); break; case BTREE_PAGE_REPLACE: cacheId = in.readInt(); pageId = in.readLong(); ioType = in.readUnsignedShort(); ioVer = in.readUnsignedShort(); itemIdx = in.readUnsignedShort(); io = BPlusIO.getBPlusIO(ioType, ioVer); rowBytes = new byte[io.getItemSize()]; in.readFully(rowBytes); res = new ReplaceRecord<>(cacheId, pageId, io, rowBytes, itemIdx); break; case BTREE_PAGE_REMOVE: cacheId = in.readInt(); pageId = in.readLong(); itemIdx = in.readUnsignedShort(); cnt = in.readUnsignedShort(); res = new RemoveRecord(cacheId, pageId, itemIdx, cnt); break; case BTREE_PAGE_INNER_REPLACE: cacheId = in.readInt(); pageId = in.readLong(); int dstIdx = in.readUnsignedShort(); long srcPageId = in.readLong(); int srcIdx = in.readUnsignedShort(); rmvId = in.readLong(); res = new InnerReplaceRecord<>(cacheId, pageId, dstIdx, srcPageId, srcIdx, rmvId); break; case BTREE_FORWARD_PAGE_SPLIT: cacheId = in.readInt(); pageId = in.readLong(); long fwdId = in.readLong(); ioType = in.readUnsignedShort(); ioVer = in.readUnsignedShort(); srcPageId = in.readLong(); int mid = in.readUnsignedShort(); cnt = in.readUnsignedShort(); res = new SplitForwardPageRecord(cacheId, pageId, fwdId, ioType, ioVer, srcPageId, mid, cnt); break; case BTREE_EXISTING_PAGE_SPLIT: cacheId = in.readInt(); pageId = in.readLong(); mid = in.readUnsignedShort(); fwdId = in.readLong(); res = new SplitExistingPageRecord(cacheId, pageId, mid, fwdId); break; case BTREE_PAGE_MERGE: cacheId = in.readInt(); pageId = in.readLong(); long prntId = in.readLong(); int prntIdx = in.readUnsignedShort(); rightId = in.readLong(); boolean emptyBranch = in.readBoolean(); res = new MergeRecord<>(cacheId, pageId, prntId, prntIdx, rightId, emptyBranch); break; case BTREE_FIX_REMOVE_ID: cacheId = in.readInt(); pageId = in.readLong(); rmvId = in.readLong(); res = new FixRemoveId(cacheId, pageId, rmvId); break; case PAGES_LIST_SET_NEXT: cacheId = in.readInt(); pageId = in.readLong(); long nextPageId = in.readLong(); res = new PagesListSetNextRecord(cacheId, pageId, nextPageId); break; case PAGES_LIST_SET_PREVIOUS: cacheId = in.readInt(); pageId = in.readLong(); long prevPageId = in.readLong(); res = new PagesListSetPreviousRecord(cacheId, pageId, prevPageId); break; case PAGES_LIST_INIT_NEW_PAGE: cacheId = in.readInt(); pageId = in.readLong(); ioType = in.readInt(); ioVer = in.readInt(); newPageId = in.readLong(); prevPageId = in.readLong(); long addDataPageId = in.readLong(); res = new PagesListInitNewPageRecord(cacheId, pageId, ioType, ioVer, newPageId, prevPageId, addDataPageId); break; case PAGES_LIST_ADD_PAGE: cacheId = in.readInt(); pageId = in.readLong(); long dataPageId = in.readLong(); res = new PagesListAddPageRecord(cacheId, pageId, dataPageId); break; case PAGES_LIST_REMOVE_PAGE: cacheId = in.readInt(); pageId = in.readLong(); long rmvdPageId = in.readLong(); res = new PagesListRemovePageRecord(cacheId, pageId, rmvdPageId); break; case TRACKING_PAGE_DELTA: cacheId = in.readInt(); pageId = in.readLong(); long pageIdToMark = in.readLong(); long nextSnapshotId0 = in.readLong(); long lastSuccessfulSnapshotId0 = in.readLong(); res = new TrackingPageDeltaRecord(cacheId, pageId, pageIdToMark, nextSnapshotId0, lastSuccessfulSnapshotId0); break; case META_PAGE_UPDATE_NEXT_SNAPSHOT_ID: cacheId = in.readInt(); pageId = in.readLong(); long nextSnapshotId = in.readLong(); res = new MetaPageUpdateNextSnapshotId(cacheId, pageId, nextSnapshotId); break; case META_PAGE_UPDATE_LAST_SUCCESSFUL_FULL_SNAPSHOT_ID: cacheId = in.readInt(); pageId = in.readLong(); long lastSuccessfulFullSnapshotId = in.readLong(); res = new MetaPageUpdateLastSuccessfulFullSnapshotId(cacheId, pageId, lastSuccessfulFullSnapshotId); break; case META_PAGE_UPDATE_LAST_SUCCESSFUL_SNAPSHOT_ID: cacheId = in.readInt(); pageId = in.readLong(); long lastSuccessfulSnapshotId = in.readLong(); long lastSuccessfulSnapshotTag = in.readLong(); res = new MetaPageUpdateLastSuccessfulSnapshotId(cacheId, pageId, lastSuccessfulSnapshotId, lastSuccessfulSnapshotTag); break; case META_PAGE_UPDATE_LAST_ALLOCATED_INDEX: cacheId = in.readInt(); pageId = in.readLong(); int lastAllocatedIdx = in.readInt(); res = new MetaPageUpdateLastAllocatedIndex(cacheId, pageId, lastAllocatedIdx); break; case PART_META_UPDATE_STATE: cacheId = in.readInt(); partId = in.readInt(); state = in.readByte(); long updateCntr = in.readLong(); GridDhtPartitionState partState = GridDhtPartitionState.fromOrdinal(state); res = new PartitionMetaStateRecord(cacheId, partId, partState, updateCntr); break; case PAGE_LIST_META_RESET_COUNT_RECORD: cacheId = in.readInt(); pageId = in.readLong(); res = new PageListMetaResetCountRecord(cacheId, pageId); break; case ROTATED_ID_PART_RECORD: cacheId = in.readInt(); pageId = in.readLong(); byte rotatedIdPart = in.readByte(); res = new RotatedIdPartRecord(cacheId, pageId, rotatedIdPart); break; case SWITCH_SEGMENT_RECORD: throw new EOFException("END OF SEGMENT"); case TX_RECORD: res = txRecordSerializer.read(in); break; default: throw new UnsupportedOperationException("Type: " + type); } return res; } /** {@inheritDoc} */ @Override public void writeRecord(WALRecord rec, ByteBuffer buf) throws IgniteCheckedException { switch (rec.type()) { case PAGE_RECORD: PageSnapshot snap = (PageSnapshot)rec; buf.putInt(snap.fullPageId().groupId()); buf.putLong(snap.fullPageId().pageId()); buf.put(snap.pageData()); break; case MEMORY_RECOVERY: MemoryRecoveryRecord memoryRecoveryRecord = (MemoryRecoveryRecord)rec; buf.putLong(memoryRecoveryRecord.time()); break; case PARTITION_DESTROY: PartitionDestroyRecord partDestroy = (PartitionDestroyRecord)rec; buf.putInt(partDestroy.groupId()); buf.putInt(partDestroy.partitionId()); break; case META_PAGE_INIT: MetaPageInitRecord updRootsRec = (MetaPageInitRecord)rec; buf.putInt(updRootsRec.groupId()); buf.putLong(updRootsRec.pageId()); buf.putShort((short)updRootsRec.ioType()); buf.putShort((short)updRootsRec.ioVersion()); buf.putLong(updRootsRec.treeRoot()); buf.putLong(updRootsRec.reuseListRoot()); break; case PARTITION_META_PAGE_UPDATE_COUNTERS: MetaPageUpdatePartitionDataRecord partDataRec = (MetaPageUpdatePartitionDataRecord)rec; buf.putInt(partDataRec.groupId()); buf.putLong(partDataRec.pageId()); buf.putLong(partDataRec.updateCounter()); buf.putLong(partDataRec.globalRemoveId()); buf.putInt(partDataRec.partitionSize()); buf.putLong(partDataRec.countersPageId()); buf.put(partDataRec.state()); buf.putInt(partDataRec.allocatedIndexCandidate()); break; case CHECKPOINT_RECORD: CheckpointRecord cpRec = (CheckpointRecord)rec; assert cpRec.checkpointMark() == null || cpRec.checkpointMark() instanceof FileWALPointer : "Invalid WAL record: " + cpRec; FileWALPointer walPtr = (FileWALPointer)cpRec.checkpointMark(); UUID cpId = cpRec.checkpointId(); buf.putLong(cpId.getMostSignificantBits()); buf.putLong(cpId.getLeastSignificantBits()); buf.put(walPtr == null ? (byte)0 : 1); if (walPtr != null) { buf.putLong(walPtr.index()); buf.putInt(walPtr.fileOffset()); buf.putInt(walPtr.length()); } putCacheStates(buf, cpRec.cacheGroupStates()); buf.put(cpRec.end() ? (byte)1 : 0); break; case DATA_RECORD: DataRecord dataRec = (DataRecord)rec; buf.putInt(dataRec.writeEntries().size()); for (DataEntry dataEntry : dataRec.writeEntries()) putDataEntry(buf, dataEntry); break; case METASTORE_DATA_RECORD: MetastoreDataRecord metastoreDataRecord = (MetastoreDataRecord)rec; byte[] strBytes = metastoreDataRecord.key().getBytes(); buf.putInt(strBytes.length); buf.put(strBytes); if (metastoreDataRecord.value() != null) { buf.putInt(metastoreDataRecord.value().length); buf.put(metastoreDataRecord.value()); } else buf.putInt(0); break; case HEADER_RECORD: buf.putLong(HeaderRecord.REGULAR_MAGIC); buf.putInt(((HeaderRecord)rec).version()); break; case DATA_PAGE_INSERT_RECORD: DataPageInsertRecord diRec = (DataPageInsertRecord)rec; buf.putInt(diRec.groupId()); buf.putLong(diRec.pageId()); buf.putShort((short)diRec.payload().length); buf.put(diRec.payload()); break; case DATA_PAGE_UPDATE_RECORD: DataPageUpdateRecord uRec = (DataPageUpdateRecord)rec; buf.putInt(uRec.groupId()); buf.putLong(uRec.pageId()); buf.putInt(uRec.itemId()); buf.putShort((short)uRec.payload().length); buf.put(uRec.payload()); break; case DATA_PAGE_INSERT_FRAGMENT_RECORD: final DataPageInsertFragmentRecord difRec = (DataPageInsertFragmentRecord)rec; buf.putInt(difRec.groupId()); buf.putLong(difRec.pageId()); buf.putLong(difRec.lastLink()); buf.putInt(difRec.payloadSize()); buf.put(difRec.payload()); break; case DATA_PAGE_REMOVE_RECORD: DataPageRemoveRecord drRec = (DataPageRemoveRecord)rec; buf.putInt(drRec.groupId()); buf.putLong(drRec.pageId()); buf.put((byte)drRec.itemId()); break; case DATA_PAGE_SET_FREE_LIST_PAGE: DataPageSetFreeListPageRecord freeListRec = (DataPageSetFreeListPageRecord)rec; buf.putInt(freeListRec.groupId()); buf.putLong(freeListRec.pageId()); buf.putLong(freeListRec.freeListPage()); break; case INIT_NEW_PAGE_RECORD: InitNewPageRecord inpRec = (InitNewPageRecord)rec; buf.putInt(inpRec.groupId()); buf.putLong(inpRec.pageId()); buf.putShort((short)inpRec.ioType()); buf.putShort((short)inpRec.ioVersion()); buf.putLong(inpRec.newPageId()); break; case BTREE_META_PAGE_INIT_ROOT: MetaPageInitRootRecord imRec = (MetaPageInitRootRecord)rec; buf.putInt(imRec.groupId()); buf.putLong(imRec.pageId()); buf.putLong(imRec.rootId()); break; case BTREE_META_PAGE_INIT_ROOT2: MetaPageInitRootInlineRecord imRec2 = (MetaPageInitRootInlineRecord)rec; buf.putInt(imRec2.groupId()); buf.putLong(imRec2.pageId()); buf.putLong(imRec2.rootId()); buf.putShort((short)imRec2.inlineSize()); break; case BTREE_META_PAGE_ADD_ROOT: MetaPageAddRootRecord arRec = (MetaPageAddRootRecord)rec; buf.putInt(arRec.groupId()); buf.putLong(arRec.pageId()); buf.putLong(arRec.rootId()); break; case BTREE_META_PAGE_CUT_ROOT: MetaPageCutRootRecord crRec = (MetaPageCutRootRecord)rec; buf.putInt(crRec.groupId()); buf.putLong(crRec.pageId()); break; case BTREE_INIT_NEW_ROOT: NewRootInitRecord<?> riRec = (NewRootInitRecord<?>)rec; buf.putInt(riRec.groupId()); buf.putLong(riRec.pageId()); buf.putLong(riRec.rootId()); buf.putShort((short)riRec.io().getType()); buf.putShort((short)riRec.io().getVersion()); buf.putLong(riRec.leftId()); buf.putLong(riRec.rightId()); putRow(buf, riRec.rowBytes()); break; case BTREE_PAGE_RECYCLE: RecycleRecord recRec = (RecycleRecord)rec; buf.putInt(recRec.groupId()); buf.putLong(recRec.pageId()); buf.putLong(recRec.newPageId()); break; case BTREE_PAGE_INSERT: InsertRecord<?> inRec = (InsertRecord<?>)rec; buf.putInt(inRec.groupId()); buf.putLong(inRec.pageId()); buf.putShort((short)inRec.io().getType()); buf.putShort((short)inRec.io().getVersion()); buf.putShort((short)inRec.index()); buf.putLong(inRec.rightId()); putRow(buf, inRec.rowBytes()); break; case BTREE_FIX_LEFTMOST_CHILD: FixLeftmostChildRecord flRec = (FixLeftmostChildRecord)rec; buf.putInt(flRec.groupId()); buf.putLong(flRec.pageId()); buf.putLong(flRec.rightId()); break; case BTREE_FIX_COUNT: FixCountRecord fcRec = (FixCountRecord)rec; buf.putInt(fcRec.groupId()); buf.putLong(fcRec.pageId()); buf.putShort((short)fcRec.count()); break; case BTREE_PAGE_REPLACE: ReplaceRecord<?> rRec = (ReplaceRecord<?>)rec; buf.putInt(rRec.groupId()); buf.putLong(rRec.pageId()); buf.putShort((short)rRec.io().getType()); buf.putShort((short)rRec.io().getVersion()); buf.putShort((short)rRec.index()); putRow(buf, rRec.rowBytes()); break; case BTREE_PAGE_REMOVE: RemoveRecord rmRec = (RemoveRecord)rec; buf.putInt(rmRec.groupId()); buf.putLong(rmRec.pageId()); buf.putShort((short)rmRec.index()); buf.putShort((short)rmRec.count()); break; case BTREE_PAGE_INNER_REPLACE: InnerReplaceRecord<?> irRec = (InnerReplaceRecord<?>)rec; buf.putInt(irRec.groupId()); buf.putLong(irRec.pageId()); buf.putShort((short)irRec.destinationIndex()); buf.putLong(irRec.sourcePageId()); buf.putShort((short)irRec.sourceIndex()); buf.putLong(irRec.removeId()); break; case BTREE_FORWARD_PAGE_SPLIT: SplitForwardPageRecord sfRec = (SplitForwardPageRecord)rec; buf.putInt(sfRec.groupId()); buf.putLong(sfRec.pageId()); buf.putLong(sfRec.forwardId()); buf.putShort((short)sfRec.ioType()); buf.putShort((short)sfRec.ioVersion()); buf.putLong(sfRec.sourcePageId()); buf.putShort((short)sfRec.middleIndex()); buf.putShort((short)sfRec.count()); break; case BTREE_EXISTING_PAGE_SPLIT: SplitExistingPageRecord seRec = (SplitExistingPageRecord)rec; buf.putInt(seRec.groupId()); buf.putLong(seRec.pageId()); buf.putShort((short)seRec.middleIndex()); buf.putLong(seRec.forwardId()); break; case BTREE_PAGE_MERGE: MergeRecord<?> mRec = (MergeRecord<?>)rec; buf.putInt(mRec.groupId()); buf.putLong(mRec.pageId()); buf.putLong(mRec.parentId()); buf.putShort((short)mRec.parentIndex()); buf.putLong(mRec.rightId()); buf.put((byte)(mRec.isEmptyBranch() ? 1 : 0)); break; case PAGES_LIST_SET_NEXT: PagesListSetNextRecord plNextRec = (PagesListSetNextRecord)rec; buf.putInt(plNextRec.groupId()); buf.putLong(plNextRec.pageId()); buf.putLong(plNextRec.nextPageId()); break; case PAGES_LIST_SET_PREVIOUS: PagesListSetPreviousRecord plPrevRec = (PagesListSetPreviousRecord)rec; buf.putInt(plPrevRec.groupId()); buf.putLong(plPrevRec.pageId()); buf.putLong(plPrevRec.previousPageId()); break; case PAGES_LIST_INIT_NEW_PAGE: PagesListInitNewPageRecord plNewRec = (PagesListInitNewPageRecord)rec; buf.putInt(plNewRec.groupId()); buf.putLong(plNewRec.pageId()); buf.putInt(plNewRec.ioType()); buf.putInt(plNewRec.ioVersion()); buf.putLong(plNewRec.newPageId()); buf.putLong(plNewRec.previousPageId()); buf.putLong(plNewRec.dataPageId()); break; case PAGES_LIST_ADD_PAGE: PagesListAddPageRecord plAddRec = (PagesListAddPageRecord)rec; buf.putInt(plAddRec.groupId()); buf.putLong(plAddRec.pageId()); buf.putLong(plAddRec.dataPageId()); break; case PAGES_LIST_REMOVE_PAGE: PagesListRemovePageRecord plRmvRec = (PagesListRemovePageRecord)rec; buf.putInt(plRmvRec.groupId()); buf.putLong(plRmvRec.pageId()); buf.putLong(plRmvRec.removedPageId()); break; case BTREE_FIX_REMOVE_ID: FixRemoveId frRec = (FixRemoveId)rec; buf.putInt(frRec.groupId()); buf.putLong(frRec.pageId()); buf.putLong(frRec.removeId()); break; case TRACKING_PAGE_DELTA: TrackingPageDeltaRecord tpDelta = (TrackingPageDeltaRecord)rec; buf.putInt(tpDelta.groupId()); buf.putLong(tpDelta.pageId()); buf.putLong(tpDelta.pageIdToMark()); buf.putLong(tpDelta.nextSnapshotId()); buf.putLong(tpDelta.lastSuccessfulSnapshotId()); break; case META_PAGE_UPDATE_NEXT_SNAPSHOT_ID: MetaPageUpdateNextSnapshotId mpUpdateNextSnapshotId = (MetaPageUpdateNextSnapshotId)rec; buf.putInt(mpUpdateNextSnapshotId.groupId()); buf.putLong(mpUpdateNextSnapshotId.pageId()); buf.putLong(mpUpdateNextSnapshotId.nextSnapshotId()); break; case META_PAGE_UPDATE_LAST_SUCCESSFUL_FULL_SNAPSHOT_ID: MetaPageUpdateLastSuccessfulFullSnapshotId mpUpdateLastSuccFullSnapshotId = (MetaPageUpdateLastSuccessfulFullSnapshotId)rec; buf.putInt(mpUpdateLastSuccFullSnapshotId.groupId()); buf.putLong(mpUpdateLastSuccFullSnapshotId.pageId()); buf.putLong(mpUpdateLastSuccFullSnapshotId.lastSuccessfulFullSnapshotId()); break; case META_PAGE_UPDATE_LAST_SUCCESSFUL_SNAPSHOT_ID: MetaPageUpdateLastSuccessfulSnapshotId mpUpdateLastSuccSnapshotId = (MetaPageUpdateLastSuccessfulSnapshotId)rec; buf.putInt(mpUpdateLastSuccSnapshotId.groupId()); buf.putLong(mpUpdateLastSuccSnapshotId.pageId()); buf.putLong(mpUpdateLastSuccSnapshotId.lastSuccessfulSnapshotId()); buf.putLong(mpUpdateLastSuccSnapshotId.lastSuccessfulSnapshotTag()); break; case META_PAGE_UPDATE_LAST_ALLOCATED_INDEX: MetaPageUpdateLastAllocatedIndex mpUpdateLastAllocatedIdx = (MetaPageUpdateLastAllocatedIndex) rec; buf.putInt(mpUpdateLastAllocatedIdx.groupId()); buf.putLong(mpUpdateLastAllocatedIdx.pageId()); buf.putInt(mpUpdateLastAllocatedIdx.lastAllocatedIndex()); break; case PART_META_UPDATE_STATE: PartitionMetaStateRecord partMetaStateRecord = (PartitionMetaStateRecord) rec; buf.putInt(partMetaStateRecord.groupId()); buf.putInt(partMetaStateRecord.partitionId()); buf.put(partMetaStateRecord.state()); buf.putLong(partMetaStateRecord.updateCounter()); break; case PAGE_LIST_META_RESET_COUNT_RECORD: PageListMetaResetCountRecord pageListMetaResetCntRecord = (PageListMetaResetCountRecord) rec; buf.putInt(pageListMetaResetCntRecord.groupId()); buf.putLong(pageListMetaResetCntRecord.pageId()); break; case ROTATED_ID_PART_RECORD: RotatedIdPartRecord rotatedIdPartRecord = (RotatedIdPartRecord) rec; buf.putInt(rotatedIdPartRecord.groupId()); buf.putLong(rotatedIdPartRecord.pageId()); buf.put(rotatedIdPartRecord.rotatedIdPart()); break; case TX_RECORD: txRecordSerializer.write((TxRecord)rec, buf); break; case SWITCH_SEGMENT_RECORD: break; default: throw new UnsupportedOperationException("Type: " + rec.type()); } } /** * Return shared cache context. * * @return Shared cache context. */ public GridCacheSharedContext cctx() { return cctx; } /** * @param buf Buffer to write to. * @param entry Data entry. */ static void putDataEntry(ByteBuffer buf, DataEntry entry) throws IgniteCheckedException { buf.putInt(entry.cacheId()); if (!entry.key().putValue(buf)) throw new AssertionError(); if (entry.value() == null) buf.putInt(-1); else if (!entry.value().putValue(buf)) throw new AssertionError(); buf.put((byte)entry.op().ordinal()); putVersion(buf, entry.nearXidVersion(), true); putVersion(buf, entry.writeVersion(), false); buf.putInt(entry.partitionId()); buf.putLong(entry.partitionCounter()); buf.putLong(entry.expireTime()); } /** * @param states Cache states. */ private static void putCacheStates(ByteBuffer buf, Map<Integer, CacheState> states) { buf.putShort((short)states.size()); for (Map.Entry<Integer, CacheState> entry : states.entrySet()) { buf.putInt(entry.getKey()); CacheState state = entry.getValue(); // Need 2 bytes for the number of partitions. buf.putShort((short)state.size()); for (int i = 0; i < state.size(); i++) { buf.putShort((short)state.partitionByIndex(i)); buf.putLong(state.partitionSizeByIndex(i)); buf.putLong(state.partitionCounterByIndex(i)); } } } /** * @param buf Buffer. * @param ver Version to write. * @param allowNull Is {@code null}version allowed. */ private static void putVersion(ByteBuffer buf, GridCacheVersion ver, boolean allowNull) { CacheVersionIO.write(buf, ver, allowNull); } /** * @param buf Buffer. * @param rowBytes Row bytes. */ @SuppressWarnings("unchecked") private static void putRow(ByteBuffer buf, byte[] rowBytes) { assert rowBytes.length > 0; buf.put(rowBytes); } /** * @param in Input to read from. * @return Read entry. */ DataEntry readDataEntry(ByteBufferBackedDataInput in) throws IOException, IgniteCheckedException { int cacheId = in.readInt(); int keySize = in.readInt(); byte keyType = in.readByte(); byte[] keyBytes = new byte[keySize]; in.readFully(keyBytes); int valSize = in.readInt(); byte valType = 0; byte[] valBytes = null; if (valSize >= 0) { valType = in.readByte(); valBytes = new byte[valSize]; in.readFully(valBytes); } byte ord = in.readByte(); GridCacheOperation op = GridCacheOperation.fromOrdinal(ord & 0xFF); GridCacheVersion nearXidVer = readVersion(in, true); GridCacheVersion writeVer = readVersion(in, false); int partId = in.readInt(); long partCntr = in.readLong(); long expireTime = in.readLong(); GridCacheContext cacheCtx = cctx.cacheContext(cacheId); if (cacheCtx != null) { CacheObjectContext coCtx = cacheCtx.cacheObjectContext(); KeyCacheObject key = co.toKeyCacheObject(coCtx, keyType, keyBytes); CacheObject val = valBytes != null ? co.toCacheObject(coCtx, valType, valBytes) : null; return new DataEntry( cacheId, key, val, op, nearXidVer, writeVer, expireTime, partId, partCntr ); } else return new LazyDataEntry( cctx, cacheId, keyType, keyBytes, valType, valBytes, op, nearXidVer, writeVer, expireTime, partId, partCntr); } /** * @param buf Buffer to read from. * @return Read map. */ private Map<Integer, CacheState> readPartitionStates(DataInput buf) throws IOException { int caches = buf.readShort() & 0xFFFF; if (caches == 0) return Collections.emptyMap(); Map<Integer, CacheState> states = new HashMap<>(caches, 1.0f); for (int i = 0; i < caches; i++) { int cacheId = buf.readInt(); int parts = buf.readShort() & 0xFFFF; CacheState state = new CacheState(parts); for (int p = 0; p < parts; p++) { int partId = buf.readShort() & 0xFFFF; long size = buf.readLong(); long partCntr = buf.readLong(); state.addPartitionState(partId, size, partCntr); } states.put(cacheId, state); } return states; } /** * Changes the buffer position by the number of read bytes. * * @param in Data input to read from. * @param allowNull Is {@code null}version allowed. * @return Read cache version. */ private GridCacheVersion readVersion(ByteBufferBackedDataInput in, boolean allowNull) throws IOException { // To be able to read serialization protocol version. in.ensure(1); try { int size = CacheVersionIO.readSize(in.buffer(), allowNull); in.ensure(size); return CacheVersionIO.read(in.buffer(), allowNull); } catch (IgniteCheckedException e) { throw new IOException(e); } } /** * @param dataRec Data record to serialize. * @return Full data record size. * @throws IgniteCheckedException If failed to obtain the length of one of the entries. */ private int dataSize(DataRecord dataRec) throws IgniteCheckedException { int sz = 0; for (DataEntry entry : dataRec.writeEntries()) sz += entrySize(entry); return sz; } /** * @param entry Entry to get size for. * @return Entry size. * @throws IgniteCheckedException If failed to get key or value bytes length. */ private int entrySize(DataEntry entry) throws IgniteCheckedException { GridCacheContext cctx = this.cctx.cacheContext(entry.cacheId()); CacheObjectContext coCtx = cctx.cacheObjectContext(); return /*cache ID*/4 + /*key*/entry.key().valueBytesLength(coCtx) + /*value*/(entry.value() == null ? 4 : entry.value().valueBytesLength(coCtx)) + /*op*/1 + /*near xid ver*/CacheVersionIO.size(entry.nearXidVersion(), true) + /*write ver*/CacheVersionIO.size(entry.writeVersion(), false) + /*part ID*/4 + /*expire Time*/8 + /*part cnt*/8; } /** * @param states Partition states. * @return Size required to write partition states. */ private int cacheStatesSize(Map<Integer, CacheState> states) { // Need 4 bytes for the number of caches. int size = 2; for (Map.Entry<Integer, CacheState> entry : states.entrySet()) { // Cache ID. size += 4; // Need 2 bytes for the number of partitions. size += 2; CacheState state = entry.getValue(); // 2 bytes partition ID, size and counter per partition. size += 18 * state.size(); } return size; } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package com.datatorrent.stram.plan.logical.module; import java.io.Serializable; import java.util.ArrayList; import java.util.Collection; import java.util.List; import java.util.Map; import java.util.Random; import org.junit.Assert; import org.junit.Test; import org.apache.hadoop.conf.Configuration; import com.datatorrent.api.Attribute; import com.datatorrent.api.Context; import com.datatorrent.api.DAG; import com.datatorrent.api.DefaultInputPort; import com.datatorrent.api.DefaultOutputPort; import com.datatorrent.api.InputOperator; import com.datatorrent.api.Module; import com.datatorrent.api.Partitioner; import com.datatorrent.api.StreamingApplication; import com.datatorrent.api.annotation.InputPortFieldAnnotation; import com.datatorrent.api.annotation.OutputPortFieldAnnotation; import com.datatorrent.common.util.BaseOperator; import com.datatorrent.stram.engine.OperatorContext; import com.datatorrent.stram.plan.logical.LogicalPlan; import com.datatorrent.stram.plan.logical.LogicalPlanConfiguration; public class TestModuleExpansion { static class DummyInputOperator extends BaseOperator implements InputOperator { private int inputOperatorProp = 0; Random r = new Random(); public transient DefaultOutputPort<Integer> out = new DefaultOutputPort<Integer>(); @Override public void emitTuples() { out.emit(r.nextInt()); } public int getInputOperatorProp() { return inputOperatorProp; } public void setInputOperatorProp(int inputOperatorProp) { this.inputOperatorProp = inputOperatorProp; } } static class DummyOperator extends BaseOperator { private int operatorProp = 0; @OutputPortFieldAnnotation(optional = true) public final transient DefaultOutputPort<Integer> out1 = new DefaultOutputPort<>(); @OutputPortFieldAnnotation(optional = true) public final transient DefaultOutputPort<Integer> out2 = new DefaultOutputPort<>(); @InputPortFieldAnnotation(optional = true) public final transient DefaultInputPort<Integer> in = new DefaultInputPort<Integer>() { @Override public void process(Integer tuple) { out1.emit(tuple); out2.emit(tuple); } }; public int getOperatorProp() { return operatorProp; } public void setOperatorProp(int operatorProp) { this.operatorProp = operatorProp; } } static class TestPartitioner implements Partitioner<DummyOperator>, Serializable { @Override public Collection<Partition<DummyOperator>> definePartitions(Collection<Partition<DummyOperator>> partitions, PartitioningContext context) { ArrayList<Partition<DummyOperator>> lst = new ArrayList(); lst.add(partitions.iterator().next()); return lst; } @Override public void partitioned(Map<Integer, Partition<DummyOperator>> partitions) { } } static class Level1Module implements Module { private int level1ModuleProp = 0; @InputPortFieldAnnotation(optional = true) public final transient ProxyInputPort<Integer> mIn = new ProxyInputPort<>(); @OutputPortFieldAnnotation(optional = true) public final transient ProxyOutputPort<Integer> mOut = new ProxyOutputPort<>(); private int memory = 512; private int portMemory = 2; @Override public void populateDAG(DAG dag, Configuration conf) { DummyOperator o1 = dag.addOperator("O1", new DummyOperator()); o1.setOperatorProp(level1ModuleProp); /** set various attribute on the operator for testing */ Attribute.AttributeMap attr = dag.getMeta(o1).getAttributes(); attr.put(OperatorContext.MEMORY_MB, memory); attr.put(OperatorContext.APPLICATION_WINDOW_COUNT, 2); attr.put(OperatorContext.LOCALITY_HOST, "host1"); attr.put(OperatorContext.PARTITIONER, new TestPartitioner()); attr.put(OperatorContext.CHECKPOINT_WINDOW_COUNT, 120); attr.put(OperatorContext.STATELESS, true); attr.put(OperatorContext.SPIN_MILLIS, 20); dag.setInputPortAttribute(o1.in, Context.PortContext.BUFFER_MEMORY_MB, portMemory); mIn.set(o1.in); mOut.set(o1.out1); } public int getLevel1ModuleProp() { return level1ModuleProp; } public void setLevel1ModuleProp(int level1ModuleProp) { this.level1ModuleProp = level1ModuleProp; } public int getMemory() { return memory; } public void setMemory(int memory) { this.memory = memory; } public int getPortMemory() { return portMemory; } public void setPortMemory(int portMemory) { this.portMemory = portMemory; } } static class Level2ModuleA implements Module { private int level2ModuleAProp1 = 0; private int level2ModuleAProp2 = 0; private int level2ModuleAProp3 = 0; @InputPortFieldAnnotation(optional = true) public final transient ProxyInputPort<Integer> mIn = new ProxyInputPort<>(); @OutputPortFieldAnnotation(optional = true) public final transient ProxyOutputPort<Integer> mOut1 = new ProxyOutputPort<>(); @OutputPortFieldAnnotation(optional = true) public final transient ProxyOutputPort<Integer> mOut2 = new ProxyOutputPort<>(); @Override public void populateDAG(DAG dag, Configuration conf) { Level1Module m1 = dag.addModule("M1", new Level1Module()); m1.setMemory(1024); m1.setPortMemory(1); m1.setLevel1ModuleProp(level2ModuleAProp1); Level1Module m2 = dag.addModule("M2", new Level1Module()); m2.setMemory(2048); m2.setPortMemory(2); m2.setLevel1ModuleProp(level2ModuleAProp2); DummyOperator o1 = dag.addOperator("O1", new DummyOperator()); o1.setOperatorProp(level2ModuleAProp3); dag.addStream("M1_M2&O1", m1.mOut, m2.mIn, o1.in).setLocality(DAG.Locality.CONTAINER_LOCAL); mIn.set(m1.mIn); mOut1.set(m2.mOut); mOut2.set(o1.out1); } public int getLevel2ModuleAProp1() { return level2ModuleAProp1; } public void setLevel2ModuleAProp1(int level2ModuleAProp1) { this.level2ModuleAProp1 = level2ModuleAProp1; } public int getLevel2ModuleAProp2() { return level2ModuleAProp2; } public void setLevel2ModuleAProp2(int level2ModuleAProp2) { this.level2ModuleAProp2 = level2ModuleAProp2; } public int getLevel2ModuleAProp3() { return level2ModuleAProp3; } public void setLevel2ModuleAProp3(int level2ModuleAProp3) { this.level2ModuleAProp3 = level2ModuleAProp3; } } static class Level2ModuleB implements Module { private int level2ModuleBProp1 = 0; private int level2ModuleBProp2 = 0; private int level2ModuleBProp3 = 0; @InputPortFieldAnnotation(optional = true) public final transient ProxyInputPort<Integer> mIn = new ProxyInputPort<>(); @OutputPortFieldAnnotation(optional = true) public final transient ProxyOutputPort<Integer> mOut1 = new ProxyOutputPort<>(); @OutputPortFieldAnnotation(optional = true) public final transient ProxyOutputPort<Integer> mOut2 = new ProxyOutputPort<>(); @Override public void populateDAG(DAG dag, Configuration conf) { DummyOperator o1 = dag.addOperator("O1", new DummyOperator()); o1.setOperatorProp(level2ModuleBProp1); Level1Module m1 = dag.addModule("M1", new Level1Module()); m1.setMemory(4096); m1.setPortMemory(3); m1.setLevel1ModuleProp(level2ModuleBProp2); DummyOperator o2 = dag.addOperator("O2", new DummyOperator()); o2.setOperatorProp(level2ModuleBProp3); dag.addStream("O1_M1", o1.out1, m1.mIn).setLocality(DAG.Locality.THREAD_LOCAL); dag.addStream("O1_O2", o1.out2, o2.in).setLocality(DAG.Locality.RACK_LOCAL); mIn.set(o1.in); mOut1.set(m1.mOut); mOut2.set(o2.out1); } public int getLevel2ModuleBProp1() { return level2ModuleBProp1; } public void setLevel2ModuleBProp1(int level2ModuleBProp1) { this.level2ModuleBProp1 = level2ModuleBProp1; } public int getLevel2ModuleBProp2() { return level2ModuleBProp2; } public void setLevel2ModuleBProp2(int level2ModuleBProp2) { this.level2ModuleBProp2 = level2ModuleBProp2; } public int getLevel2ModuleBProp3() { return level2ModuleBProp3; } public void setLevel2ModuleBProp3(int level2ModuleBProp3) { this.level2ModuleBProp3 = level2ModuleBProp3; } } static class Level3Module implements Module { public final transient ProxyInputPort<Integer> mIn = new ProxyInputPort<>(); public final transient ProxyOutputPort<Integer> mOut1 = new ProxyOutputPort<>(); public final transient ProxyOutputPort<Integer> mOut2 = new ProxyOutputPort<>(); @Override public void populateDAG(DAG dag, Configuration conf) { DummyOperator op = dag.addOperator("O1", new DummyOperator()); Level2ModuleB m1 = dag.addModule("M1", new Level2ModuleB()); Level1Module m2 = dag.addModule("M2", new Level1Module()); dag.addStream("s1", op.out1, m1.mIn); dag.addStream("s2", op.out2, m2.mIn); mIn.set(op.in); mOut1.set(m1.mOut1); mOut2.set(m2.mOut); } } static class NestedModuleApp implements StreamingApplication { @Override public void populateDAG(DAG dag, Configuration conf) { DummyInputOperator o1 = dag.addOperator("O1", new DummyInputOperator()); o1.setInputOperatorProp(1); DummyOperator o2 = dag.addOperator("O2", new DummyOperator()); o2.setOperatorProp(2); Level2ModuleA ma = dag.addModule("Ma", new Level2ModuleA()); ma.setLevel2ModuleAProp1(11); ma.setLevel2ModuleAProp2(12); ma.setLevel2ModuleAProp3(13); Level2ModuleB mb = dag.addModule("Mb", new Level2ModuleB()); mb.setLevel2ModuleBProp1(21); mb.setLevel2ModuleBProp2(22); mb.setLevel2ModuleBProp3(23); Level2ModuleA mc = dag.addModule("Mc", new Level2ModuleA()); mc.setLevel2ModuleAProp1(31); mc.setLevel2ModuleAProp2(32); mc.setLevel2ModuleAProp3(33); Level2ModuleB md = dag.addModule("Md", new Level2ModuleB()); md.setLevel2ModuleBProp1(41); md.setLevel2ModuleBProp2(42); md.setLevel2ModuleBProp3(43); Level3Module me = dag.addModule("Me", new Level3Module()); dag.addStream("O1_O2", o1.out, o2.in, me.mIn); dag.addStream("O2_Ma", o2.out1, ma.mIn); dag.addStream("Ma_Mb", ma.mOut1, mb.mIn); dag.addStream("Ma_Md", ma.mOut2, md.mIn); dag.addStream("Mb_Mc", mb.mOut2, mc.mIn); } } @Test public void testModuleExtreme() { StreamingApplication app = new NestedModuleApp(); Configuration conf = new Configuration(false); LogicalPlanConfiguration lpc = new LogicalPlanConfiguration(conf); LogicalPlan dag = new LogicalPlan(); lpc.prepareDAG(dag, app, "ModuleApp"); dag.validate(); validateTopLevelOperators(dag); validateTopLevelStreams(dag); validatePublicMethods(dag); } private void validateTopLevelStreams(LogicalPlan dag) { List<String> streamNames = new ArrayList<>(); for (LogicalPlan.StreamMeta streamMeta : dag.getAllStreams()) { streamNames.add(streamMeta.getName()); } Assert.assertTrue(streamNames.contains(componentName("Mb", "O1_M1"))); Assert.assertTrue(streamNames.contains("O2_Ma")); Assert.assertTrue(streamNames.contains("Mb_Mc")); Assert.assertTrue(streamNames.contains(componentName("Mb", "O1_O2"))); Assert.assertTrue(streamNames.contains(componentName("Ma", "M1_M2&O1"))); Assert.assertTrue(streamNames.contains(componentName("Md", "O1_M1"))); Assert.assertTrue(streamNames.contains(componentName("Ma_Md"))); Assert.assertTrue(streamNames.contains(componentName("Mc", "M1_M2&O1"))); Assert.assertTrue(streamNames.contains(componentName("Md", "O1_O2"))); Assert.assertTrue(streamNames.contains("Ma_Mb")); Assert.assertTrue(streamNames.contains("O1_O2")); validateSeperateStream(dag, componentName("Mb", "O1_M1"), componentName("Mb", "O1"), componentName("Mb", "M1", "O1")); validateSeperateStream(dag, "O2_Ma", "O2", componentName("Ma", "M1", "O1")); validateSeperateStream(dag, "Mb_Mc", componentName("Mb", "O2"), componentName("Mc", "M1", "O1")); validateSeperateStream(dag, componentName("Mb", "O1_O2"), componentName("Mb", "O1"), componentName("Mb", "O2")); validateSeperateStream(dag, componentName("Ma", "M1_M2&O1"), componentName("Ma", "M1", "O1"), componentName("Ma", "O1"), componentName("Ma", "M2", "O1")); validateSeperateStream(dag, componentName("Md", "O1_M1"), componentName("Md", "O1"), componentName("Md", "M1", "O1")); validateSeperateStream(dag, "Ma_Md", componentName("Ma", "O1"), componentName("Md", "O1")); validateSeperateStream(dag, componentName("Mc", "M1_M2&O1"), componentName("Mc", "M1", "O1"), componentName("Mc", "O1"), componentName("Mc", "M2", "O1")); validateSeperateStream(dag, componentName("Md", "O1_O2"), componentName("Md", "O1"), componentName("Md", "O2")); validateSeperateStream(dag, "Ma_Mb", componentName("Ma", "M2", "O1"), componentName("Mb", "O1")); validateSeperateStream(dag, "O1_O2", "O1", "O2", componentName("Me", "O1")); /* Verify that stream locality is set correctly in top level dag */ validateStreamLocality(dag, componentName("Mc", "M1_M2&O1"), DAG.Locality.CONTAINER_LOCAL); validateStreamLocality(dag, componentName("Mb", "O1_M1"), DAG.Locality.THREAD_LOCAL); validateStreamLocality(dag, componentName("Mb", "O1_O2"), DAG.Locality.RACK_LOCAL); validateStreamLocality(dag, componentName("Mc", "M1_M2&O1"), DAG.Locality.CONTAINER_LOCAL); validateStreamLocality(dag, componentName("Md", "O1_M1"), DAG.Locality.THREAD_LOCAL); validateStreamLocality(dag, componentName("Me", "s1"), null); } private void validateSeperateStream(LogicalPlan dag, String streamName, String inputOperatorName, String... outputOperatorNames) { LogicalPlan.StreamMeta streamMeta = dag.getStream(streamName); String sourceName = streamMeta.getSource().getOperatorMeta().getName(); List<String> sinksName = new ArrayList<>(); for (LogicalPlan.InputPortMeta inputPortMeta : streamMeta.getSinks()) { sinksName.add(inputPortMeta.getOperatorWrapper().getName()); } Assert.assertTrue(inputOperatorName.equals(sourceName)); Assert.assertEquals(outputOperatorNames.length, sinksName.size()); for (String outputOperatorName : outputOperatorNames) { Assert.assertTrue(sinksName.contains(outputOperatorName)); } } private void validateTopLevelOperators(LogicalPlan dag) { List<String> operatorNames = new ArrayList<>(); for (LogicalPlan.OperatorMeta operatorMeta : dag.getAllOperators()) { operatorNames.add(operatorMeta.getName()); } Assert.assertTrue(operatorNames.contains("O1")); Assert.assertTrue(operatorNames.contains("O2")); Assert.assertTrue(operatorNames.contains(componentName("Ma", "M1", "O1"))); Assert.assertTrue(operatorNames.contains(componentName("Ma", "M2", "O1"))); Assert.assertTrue(operatorNames.contains(componentName("Ma", "O1"))); Assert.assertTrue(operatorNames.contains(componentName("Mb", "O1"))); Assert.assertTrue(operatorNames.contains(componentName("Mb", "M1", "O1"))); Assert.assertTrue(operatorNames.contains(componentName("Mb", "O2"))); Assert.assertTrue(operatorNames.contains(componentName("Mc", "M1", "O1"))); Assert.assertTrue(operatorNames.contains(componentName("Mc", "M2", "O1"))); Assert.assertTrue(operatorNames.contains(componentName("Mc", "O1"))); Assert.assertTrue(operatorNames.contains(componentName("Md", "O1"))); Assert.assertTrue(operatorNames.contains(componentName("Md", "M1", "O1"))); Assert.assertTrue(operatorNames.contains(componentName("Md", "O2"))); validateOperatorPropertyValue(dag, "O1", 1); validateOperatorPropertyValue(dag, "O2", 2); validateOperatorPropertyValue(dag, componentName("Ma", "M1", "O1"), 11); validateOperatorPropertyValue(dag, componentName("Ma", "M2", "O1"), 12); validateOperatorPropertyValue(dag, componentName("Ma", "O1"), 13); validateOperatorPropertyValue(dag, componentName("Mb", "O1"), 21); validateOperatorPropertyValue(dag, componentName("Mb", "M1", "O1"), 22); validateOperatorPropertyValue(dag, componentName("Mb", "O2"), 23); validateOperatorPropertyValue(dag, componentName("Mc", "M1", "O1"), 31); validateOperatorPropertyValue(dag, componentName("Mc", "M2", "O1"), 32); validateOperatorPropertyValue(dag, componentName("Mc", "O1"), 33); validateOperatorPropertyValue(dag, componentName("Md", "O1"), 41); validateOperatorPropertyValue(dag, componentName("Md", "M1", "O1"), 42); validateOperatorPropertyValue(dag, componentName("Md", "O2"), 43); validateOperatorParent(dag, "O1", null); validateOperatorParent(dag, "O2", null); validateOperatorParent(dag, componentName("Ma", "M1", "O1"), componentName("Ma", "M1")); validateOperatorParent(dag, componentName("Ma", "M2", "O1"), componentName("Ma", "M2")); validateOperatorParent(dag, componentName("Ma", "O1"), "Ma"); validateOperatorParent(dag, componentName("Mb", "O1"), "Mb"); validateOperatorParent(dag, componentName("Mb", "M1", "O1"), componentName("Mb", "M1")); validateOperatorParent(dag, componentName("Mb", "O2"), "Mb"); validateOperatorParent(dag, componentName("Mc", "M1", "O1"), componentName("Mc", "M1")); validateOperatorParent(dag, componentName("Mc", "M2", "O1"), componentName("Mc", "M2")); validateOperatorParent(dag, componentName("Mc", "O1"), "Mc"); validateOperatorParent(dag, componentName("Md", "O1"), "Md"); validateOperatorParent(dag, componentName("Md", "M1", "O1"), componentName("Md", "M1")); validateOperatorParent(dag, componentName("Md", "O2"), "Md"); validateOperatorAttribute(dag, componentName("Ma", "M1", "O1"), 1024); validateOperatorAttribute(dag, componentName("Ma", "M2", "O1"), 2048); validateOperatorAttribute(dag, componentName("Mb", "M1", "O1"), 4096); validateOperatorAttribute(dag, componentName("Mc", "M1", "O1"), 1024); validateOperatorAttribute(dag, componentName("Mc", "M2", "O1"), 2048); validatePortAttribute(dag, componentName("Ma", "M1", "O1"), 1); validatePortAttribute(dag, componentName("Ma", "M2", "O1"), 2); validatePortAttribute(dag, componentName("Mb", "M1", "O1"), 3); validatePortAttribute(dag, componentName("Mc", "M1", "O1"), 1); validatePortAttribute(dag, componentName("Mc", "M2", "O1"), 2); } private void validateOperatorParent(LogicalPlan dag, String operatorName, String parentModuleName) { LogicalPlan.OperatorMeta operatorMeta = dag.getOperatorMeta(operatorName); if (parentModuleName == null) { Assert.assertNull(operatorMeta.getModuleName()); } else { Assert.assertTrue(parentModuleName.equals(operatorMeta.getModuleName())); } } private void validateOperatorPropertyValue(LogicalPlan dag, String operatorName, int expectedValue) { LogicalPlan.OperatorMeta oMeta = dag.getOperatorMeta(operatorName); if (operatorName.equals("O1")) { DummyInputOperator operator = (DummyInputOperator)oMeta.getOperator(); Assert.assertEquals(expectedValue, operator.getInputOperatorProp()); } else { DummyOperator operator = (DummyOperator)oMeta.getOperator(); Assert.assertEquals(expectedValue, operator.getOperatorProp()); } } private void validatePublicMethods(LogicalPlan dag) { // Logical dag contains 4 modules added on top level. List<String> moduleNames = new ArrayList<>(); for (LogicalPlan.ModuleMeta moduleMeta : dag.getAllModules()) { moduleNames.add(moduleMeta.getName()); } Assert.assertTrue(moduleNames.contains("Ma")); Assert.assertTrue(moduleNames.contains("Mb")); Assert.assertTrue(moduleNames.contains("Mc")); Assert.assertTrue(moduleNames.contains("Md")); Assert.assertTrue(moduleNames.contains("Me")); Assert.assertEquals("Number of modules are 5", 5, dag.getAllModules().size()); // correct module meta is returned by getMeta call. LogicalPlan.ModuleMeta m = dag.getModuleMeta("Ma"); Assert.assertEquals("Name of module is Ma", m.getName(), "Ma"); } private static String componentName(String... names) { if (names.length == 0) { return ""; } StringBuilder sb = new StringBuilder(names[0]); for (int i = 1; i < names.length; i++) { sb.append(LogicalPlan.MODULE_NAMESPACE_SEPARATOR); sb.append(names[i]); } return sb.toString(); } /** * Generate a conflict, Add a top level operator with name "m1_O1", * and add a module "m1" which will populate operator "O1", causing name conflict with * top level operator. */ @Test(expected = java.lang.IllegalArgumentException.class) public void conflictingNamesWithExpandedModule() { Configuration conf = new Configuration(false); LogicalPlanConfiguration lpc = new LogicalPlanConfiguration(conf); LogicalPlan dag = new LogicalPlan(); DummyInputOperator in = dag.addOperator(componentName("m1", "O1"), new DummyInputOperator()); Level2ModuleA module = dag.addModule("m1", new Level2ModuleA()); dag.addStream("s1", in.out, module.mIn); lpc.prepareDAG(dag, null, "ModuleApp"); dag.validate(); } /** * Module and Operator with same name is not allowed in a DAG, to prevent properties * conflict. */ @Test(expected = java.lang.IllegalArgumentException.class) public void conflictingNamesWithOperator1() { Configuration conf = new Configuration(false); LogicalPlanConfiguration lpc = new LogicalPlanConfiguration(conf); LogicalPlan dag = new LogicalPlan(); DummyInputOperator in = dag.addOperator("M1", new DummyInputOperator()); Level2ModuleA module = dag.addModule("M1", new Level2ModuleA()); dag.addStream("s1", in.out, module.mIn); lpc.prepareDAG(dag, null, "ModuleApp"); dag.validate(); } /** * Module and Operator with same name is not allowed in a DAG, to prevent properties * conflict. */ @Test(expected = java.lang.IllegalArgumentException.class) public void conflictingNamesWithOperator2() { Configuration conf = new Configuration(false); LogicalPlanConfiguration lpc = new LogicalPlanConfiguration(conf); LogicalPlan dag = new LogicalPlan(); Level2ModuleA module = dag.addModule("M1", new Level2ModuleA()); DummyInputOperator in = dag.addOperator("M1", new DummyInputOperator()); dag.addStream("s1", in.out, module.mIn); lpc.prepareDAG(dag, null, "ModuleApp"); dag.validate(); } /** * Verify attributes populated on DummyOperator from Level1 module */ private void validateOperatorAttribute(LogicalPlan dag, String name, int memory) { LogicalPlan.OperatorMeta oMeta = dag.getOperatorMeta(name); Attribute.AttributeMap attrs = oMeta.getAttributes(); Assert.assertEquals((int)attrs.get(OperatorContext.MEMORY_MB), memory); Assert.assertEquals("Application window id is 2 ", (int)attrs.get(OperatorContext.APPLICATION_WINDOW_COUNT), 2); Assert.assertEquals("Locality host is host1", attrs.get(OperatorContext.LOCALITY_HOST), "host1"); Assert.assertEquals(attrs.get(OperatorContext.PARTITIONER).getClass(), TestPartitioner.class); Assert.assertEquals("Checkpoint window count ", (int)attrs.get(OperatorContext.CHECKPOINT_WINDOW_COUNT), 120); Assert.assertEquals("Operator is stateless ", attrs.get(OperatorContext.STATELESS), true); Assert.assertEquals("SPIN MILLIS is set to 20 ", (int)attrs.get(OperatorContext.SPIN_MILLIS), 20); } /** * Validate attribute set on the port of DummyOperator in Level1Module */ private void validatePortAttribute(LogicalPlan dag, String name, int memory) { LogicalPlan.InputPortMeta imeta = dag.getOperatorMeta(name).getInputStreams().keySet().iterator().next(); Assert.assertEquals(memory, (int)imeta.getAttributes().get(Context.PortContext.BUFFER_MEMORY_MB)); } /** * validate if stream attributes are copied or not */ private void validateStreamLocality(LogicalPlan dag, String name, DAG.Locality locality) { LogicalPlan.StreamMeta meta = dag.getStream(name); Assert.assertTrue("Metadata for stream is available ", meta != null); Assert.assertEquals("Locality is " + locality, meta.getLocality(), locality); } }
package psidev.psi.mi.jami.xml.io.writer.elements.impl.extended.compact.xml25; import junit.framework.Assert; import org.junit.Ignore; import org.junit.Test; import psidev.psi.mi.jami.binary.BinaryInteraction; import psidev.psi.mi.jami.exception.IllegalRangeException; import psidev.psi.mi.jami.model.Complex; import psidev.psi.mi.jami.model.Feature; import psidev.psi.mi.jami.model.NamedInteraction; import psidev.psi.mi.jami.model.Participant; import psidev.psi.mi.jami.model.impl.*; import psidev.psi.mi.jami.utils.CvTermUtils; import psidev.psi.mi.jami.utils.InteractorUtils; import psidev.psi.mi.jami.utils.RangeUtils; import psidev.psi.mi.jami.xml.cache.PsiXmlObjectCache; import psidev.psi.mi.jami.xml.cache.InMemoryIdentityObjectCache; import psidev.psi.mi.jami.xml.io.writer.elements.impl.AbstractXmlWriterTest; import psidev.psi.mi.jami.xml.model.extension.ExtendedPsiXmlInteraction; import psidev.psi.mi.jami.xml.model.extension.binary.xml25.XmlBinaryInteraction; import javax.xml.stream.XMLStreamException; import java.io.IOException; /** * Unit tester for XmlBasicBinaryInteractionWriter * * @author Marine Dumousseau (marine@ebi.ac.uk) * @version $Id$ * @since <pre>25/11/13</pre> */ public class XmlBasicBinaryInteractionWriterTest extends AbstractXmlWriterTest { private String interaction = "<interaction id=\"1\">\n" + " <experimentList>\n" + " <experimentRef>2</experimentRef>\n"+ " </experimentList>\n" + " <participantList>\n" + " <participant id=\"3\">\n" + " <interactorRef>4</interactorRef>\n" + " <biologicalRole>\n" + " <names>\n" + " <shortLabel>unspecified role</shortLabel>\n" + " </names>\n" + " <xref>\n" + " <primaryRef db=\"psi-mi\" dbAc=\"MI:0488\" id=\"MI:0499\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n" + " </xref>\n" + " </biologicalRole>\n" + " </participant>\n"+ " </participantList>\n" + "</interaction>"; private String interaction_complex = "<interaction id=\"1\">\n" + " <experimentList>\n" + " <experimentRef>2</experimentRef>\n"+ " </experimentList>\n" + " <participantList>\n" + " <participant id=\"3\">\n" + " <interactionRef>4</interactionRef>\n" + " <biologicalRole>\n" + " <names>\n" + " <shortLabel>unspecified role</shortLabel>\n" + " </names>\n" + " <xref>\n" + " <primaryRef db=\"psi-mi\" dbAc=\"MI:0488\" id=\"MI:0499\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n" + " </xref>\n" + " </biologicalRole>\n" + " </participant>\n"+ " </participantList>\n" + "</interaction>"; private String interaction_shortName ="<interaction id=\"1\">\n" + " <names>\n" + " <shortLabel>interaction test</shortLabel>\n"+ " </names>\n" + " <experimentList>\n" + " <experimentRef>2</experimentRef>\n"+ " </experimentList>\n" + " <participantList>\n" + " <participant id=\"3\">\n" + " <interactorRef>4</interactorRef>\n" + " <biologicalRole>\n" + " <names>\n" + " <shortLabel>unspecified role</shortLabel>\n" + " </names>\n" + " <xref>\n" + " <primaryRef db=\"psi-mi\" dbAc=\"MI:0488\" id=\"MI:0499\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n" + " </xref>\n" + " </biologicalRole>\n" + " </participant>\n"+ " </participantList>\n" + "</interaction>"; private String interaction_fullName ="<interaction id=\"1\">\n" + " <names>\n" + " <fullName>interaction test</fullName>\n"+ " </names>\n" + " <experimentList>\n" + " <experimentRef>2</experimentRef>\n"+ " </experimentList>\n" + " <participantList>\n" + " <participant id=\"3\">\n" + " <interactorRef>4</interactorRef>\n" + " <biologicalRole>\n" + " <names>\n" + " <shortLabel>unspecified role</shortLabel>\n" + " </names>\n" + " <xref>\n" + " <primaryRef db=\"psi-mi\" dbAc=\"MI:0488\" id=\"MI:0499\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n" + " </xref>\n" + " </biologicalRole>\n" + " </participant>\n"+ " </participantList>\n" + "</interaction>"; private String interaction_aliases ="<interaction id=\"1\">\n" + " <names>\n" + " <alias type=\"synonym\">interaction synonym</alias>\n"+ " <alias>test</alias>\n"+ " </names>\n" + " <experimentList>\n" + " <experimentRef>2</experimentRef>\n"+ " </experimentList>\n" + " <participantList>\n" + " <participant id=\"3\">\n" + " <interactorRef>4</interactorRef>\n" + " <biologicalRole>\n" + " <names>\n" + " <shortLabel>unspecified role</shortLabel>\n" + " </names>\n" + " <xref>\n" + " <primaryRef db=\"psi-mi\" dbAc=\"MI:0488\" id=\"MI:0499\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n" + " </xref>\n" + " </biologicalRole>\n" + " </participant>\n"+ " </participantList>\n" + "</interaction>"; private String interaction_identifier = "<interaction id=\"1\">\n" + " <xref>\n" + " <primaryRef db=\"intact\" id=\"EBI-xxx\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n" + " <secondaryRef db=\"test\" id=\"xxxx1\"/>\n"+ " </xref>\n"+ " <experimentList>\n" + " <experimentRef>2</experimentRef>\n"+ " </experimentList>\n" + " <participantList>\n" + " <participant id=\"3\">\n" + " <interactorRef>4</interactorRef>\n" + " <biologicalRole>\n" + " <names>\n" + " <shortLabel>unspecified role</shortLabel>\n" + " </names>\n" + " <xref>\n" + " <primaryRef db=\"psi-mi\" dbAc=\"MI:0488\" id=\"MI:0499\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n" + " </xref>\n" + " </biologicalRole>\n" + " </participant>\n"+ " </participantList>\n" + "</interaction>"; private String interaction_xref = "<interaction id=\"1\">\n" + " <xref>\n" + " <primaryRef db=\"test2\" id=\"xxxx2\"/>\n" + " <secondaryRef db=\"test\" id=\"xxxx1\"/>\n"+ " </xref>\n"+ " <experimentList>\n" + " <experimentRef>2</experimentRef>\n"+ " </experimentList>\n" + " <participantList>\n" + " <participant id=\"3\">\n" + " <interactorRef>4</interactorRef>\n" + " <biologicalRole>\n" + " <names>\n" + " <shortLabel>unspecified role</shortLabel>\n" + " </names>\n" + " <xref>\n" + " <primaryRef db=\"psi-mi\" dbAc=\"MI:0488\" id=\"MI:0499\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n" + " </xref>\n" + " </biologicalRole>\n" + " </participant>\n"+ " </participantList>\n" + "</interaction>"; private String interaction_inferred = "<interaction id=\"1\">\n" + " <experimentList>\n" + " <experimentRef>2</experimentRef>\n"+ " </experimentList>\n" + " <participantList>\n" + " <participant id=\"3\">\n" + " <interactorRef>4</interactorRef>\n" + " <biologicalRole>\n" + " <names>\n" + " <shortLabel>unspecified role</shortLabel>\n" + " </names>\n" + " <xref>\n" + " <primaryRef db=\"psi-mi\" dbAc=\"MI:0488\" id=\"MI:0499\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n" + " </xref>\n" + " </biologicalRole>\n" + " <featureList>\n" + " <feature id=\"5\">\n" + " <featureRangeList>\n" + " <featureRange>\n" + " <startStatus>\n" + " <names>\n" + " <shortLabel>certain</shortLabel>\n"+ " </names>\n"+ " <xref>\n" + " <primaryRef db=\"psi-mi\" dbAc=\"MI:0488\" id=\"MI:0335\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n"+ " </xref>\n"+ " </startStatus>\n" + " <begin position=\"1\"/>\n"+ " <endStatus>\n" + " <names>\n" + " <shortLabel>certain</shortLabel>\n"+ " </names>\n"+ " <xref>\n" + " <primaryRef db=\"psi-mi\" dbAc=\"MI:0488\" id=\"MI:0335\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n"+ " </xref>\n"+ " </endStatus>\n" + " <end position=\"4\"/>\n"+ " </featureRange>\n"+ " </featureRangeList>\n" + " </feature>\n"+ " </featureList>\n" + " </participant>\n"+ " <participant id=\"6\">\n" + " <interactorRef>7</interactorRef>\n" + " <biologicalRole>\n" + " <names>\n" + " <shortLabel>unspecified role</shortLabel>\n" + " </names>\n" + " <xref>\n" + " <primaryRef db=\"psi-mi\" dbAc=\"MI:0488\" id=\"MI:0499\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n" + " </xref>\n" + " </biologicalRole>\n" + " <featureList>\n" + " <feature id=\"8\">\n" + " <featureRangeList>\n" + " <featureRange>\n" + " <startStatus>\n" + " <names>\n" + " <shortLabel>certain</shortLabel>\n"+ " </names>\n"+ " <xref>\n" + " <primaryRef db=\"psi-mi\" dbAc=\"MI:0488\" id=\"MI:0335\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n"+ " </xref>\n"+ " </startStatus>\n" + " <begin position=\"1\"/>\n"+ " <endStatus>\n" + " <names>\n" + " <shortLabel>certain</shortLabel>\n"+ " </names>\n"+ " <xref>\n" + " <primaryRef db=\"psi-mi\" dbAc=\"MI:0488\" id=\"MI:0335\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n"+ " </xref>\n"+ " </endStatus>\n" + " <end position=\"4\"/>\n"+ " </featureRange>\n"+ " </featureRangeList>\n" + " </feature>\n"+ " </featureList>\n" + " </participant>\n"+ " </participantList>\n" + " <inferredInteractionList>\n" + " <inferredInteraction>\n" + " <participant>\n" + " <participantFeatureRef>5</participantFeatureRef>\n" + " </participant>\n"+ " <participant>\n" + " <participantFeatureRef>8</participantFeatureRef>\n" + " </participant>\n"+ " </inferredInteraction>\n"+ " </inferredInteractionList>\n" + "</interaction>"; private String interaction_type = "<interaction id=\"1\">\n" + " <experimentList>\n" + " <experimentRef>2</experimentRef>\n"+ " </experimentList>\n" + " <participantList>\n" + " <participant id=\"3\">\n" + " <interactorRef>4</interactorRef>\n" + " <biologicalRole>\n" + " <names>\n" + " <shortLabel>unspecified role</shortLabel>\n" + " </names>\n" + " <xref>\n" + " <primaryRef db=\"psi-mi\" dbAc=\"MI:0488\" id=\"MI:0499\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n" + " </xref>\n" + " </biologicalRole>\n" + " </participant>\n"+ " </participantList>\n" + " <interactionType>\n" + " <names>\n" + " <shortLabel>association</shortLabel>\n" + " </names>\n" + " <xref>\n" + " <primaryRef db=\"psi-mi\" dbAc=\"MI:0488\" id=\"MI:0914\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n" + " </xref>\n" + " </interactionType>\n" + "</interaction>"; private String interaction_attributes = "<interaction id=\"1\">\n" + " <experimentList>\n" + " <experimentRef>2</experimentRef>\n"+ " </experimentList>\n" + " <participantList>\n" + " <participant id=\"3\">\n" + " <interactorRef>4</interactorRef>\n" + " <biologicalRole>\n" + " <names>\n" + " <shortLabel>unspecified role</shortLabel>\n" + " </names>\n" + " <xref>\n" + " <primaryRef db=\"psi-mi\" dbAc=\"MI:0488\" id=\"MI:0499\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n" + " </xref>\n" + " </biologicalRole>\n" + " </participant>\n"+ " </participantList>\n" + " <attributeList>\n" + " <attribute name=\"test2\"/>\n"+ " <attribute name=\"test3\"/>\n"+ " <attribute name=\"spoke expansion\" nameAc=\"MI:1060\"/>\n"+ " </attributeList>\n"+ "</interaction>"; private String interaction_registered = "<interaction id=\"2\">\n" + " <experimentList>\n" + " <experimentRef>3</experimentRef>\n"+ " </experimentList>\n" + " <participantList>\n" + " <participant id=\"4\">\n" + " <interactorRef>5</interactorRef>\n" + " <biologicalRole>\n" + " <names>\n" + " <shortLabel>unspecified role</shortLabel>\n" + " </names>\n" + " <xref>\n" + " <primaryRef db=\"psi-mi\" dbAc=\"MI:0488\" id=\"MI:0499\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n" + " </xref>\n" + " </biologicalRole>\n" + " </participant>\n"+ " </participantList>\n" + "</interaction>"; private String interaction_intra = "<interaction id=\"1\">\n" + " <experimentList>\n" + " <experimentRef>2</experimentRef>\n"+ " </experimentList>\n" + " <participantList>\n" + " <participant id=\"3\">\n" + " <interactorRef>4</interactorRef>\n" + " <biologicalRole>\n" + " <names>\n" + " <shortLabel>unspecified role</shortLabel>\n" + " </names>\n" + " <xref>\n" + " <primaryRef db=\"psi-mi\" dbAc=\"MI:0488\" id=\"MI:0499\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n" + " </xref>\n" + " </biologicalRole>\n" + " </participant>\n"+ " </participantList>\n" + " <intraMolecular>true</intraMolecular>\n" + "</interaction>"; private PsiXmlObjectCache elementCache = new InMemoryIdentityObjectCache(); @Test public void test_write_interaction() throws XMLStreamException, IOException, IllegalRangeException { BinaryInteraction interaction = new XmlBinaryInteraction(); Participant participant = new DefaultParticipant(InteractorUtils.createUnknownBasicInteractor()); interaction.addParticipant(participant); elementCache.clear(); XmlBasicBinaryInteractionWriter writer = new XmlBasicBinaryInteractionWriter(createStreamWriter(), this.elementCache); writer.write(interaction); streamWriter.flush(); Assert.assertEquals(this.interaction, output.toString()); } @Test public void test_write_participant_complex() throws XMLStreamException, IOException, IllegalRangeException { BinaryInteraction interaction = new XmlBinaryInteraction(); Complex complex = new DefaultComplex("test complex"); complex.getParticipants().add(new DefaultModelledParticipant(new DefaultProtein("test protein"))); Participant participant = new DefaultParticipant(complex); interaction.addParticipant(participant); elementCache.clear(); XmlBasicBinaryInteractionWriter writer = new XmlBasicBinaryInteractionWriter(createStreamWriter(), this.elementCache); writer.write(interaction); streamWriter.flush(); Assert.assertEquals(this.interaction_complex, output.toString()); } @Test public void test_write_participant_complex_as_interactor() throws XMLStreamException, IOException, IllegalRangeException { BinaryInteraction interaction = new XmlBinaryInteraction(); Complex complex = new DefaultComplex("test complex"); complex.getParticipants().add(new DefaultModelledParticipant(new DefaultProtein("test protein"))); Participant participant = new DefaultParticipant(complex); interaction.addParticipant(participant); elementCache.clear(); XmlBasicBinaryInteractionWriter writer = new XmlBasicBinaryInteractionWriter(createStreamWriter(), this.elementCache); writer.setComplexAsInteractor(true); writer.write(interaction); streamWriter.flush(); Assert.assertEquals(this.interaction, output.toString()); } @Test public void test_write_participant_complex_no_participants() throws XMLStreamException, IOException, IllegalRangeException { BinaryInteraction interaction = new XmlBinaryInteraction(); Complex complex = new DefaultComplex("test complex"); Participant participant = new DefaultParticipant(complex); interaction.addParticipant(participant); elementCache.clear(); XmlBasicBinaryInteractionWriter writer = new XmlBasicBinaryInteractionWriter(createStreamWriter(), this.elementCache); writer.write(interaction); streamWriter.flush(); Assert.assertEquals(this.interaction, output.toString()); } @Test public void test_write_interaction_shortName() throws XMLStreamException, IOException, IllegalRangeException { BinaryInteraction interaction = new XmlBinaryInteraction("interaction test"); Participant participant = new DefaultParticipant(InteractorUtils.createUnknownBasicInteractor()); interaction.addParticipant(participant); elementCache.clear(); XmlBasicBinaryInteractionWriter writer = new XmlBasicBinaryInteractionWriter(createStreamWriter(), this.elementCache); writer.write(interaction); streamWriter.flush(); Assert.assertEquals(this.interaction_shortName, output.toString()); } @Test public void test_write_interaction_fullName() throws XMLStreamException, IOException, IllegalRangeException { NamedInteraction interaction = new XmlBinaryInteraction(); interaction.setFullName("interaction test"); Participant participant = new DefaultParticipant(InteractorUtils.createUnknownBasicInteractor()); interaction.addParticipant(participant); elementCache.clear(); XmlBasicBinaryInteractionWriter writer = new XmlBasicBinaryInteractionWriter(createStreamWriter(), this.elementCache); writer.write((BinaryInteraction)interaction); streamWriter.flush(); Assert.assertEquals(this.interaction_fullName, output.toString()); } @Test public void test_write_interaction_alias() throws XMLStreamException, IOException, IllegalRangeException { NamedInteraction interaction = new XmlBinaryInteraction(); interaction.getAliases().add(new DefaultAlias(new DefaultCvTerm("synonym"), "interaction synonym")); interaction.getAliases().add(new DefaultAlias("test")); Participant participant = new DefaultParticipant(InteractorUtils.createUnknownBasicInteractor()); interaction.addParticipant(participant); elementCache.clear(); XmlBasicBinaryInteractionWriter writer = new XmlBasicBinaryInteractionWriter(createStreamWriter(), this.elementCache); writer.write((BinaryInteraction)interaction); streamWriter.flush(); Assert.assertEquals(this.interaction_aliases, output.toString()); } @Test public void test_write_interaction_identifier() throws XMLStreamException, IOException, IllegalRangeException { BinaryInteraction interaction = new XmlBinaryInteraction(); Participant participant = new DefaultParticipant(InteractorUtils.createUnknownBasicInteractor()); interaction.addParticipant(participant); interaction.getIdentifiers().add(new DefaultXref(new DefaultCvTerm("intact"), "EBI-xxx")); interaction.getXrefs().add(new DefaultXref(new DefaultCvTerm("test"), "xxxx1")); elementCache.clear(); XmlBasicBinaryInteractionWriter writer = new XmlBasicBinaryInteractionWriter(createStreamWriter(), this.elementCache); writer.write(interaction); streamWriter.flush(); Assert.assertEquals(this.interaction_identifier, output.toString()); } @Test public void test_write_interaction_xref() throws XMLStreamException, IOException, IllegalRangeException { BinaryInteraction interaction = new XmlBinaryInteraction(); Participant participant = new DefaultParticipant(InteractorUtils.createUnknownBasicInteractor()); interaction.addParticipant(participant); interaction.getXrefs().add(new DefaultXref(new DefaultCvTerm("test2"), "xxxx2")); interaction.getXrefs().add(new DefaultXref(new DefaultCvTerm("test"), "xxxx1")); elementCache.clear(); XmlBasicBinaryInteractionWriter writer = new XmlBasicBinaryInteractionWriter(createStreamWriter(), this.elementCache); writer.write(interaction); streamWriter.flush(); Assert.assertEquals(this.interaction_xref, output.toString()); } @Test @Ignore public void test_write_interaction_inferred() throws XMLStreamException, IOException, IllegalRangeException { BinaryInteraction interaction = new XmlBinaryInteraction(); Participant participant = new DefaultParticipant(InteractorUtils.createUnknownBasicInteractor()); Participant participant2 = new DefaultParticipant(InteractorUtils.createUnknownBasicInteractor()); // two inferred interactiosn f1, f2, f3 and f3,f4 Feature f1 = new DefaultFeature(); f1.getRanges().add(RangeUtils.createRangeFromString("1-4")); Feature f2 = new DefaultFeature(); f2.getRanges().add(RangeUtils.createRangeFromString("1-4")); f1.getLinkedFeatures().add(f2); f2.getLinkedFeatures().add(f1); participant.addFeature(f1); participant2.addFeature(f2); interaction.addParticipant(participant); interaction.addParticipant(participant2); elementCache.clear(); XmlBasicBinaryInteractionWriter writer = new XmlBasicBinaryInteractionWriter(createStreamWriter(), this.elementCache); writer.write(interaction); streamWriter.flush(); Assert.assertEquals(this.interaction_inferred, output.toString()); } @Test public void test_write_interaction_type() throws XMLStreamException, IOException, IllegalRangeException { BinaryInteraction interaction = new XmlBinaryInteraction(); Participant participant = new DefaultParticipant(InteractorUtils.createUnknownBasicInteractor()); interaction.addParticipant(participant); interaction.setInteractionType(CvTermUtils.createMICvTerm("association", "MI:0914")); elementCache.clear(); XmlBasicBinaryInteractionWriter writer = new XmlBasicBinaryInteractionWriter(createStreamWriter(), this.elementCache); writer.write(interaction); streamWriter.flush(); Assert.assertEquals(this.interaction_type, output.toString()); } @Test public void test_write_interaction_attributes() throws XMLStreamException, IOException, IllegalRangeException { BinaryInteraction interaction = new XmlBinaryInteraction(); interaction.setComplexExpansion(CvTermUtils.createMICvTerm("spoke expansion", "MI:1060")); Participant participant = new DefaultParticipant(InteractorUtils.createUnknownBasicInteractor()); interaction.addParticipant(participant); interaction.getAnnotations().add(new DefaultAnnotation(new DefaultCvTerm("test2"))); interaction.getAnnotations().add(new DefaultAnnotation(new DefaultCvTerm("test3"))); elementCache.clear(); XmlBasicBinaryInteractionWriter writer = new XmlBasicBinaryInteractionWriter(createStreamWriter(), this.elementCache); writer.write(interaction); streamWriter.flush(); Assert.assertEquals(this.interaction_attributes, output.toString()); } @Test public void test_write_interaction_registered() throws XMLStreamException, IOException, IllegalRangeException { BinaryInteraction interaction = new XmlBinaryInteraction(); Participant participant = new DefaultParticipant(InteractorUtils.createUnknownBasicInteractor()); interaction.addParticipant(participant); elementCache.clear(); elementCache.extractIdForInteraction(new DefaultInteraction()); elementCache.extractIdForInteraction(interaction); XmlBasicBinaryInteractionWriter writer = new XmlBasicBinaryInteractionWriter(createStreamWriter(), this.elementCache); writer.write(interaction); streamWriter.flush(); Assert.assertEquals(this.interaction_registered, output.toString()); } @Test public void test_write_interaction_intraMolecular() throws XMLStreamException, IOException, IllegalRangeException { ExtendedPsiXmlInteraction interaction = new XmlBinaryInteraction(); Participant participant = new DefaultParticipant(InteractorUtils.createUnknownBasicInteractor()); interaction.addParticipant(participant); interaction.setIntraMolecular(true); elementCache.clear(); XmlBasicBinaryInteractionWriter writer = new XmlBasicBinaryInteractionWriter(createStreamWriter(), this.elementCache); writer.write((BinaryInteraction)interaction); streamWriter.flush(); Assert.assertEquals(this.interaction_intra, output.toString()); } }
/* Derby - Class org.apache.derbyTesting.functionTests.tests.tools.SysinfoLocaleTest Licensed to the Apache Software Foundation (ASF) under one or more contributor license agreements. See the NOTICE file distributed with this work for additional information regarding copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package org.apache.derbyTesting.functionTests.tests.tools; import java.io.ByteArrayOutputStream; import java.io.PrintStream; import java.lang.reflect.Method; import java.net.URL; import java.net.URLClassLoader; import java.util.Locale; import java.util.Properties; import junit.framework.Test; import junit.framework.TestSuite; import org.apache.derbyTesting.junit.BaseTestCase; import org.apache.derbyTesting.junit.Derby; import org.apache.derbyTesting.junit.SecurityManagerSetup; import org.apache.derbyTesting.junit.SystemPropertyTestSetup; /** * This test verifies that <code>sysinfo</code> correctly localizes its * messages according to the default locale and <code>derby.ui.locale</code>. */ public class SysinfoLocaleTest extends BaseTestCase { /** The encoding sysinfo should use for its output. */ private static final String ENCODING = "UTF-8"; /** Default locale to run this test under. */ private final Locale defaultLocale; /** * Tells whether or not this test expects sysinfo's output to be localized * to German. */ private final boolean localizedToGerman; /** Name of the test. */ private final String name; /** The default locale before this test started. */ private Locale savedLocale; /** * Create a test. * * @param defaultLocale the default locale for this test * @param german true if output is expected to be localized to German * @param info extra information to append to the test name (for debugging) */ private SysinfoLocaleTest(Locale defaultLocale, boolean german, String info) { super("testSysinfoLocale"); this.defaultLocale = defaultLocale; this.localizedToGerman = german; this.name = super.getName() + ":" + info; } /** * Returns the name of the test, which includes the default locale and * derby.ui.locale to aid debugging. * * @return name of the test */ public String getName() { return name; } /** * Set up the test environment. */ protected void setUp() { savedLocale = Locale.getDefault(); Locale.setDefault(defaultLocale); } /** * Tear down the test environment. */ protected void tearDown() throws Exception { Locale.setDefault(savedLocale); savedLocale = null; super.tearDown(); } /** * Create a suite of tests. * * @return a test suite with different combinations of * <code>derby.ui.locale</code> and default locale */ public static Test suite() { if (!Derby.hasTools()) { return new TestSuite("empty: no tools support"); } TestSuite suite = new TestSuite("SysinfoLocaleTest"); // Create test combinations. Messages should be localized to the // locale specified by derby.ui.locale, if it's set. Otherwise, the // JVM's default locale should be used. suite.addTest(createTest(Locale.ITALY, null, false)); suite.addTest(createTest(Locale.ITALY, "it_IT", false)); suite.addTest(createTest(Locale.ITALY, "de_DE", true)); suite.addTest(createTest(Locale.GERMANY, null, true)); suite.addTest(createTest(Locale.GERMANY, "it_IT", false)); suite.addTest(createTest(Locale.GERMANY, "de_DE", true)); // This test creates a class loader. We don't want to grant that // permission to derbyTesting.jar since that means none of the tests // will notice if one of the product jars misses a privileged block // around the creation of a class loader. return SecurityManagerSetup.noSecurityManager(suite); } /** * Create a single test case. * * @param loc default locale for the test case * @param ui <code>derby.ui.locale</code> for the test case * @param german whether output is expected to be German */ private static Test createTest(Locale loc, String ui, boolean german) { Properties prop = new Properties(); if (ui != null) { prop.setProperty("derby.ui.locale", ui); } // always set the encoding so that we can reliably read the output prop.setProperty("derby.ui.codeset", ENCODING); String info = "defaultLocale=" + loc + ",uiLocale=" + ui; Test test = new SysinfoLocaleTest(loc, german, info); return new SystemPropertyTestSetup(test, prop); } /** * Run a sysinfo class that is loaded in a separate class loader. A * separate class loader is required in order to force sysinfo to re-read * <code>derby.ui.locale</code> (happens when the class is loaded). */ private static void runSysinfo() throws Exception { final String className = "org.apache.derby.tools.sysinfo"; URL sysinfoURL = SecurityManagerSetup.getURL(className); URL emmaURL = getEmmaJar(); URL[] urls = null; if(emmaURL != null) { urls = new URL[] { sysinfoURL, emmaURL }; } else { urls = new URL[] { sysinfoURL }; } URLClassLoader loader = new URLClassLoader(urls, null); Class copy = Class.forName(className, true, loader); Method main = copy.getMethod("main", new Class[] { String[].class }); main.invoke(null, new Object[] { new String[0] }); } /** * Run sysinfo and return its output as a string. * * @return output from sysinfo */ private static String getSysinfoOutput() throws Exception { final PrintStream savedSystemOut = System.out; final ByteArrayOutputStream bytes = new ByteArrayOutputStream(); try { System.setOut(new PrintStream(bytes, true, ENCODING)); runSysinfo(); } finally { System.setOut(savedSystemOut); } return bytes.toString(ENCODING); } /** * Some German strings that are expected to be in sysinfo's output when * localized to German. */ private static final String[] GERMAN_STRINGS = { "BS-Name", "Java-Benutzername", "Derby-Informationen", "Informationen zum Gebietsschema", }; /** * Some Italian strings that are expected to be in sysinfo's output when * localized to Italian. */ private static final String[] ITALIAN_STRINGS = { "Nome SO", "Home utente Java", "Informazioni su Derby", "Informazioni sulle impostazioni nazionali", }; /** * Checks that all the expected substrings are part of the output from * sysinfo. Fails if one or more of the substrings are not found. * * @param expectedSubstrings substrings in the expected locale * @param output the output from sysinfo */ private void assertContains(String[] expectedSubstrings, String output) { for (int i = 0; i < expectedSubstrings.length; i++) { String s = expectedSubstrings[i]; if (output.indexOf(s) == -1) { fail("Substring '" + s + "' not found in output: " + output); } } } /** * Test method which checks that the output from sysinfo is correctly * localized. */ public void testSysinfoLocale() throws Exception { String output = getSysinfoOutput(); String[] expectedSubstrings = localizedToGerman ? GERMAN_STRINGS : ITALIAN_STRINGS; assertContains(expectedSubstrings, output); } }
/* * Copyright 2020 The Android Open Source Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package androidx.camera.core.impl.utils; import static androidx.camera.core.impl.utils.ExifAttribute.ASCII; import static androidx.camera.core.impl.utils.ExifData.Builder.sExifTagMapsForWriting; import static androidx.camera.core.impl.utils.ExifData.EXIF_POINTER_TAGS; import static androidx.camera.core.impl.utils.ExifData.EXIF_TAGS; import static androidx.camera.core.impl.utils.ExifData.IFD_TYPE_EXIF; import static androidx.camera.core.impl.utils.ExifData.IFD_TYPE_GPS; import static androidx.camera.core.impl.utils.ExifData.IFD_TYPE_INTEROPERABILITY; import static androidx.camera.core.impl.utils.ExifData.IFD_TYPE_PRIMARY; import androidx.annotation.NonNull; import androidx.annotation.RequiresApi; import androidx.camera.core.Logger; import androidx.core.util.Preconditions; import java.io.BufferedOutputStream; import java.io.FilterOutputStream; import java.io.IOException; import java.io.OutputStream; import java.nio.ByteBuffer; import java.nio.ByteOrder; import java.util.Locale; import java.util.Map; /** * This class provides a way to replace the Exif header of a JPEG image. * <p> * Below is an example of writing EXIF data into a file * * <pre> * public static void writeExif(byte[] jpeg, ExifData exif, String path) { * OutputStream os = null; * try { * os = new FileOutputStream(path); * // Set the exif header on the output stream * ExifOutputStream eos = new ExifOutputStream(os, exif); * // Write the original jpeg out, the header will be added into the file. * eos.write(jpeg); * } catch (FileNotFoundException e) { * e.printStackTrace(); * } catch (IOException e) { * e.printStackTrace(); * } finally { * if (os != null) { * try { * os.close(); * } catch (IOException e) { * e.printStackTrace(); * } * } * } * } * </pre> */ @RequiresApi(21) // TODO(b/200306659): Remove and replace with annotation on package-info.java public final class ExifOutputStream extends FilterOutputStream { private static final String TAG = "ExifOutputStream"; private static final boolean DEBUG = false; private static final int STREAMBUFFER_SIZE = 0x00010000; // 64Kb private static final int STATE_SOI = 0; private static final int STATE_FRAME_HEADER = 1; private static final int STATE_JPEG_DATA = 2; // Identifier for EXIF APP1 segment in JPEG private static final byte[] IDENTIFIER_EXIF_APP1 = "Exif\0\0".getBytes(ASCII); // Types of Exif byte alignments (see JEITA CP-3451C Section 4.5.2) private static final short BYTE_ALIGN_II = 0x4949; // II: Intel order private static final short BYTE_ALIGN_MM = 0x4d4d; // MM: Motorola order // TIFF Header Fixed Constant (see JEITA CP-3451C Section 4.5.2) private static final byte START_CODE = 0x2a; // 42 private static final int IFD_OFFSET = 8; private final ExifData mExifData; private final byte[] mSingleByteArray = new byte[1]; private final ByteBuffer mBuffer = ByteBuffer.allocate(4); private int mState = STATE_SOI; private int mByteToSkip; private int mByteToCopy; /** * Creates an ExifOutputStream that wraps the given {@link OutputStream} and overwrites exif * with the provided {@link ExifData}. * @param ou OutputStream which will be sent the final output. * @param exifData Exif data which will overwrite any exif data sent to this stream. */ public ExifOutputStream(@NonNull OutputStream ou, @NonNull ExifData exifData) { super(new BufferedOutputStream(ou, STREAMBUFFER_SIZE)); mExifData = exifData; } private int requestByteToBuffer(int requestByteCount, byte[] buffer, int offset, int length) { int byteNeeded = requestByteCount - mBuffer.position(); int byteToRead = Math.min(length, byteNeeded); mBuffer.put(buffer, offset, byteToRead); return byteToRead; } /** * Writes the image out. The input data should be a valid JPEG format. After * writing, it's Exif header will be replaced by the given header. */ @Override public void write(@NonNull byte[] buffer, int offset, int length) throws IOException { while ((mByteToSkip > 0 || mByteToCopy > 0 || mState != STATE_JPEG_DATA) && length > 0) { if (mByteToSkip > 0) { int byteToProcess = Math.min(length, mByteToSkip); length -= byteToProcess; mByteToSkip -= byteToProcess; offset += byteToProcess; } if (mByteToCopy > 0) { int byteToProcess = Math.min(length, mByteToCopy); out.write(buffer, offset, byteToProcess); length -= byteToProcess; mByteToCopy -= byteToProcess; offset += byteToProcess; } if (length == 0) { return; } switch (mState) { case STATE_SOI: int byteRead = requestByteToBuffer(2, buffer, offset, length); offset += byteRead; length -= byteRead; if (mBuffer.position() < 2) { return; } mBuffer.rewind(); if (mBuffer.getShort() != JpegHeader.SOI) { throw new IOException("Not a valid jpeg image, cannot write exif"); } out.write(mBuffer.array(), 0, 2); mState = STATE_FRAME_HEADER; mBuffer.rewind(); ByteOrderedDataOutputStream dataOutputStream = new ByteOrderedDataOutputStream(out, ByteOrder.BIG_ENDIAN); dataOutputStream.writeShort(JpegHeader.APP1); writeExifSegment(dataOutputStream); break; case STATE_FRAME_HEADER: // We ignore the APP1 segment and copy all other segments // until SOF tag. byteRead = requestByteToBuffer(4, buffer, offset, length); offset += byteRead; length -= byteRead; // Check if this image data doesn't contain SOF. if (mBuffer.position() == 2) { short tag = mBuffer.getShort(); if (tag == JpegHeader.EOI) { out.write(mBuffer.array(), 0, 2); mBuffer.rewind(); } } if (mBuffer.position() < 4) { return; } mBuffer.rewind(); short marker = mBuffer.getShort(); if (marker == JpegHeader.APP1) { mByteToSkip = (mBuffer.getShort() & 0x0000ffff) - 2; mState = STATE_JPEG_DATA; } else if (!JpegHeader.isSofMarker(marker)) { out.write(mBuffer.array(), 0, 4); mByteToCopy = (mBuffer.getShort() & 0x0000ffff) - 2; } else { out.write(mBuffer.array(), 0, 4); mState = STATE_JPEG_DATA; } mBuffer.rewind(); } } if (length > 0) { out.write(buffer, offset, length); } } /** * Writes the one bytes out. The input data should be a valid JPEG format. * After writing, it's Exif header will be replaced by the given header. */ @Override public void write(int oneByte) throws IOException { mSingleByteArray[0] = (byte) (0xff & oneByte); write(mSingleByteArray); } /** * Equivalent to calling write(buffer, 0, buffer.length). */ @Override public void write(@NonNull byte[] buffer) throws IOException { write(buffer, 0, buffer.length); } // Writes an Exif segment into the given output stream. private void writeExifSegment(@NonNull ByteOrderedDataOutputStream dataOutputStream) throws IOException { // The following variables are for calculating each IFD tag group size in bytes. int[] ifdOffsets = new int[EXIF_TAGS.length]; int[] ifdDataSizes = new int[EXIF_TAGS.length]; // Remove IFD pointer tags (we'll re-add it later.) for (ExifTag tag : EXIF_POINTER_TAGS) { for (int ifdIndex = 0; ifdIndex < EXIF_TAGS.length; ++ifdIndex) { mExifData.getAttributes(ifdIndex).remove(tag.name); } } // Add IFD pointer tags. The next offset of primary image TIFF IFD will have thumbnail IFD // offset when there is one or more tags in the thumbnail IFD. if (!mExifData.getAttributes(IFD_TYPE_EXIF).isEmpty()) { mExifData.getAttributes(IFD_TYPE_PRIMARY).put(EXIF_POINTER_TAGS[1].name, ExifAttribute.createULong(0, mExifData.getByteOrder())); } if (!mExifData.getAttributes(IFD_TYPE_GPS).isEmpty()) { mExifData.getAttributes(IFD_TYPE_PRIMARY).put(EXIF_POINTER_TAGS[2].name, ExifAttribute.createULong(0, mExifData.getByteOrder())); } if (!mExifData.getAttributes(IFD_TYPE_INTEROPERABILITY).isEmpty()) { mExifData.getAttributes(IFD_TYPE_EXIF).put(EXIF_POINTER_TAGS[3].name, ExifAttribute.createULong(0, mExifData.getByteOrder())); } // Calculate IFD group data area sizes. IFD group data area is assigned to save the entry // value which has a bigger size than 4 bytes. for (int i = 0; i < EXIF_TAGS.length; ++i) { int sum = 0; for (Map.Entry<String, ExifAttribute> entry : mExifData.getAttributes(i).entrySet()) { final ExifAttribute exifAttribute = entry.getValue(); final int size = exifAttribute.size(); if (size > 4) { sum += size; } } ifdDataSizes[i] += sum; } // Calculate IFD offsets. // 8 bytes are for TIFF headers: 2 bytes (byte order) + 2 bytes (identifier) + 4 bytes // (offset of IFDs) int position = 8; for (int ifdType = 0; ifdType < EXIF_TAGS.length; ++ifdType) { if (!mExifData.getAttributes(ifdType).isEmpty()) { ifdOffsets[ifdType] = position; position += 2 + mExifData.getAttributes(ifdType).size() * 12 + 4 + ifdDataSizes[ifdType]; } } int totalSize = position; // Add 8 bytes for APP1 size and identifier data totalSize += 8; if (DEBUG) { for (int i = 0; i < EXIF_TAGS.length; ++i) { Logger.d(TAG, String.format(Locale.US, "index: %d, offsets: %d, tag count: %d, " + "data sizes: %d, total size: %d", i, ifdOffsets[i], mExifData.getAttributes(i).size(), ifdDataSizes[i], totalSize)); } } // Update IFD pointer tags with the calculated offsets. if (!mExifData.getAttributes(IFD_TYPE_EXIF).isEmpty()) { mExifData.getAttributes(IFD_TYPE_PRIMARY).put(EXIF_POINTER_TAGS[1].name, ExifAttribute.createULong(ifdOffsets[IFD_TYPE_EXIF], mExifData.getByteOrder())); } if (!mExifData.getAttributes(IFD_TYPE_GPS).isEmpty()) { mExifData.getAttributes(IFD_TYPE_PRIMARY).put(EXIF_POINTER_TAGS[2].name, ExifAttribute.createULong(ifdOffsets[IFD_TYPE_GPS], mExifData.getByteOrder())); } if (!mExifData.getAttributes(IFD_TYPE_INTEROPERABILITY).isEmpty()) { mExifData.getAttributes(IFD_TYPE_EXIF).put(EXIF_POINTER_TAGS[3].name, ExifAttribute.createULong( ifdOffsets[IFD_TYPE_INTEROPERABILITY], mExifData.getByteOrder())); } // Write JPEG specific data (APP1 size, APP1 identifier) dataOutputStream.writeUnsignedShort(totalSize); dataOutputStream.write(IDENTIFIER_EXIF_APP1); // Write TIFF Headers. See JEITA CP-3451C Section 4.5.2. Table 1. dataOutputStream.writeShort(mExifData.getByteOrder() == ByteOrder.BIG_ENDIAN ? BYTE_ALIGN_MM : BYTE_ALIGN_II); dataOutputStream.setByteOrder(mExifData.getByteOrder()); dataOutputStream.writeUnsignedShort(START_CODE); dataOutputStream.writeUnsignedInt(IFD_OFFSET); // Write IFD groups. See JEITA CP-3451C Section 4.5.8. Figure 9. for (int ifdType = 0; ifdType < EXIF_TAGS.length; ++ifdType) { if (!mExifData.getAttributes(ifdType).isEmpty()) { // See JEITA CP-3451C Section 4.6.2: IFD structure. // Write entry count dataOutputStream.writeUnsignedShort(mExifData.getAttributes(ifdType).size()); // Write entry info int dataOffset = ifdOffsets[ifdType] + 2 + mExifData.getAttributes(ifdType).size() * 12 + 4; for (Map.Entry<String, ExifAttribute> entry : mExifData.getAttributes( ifdType).entrySet()) { // Convert tag name to tag number. final ExifTag tag = sExifTagMapsForWriting.get(ifdType).get(entry.getKey()); final int tagNumber = Preconditions.checkNotNull(tag, "Tag not supported: " + entry.getKey() + ". Tag needs to be " + "ported from ExifInterface to ExifData.").number; final ExifAttribute attribute = entry.getValue(); final int size = attribute.size(); dataOutputStream.writeUnsignedShort(tagNumber); dataOutputStream.writeUnsignedShort(attribute.format); dataOutputStream.writeInt(attribute.numberOfComponents); if (size > 4) { dataOutputStream.writeUnsignedInt(dataOffset); dataOffset += size; } else { dataOutputStream.write(attribute.bytes); // Fill zero up to 4 bytes if (size < 4) { for (int i = size; i < 4; ++i) { dataOutputStream.writeByte(0); } } } } // Write the next offset. Since we aren't handling thumbnails, this is just 0. dataOutputStream.writeUnsignedInt(0); // Write values of data field exceeding 4 bytes after the next offset. for (Map.Entry<String, ExifAttribute> entry : mExifData.getAttributes( ifdType).entrySet()) { ExifAttribute attribute = entry.getValue(); if (attribute.bytes.length > 4) { dataOutputStream.write(attribute.bytes, 0, attribute.bytes.length); } } } } // Reset the byte order to big endian in order to write remaining parts of the JPEG file. dataOutputStream.setByteOrder(ByteOrder.BIG_ENDIAN); } static final class JpegHeader { public static final short SOI = (short) 0xFFD8; public static final short APP1 = (short) 0xFFE1; public static final short EOI = (short) 0xFFD9; /** * SOF (start of frame). All value between SOF0 and SOF15 is SOF marker except for DHT, * JPG, and DAC marker. */ public static final short SOF0 = (short) 0xFFC0; public static final short SOF15 = (short) 0xFFCF; public static final short DHT = (short) 0xFFC4; public static final short JPG = (short) 0xFFC8; public static final short DAC = (short) 0xFFCC; public static boolean isSofMarker(short marker) { return marker >= SOF0 && marker <= SOF15 && marker != DHT && marker != JPG && marker != DAC; } private JpegHeader() {} } }
/* * Copyright 2000-2016 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intellij.ide.todo; import com.intellij.find.FindModel; import com.intellij.find.impl.FindInProjectUtil; import com.intellij.icons.AllIcons; import com.intellij.ide.*; import com.intellij.ide.actions.ContextHelpAction; import com.intellij.ide.actions.NextOccurenceToolbarAction; import com.intellij.ide.actions.PreviousOccurenceToolbarAction; import com.intellij.ide.todo.nodes.TodoFileNode; import com.intellij.ide.todo.nodes.TodoItemNode; import com.intellij.ide.todo.nodes.TodoTreeHelper; import com.intellij.ide.util.treeView.NodeDescriptor; import com.intellij.openapi.Disposable; import com.intellij.openapi.actionSystem.*; import com.intellij.openapi.application.ApplicationManager; import com.intellij.openapi.diagnostic.Logger; import com.intellij.openapi.editor.Document; import com.intellij.openapi.editor.RangeMarker; import com.intellij.openapi.fileEditor.OpenFileDescriptor; import com.intellij.openapi.project.IndexNotReadyException; import com.intellij.openapi.project.Project; import com.intellij.openapi.ui.SimpleToolWindowPanel; import com.intellij.openapi.ui.Splitter; import com.intellij.openapi.util.Disposer; import com.intellij.openapi.util.SystemInfo; import com.intellij.openapi.vfs.VirtualFile; import com.intellij.openapi.wm.impl.VisibilityWatcher; import com.intellij.psi.PsiDocumentManager; import com.intellij.psi.PsiElement; import com.intellij.psi.PsiFile; import com.intellij.ui.*; import com.intellij.ui.content.Content; import com.intellij.ui.treeStructure.Tree; import com.intellij.usageView.UsageInfo; import com.intellij.usages.impl.UsagePreviewPanel; import com.intellij.util.*; import com.intellij.util.ui.UIUtil; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import javax.swing.*; import javax.swing.event.TreeSelectionEvent; import javax.swing.event.TreeSelectionListener; import javax.swing.tree.DefaultMutableTreeNode; import javax.swing.tree.DefaultTreeModel; import javax.swing.tree.TreeNode; import javax.swing.tree.TreePath; import java.awt.*; import java.awt.event.InputEvent; import java.awt.event.KeyAdapter; import java.awt.event.KeyEvent; import java.util.ArrayList; import java.util.HashSet; import java.util.List; import java.util.Set; abstract class TodoPanel extends SimpleToolWindowPanel implements OccurenceNavigator, DataProvider, Disposable { protected static final Logger LOG = Logger.getInstance(TodoPanel.class); protected Project myProject; private final TodoPanelSettings mySettings; private final boolean myCurrentFileMode; private final Content myContent; private final Tree myTree; private final MyTreeExpander myTreeExpander; private final MyOccurenceNavigator myOccurenceNavigator; protected final TodoTreeBuilder myTodoTreeBuilder; private MyVisibilityWatcher myVisibilityWatcher; private UsagePreviewPanel myUsagePreviewPanel; /** * @param currentFileMode if <code>true</code> then view doesn't have "Group By Packages" and "Flatten Packages" * actions. */ TodoPanel(Project project, TodoPanelSettings settings, boolean currentFileMode, Content content) { super(false, true); myProject = project; mySettings = settings; myCurrentFileMode = currentFileMode; myContent = content; DefaultTreeModel model = new DefaultTreeModel(new DefaultMutableTreeNode()); myTree = new Tree(model); myTreeExpander = new MyTreeExpander(); myOccurenceNavigator = new MyOccurenceNavigator(); initUI(); myTodoTreeBuilder = createTreeBuilder(myTree, model, myProject); Disposer.register(myProject, myTodoTreeBuilder); updateTodoFilter(); myTodoTreeBuilder.setShowPackages(mySettings.arePackagesShown); myTodoTreeBuilder.setShowModules(mySettings.areModulesShown); myTodoTreeBuilder.setFlattenPackages(mySettings.areFlattenPackages); myVisibilityWatcher = new MyVisibilityWatcher(); myVisibilityWatcher.install(this); } protected abstract TodoTreeBuilder createTreeBuilder(JTree tree, DefaultTreeModel treeModel, Project project); private void initUI() { UIUtil.setLineStyleAngled(myTree); myTree.setShowsRootHandles(true); myTree.setRootVisible(false); myTree.setCellRenderer(new TodoCompositeRenderer()); EditSourceOnDoubleClickHandler.install(myTree); new TreeSpeedSearch(myTree); DefaultActionGroup group = new DefaultActionGroup(); group.add(ActionManager.getInstance().getAction(IdeActions.ACTION_EDIT_SOURCE)); group.addSeparator(); group.add(ActionManager.getInstance().getAction(IdeActions.GROUP_VERSION_CONTROLS)); PopupHandler.installPopupHandler(myTree, group, ActionPlaces.TODO_VIEW_POPUP, ActionManager.getInstance()); myTree.addKeyListener( new KeyAdapter() { @Override public void keyPressed(KeyEvent e) { if (!e.isConsumed() && KeyEvent.VK_ENTER == e.getKeyCode()) { TreePath path = myTree.getSelectionPath(); if (path == null) { return; } final Object userObject = ((DefaultMutableTreeNode)path.getLastPathComponent()).getUserObject(); if (!((userObject instanceof NodeDescriptor ? (NodeDescriptor)userObject : null) instanceof TodoItemNode)) { return; } OpenSourceUtil.openSourcesFrom(DataManager.getInstance().getDataContext(TodoPanel.this), false); } } } ); myUsagePreviewPanel = new UsagePreviewPanel(myProject, FindInProjectUtil.setupViewPresentation(false, new FindModel())); Disposer.register(this, myUsagePreviewPanel); myUsagePreviewPanel.setVisible(mySettings.showPreview); setContent(createCenterComponent()); myTree.getSelectionModel().addTreeSelectionListener(new TreeSelectionListener() { @Override public void valueChanged(final TreeSelectionEvent e) { SwingUtilities.invokeLater(() -> { if (myUsagePreviewPanel.isVisible()) { updatePreviewPanel(); } }); } }); // Create tool bars and register custom shortcuts JPanel toolBarPanel = new JPanel(new GridLayout()); DefaultActionGroup leftGroup = new DefaultActionGroup(); leftGroup.add(new PreviousOccurenceToolbarAction(myOccurenceNavigator)); leftGroup.add(new NextOccurenceToolbarAction(myOccurenceNavigator)); leftGroup.add(new ContextHelpAction("find.todoList")); toolBarPanel.add( ActionManager.getInstance().createActionToolbar(ActionPlaces.TODO_VIEW_TOOLBAR, leftGroup, false).getComponent()); DefaultActionGroup rightGroup = new DefaultActionGroup(); AnAction expandAllAction = CommonActionsManager.getInstance().createExpandAllAction(myTreeExpander, this); rightGroup.add(expandAllAction); AnAction collapseAllAction = CommonActionsManager.getInstance().createCollapseAllAction(myTreeExpander, this); rightGroup.add(collapseAllAction); if (!myCurrentFileMode) { MyShowModulesAction showModulesAction = new MyShowModulesAction(); showModulesAction.registerCustomShortcutSet( new CustomShortcutSet( KeyStroke.getKeyStroke(KeyEvent.VK_M, SystemInfo.isMac ? InputEvent.META_MASK : InputEvent.CTRL_MASK)), myTree); rightGroup.add(showModulesAction); MyShowPackagesAction showPackagesAction = new MyShowPackagesAction(); showPackagesAction.registerCustomShortcutSet( new CustomShortcutSet( KeyStroke.getKeyStroke(KeyEvent.VK_P, SystemInfo.isMac ? InputEvent.META_MASK : InputEvent.CTRL_MASK)), myTree); rightGroup.add(showPackagesAction); MyFlattenPackagesAction flattenPackagesAction = new MyFlattenPackagesAction(); flattenPackagesAction.registerCustomShortcutSet( new CustomShortcutSet( KeyStroke.getKeyStroke(KeyEvent.VK_F, SystemInfo.isMac ? InputEvent.META_MASK : InputEvent.CTRL_MASK)), myTree); rightGroup.add(flattenPackagesAction); } MyAutoScrollToSourceHandler autoScrollToSourceHandler = new MyAutoScrollToSourceHandler(); autoScrollToSourceHandler.install(myTree); rightGroup.add(autoScrollToSourceHandler.createToggleAction()); SetTodoFilterAction setTodoFilterAction = new SetTodoFilterAction(myProject, mySettings, todoFilter -> setTodoFilter(todoFilter)); rightGroup.add(setTodoFilterAction); rightGroup.add(new MyPreviewAction()); toolBarPanel.add( ActionManager.getInstance().createActionToolbar(ActionPlaces.TODO_VIEW_TOOLBAR, rightGroup, false).getComponent()); setToolbar(toolBarPanel); } protected JComponent createCenterComponent() { Splitter splitter = new OnePixelSplitter(false); splitter.setSecondComponent(myUsagePreviewPanel); splitter.setFirstComponent(ScrollPaneFactory.createScrollPane(myTree)); return splitter; } private void updatePreviewPanel() { if (myProject.isDisposed()) return; List<UsageInfo> infos = new ArrayList<>(); final TreePath path = myTree.getSelectionPath(); if (path != null) { DefaultMutableTreeNode node = (DefaultMutableTreeNode)path.getLastPathComponent(); Object userObject = node.getUserObject(); if (userObject instanceof NodeDescriptor) { Object element = ((NodeDescriptor)userObject).getElement(); TodoItemNode pointer = myTodoTreeBuilder.getFirstPointerForElement(element); if (pointer != null) { final SmartTodoItemPointer value = pointer.getValue(); final Document document = value.getDocument(); final PsiFile psiFile = PsiDocumentManager.getInstance(myProject).getPsiFile(document); final RangeMarker rangeMarker = value.getRangeMarker(); if (psiFile != null) { infos.add(new UsageInfo(psiFile, rangeMarker.getStartOffset(), rangeMarker.getEndOffset())); } } } } myUsagePreviewPanel.updateLayout(infos.isEmpty() ? null : infos); } @Override public void dispose() { if (myVisibilityWatcher != null) { myVisibilityWatcher.deinstall(this); myVisibilityWatcher = null; } myProject = null; } void rebuildCache() { myTodoTreeBuilder.rebuildCache(); } /** * Immediately updates tree. */ void updateTree() { myTodoTreeBuilder.updateTree(false); } /** * Updates current filter. If previously set filter was removed then empty filter is set. * * @see TodoTreeBuilder#setTodoFilter */ void updateTodoFilter() { TodoFilter filter = TodoConfiguration.getInstance().getTodoFilter(mySettings.todoFilterName); setTodoFilter(filter); } /** * Sets specified <code>TodoFilter</code>. The method also updates window's title. * * @see TodoTreeBuilder#setTodoFilter */ private void setTodoFilter(TodoFilter filter) { // Clear name of current filter if it was removed from configuration. String filterName = filter != null ? filter.getName() : null; mySettings.todoFilterName = filterName; // Update filter myTodoTreeBuilder.setTodoFilter(filter); // Update content's title myContent.setDescription(filterName); } /** * @return list of all selected virtual files. */ @Nullable protected PsiFile getSelectedFile() { TreePath path = myTree.getSelectionPath(); if (path == null) { return null; } DefaultMutableTreeNode node = (DefaultMutableTreeNode)path.getLastPathComponent(); LOG.assertTrue(node != null); if(node.getUserObject() == null){ return null; } return TodoTreeBuilder.getFileForNode(node); } protected void setDisplayName(String tabName) { myContent.setDisplayName(tabName); } @Nullable private PsiElement getSelectedElement() { if (myTree == null) return null; TreePath path = myTree.getSelectionPath(); if (path == null) { return null; } DefaultMutableTreeNode node = (DefaultMutableTreeNode)path.getLastPathComponent(); Object userObject = node.getUserObject(); final PsiElement selectedElement = TodoTreeHelper.getInstance(myProject).getSelectedElement(userObject); if (selectedElement != null) return selectedElement; return getSelectedFile(); } @Override public Object getData(String dataId) { if (CommonDataKeys.NAVIGATABLE.is(dataId)) { TreePath path = myTree.getSelectionPath(); if (path == null) { return null; } DefaultMutableTreeNode node = (DefaultMutableTreeNode)path.getLastPathComponent(); Object userObject = node.getUserObject(); if (!(userObject instanceof NodeDescriptor)) { return null; } Object element = ((NodeDescriptor)userObject).getElement(); if (!(element instanceof TodoFileNode || element instanceof TodoItemNode)) { // allow user to use F4 only on files an TODOs return null; } TodoItemNode pointer = myTodoTreeBuilder.getFirstPointerForElement(element); if (pointer != null) { return new OpenFileDescriptor(myProject, pointer.getValue().getTodoItem().getFile().getVirtualFile(), pointer.getValue().getRangeMarker().getStartOffset() ); } else { return null; } } else if (CommonDataKeys.VIRTUAL_FILE.is(dataId)) { final PsiFile file = getSelectedFile(); return file != null ? file.getVirtualFile() : null; } else if (CommonDataKeys.PSI_ELEMENT.is(dataId)) { return getSelectedElement(); } else if (CommonDataKeys.VIRTUAL_FILE_ARRAY.is(dataId)) { PsiFile file = getSelectedFile(); if (file != null) { return new VirtualFile[]{file.getVirtualFile()}; } else { return VirtualFile.EMPTY_ARRAY; } } else if (PlatformDataKeys.HELP_ID.is(dataId)) { //noinspection HardCodedStringLiteral return "find.todoList"; } return super.getData(dataId); } @Override @Nullable public OccurenceInfo goPreviousOccurence() { return myOccurenceNavigator.goPreviousOccurence(); } @Override public String getNextOccurenceActionName() { return myOccurenceNavigator.getNextOccurenceActionName(); } @Override @Nullable public OccurenceInfo goNextOccurence() { return myOccurenceNavigator.goNextOccurence(); } @Override public boolean hasNextOccurence() { return myOccurenceNavigator.hasNextOccurence(); } @Override public String getPreviousOccurenceActionName() { return myOccurenceNavigator.getPreviousOccurenceActionName(); } @Override public boolean hasPreviousOccurence() { return myOccurenceNavigator.hasPreviousOccurence(); } protected void rebuildWithAlarm(final Alarm alarm) { alarm.cancelAllRequests(); alarm.addRequest(() -> { final Set<VirtualFile> files = new HashSet<>(); ApplicationManager.getApplication().runReadAction(() -> { try { myTodoTreeBuilder.collectFiles(virtualFile -> { files.add(virtualFile); return true; }); } catch (IndexNotReadyException ignore) { } }); final Runnable runnable = () -> { myTodoTreeBuilder.rebuildCache(files); updateTree(); }; ApplicationManager.getApplication().invokeLater(runnable); }, 300); } private final class MyTreeExpander implements TreeExpander { @Override public boolean canCollapse() { return true; } @Override public boolean canExpand() { return true; } @Override public void collapseAll() { myTodoTreeBuilder.collapseAll(); } @Override public void expandAll() { myTodoTreeBuilder.expandAll(null); } } /** * Provides support for "auto scroll to source" functionality */ private final class MyAutoScrollToSourceHandler extends AutoScrollToSourceHandler { MyAutoScrollToSourceHandler() { } @Override protected boolean isAutoScrollMode() { return mySettings.isAutoScrollToSource; } @Override protected void setAutoScrollMode(boolean state) { mySettings.isAutoScrollToSource = state; } } /** * Provides support for "Ctrl+Alt+Up/Down" navigation. */ private final class MyOccurenceNavigator implements OccurenceNavigator { @Override public boolean hasNextOccurence() { TreePath path = myTree.getSelectionPath(); if (path == null) { return false; } DefaultMutableTreeNode node = (DefaultMutableTreeNode)path.getLastPathComponent(); Object userObject = node.getUserObject(); if (userObject == null) { return false; } if (userObject instanceof NodeDescriptor && ((NodeDescriptor)userObject).getElement() instanceof TodoItemNode) { return myTree.getRowCount() != myTree.getRowForPath(path) + 1; } else { return node.getChildCount() > 0; } } @Override public boolean hasPreviousOccurence() { TreePath path = myTree.getSelectionPath(); if (path == null) { return false; } DefaultMutableTreeNode node = (DefaultMutableTreeNode)path.getLastPathComponent(); Object userObject = node.getUserObject(); return userObject instanceof NodeDescriptor && !isFirst(node); } private boolean isFirst(final TreeNode node) { final TreeNode parent = node.getParent(); return parent == null || parent.getIndex(node) == 0 && isFirst(parent); } @Override @Nullable public OccurenceInfo goNextOccurence() { return goToPointer(getNextPointer()); } @Override @Nullable public OccurenceInfo goPreviousOccurence() { return goToPointer(getPreviousPointer()); } @Override public String getNextOccurenceActionName() { return IdeBundle.message("action.next.todo"); } @Override public String getPreviousOccurenceActionName() { return IdeBundle.message("action.previous.todo"); } @Nullable private OccurenceInfo goToPointer(TodoItemNode pointer) { if (pointer == null) return null; myTodoTreeBuilder.select(pointer); return new OccurenceInfo( new OpenFileDescriptor(myProject, pointer.getValue().getTodoItem().getFile().getVirtualFile(), pointer.getValue().getRangeMarker().getStartOffset()), -1, -1 ); } @Nullable private TodoItemNode getNextPointer() { TreePath path = myTree.getSelectionPath(); if (path == null) { return null; } DefaultMutableTreeNode node = (DefaultMutableTreeNode)path.getLastPathComponent(); Object userObject = node.getUserObject(); if (!(userObject instanceof NodeDescriptor)) { return null; } Object element = ((NodeDescriptor)userObject).getElement(); TodoItemNode pointer; if (element instanceof TodoItemNode) { pointer = myTodoTreeBuilder.getNextPointer((TodoItemNode)element); } else { pointer = myTodoTreeBuilder.getFirstPointerForElement(element); } return pointer; } @Nullable private TodoItemNode getPreviousPointer() { TreePath path = myTree.getSelectionPath(); if (path == null) { return null; } DefaultMutableTreeNode node = (DefaultMutableTreeNode)path.getLastPathComponent(); Object userObject = node.getUserObject(); if (!(userObject instanceof NodeDescriptor)) { return null; } Object element = ((NodeDescriptor)userObject).getElement(); TodoItemNode pointer; if (element instanceof TodoItemNode) { pointer = myTodoTreeBuilder.getPreviousPointer((TodoItemNode)element); } else { Object sibling = myTodoTreeBuilder.getPreviousSibling(element); if (sibling == null) { return null; } pointer = myTodoTreeBuilder.getLastPointerForElement(sibling); } return pointer; } } private final class MyShowPackagesAction extends ToggleAction { MyShowPackagesAction() { super(IdeBundle.message("action.group.by.packages"), null, PlatformIcons.GROUP_BY_PACKAGES); } @Override public boolean isSelected(AnActionEvent e) { return mySettings.arePackagesShown; } @Override public void setSelected(AnActionEvent e, boolean state) { mySettings.arePackagesShown = state; myTodoTreeBuilder.setShowPackages(state); } } private final class MyShowModulesAction extends ToggleAction { MyShowModulesAction() { super(IdeBundle.message("action.group.by.modules"), null, AllIcons.ObjectBrowser.ShowModules); } @Override public boolean isSelected(AnActionEvent e) { return mySettings.areModulesShown; } @Override public void setSelected(AnActionEvent e, boolean state) { mySettings.areModulesShown = state; myTodoTreeBuilder.setShowModules(state); } } private final class MyFlattenPackagesAction extends ToggleAction { MyFlattenPackagesAction() { super(IdeBundle.message("action.flatten.packages"), null, PlatformIcons.FLATTEN_PACKAGES_ICON); } @Override public void update(@NotNull AnActionEvent e) { super.update(e); e.getPresentation().setEnabled(mySettings.arePackagesShown); } @Override public boolean isSelected(AnActionEvent e) { return mySettings.areFlattenPackages; } @Override public void setSelected(AnActionEvent e, boolean state) { mySettings.areFlattenPackages = state; myTodoTreeBuilder.setFlattenPackages(state); } } private final class MyVisibilityWatcher extends VisibilityWatcher { @Override public void visibilityChanged() { if (myProject.isOpen()) { PsiDocumentManager.getInstance(myProject).performWhenAllCommitted( () -> myTodoTreeBuilder.setUpdatable(isShowing())); } } } private final class MyPreviewAction extends ToggleAction { MyPreviewAction() { super("Preview Source", null, AllIcons.Actions.PreviewDetails); } @Override public void update(AnActionEvent e) { super.update(e); } @Override public boolean isSelected(AnActionEvent e) { return mySettings.showPreview; } @Override public void setSelected(AnActionEvent e, boolean state) { mySettings.showPreview = state; myUsagePreviewPanel.setVisible(state); if (state) { updatePreviewPanel(); } } } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.nifi.processors.grpc; import org.apache.nifi.reporting.InitializationException; import org.apache.nifi.ssl.SSLContextService; import org.apache.nifi.ssl.StandardSSLContextService; import org.apache.nifi.util.MockFlowFile; import org.apache.nifi.util.TestRunner; import org.apache.nifi.util.TestRunners; import org.junit.Assert; import org.junit.Test; import java.util.HashMap; import java.util.List; import java.util.Map; import io.grpc.stub.StreamObserver; import io.netty.handler.ssl.ClientAuth; import static org.hamcrest.CoreMatchers.equalTo; import static org.hamcrest.MatcherAssert.assertThat; public class TestInvokeGRPC { // ids placed on flowfiles and used to dictate response codes in the DummyFlowFileService below private static final long ERROR = 500; private static final long SUCCESS = 501; private static final long RETRY = 502; @Test public void testSuccess() throws Exception { final TestGRPCServer<DummyFlowFileService> server = new TestGRPCServer<>(DummyFlowFileService.class); try { final int port = TestGRPCServer.randomPort(); server.start(port); final TestRunner runner = TestRunners.newTestRunner(InvokeGRPC.class); runner.setProperty(InvokeGRPC.PROP_SERVICE_HOST, TestGRPCServer.HOST); runner.setProperty(InvokeGRPC.PROP_SERVICE_PORT, String.valueOf(port)); final MockFlowFile mockFlowFile = new MockFlowFile(SUCCESS); runner.enqueue(mockFlowFile); runner.run(); runner.assertTransferCount(InvokeGRPC.REL_RESPONSE, 1); runner.assertTransferCount(InvokeGRPC.REL_SUCCESS_REQ, 1); runner.assertTransferCount(InvokeGRPC.REL_RETRY, 0); runner.assertTransferCount(InvokeGRPC.REL_NO_RETRY, 0); runner.assertTransferCount(InvokeGRPC.REL_FAILURE, 0); final List<MockFlowFile> responseFiles = runner.getFlowFilesForRelationship(InvokeGRPC.REL_RESPONSE); assertThat(responseFiles.size(), equalTo(1)); final MockFlowFile response = responseFiles.get(0); response.assertAttributeEquals(InvokeGRPC.RESPONSE_CODE, String.valueOf(FlowFileReply.ResponseCode.SUCCESS)); response.assertAttributeEquals(InvokeGRPC.RESPONSE_BODY, "success"); response.assertAttributeEquals(InvokeGRPC.SERVICE_HOST, TestGRPCServer.HOST); response.assertAttributeEquals(InvokeGRPC.SERVICE_PORT, String.valueOf(port)); final List<MockFlowFile> successFiles = runner.getFlowFilesForRelationship(InvokeGRPC.REL_SUCCESS_REQ); assertThat(successFiles.size(), equalTo(1)); final MockFlowFile successFile = successFiles.get(0); successFile.assertAttributeEquals(InvokeGRPC.RESPONSE_CODE, String.valueOf(FlowFileReply.ResponseCode.SUCCESS)); successFile.assertAttributeEquals(InvokeGRPC.RESPONSE_BODY, "success"); successFile.assertAttributeEquals(InvokeGRPC.SERVICE_HOST, TestGRPCServer.HOST); successFile.assertAttributeEquals(InvokeGRPC.SERVICE_PORT, String.valueOf(port)); } finally { server.stop(); } } @Test public void testSuccessWithFlowFileContent() throws Exception { final TestGRPCServer<DummyFlowFileService> server = new TestGRPCServer<>(DummyFlowFileService.class); try { final int port = TestGRPCServer.randomPort(); server.start(port); final TestRunner runner = TestRunners.newTestRunner(InvokeGRPC.class); runner.setProperty(InvokeGRPC.PROP_SERVICE_HOST, TestGRPCServer.HOST); runner.setProperty(InvokeGRPC.PROP_SERVICE_PORT, String.valueOf(port)); runner.enqueue("content"); runner.run(); runner.assertTransferCount(InvokeGRPC.REL_RESPONSE, 1); runner.assertTransferCount(InvokeGRPC.REL_SUCCESS_REQ, 1); runner.assertTransferCount(InvokeGRPC.REL_RETRY, 0); runner.assertTransferCount(InvokeGRPC.REL_NO_RETRY, 0); runner.assertTransferCount(InvokeGRPC.REL_FAILURE, 0); final List<MockFlowFile> responseFiles = runner.getFlowFilesForRelationship(InvokeGRPC.REL_RESPONSE); assertThat(responseFiles.size(), equalTo(1)); final MockFlowFile response = responseFiles.get(0); response.assertAttributeEquals(InvokeGRPC.RESPONSE_CODE, String.valueOf(FlowFileReply.ResponseCode.SUCCESS)); response.assertAttributeEquals(InvokeGRPC.RESPONSE_BODY, "content"); response.assertAttributeEquals(InvokeGRPC.SERVICE_HOST, TestGRPCServer.HOST); response.assertAttributeEquals(InvokeGRPC.SERVICE_PORT, String.valueOf(port)); final List<MockFlowFile> successFiles = runner.getFlowFilesForRelationship(InvokeGRPC.REL_SUCCESS_REQ); assertThat(successFiles.size(), equalTo(1)); final MockFlowFile successFile = successFiles.get(0); successFile.assertAttributeEquals(InvokeGRPC.RESPONSE_CODE, String.valueOf(FlowFileReply.ResponseCode.SUCCESS)); successFile.assertAttributeEquals(InvokeGRPC.RESPONSE_BODY, "content"); successFile.assertAttributeEquals(InvokeGRPC.SERVICE_HOST, TestGRPCServer.HOST); successFile.assertAttributeEquals(InvokeGRPC.SERVICE_PORT, String.valueOf(port)); } finally { server.stop(); } } @Test public void testSuccessAlwaysOutputResponse() throws Exception { final TestGRPCServer<DummyFlowFileService> server = new TestGRPCServer<>(DummyFlowFileService.class); try { final int port = TestGRPCServer.randomPort(); server.start(port); final TestRunner runner = TestRunners.newTestRunner(InvokeGRPC.class); runner.setProperty(InvokeGRPC.PROP_SERVICE_HOST, TestGRPCServer.HOST); runner.setProperty(InvokeGRPC.PROP_SERVICE_PORT, String.valueOf(port)); runner.setProperty(InvokeGRPC.PROP_OUTPUT_RESPONSE_REGARDLESS, "true"); final MockFlowFile mockFlowFile = new MockFlowFile(SUCCESS); runner.enqueue(mockFlowFile); runner.run(); runner.assertTransferCount(InvokeGRPC.REL_RESPONSE, 1); runner.assertTransferCount(InvokeGRPC.REL_SUCCESS_REQ, 1); runner.assertTransferCount(InvokeGRPC.REL_RETRY, 0); runner.assertTransferCount(InvokeGRPC.REL_NO_RETRY, 0); runner.assertTransferCount(InvokeGRPC.REL_FAILURE, 0); final List<MockFlowFile> responseFiles = runner.getFlowFilesForRelationship(InvokeGRPC.REL_RESPONSE); assertThat(responseFiles.size(), equalTo(1)); final MockFlowFile response = responseFiles.get(0); response.assertAttributeEquals(InvokeGRPC.RESPONSE_CODE, String.valueOf(FlowFileReply.ResponseCode.SUCCESS)); response.assertAttributeEquals(InvokeGRPC.RESPONSE_BODY, "success"); response.assertAttributeEquals(InvokeGRPC.SERVICE_HOST, TestGRPCServer.HOST); response.assertAttributeEquals(InvokeGRPC.SERVICE_PORT, String.valueOf(port)); final List<MockFlowFile> successFiles = runner.getFlowFilesForRelationship(InvokeGRPC.REL_SUCCESS_REQ); assertThat(successFiles.size(), equalTo(1)); final MockFlowFile successFile = successFiles.get(0); successFile.assertAttributeEquals(InvokeGRPC.RESPONSE_CODE, String.valueOf(FlowFileReply.ResponseCode.SUCCESS)); successFile.assertAttributeEquals(InvokeGRPC.RESPONSE_BODY, "success"); successFile.assertAttributeEquals(InvokeGRPC.SERVICE_HOST, TestGRPCServer.HOST); successFile.assertAttributeEquals(InvokeGRPC.SERVICE_PORT, String.valueOf(port)); } finally { server.stop(); } } @Test public void testExceedMaxMessageSize() throws Exception { final TestGRPCServer<DummyFlowFileService> server = new TestGRPCServer<>(DummyFlowFileService.class); try { final int port = TestGRPCServer.randomPort(); server.start(port); final TestRunner runner = TestRunners.newTestRunner(InvokeGRPC.class); runner.setProperty(InvokeGRPC.PROP_SERVICE_HOST, TestGRPCServer.HOST); runner.setProperty(InvokeGRPC.PROP_SERVICE_PORT, String.valueOf(port)); // set max message size to 1B to force error runner.setProperty(InvokeGRPC.PROP_MAX_MESSAGE_SIZE, "1B"); final MockFlowFile mockFlowFile = new MockFlowFile(SUCCESS); runner.enqueue(mockFlowFile); runner.run(); runner.assertTransferCount(InvokeGRPC.REL_RESPONSE, 0); runner.assertTransferCount(InvokeGRPC.REL_SUCCESS_REQ, 0); runner.assertTransferCount(InvokeGRPC.REL_RETRY, 0); runner.assertTransferCount(InvokeGRPC.REL_NO_RETRY, 0); runner.assertTransferCount(InvokeGRPC.REL_FAILURE, 1); final List<MockFlowFile> responseFiles = runner.getFlowFilesForRelationship(InvokeGRPC.REL_FAILURE); assertThat(responseFiles.size(), equalTo(1)); final MockFlowFile response = responseFiles.get(0); response.assertAttributeEquals(InvokeGRPC.SERVICE_HOST, TestGRPCServer.HOST); response.assertAttributeEquals(InvokeGRPC.SERVICE_PORT, String.valueOf(port)); // an exception should be thrown indicating that the max message size was exceeded. response.assertAttributeEquals(InvokeGRPC.EXCEPTION_CLASS, "io.grpc.StatusRuntimeException"); } finally { server.stop(); } } @Test public void testRetry() throws Exception { final TestGRPCServer<DummyFlowFileService> server = new TestGRPCServer<>(DummyFlowFileService.class); try { final int port = TestGRPCServer.randomPort(); server.start(port); final TestRunner runner = TestRunners.newTestRunner(InvokeGRPC.class); runner.setProperty(InvokeGRPC.PROP_SERVICE_HOST, TestGRPCServer.HOST); runner.setProperty(InvokeGRPC.PROP_SERVICE_PORT, String.valueOf(port)); final MockFlowFile mockFlowFile = new MockFlowFile(RETRY); runner.enqueue(mockFlowFile); runner.run(); runner.assertTransferCount(InvokeGRPC.REL_RESPONSE, 0); runner.assertTransferCount(InvokeGRPC.REL_SUCCESS_REQ, 0); runner.assertTransferCount(InvokeGRPC.REL_RETRY, 1); runner.assertTransferCount(InvokeGRPC.REL_NO_RETRY, 0); runner.assertTransferCount(InvokeGRPC.REL_FAILURE, 0); runner.assertPenalizeCount(1); final List<MockFlowFile> responseFiles = runner.getFlowFilesForRelationship(InvokeGRPC.REL_RETRY); assertThat(responseFiles.size(), equalTo(1)); final MockFlowFile response = responseFiles.get(0); response.assertAttributeEquals(InvokeGRPC.RESPONSE_CODE, String.valueOf(FlowFileReply.ResponseCode.RETRY)); response.assertAttributeEquals(InvokeGRPC.RESPONSE_BODY, "retry"); response.assertAttributeEquals(InvokeGRPC.SERVICE_HOST, TestGRPCServer.HOST); response.assertAttributeEquals(InvokeGRPC.SERVICE_PORT, String.valueOf(port)); } finally { server.stop(); } } @Test public void testRetryAlwaysOutputResponse() throws Exception { final TestGRPCServer<DummyFlowFileService> server = new TestGRPCServer<>(DummyFlowFileService.class); try { final int port = TestGRPCServer.randomPort(); server.start(port); final TestRunner runner = TestRunners.newTestRunner(InvokeGRPC.class); runner.setProperty(InvokeGRPC.PROP_SERVICE_HOST, TestGRPCServer.HOST); runner.setProperty(InvokeGRPC.PROP_SERVICE_PORT, String.valueOf(port)); runner.setProperty(InvokeGRPC.PROP_OUTPUT_RESPONSE_REGARDLESS, "true"); final MockFlowFile mockFlowFile = new MockFlowFile(RETRY); runner.enqueue(mockFlowFile); runner.run(); runner.assertTransferCount(InvokeGRPC.REL_RESPONSE, 1); runner.assertTransferCount(InvokeGRPC.REL_SUCCESS_REQ, 0); runner.assertTransferCount(InvokeGRPC.REL_RETRY, 1); runner.assertTransferCount(InvokeGRPC.REL_NO_RETRY, 0); runner.assertTransferCount(InvokeGRPC.REL_FAILURE, 0); runner.assertPenalizeCount(1); final List<MockFlowFile> retryFiles = runner.getFlowFilesForRelationship(InvokeGRPC.REL_RETRY); assertThat(retryFiles.size(), equalTo(1)); final MockFlowFile retry = retryFiles.get(0); retry.assertAttributeEquals(InvokeGRPC.RESPONSE_CODE, String.valueOf(FlowFileReply.ResponseCode.RETRY)); retry.assertAttributeEquals(InvokeGRPC.RESPONSE_BODY, "retry"); retry.assertAttributeEquals(InvokeGRPC.SERVICE_HOST, TestGRPCServer.HOST); retry.assertAttributeEquals(InvokeGRPC.SERVICE_PORT, String.valueOf(port)); final List<MockFlowFile> responseFiles = runner.getFlowFilesForRelationship(InvokeGRPC.REL_RESPONSE); assertThat(responseFiles.size(), equalTo(1)); final MockFlowFile response = responseFiles.get(0); response.assertAttributeEquals(InvokeGRPC.RESPONSE_CODE, String.valueOf(FlowFileReply.ResponseCode.RETRY)); response.assertAttributeEquals(InvokeGRPC.RESPONSE_BODY, "retry"); response.assertAttributeEquals(InvokeGRPC.SERVICE_HOST, TestGRPCServer.HOST); response.assertAttributeEquals(InvokeGRPC.SERVICE_PORT, String.valueOf(port)); } finally { server.stop(); } } @Test public void testNoRetryOnError() throws Exception { final TestGRPCServer<DummyFlowFileService> server = new TestGRPCServer<>(DummyFlowFileService.class); try { final int port = TestGRPCServer.randomPort(); server.start(port); final TestRunner runner = TestRunners.newTestRunner(InvokeGRPC.class); runner.setProperty(InvokeGRPC.PROP_SERVICE_HOST, TestGRPCServer.HOST); runner.setProperty(InvokeGRPC.PROP_SERVICE_PORT, String.valueOf(port)); final MockFlowFile mockFlowFile = new MockFlowFile(ERROR); runner.enqueue(mockFlowFile); runner.run(); runner.assertTransferCount(InvokeGRPC.REL_RESPONSE, 0); runner.assertTransferCount(InvokeGRPC.REL_SUCCESS_REQ, 0); runner.assertTransferCount(InvokeGRPC.REL_RETRY, 0); runner.assertTransferCount(InvokeGRPC.REL_NO_RETRY, 1); runner.assertTransferCount(InvokeGRPC.REL_FAILURE, 0); final List<MockFlowFile> responseFiles = runner.getFlowFilesForRelationship(InvokeGRPC.REL_NO_RETRY); assertThat(responseFiles.size(), equalTo(1)); final MockFlowFile response = responseFiles.get(0); response.assertAttributeEquals(InvokeGRPC.RESPONSE_CODE, String.valueOf(FlowFileReply.ResponseCode.ERROR)); response.assertAttributeEquals(InvokeGRPC.RESPONSE_BODY, "error"); response.assertAttributeEquals(InvokeGRPC.SERVICE_HOST, TestGRPCServer.HOST); response.assertAttributeEquals(InvokeGRPC.SERVICE_PORT, String.valueOf(port)); } finally { server.stop(); } } @Test public void testNoRetryOnErrorAlwaysOutputResponseAndPenalize() throws Exception { final TestGRPCServer<DummyFlowFileService> server = new TestGRPCServer<>(DummyFlowFileService.class); try { final int port = TestGRPCServer.randomPort(); server.start(port); final TestRunner runner = TestRunners.newTestRunner(InvokeGRPC.class); runner.setProperty(InvokeGRPC.PROP_SERVICE_HOST, TestGRPCServer.HOST); runner.setProperty(InvokeGRPC.PROP_SERVICE_PORT, String.valueOf(port)); runner.setProperty(InvokeGRPC.PROP_OUTPUT_RESPONSE_REGARDLESS, "true"); runner.setProperty(InvokeGRPC.PROP_PENALIZE_NO_RETRY, "true"); final MockFlowFile mockFlowFile = new MockFlowFile(ERROR); runner.enqueue(mockFlowFile); runner.run(); runner.assertTransferCount(InvokeGRPC.REL_RESPONSE, 1); runner.assertTransferCount(InvokeGRPC.REL_SUCCESS_REQ, 0); runner.assertTransferCount(InvokeGRPC.REL_RETRY, 0); runner.assertTransferCount(InvokeGRPC.REL_NO_RETRY, 1); runner.assertTransferCount(InvokeGRPC.REL_FAILURE, 0); runner.assertPenalizeCount(1); final List<MockFlowFile> noRetryFiles = runner.getFlowFilesForRelationship(InvokeGRPC.REL_NO_RETRY); assertThat(noRetryFiles.size(), equalTo(1)); final MockFlowFile noRetry = noRetryFiles.get(0); noRetry.assertAttributeEquals(InvokeGRPC.RESPONSE_CODE, String.valueOf(FlowFileReply.ResponseCode.ERROR)); noRetry.assertAttributeEquals(InvokeGRPC.RESPONSE_BODY, "error"); noRetry.assertAttributeEquals(InvokeGRPC.SERVICE_HOST, TestGRPCServer.HOST); noRetry.assertAttributeEquals(InvokeGRPC.SERVICE_PORT, String.valueOf(port)); final List<MockFlowFile> responseFiles = runner.getFlowFilesForRelationship(InvokeGRPC.REL_RESPONSE); assertThat(responseFiles.size(), equalTo(1)); final MockFlowFile response = responseFiles.get(0); response.assertAttributeEquals(InvokeGRPC.RESPONSE_CODE, String.valueOf(FlowFileReply.ResponseCode.ERROR)); response.assertAttributeEquals(InvokeGRPC.RESPONSE_BODY, "error"); response.assertAttributeEquals(InvokeGRPC.SERVICE_HOST, TestGRPCServer.HOST); response.assertAttributeEquals(InvokeGRPC.SERVICE_PORT, String.valueOf(port)); } finally { server.stop(); } } @Test public void testNoInput() throws Exception { final TestGRPCServer<DummyFlowFileService> server = new TestGRPCServer<>(DummyFlowFileService.class); try { final int port = TestGRPCServer.randomPort(); server.start(port); final TestRunner runner = TestRunners.newTestRunner(InvokeGRPC.class); runner.setProperty(InvokeGRPC.PROP_SERVICE_HOST, TestGRPCServer.HOST); runner.setProperty(InvokeGRPC.PROP_SERVICE_PORT, String.valueOf(port)); runner.run(); runner.assertTransferCount(InvokeGRPC.REL_RESPONSE, 0); runner.assertTransferCount(InvokeGRPC.REL_SUCCESS_REQ, 0); runner.assertTransferCount(InvokeGRPC.REL_RETRY, 0); runner.assertTransferCount(InvokeGRPC.REL_NO_RETRY, 0); runner.assertTransferCount(InvokeGRPC.REL_FAILURE, 0); runner.assertPenalizeCount(0); } finally { server.stop(); } } @Test public void testServerConnectionFail() throws Exception { final int port = TestGRPCServer.randomPort(); // should be no gRPC server running @ that port, so processor will fail final TestRunner runner = TestRunners.newTestRunner(InvokeGRPC.class); runner.setProperty(InvokeGRPC.PROP_SERVICE_HOST, TestGRPCServer.HOST); runner.setProperty(InvokeGRPC.PROP_SERVICE_PORT, Integer.toString(port)); final MockFlowFile mockFlowFile = new MockFlowFile(SUCCESS); runner.enqueue(mockFlowFile); runner.run(); runner.assertTransferCount(InvokeGRPC.REL_RESPONSE, 0); runner.assertTransferCount(InvokeGRPC.REL_SUCCESS_REQ, 0); runner.assertTransferCount(InvokeGRPC.REL_RETRY, 0); runner.assertTransferCount(InvokeGRPC.REL_NO_RETRY, 0); runner.assertTransferCount(InvokeGRPC.REL_FAILURE, 1); final List<MockFlowFile> responseFiles = runner.getFlowFilesForRelationship(InvokeGRPC.REL_FAILURE); assertThat(responseFiles.size(), equalTo(1)); final MockFlowFile response = responseFiles.get(0); response.assertAttributeEquals(InvokeGRPC.SERVICE_HOST, TestGRPCServer.HOST); response.assertAttributeEquals(InvokeGRPC.SERVICE_PORT, Integer.toString(port)); response.assertAttributeEquals(InvokeGRPC.EXCEPTION_CLASS, "io.grpc.StatusRuntimeException"); } @Test public void testSecureTwoWaySsl() throws Exception { final Map<String, String> sslProperties = getKeystoreProperties(); sslProperties.putAll(getTruststoreProperties()); final TestGRPCServer<DummyFlowFileService> server = new TestGRPCServer<>(DummyFlowFileService.class, sslProperties); try { final int port = TestGRPCServer.randomPort(); final TestRunner runner = TestRunners.newTestRunner(InvokeGRPC.class); runner.setProperty(InvokeGRPC.PROP_SERVICE_HOST, TestGRPCServer.HOST); runner.setProperty(InvokeGRPC.PROP_SERVICE_PORT, String.valueOf(port)); runner.setProperty(InvokeGRPC.PROP_USE_SECURE, "true"); useSSLContextService(runner, sslProperties); server.start(port); final MockFlowFile mockFlowFile = new MockFlowFile(SUCCESS); runner.enqueue(mockFlowFile); runner.run(); runner.assertTransferCount(InvokeGRPC.REL_RESPONSE, 1); runner.assertTransferCount(InvokeGRPC.REL_SUCCESS_REQ, 1); runner.assertTransferCount(InvokeGRPC.REL_RETRY, 0); runner.assertTransferCount(InvokeGRPC.REL_NO_RETRY, 0); runner.assertTransferCount(InvokeGRPC.REL_FAILURE, 0); final List<MockFlowFile> responseFiles = runner.getFlowFilesForRelationship(InvokeGRPC.REL_RESPONSE); assertThat(responseFiles.size(), equalTo(1)); final MockFlowFile response = responseFiles.get(0); response.assertAttributeEquals(InvokeGRPC.RESPONSE_CODE, String.valueOf(FlowFileReply.ResponseCode.SUCCESS)); response.assertAttributeEquals(InvokeGRPC.RESPONSE_BODY, "success"); response.assertAttributeEquals(InvokeGRPC.SERVICE_HOST, TestGRPCServer.HOST); response.assertAttributeEquals(InvokeGRPC.SERVICE_PORT, String.valueOf(port)); final List<MockFlowFile> successFiles = runner.getFlowFilesForRelationship(InvokeGRPC.REL_SUCCESS_REQ); assertThat(successFiles.size(), equalTo(1)); final MockFlowFile successFile = successFiles.get(0); successFile.assertAttributeEquals(InvokeGRPC.RESPONSE_CODE, String.valueOf(FlowFileReply.ResponseCode.SUCCESS)); successFile.assertAttributeEquals(InvokeGRPC.RESPONSE_BODY, "success"); successFile.assertAttributeEquals(InvokeGRPC.SERVICE_HOST, TestGRPCServer.HOST); successFile.assertAttributeEquals(InvokeGRPC.SERVICE_PORT, String.valueOf(port)); } finally { server.stop(); } } @Test public void testSecureOneWaySsl() throws Exception { final Map<String, String> sslProperties = getKeystoreProperties(); sslProperties.put(TestGRPCServer.NEED_CLIENT_AUTH, ClientAuth.NONE.name()); final TestGRPCServer<DummyFlowFileService> server = new TestGRPCServer<>(DummyFlowFileService.class, sslProperties); try { final int port = TestGRPCServer.randomPort(); final TestRunner runner = TestRunners.newTestRunner(InvokeGRPC.class); runner.setProperty(InvokeGRPC.PROP_SERVICE_HOST, TestGRPCServer.HOST); runner.setProperty(InvokeGRPC.PROP_SERVICE_PORT, String.valueOf(port)); runner.setProperty(InvokeGRPC.PROP_USE_SECURE, "true"); useSSLContextService(runner, getTruststoreProperties()); server.start(port); final MockFlowFile mockFlowFile = new MockFlowFile(SUCCESS); runner.enqueue(mockFlowFile); runner.run(); runner.assertTransferCount(InvokeGRPC.REL_RESPONSE, 1); runner.assertTransferCount(InvokeGRPC.REL_SUCCESS_REQ, 1); runner.assertTransferCount(InvokeGRPC.REL_RETRY, 0); runner.assertTransferCount(InvokeGRPC.REL_NO_RETRY, 0); runner.assertTransferCount(InvokeGRPC.REL_FAILURE, 0); final List<MockFlowFile> responseFiles = runner.getFlowFilesForRelationship(InvokeGRPC.REL_RESPONSE); assertThat(responseFiles.size(), equalTo(1)); final MockFlowFile response = responseFiles.get(0); response.assertAttributeEquals(InvokeGRPC.RESPONSE_CODE, String.valueOf(FlowFileReply.ResponseCode.SUCCESS)); response.assertAttributeEquals(InvokeGRPC.RESPONSE_BODY, "success"); response.assertAttributeEquals(InvokeGRPC.SERVICE_HOST, TestGRPCServer.HOST); response.assertAttributeEquals(InvokeGRPC.SERVICE_PORT, String.valueOf(port)); final List<MockFlowFile> successFiles = runner.getFlowFilesForRelationship(InvokeGRPC.REL_SUCCESS_REQ); assertThat(successFiles.size(), equalTo(1)); final MockFlowFile successFile = successFiles.get(0); successFile.assertAttributeEquals(InvokeGRPC.RESPONSE_CODE, String.valueOf(FlowFileReply.ResponseCode.SUCCESS)); successFile.assertAttributeEquals(InvokeGRPC.RESPONSE_BODY, "success"); successFile.assertAttributeEquals(InvokeGRPC.SERVICE_HOST, TestGRPCServer.HOST); successFile.assertAttributeEquals(InvokeGRPC.SERVICE_PORT, String.valueOf(port)); } finally { server.stop(); } } private static Map<String, String> getTruststoreProperties() { final Map<String, String> props = new HashMap<>(); props.put(StandardSSLContextService.TRUSTSTORE.getName(), "src/test/resources/localhost-ts.jks"); props.put(StandardSSLContextService.TRUSTSTORE_PASSWORD.getName(), "localtest"); props.put(StandardSSLContextService.TRUSTSTORE_TYPE.getName(), "JKS"); return props; } private static Map<String, String> getKeystoreProperties() { final Map<String, String> properties = new HashMap<>(); properties.put(StandardSSLContextService.KEYSTORE.getName(), "src/test/resources/localhost-ks.jks"); properties.put(StandardSSLContextService.KEYSTORE_PASSWORD.getName(), "localtest"); properties.put(StandardSSLContextService.KEYSTORE_TYPE.getName(), "JKS"); return properties; } private void useSSLContextService(final TestRunner controller, final Map<String, String> sslProperties) { final SSLContextService service = new StandardSSLContextService(); try { controller.addControllerService("ssl-service", service, sslProperties); controller.enableControllerService(service); } catch (InitializationException ex) { ex.printStackTrace(); Assert.fail("Could not create SSL Context Service"); } controller.setProperty(InvokeGRPC.PROP_SSL_CONTEXT_SERVICE, "ssl-service"); } /** * Dummy gRPC service whose responses are dictated by the IDs on the messages it receives */ private static class DummyFlowFileService extends FlowFileServiceGrpc.FlowFileServiceImplBase { public DummyFlowFileService() { } @Override public void send(FlowFileRequest request, StreamObserver<FlowFileReply> responseObserver) { final FlowFileReply.Builder replyBuilder = FlowFileReply.newBuilder(); // use the id to dictate response codes final long id = request.getId(); if (id == ERROR) { replyBuilder.setResponseCode(FlowFileReply.ResponseCode.ERROR) .setBody("error"); } else if (id == SUCCESS) { replyBuilder.setResponseCode(FlowFileReply.ResponseCode.SUCCESS) .setBody("success"); } else if (id == RETRY) { replyBuilder.setResponseCode(FlowFileReply.ResponseCode.RETRY) .setBody("retry"); // else, assume the request is to include the flowfile content in the response } else { replyBuilder.setResponseCode(FlowFileReply.ResponseCode.SUCCESS) .setBody(request.getContent().toStringUtf8()); } responseObserver.onNext(replyBuilder.build()); responseObserver.onCompleted(); } } }
/* * Copyright (c) 2015 Nova Ordis LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.novaordis.gld.api.jms.embedded; import javax.jms.Destination; import javax.jms.JMSException; import javax.jms.TextMessage; import java.util.Enumeration; public class EmbeddedTextMessage implements TextMessage { // Constants ------------------------------------------------------------------------------------------------------- // Static ---------------------------------------------------------------------------------------------------------- // Attributes ------------------------------------------------------------------------------------------------------ private String text; private String id; // Constructors ---------------------------------------------------------------------------------------------------- public EmbeddedTextMessage() { this(null); } public EmbeddedTextMessage(String text) { this.text = text; this.id = null; } // TextMessage implementation -------------------------------------------------------------------------------------- @Override public void setText(String s) throws JMSException { this.text = s; } @Override public String getText() throws JMSException { return text; } /** * The ID is null when the message is created and set to a non null value upon sending. */ @Override public String getJMSMessageID() throws JMSException { return id; } @Override public void setJMSMessageID(String s) throws JMSException { this.id = s; } @Override public long getJMSTimestamp() throws JMSException { throw new RuntimeException("NOT YET IMPLEMENTED"); } @Override public void setJMSTimestamp(long l) throws JMSException { throw new RuntimeException("NOT YET IMPLEMENTED"); } @Override public byte[] getJMSCorrelationIDAsBytes() throws JMSException { throw new RuntimeException("NOT YET IMPLEMENTED"); } @Override public void setJMSCorrelationIDAsBytes(byte[] bytes) throws JMSException { throw new RuntimeException("NOT YET IMPLEMENTED"); } @Override public void setJMSCorrelationID(String s) throws JMSException { throw new RuntimeException("NOT YET IMPLEMENTED"); } @Override public String getJMSCorrelationID() throws JMSException { throw new RuntimeException("NOT YET IMPLEMENTED"); } @Override public Destination getJMSReplyTo() throws JMSException { throw new RuntimeException("NOT YET IMPLEMENTED"); } @Override public void setJMSReplyTo(Destination destination) throws JMSException { throw new RuntimeException("NOT YET IMPLEMENTED"); } @Override public Destination getJMSDestination() throws JMSException { throw new RuntimeException("NOT YET IMPLEMENTED"); } @Override public void setJMSDestination(Destination destination) throws JMSException { throw new RuntimeException("NOT YET IMPLEMENTED"); } @Override public int getJMSDeliveryMode() throws JMSException { throw new RuntimeException("NOT YET IMPLEMENTED"); } @Override public void setJMSDeliveryMode(int i) throws JMSException { throw new RuntimeException("NOT YET IMPLEMENTED"); } @Override public boolean getJMSRedelivered() throws JMSException { throw new RuntimeException("NOT YET IMPLEMENTED"); } @Override public void setJMSRedelivered(boolean b) throws JMSException { throw new RuntimeException("NOT YET IMPLEMENTED"); } @Override public String getJMSType() throws JMSException { throw new RuntimeException("NOT YET IMPLEMENTED"); } @Override public void setJMSType(String s) throws JMSException { throw new RuntimeException("NOT YET IMPLEMENTED"); } @Override public long getJMSExpiration() throws JMSException { throw new RuntimeException("NOT YET IMPLEMENTED"); } @Override public void setJMSExpiration(long l) throws JMSException { throw new RuntimeException("NOT YET IMPLEMENTED"); } @Override public int getJMSPriority() throws JMSException { throw new RuntimeException("NOT YET IMPLEMENTED"); } @Override public void setJMSPriority(int i) throws JMSException { throw new RuntimeException("NOT YET IMPLEMENTED"); } @Override public void clearProperties() throws JMSException { throw new RuntimeException("NOT YET IMPLEMENTED"); } @Override public boolean propertyExists(String s) throws JMSException { throw new RuntimeException("NOT YET IMPLEMENTED"); } @Override public boolean getBooleanProperty(String s) throws JMSException { throw new RuntimeException("NOT YET IMPLEMENTED"); } @Override public byte getByteProperty(String s) throws JMSException { throw new RuntimeException("NOT YET IMPLEMENTED"); } @Override public short getShortProperty(String s) throws JMSException { throw new RuntimeException("NOT YET IMPLEMENTED"); } @Override public int getIntProperty(String s) throws JMSException { throw new RuntimeException("NOT YET IMPLEMENTED"); } @Override public long getLongProperty(String s) throws JMSException { throw new RuntimeException("NOT YET IMPLEMENTED"); } @Override public float getFloatProperty(String s) throws JMSException { throw new RuntimeException("NOT YET IMPLEMENTED"); } @Override public double getDoubleProperty(String s) throws JMSException { throw new RuntimeException("NOT YET IMPLEMENTED"); } @Override public String getStringProperty(String s) throws JMSException { throw new RuntimeException("NOT YET IMPLEMENTED"); } @Override public Object getObjectProperty(String s) throws JMSException { throw new RuntimeException("NOT YET IMPLEMENTED"); } @Override public Enumeration getPropertyNames() throws JMSException { throw new RuntimeException("NOT YET IMPLEMENTED"); } @Override public void setBooleanProperty(String s, boolean b) throws JMSException { throw new RuntimeException("NOT YET IMPLEMENTED"); } @Override public void setByteProperty(String s, byte b) throws JMSException { throw new RuntimeException("NOT YET IMPLEMENTED"); } @Override public void setShortProperty(String s, short i) throws JMSException { throw new RuntimeException("NOT YET IMPLEMENTED"); } @Override public void setIntProperty(String s, int i) throws JMSException { throw new RuntimeException("NOT YET IMPLEMENTED"); } @Override public void setLongProperty(String s, long l) throws JMSException { throw new RuntimeException("NOT YET IMPLEMENTED"); } @Override public void setFloatProperty(String s, float v) throws JMSException { throw new RuntimeException("NOT YET IMPLEMENTED"); } @Override public void setDoubleProperty(String s, double v) throws JMSException { throw new RuntimeException("NOT YET IMPLEMENTED"); } @Override public void setStringProperty(String s, String s1) throws JMSException { throw new RuntimeException("NOT YET IMPLEMENTED"); } @Override public void setObjectProperty(String s, Object o) throws JMSException { throw new RuntimeException("NOT YET IMPLEMENTED"); } @Override public void acknowledge() throws JMSException { throw new RuntimeException("NOT YET IMPLEMENTED"); } @Override public void clearBody() throws JMSException { throw new RuntimeException("NOT YET IMPLEMENTED"); } // Public ---------------------------------------------------------------------------------------------------------- // Package protected ----------------------------------------------------------------------------------------------- // Protected ------------------------------------------------------------------------------------------------------- // Private --------------------------------------------------------------------------------------------------------- // Inner classes --------------------------------------------------------------------------------------------------- }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.yarn.server.resourcemanager; import java.io.IOException; import java.io.InputStream; import java.net.InetSocketAddress; import java.net.UnknownHostException; import java.util.ArrayList; import java.util.Collections; import java.util.HashSet; import java.util.List; import java.util.Set; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentMap; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.classification.InterfaceAudience.Private; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.CommonConfigurationKeysPublic; import org.apache.hadoop.ipc.Server; import org.apache.hadoop.security.SaslRpcServer; import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.security.authorize.PolicyProvider; import org.apache.hadoop.security.token.Token; import org.apache.hadoop.security.token.TokenIdentifier; import org.apache.hadoop.service.AbstractService; import org.apache.hadoop.util.StringUtils; import org.apache.hadoop.yarn.api.ApplicationMasterProtocol; import org.apache.hadoop.yarn.api.protocolrecords.AllocateRequest; import org.apache.hadoop.yarn.api.protocolrecords.AllocateResponse; import org.apache.hadoop.yarn.api.protocolrecords.FinishApplicationMasterRequest; import org.apache.hadoop.yarn.api.protocolrecords.FinishApplicationMasterResponse; import org.apache.hadoop.yarn.api.protocolrecords.RegisterApplicationMasterRequest; import org.apache.hadoop.yarn.api.protocolrecords.RegisterApplicationMasterResponse; import org.apache.hadoop.yarn.api.records.ApplicationAttemptId; import org.apache.hadoop.yarn.api.records.ApplicationId; import org.apache.hadoop.yarn.api.records.ApplicationSubmissionContext; import org.apache.hadoop.yarn.api.records.Container; import org.apache.hadoop.yarn.api.records.ContainerId; import org.apache.hadoop.yarn.api.records.NMToken; import org.apache.hadoop.yarn.api.records.NodeId; import org.apache.hadoop.yarn.api.records.NodeReport; import org.apache.hadoop.yarn.api.records.PreemptionContainer; import org.apache.hadoop.yarn.api.records.PreemptionContract; import org.apache.hadoop.yarn.api.records.PreemptionMessage; import org.apache.hadoop.yarn.api.records.PreemptionResourceRequest; import org.apache.hadoop.yarn.api.records.Resource; import org.apache.hadoop.yarn.api.records.ResourceBlacklistRequest; import org.apache.hadoop.yarn.api.records.ResourceRequest; import org.apache.hadoop.yarn.api.records.StrictPreemptionContract; import org.apache.hadoop.yarn.conf.YarnConfiguration; import org.apache.hadoop.yarn.exceptions.ApplicationAttemptNotFoundException; import org.apache.hadoop.yarn.exceptions.ApplicationMasterNotRegisteredException; import org.apache.hadoop.yarn.exceptions.InvalidApplicationMasterRequestException; import org.apache.hadoop.yarn.exceptions.InvalidContainerReleaseException; import org.apache.hadoop.yarn.exceptions.InvalidResourceBlacklistRequestException; import org.apache.hadoop.yarn.exceptions.InvalidResourceRequestException; import org.apache.hadoop.yarn.exceptions.YarnException; import org.apache.hadoop.yarn.factories.RecordFactory; import org.apache.hadoop.yarn.factory.providers.RecordFactoryProvider; import org.apache.hadoop.yarn.ipc.RPCUtil; import org.apache.hadoop.yarn.ipc.YarnRPC; import org.apache.hadoop.yarn.security.AMRMTokenIdentifier; import org.apache.hadoop.yarn.server.resourcemanager.RMAuditLogger.AuditConstants; import org.apache.hadoop.yarn.server.resourcemanager.rmapp.RMApp; import org.apache.hadoop.yarn.server.resourcemanager.rmapp.attempt.AMLivelinessMonitor; import org.apache.hadoop.yarn.server.resourcemanager.rmapp.attempt.RMAppAttempt; import org.apache.hadoop.yarn.server.resourcemanager.rmapp.attempt.RMAppAttemptImpl; import org.apache.hadoop.yarn.server.resourcemanager.rmapp.attempt.RMAppAttemptState; import org.apache.hadoop.yarn.server.resourcemanager.rmapp.attempt.event.RMAppAttemptRegistrationEvent; import org.apache.hadoop.yarn.server.resourcemanager.rmapp.attempt.event.RMAppAttemptStatusupdateEvent; import org.apache.hadoop.yarn.server.resourcemanager.rmapp.attempt.event.RMAppAttemptUnregistrationEvent; import org.apache.hadoop.yarn.server.resourcemanager.rmnode.RMNode; import org.apache.hadoop.yarn.server.resourcemanager.scheduler.AbstractYarnScheduler; import org.apache.hadoop.yarn.server.resourcemanager.scheduler.Allocation; import org.apache.hadoop.yarn.server.resourcemanager.scheduler.SchedulerNodeReport; import org.apache.hadoop.yarn.server.resourcemanager.scheduler.YarnScheduler; import org.apache.hadoop.yarn.server.resourcemanager.security.authorize.RMPolicyProvider; import org.apache.hadoop.yarn.server.security.MasterKeyData; import org.apache.hadoop.yarn.server.utils.BuilderUtils; import org.apache.hadoop.yarn.util.resource.Resources; import com.google.common.annotations.VisibleForTesting; @SuppressWarnings("unchecked") @Private public class ApplicationMasterService extends AbstractService implements ApplicationMasterProtocol { private static final Log LOG = LogFactory.getLog(ApplicationMasterService.class); private final AMLivelinessMonitor amLivelinessMonitor; private YarnScheduler rScheduler; private InetSocketAddress masterServiceAddress; private Server server; private final RecordFactory recordFactory = RecordFactoryProvider.getRecordFactory(null); private final ConcurrentMap<ApplicationAttemptId, AllocateResponseLock> responseMap = new ConcurrentHashMap<ApplicationAttemptId, AllocateResponseLock>(); private final RMContext rmContext; public ApplicationMasterService(RMContext rmContext, YarnScheduler scheduler) { super(ApplicationMasterService.class.getName()); this.amLivelinessMonitor = rmContext.getAMLivelinessMonitor(); this.rScheduler = scheduler; this.rmContext = rmContext; } @Override protected void serviceInit(Configuration conf) throws Exception { masterServiceAddress = conf.getSocketAddr( YarnConfiguration.RM_BIND_HOST, YarnConfiguration.RM_SCHEDULER_ADDRESS, YarnConfiguration.DEFAULT_RM_SCHEDULER_ADDRESS, YarnConfiguration.DEFAULT_RM_SCHEDULER_PORT); } @Override protected void serviceStart() throws Exception { Configuration conf = getConfig(); YarnRPC rpc = YarnRPC.create(conf); Configuration serverConf = conf; // If the auth is not-simple, enforce it to be token-based. serverConf = new Configuration(conf); serverConf.set( CommonConfigurationKeysPublic.HADOOP_SECURITY_AUTHENTICATION, SaslRpcServer.AuthMethod.TOKEN.toString()); this.server = rpc.getServer(ApplicationMasterProtocol.class, this, masterServiceAddress, serverConf, this.rmContext.getAMRMTokenSecretManager(), serverConf.getInt(YarnConfiguration.RM_SCHEDULER_CLIENT_THREAD_COUNT, YarnConfiguration.DEFAULT_RM_SCHEDULER_CLIENT_THREAD_COUNT)); // Enable service authorization? if (conf.getBoolean( CommonConfigurationKeysPublic.HADOOP_SECURITY_AUTHORIZATION, false)) { InputStream inputStream = this.rmContext.getConfigurationProvider() .getConfigurationInputStream(conf, YarnConfiguration.HADOOP_POLICY_CONFIGURATION_FILE); if (inputStream != null) { conf.addResource(inputStream); } refreshServiceAcls(conf, RMPolicyProvider.getInstance()); } this.server.start(); this.masterServiceAddress = conf.updateConnectAddr(YarnConfiguration.RM_BIND_HOST, YarnConfiguration.RM_SCHEDULER_ADDRESS, YarnConfiguration.DEFAULT_RM_SCHEDULER_ADDRESS, server.getListenerAddress()); super.serviceStart(); } @Private public InetSocketAddress getBindAddress() { return this.masterServiceAddress; } // Obtain the needed AMRMTokenIdentifier from the remote-UGI. RPC layer // currently sets only the required id, but iterate through anyways just to be // sure. private AMRMTokenIdentifier selectAMRMTokenIdentifier( UserGroupInformation remoteUgi) throws IOException { AMRMTokenIdentifier result = null; Set<TokenIdentifier> tokenIds = remoteUgi.getTokenIdentifiers(); for (TokenIdentifier tokenId : tokenIds) { if (tokenId instanceof AMRMTokenIdentifier) { result = (AMRMTokenIdentifier) tokenId; break; } } return result; } private AMRMTokenIdentifier authorizeRequest() throws YarnException { UserGroupInformation remoteUgi; try { remoteUgi = UserGroupInformation.getCurrentUser(); } catch (IOException e) { String msg = "Cannot obtain the user-name for authorizing ApplicationMaster. " + "Got exception: " + StringUtils.stringifyException(e); LOG.warn(msg); throw RPCUtil.getRemoteException(msg); } boolean tokenFound = false; String message = ""; AMRMTokenIdentifier appTokenIdentifier = null; try { appTokenIdentifier = selectAMRMTokenIdentifier(remoteUgi); if (appTokenIdentifier == null) { tokenFound = false; message = "No AMRMToken found for user " + remoteUgi.getUserName(); } else { tokenFound = true; } } catch (IOException e) { tokenFound = false; message = "Got exception while looking for AMRMToken for user " + remoteUgi.getUserName(); } if (!tokenFound) { LOG.warn(message); throw RPCUtil.getRemoteException(message); } return appTokenIdentifier; } @Override public RegisterApplicationMasterResponse registerApplicationMaster( RegisterApplicationMasterRequest request) throws YarnException, IOException { AMRMTokenIdentifier amrmTokenIdentifier = authorizeRequest(); ApplicationAttemptId applicationAttemptId = amrmTokenIdentifier.getApplicationAttemptId(); ApplicationId appID = applicationAttemptId.getApplicationId(); AllocateResponseLock lock = responseMap.get(applicationAttemptId); if (lock == null) { RMAuditLogger.logFailure(this.rmContext.getRMApps().get(appID).getUser(), AuditConstants.REGISTER_AM, "Application doesn't exist in cache " + applicationAttemptId, "ApplicationMasterService", "Error in registering application master", appID, applicationAttemptId); throwApplicationDoesNotExistInCacheException(applicationAttemptId); } // Allow only one thread in AM to do registerApp at a time. synchronized (lock) { AllocateResponse lastResponse = lock.getAllocateResponse(); if (hasApplicationMasterRegistered(applicationAttemptId)) { String message = "Application Master is already registered : " + appID; LOG.warn(message); RMAuditLogger.logFailure( this.rmContext.getRMApps() .get(appID).getUser(), AuditConstants.REGISTER_AM, "", "ApplicationMasterService", message, appID, applicationAttemptId); throw new InvalidApplicationMasterRequestException(message); } this.amLivelinessMonitor.receivedPing(applicationAttemptId); RMApp app = this.rmContext.getRMApps().get(appID); // Setting the response id to 0 to identify if the // application master is register for the respective attemptid lastResponse.setResponseId(0); lock.setAllocateResponse(lastResponse); LOG.info("AM registration " + applicationAttemptId); this.rmContext .getDispatcher() .getEventHandler() .handle( new RMAppAttemptRegistrationEvent(applicationAttemptId, request .getHost(), request.getRpcPort(), request.getTrackingUrl())); RMAuditLogger.logSuccess(app.getUser(), AuditConstants.REGISTER_AM, "ApplicationMasterService", appID, applicationAttemptId); // Pick up min/max resource from scheduler... RegisterApplicationMasterResponse response = recordFactory .newRecordInstance(RegisterApplicationMasterResponse.class); response.setMaximumResourceCapability(rScheduler .getMaximumResourceCapability(app.getQueue())); response.setApplicationACLs(app.getRMAppAttempt(applicationAttemptId) .getSubmissionContext().getAMContainerSpec().getApplicationACLs()); response.setQueue(app.getQueue()); if (UserGroupInformation.isSecurityEnabled()) { LOG.info("Setting client token master key"); response.setClientToAMTokenMasterKey(java.nio.ByteBuffer.wrap(rmContext .getClientToAMTokenSecretManager() .getMasterKey(applicationAttemptId).getEncoded())); } // For work-preserving AM restart, retrieve previous attempts' containers // and corresponding NM tokens. if (app.getApplicationSubmissionContext() .getKeepContainersAcrossApplicationAttempts()) { List<Container> transferredContainers = ((AbstractYarnScheduler) rScheduler) .getTransferredContainers(applicationAttemptId); if (!transferredContainers.isEmpty()) { response.setContainersFromPreviousAttempts(transferredContainers); List<NMToken> nmTokens = new ArrayList<NMToken>(); for (Container container : transferredContainers) { try { NMToken token = rmContext.getNMTokenSecretManager() .createAndGetNMToken(app.getUser(), applicationAttemptId, container); if (null != token) { nmTokens.add(token); } } catch (IllegalArgumentException e) { // if it's a DNS issue, throw UnknowHostException directly and // that // will be automatically retried by RMProxy in RPC layer. if (e.getCause() instanceof UnknownHostException) { throw (UnknownHostException) e.getCause(); } } } response.setNMTokensFromPreviousAttempts(nmTokens); LOG.info("Application " + appID + " retrieved " + transferredContainers.size() + " containers from previous" + " attempts and " + nmTokens.size() + " NM tokens."); } } response.setSchedulerResourceTypes(rScheduler .getSchedulingResourceTypes()); return response; } } @Override public FinishApplicationMasterResponse finishApplicationMaster( FinishApplicationMasterRequest request) throws YarnException, IOException { ApplicationAttemptId applicationAttemptId = authorizeRequest().getApplicationAttemptId(); ApplicationId appId = applicationAttemptId.getApplicationId(); RMApp rmApp = rmContext.getRMApps().get(applicationAttemptId.getApplicationId()); // Remove collector address when app get finished. rmApp.removeCollectorAddr(); // checking whether the app exits in RMStateStore at first not to throw // ApplicationDoesNotExistInCacheException before and after // RM work-preserving restart. if (rmApp.isAppFinalStateStored()) { LOG.info(rmApp.getApplicationId() + " unregistered successfully. "); return FinishApplicationMasterResponse.newInstance(true); } AllocateResponseLock lock = responseMap.get(applicationAttemptId); if (lock == null) { throwApplicationDoesNotExistInCacheException(applicationAttemptId); } // Allow only one thread in AM to do finishApp at a time. synchronized (lock) { if (!hasApplicationMasterRegistered(applicationAttemptId)) { String message = "Application Master is trying to unregister before registering for: " + appId; LOG.error(message); RMAuditLogger.logFailure( this.rmContext.getRMApps() .get(appId).getUser(), AuditConstants.UNREGISTER_AM, "", "ApplicationMasterService", message, appId, applicationAttemptId); throw new ApplicationMasterNotRegisteredException(message); } this.amLivelinessMonitor.receivedPing(applicationAttemptId); rmContext.getDispatcher().getEventHandler().handle( new RMAppAttemptUnregistrationEvent(applicationAttemptId, request .getTrackingUrl(), request.getFinalApplicationStatus(), request .getDiagnostics())); // For UnmanagedAMs, return true so they don't retry return FinishApplicationMasterResponse.newInstance( rmApp.getApplicationSubmissionContext().getUnmanagedAM()); } } private void throwApplicationDoesNotExistInCacheException( ApplicationAttemptId appAttemptId) throws InvalidApplicationMasterRequestException { String message = "Application doesn't exist in cache " + appAttemptId; LOG.error(message); throw new InvalidApplicationMasterRequestException(message); } /** * @param appAttemptId * @return true if application is registered for the respective attemptid */ public boolean hasApplicationMasterRegistered( ApplicationAttemptId appAttemptId) { boolean hasApplicationMasterRegistered = false; AllocateResponseLock lastResponse = responseMap.get(appAttemptId); if (lastResponse != null) { synchronized (lastResponse) { if (lastResponse.getAllocateResponse() != null && lastResponse.getAllocateResponse().getResponseId() >= 0) { hasApplicationMasterRegistered = true; } } } return hasApplicationMasterRegistered; } protected final static List<Container> EMPTY_CONTAINER_LIST = new ArrayList<Container>(); protected static final Allocation EMPTY_ALLOCATION = new Allocation( EMPTY_CONTAINER_LIST, Resources.createResource(0), null, null, null); @Override public AllocateResponse allocate(AllocateRequest request) throws YarnException, IOException { AMRMTokenIdentifier amrmTokenIdentifier = authorizeRequest(); ApplicationAttemptId appAttemptId = amrmTokenIdentifier.getApplicationAttemptId(); ApplicationId applicationId = appAttemptId.getApplicationId(); this.amLivelinessMonitor.receivedPing(appAttemptId); /* check if its in cache */ AllocateResponseLock lock = responseMap.get(appAttemptId); if (lock == null) { String message = "Application attempt " + appAttemptId + " doesn't exist in ApplicationMasterService cache."; LOG.error(message); throw new ApplicationAttemptNotFoundException(message); } synchronized (lock) { AllocateResponse lastResponse = lock.getAllocateResponse(); if (!hasApplicationMasterRegistered(appAttemptId)) { String message = "AM is not registered for known application attempt: " + appAttemptId + " or RM had restarted after AM registered . AM should re-register."; LOG.info(message); RMAuditLogger.logFailure( this.rmContext.getRMApps().get(appAttemptId.getApplicationId()) .getUser(), AuditConstants.AM_ALLOCATE, "", "ApplicationMasterService", message, applicationId, appAttemptId); throw new ApplicationMasterNotRegisteredException(message); } if ((request.getResponseId() + 1) == lastResponse.getResponseId()) { /* old heartbeat */ return lastResponse; } else if (request.getResponseId() + 1 < lastResponse.getResponseId()) { String message = "Invalid responseId in AllocateRequest from application attempt: " + appAttemptId + ", expect responseId to be " + (lastResponse.getResponseId() + 1); throw new InvalidApplicationMasterRequestException(message); } //filter illegal progress values float filteredProgress = request.getProgress(); if (Float.isNaN(filteredProgress) || filteredProgress == Float.NEGATIVE_INFINITY || filteredProgress < 0) { request.setProgress(0); } else if (filteredProgress > 1 || filteredProgress == Float.POSITIVE_INFINITY) { request.setProgress(1); } // Send the status update to the appAttempt. this.rmContext.getDispatcher().getEventHandler().handle( new RMAppAttemptStatusupdateEvent(appAttemptId, request .getProgress())); List<ResourceRequest> ask = request.getAskList(); List<ContainerId> release = request.getReleaseList(); ResourceBlacklistRequest blacklistRequest = request.getResourceBlacklistRequest(); List<String> blacklistAdditions = (blacklistRequest != null) ? blacklistRequest.getBlacklistAdditions() : Collections.EMPTY_LIST; List<String> blacklistRemovals = (blacklistRequest != null) ? blacklistRequest.getBlacklistRemovals() : Collections.EMPTY_LIST; RMApp app = this.rmContext.getRMApps().get(applicationId); // set label expression for Resource Requests if resourceName=ANY ApplicationSubmissionContext asc = app.getApplicationSubmissionContext(); for (ResourceRequest req : ask) { if (null == req.getNodeLabelExpression() && ResourceRequest.ANY.equals(req.getResourceName())) { req.setNodeLabelExpression(asc.getNodeLabelExpression()); } } // sanity check try { RMServerUtils.normalizeAndValidateRequests(ask, rScheduler.getMaximumResourceCapability(), app.getQueue(), rScheduler, rmContext); } catch (InvalidResourceRequestException e) { LOG.warn("Invalid resource ask by application " + appAttemptId, e); throw e; } try { RMServerUtils.validateBlacklistRequest(blacklistRequest); } catch (InvalidResourceBlacklistRequestException e) { LOG.warn("Invalid blacklist request by application " + appAttemptId, e); throw e; } // In the case of work-preserving AM restart, it's possible for the // AM to release containers from the earlier attempt. if (!app.getApplicationSubmissionContext() .getKeepContainersAcrossApplicationAttempts()) { try { RMServerUtils.validateContainerReleaseRequest(release, appAttemptId); } catch (InvalidContainerReleaseException e) { LOG.warn("Invalid container release by application " + appAttemptId, e); throw e; } } // Send new requests to appAttempt. Allocation allocation; RMAppAttemptState state = app.getRMAppAttempt(appAttemptId).getAppAttemptState(); if (state.equals(RMAppAttemptState.FINAL_SAVING) || state.equals(RMAppAttemptState.FINISHING) || app.isAppFinalStateStored()) { LOG.warn(appAttemptId + " is in " + state + " state, ignore container allocate request."); allocation = EMPTY_ALLOCATION; } else { allocation = this.rScheduler.allocate(appAttemptId, ask, release, blacklistAdditions, blacklistRemovals); } if (!blacklistAdditions.isEmpty() || !blacklistRemovals.isEmpty()) { LOG.info("blacklist are updated in Scheduler." + "blacklistAdditions: " + blacklistAdditions + ", " + "blacklistRemovals: " + blacklistRemovals); } RMAppAttempt appAttempt = app.getRMAppAttempt(appAttemptId); AllocateResponse allocateResponse = recordFactory.newRecordInstance(AllocateResponse.class); if (!allocation.getContainers().isEmpty()) { allocateResponse.setNMTokens(allocation.getNMTokens()); } // update the response with the deltas of node status changes List<RMNode> updatedNodes = new ArrayList<RMNode>(); if(app.pullRMNodeUpdates(updatedNodes) > 0) { List<NodeReport> updatedNodeReports = new ArrayList<NodeReport>(); for(RMNode rmNode: updatedNodes) { SchedulerNodeReport schedulerNodeReport = rScheduler.getNodeReport(rmNode.getNodeID()); Resource used = BuilderUtils.newResource(0, 0); int numContainers = 0; if (schedulerNodeReport != null) { used = schedulerNodeReport.getUsedResource(); numContainers = schedulerNodeReport.getNumContainers(); } NodeId nodeId = rmNode.getNodeID(); NodeReport report = BuilderUtils.newNodeReport(nodeId, rmNode.getState(), rmNode.getHttpAddress(), rmNode.getRackName(), used, rmNode.getTotalCapability(), numContainers, rmNode.getHealthReport(), rmNode.getLastHealthReportTime(), rmNode.getNodeLabels()); updatedNodeReports.add(report); } allocateResponse.setUpdatedNodes(updatedNodeReports); } allocateResponse.setAllocatedContainers(allocation.getContainers()); allocateResponse.setCompletedContainersStatuses(appAttempt .pullJustFinishedContainers()); allocateResponse.setResponseId(lastResponse.getResponseId() + 1); allocateResponse.setAvailableResources(allocation.getResourceLimit()); allocateResponse.setNumClusterNodes(this.rScheduler.getNumClusterNodes()); // add collector address for this application allocateResponse.setCollectorAddr( this.rmContext.getRMApps().get(applicationId).getCollectorAddr()); // add preemption to the allocateResponse message (if any) allocateResponse .setPreemptionMessage(generatePreemptionMessage(allocation)); // update AMRMToken if the token is rolled-up MasterKeyData nextMasterKey = this.rmContext.getAMRMTokenSecretManager().getNextMasterKeyData(); if (nextMasterKey != null && nextMasterKey.getMasterKey().getKeyId() != amrmTokenIdentifier .getKeyId()) { RMAppAttemptImpl appAttemptImpl = (RMAppAttemptImpl)appAttempt; Token<AMRMTokenIdentifier> amrmToken = appAttempt.getAMRMToken(); if (nextMasterKey.getMasterKey().getKeyId() != appAttemptImpl.getAMRMTokenKeyId()) { LOG.info("The AMRMToken has been rolled-over. Send new AMRMToken back" + " to application: " + applicationId); amrmToken = rmContext.getAMRMTokenSecretManager() .createAndGetAMRMToken(appAttemptId); appAttemptImpl.setAMRMToken(amrmToken); } allocateResponse.setAMRMToken(org.apache.hadoop.yarn.api.records.Token .newInstance(amrmToken.getIdentifier(), amrmToken.getKind() .toString(), amrmToken.getPassword(), amrmToken.getService() .toString())); } /* * As we are updating the response inside the lock object so we don't * need to worry about unregister call occurring in between (which * removes the lock object). */ lock.setAllocateResponse(allocateResponse); return allocateResponse; } } private PreemptionMessage generatePreemptionMessage(Allocation allocation){ PreemptionMessage pMsg = null; // assemble strict preemption request if (allocation.getStrictContainerPreemptions() != null) { pMsg = recordFactory.newRecordInstance(PreemptionMessage.class); StrictPreemptionContract pStrict = recordFactory.newRecordInstance(StrictPreemptionContract.class); Set<PreemptionContainer> pCont = new HashSet<PreemptionContainer>(); for (ContainerId cId : allocation.getStrictContainerPreemptions()) { PreemptionContainer pc = recordFactory.newRecordInstance(PreemptionContainer.class); pc.setId(cId); pCont.add(pc); } pStrict.setContainers(pCont); pMsg.setStrictContract(pStrict); } // assemble negotiable preemption request if (allocation.getResourcePreemptions() != null && allocation.getResourcePreemptions().size() > 0 && allocation.getContainerPreemptions() != null && allocation.getContainerPreemptions().size() > 0) { if (pMsg == null) { pMsg = recordFactory.newRecordInstance(PreemptionMessage.class); } PreemptionContract contract = recordFactory.newRecordInstance(PreemptionContract.class); Set<PreemptionContainer> pCont = new HashSet<PreemptionContainer>(); for (ContainerId cId : allocation.getContainerPreemptions()) { PreemptionContainer pc = recordFactory.newRecordInstance(PreemptionContainer.class); pc.setId(cId); pCont.add(pc); } List<PreemptionResourceRequest> pRes = new ArrayList<PreemptionResourceRequest>(); for (ResourceRequest crr : allocation.getResourcePreemptions()) { PreemptionResourceRequest prr = recordFactory.newRecordInstance(PreemptionResourceRequest.class); prr.setResourceRequest(crr); pRes.add(prr); } contract.setContainers(pCont); contract.setResourceRequest(pRes); pMsg.setContract(contract); } return pMsg; } public void registerAppAttempt(ApplicationAttemptId attemptId) { AllocateResponse response = recordFactory.newRecordInstance(AllocateResponse.class); // set response id to -1 before application master for the following // attemptID get registered response.setResponseId(-1); LOG.info("Registering app attempt : " + attemptId); responseMap.put(attemptId, new AllocateResponseLock(response)); rmContext.getNMTokenSecretManager().registerApplicationAttempt(attemptId); } public void unregisterAttempt(ApplicationAttemptId attemptId) { LOG.info("Unregistering app attempt : " + attemptId); responseMap.remove(attemptId); rmContext.getNMTokenSecretManager().unregisterApplicationAttempt(attemptId); } public void refreshServiceAcls(Configuration configuration, PolicyProvider policyProvider) { this.server.refreshServiceAclWithLoadedConfiguration(configuration, policyProvider); } @Override protected void serviceStop() throws Exception { if (this.server != null) { this.server.stop(); } super.serviceStop(); } public static class AllocateResponseLock { private AllocateResponse response; public AllocateResponseLock(AllocateResponse response) { this.response = response; } public synchronized AllocateResponse getAllocateResponse() { return response; } public synchronized void setAllocateResponse(AllocateResponse response) { this.response = response; } } @VisibleForTesting public Server getServer() { return this.server; } }
/* * Copyright (c) OSGi Alliance (2000, 2012). All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.jboss.gravia.runtime; import static org.jboss.gravia.runtime.spi.RuntimeLogger.LOGGER; import java.lang.reflect.Array; import java.util.Collections; import java.util.SortedMap; import java.util.TreeMap; import org.jboss.gravia.Constants; import org.osgi.framework.InvalidSyntaxException; /** * The {@code ServiceTracker} class simplifies using services from the * Runtime's service registry. * <p> * A {@code ServiceTracker} object is constructed with search criteria and a * {@code ServiceTrackerCustomizer} object. A {@code ServiceTracker} can use a * {@code ServiceTrackerCustomizer} to customize the service objects to be * tracked. The {@code ServiceTracker} can then be opened to begin tracking all * services in the Framework's service registry that match the specified search * criteria. The {@code ServiceTracker} correctly handles all of the details of * listening to {@code ServiceEvent}s and getting and ungetting services. * <p> * The {@code getServiceReferences} method can be called to get references to * the services being tracked. The {@code getService} and {@code getServices} * methods can be called to get the service objects for the tracked service. * <p> * The {@code ServiceTracker} class is thread-safe. It does not call a * {@code ServiceTrackerCustomizer} while holding any locks. * {@code ServiceTrackerCustomizer} implementations must also be thread-safe. * * @param <S> The type of the service being tracked. * @param <T> The type of the tracked object. * @ThreadSafe */ public class ServiceTracker<S, T> implements ServiceTrackerCustomizer<S, T> { /* set this to true to compile in debug messages */ static final boolean DEBUG = false; /** * The Module Context used by this {@code ServiceTracker}. */ protected final ModuleContext context; /** * The Filter used by this {@code ServiceTracker} which specifies the search * criteria for the services to track. */ protected final Filter filter; /** * The {@code ServiceTrackerCustomizer} for this tracker. */ final ServiceTrackerCustomizer<S, T> customizer; /** * Filter string for use when adding the ServiceListener. If this field is * set, then certain optimizations can be taken since we don't have a user * supplied filter. */ final String listenerFilter; /** * Class name to be tracked. If this field is set, then we are tracking by * class name. */ private final String trackClass; /** * Reference to be tracked. If this field is set, then we are tracking a * single ServiceReference. */ private final ServiceReference<S> trackReference; /** * Tracked services: {@code ServiceReference} -> customized Object and * {@code ServiceListener} object */ private volatile Tracked tracked; /** * Accessor method for the current Tracked object. This method is only * intended to be used by the unsynchronized methods which do not modify the * tracked field. * * @return The current Tracked object. */ private Tracked tracked() { return tracked; } /** * Cached ServiceReference for getServiceReference. * * This field is volatile since it is accessed by multiple threads. */ private volatile ServiceReference<S> cachedReference; /** * Cached service object for getService. * * This field is volatile since it is accessed by multiple threads. */ private volatile T cachedService; /** * Create a {@code ServiceTracker} on the specified {@code ServiceReference} * . * * <p> * The service referenced by the specified {@code ServiceReference} will be * tracked by this {@code ServiceTracker}. * * @param context The {@code ModuleContext} against which the tracking is * done. * @param reference The {@code ServiceReference} for the service to be * tracked. * @param customizer The customizer object to call when services are added, * modified, or removed in this {@code ServiceTracker}. If customizer * is {@code null}, then this {@code ServiceTracker} will be used as * the {@code ServiceTrackerCustomizer} and this * {@code ServiceTracker} will call the * {@code ServiceTrackerCustomizer} methods on itself. */ public ServiceTracker(final ModuleContext context, final ServiceReference<S> reference, final ServiceTrackerCustomizer<S, T> customizer) { this.context = context; this.trackReference = reference; this.trackClass = null; this.customizer = (customizer == null) ? this : customizer; this.listenerFilter = "(" + Constants.SERVICE_ID + "=" + reference.getProperty(Constants.SERVICE_ID).toString() + ")"; this.filter = context.createFilter(listenerFilter); } /** * Create a {@code ServiceTracker} on the specified class name. * * <p> * Services registered under the specified class name will be tracked by * this {@code ServiceTracker}. * * @param context The {@code ModuleContext} against which the tracking is * done. * @param clazz The class name of the services to be tracked. * @param customizer The customizer object to call when services are added, * modified, or removed in this {@code ServiceTracker}. If customizer * is {@code null}, then this {@code ServiceTracker} will be used as * the {@code ServiceTrackerCustomizer} and this * {@code ServiceTracker} will call the * {@code ServiceTrackerCustomizer} methods on itself. */ public ServiceTracker(final ModuleContext context, final String clazz, final ServiceTrackerCustomizer<S, T> customizer) { this.context = context; this.trackReference = null; this.trackClass = clazz; this.customizer = (customizer == null) ? this : customizer; // we call clazz.toString to verify clazz is non-null! this.listenerFilter = "(" + Constants.OBJECTCLASS + "=" + clazz.toString() + ")"; this.filter = context.createFilter(listenerFilter); } /** * Create a {@code ServiceTracker} on the specified {@code Filter} object. * * <p> * Services which match the specified {@code Filter} object will be tracked * by this {@code ServiceTracker}. * * @param context The {@code ModuleContext} against which the tracking is * done. * @param filter The {@code Filter} to select the services to be tracked. * @param customizer The customizer object to call when services are added, * modified, or removed in this {@code ServiceTracker}. If customizer * is null, then this {@code ServiceTracker} will be used as the * {@code ServiceTrackerCustomizer} and this {@code ServiceTracker} * will call the {@code ServiceTrackerCustomizer} methods on itself. */ public ServiceTracker(final ModuleContext context, final Filter filter, final ServiceTrackerCustomizer<S, T> customizer) { this.context = context; this.trackReference = null; this.trackClass = null; this.listenerFilter = filter.toString(); this.filter = filter; this.customizer = (customizer == null) ? this : customizer; if ((context == null) || (filter == null)) { // we throw a NPE here to be consistent with the other constructors throw new NullPointerException(); } } /** * Create a {@code ServiceTracker} on the specified class. * * <p> * Services registered under the name of the specified class will be tracked * by this {@code ServiceTracker}. * * @param context The {@code ModuleContext} against which the tracking is * done. * @param clazz The class of the services to be tracked. * @param customizer The customizer object to call when services are added, * modified, or removed in this {@code ServiceTracker}. If customizer * is {@code null}, then this {@code ServiceTracker} will be used as * the {@code ServiceTrackerCustomizer} and this * {@code ServiceTracker} will call the * {@code ServiceTrackerCustomizer} methods on itself. */ public ServiceTracker(final ModuleContext context, final Class<S> clazz, final ServiceTrackerCustomizer<S, T> customizer) { this(context, clazz.getName(), customizer); } /** * Open this {@code ServiceTracker} and begin tracking services. * * <p> * This implementation calls {@code open(false)}. * * @throws java.lang.IllegalStateException If the {@code ModuleContext} with * which this {@code ServiceTracker} was created is no longer valid. * @see #open(boolean) */ public void open() { open(false); } /** * Open this {@code ServiceTracker} and begin tracking services. * * <p> * Services which match the search criteria specified when this * {@code ServiceTracker} was created are now tracked by this * {@code ServiceTracker}. * * @param trackAllServices If {@code true}, then this {@code ServiceTracker} * will track all matching services regardless of class loader * accessibility. If {@code false}, then this {@code ServiceTracker} * will only track matching services which are class loader * accessible to the bundle whose {@code ModuleContext} is used by * this {@code ServiceTracker}. * @throws java.lang.IllegalStateException If the {@code ModuleContext} with * which this {@code ServiceTracker} was created is no longer valid. */ @SuppressWarnings("unchecked") public void open(boolean trackAllServices) { final Tracked t; synchronized (this) { if (tracked != null) { return; } if (DEBUG) { LOGGER.debug("ServiceTracker.open: " + filter); } t = trackAllServices ? new AllTracked() : new Tracked(); synchronized (t) { context.addServiceListener(t, listenerFilter); ServiceReference<S>[] references = null; if (trackClass != null) { references = getInitialReferences(trackAllServices, trackClass, null); } else { if (trackReference != null) { if (trackReference.getModule() != null) { ServiceReference<S>[] single = new ServiceReference[] { trackReference }; references = single; } } else { /* user supplied filter */ references = getInitialReferences(trackAllServices, null, listenerFilter); } } /* set tracked with the initial references */ t.setInitial(references); } tracked = t; } /* Call tracked outside of synchronized region */ t.trackInitial(); /* process the initial references */ } /** * Returns the list of initial {@code ServiceReference}s that will be * tracked by this {@code ServiceTracker}. * * @param trackAllServices If {@code true}, use * {@code getAllServiceReferences}. * @param className The class name with which the service was registered, or * {@code null} for all services. * @param filterString The filter criteria or {@code null} for all services. * @return The list of initial {@code ServiceReference}s. * @throws InvalidSyntaxException If the specified filterString has an * invalid syntax. */ @SuppressWarnings("unchecked") private ServiceReference<S>[] getInitialReferences(boolean trackAllServices, String className, String filterString) { ServiceReference<S>[] result; if (trackAllServices) { result = (ServiceReference<S>[]) context.getAllServiceReferences(className, filterString); } else { result = (ServiceReference<S>[]) context.getServiceReferences(className, filterString); } return result; } /** * Close this {@code ServiceTracker}. * * <p> * This method should be called when this {@code ServiceTracker} should end * the tracking of services. * * <p> * This implementation calls {@link #getServiceReferences()} to get the list * of tracked services to remove. */ public void close() { final Tracked outgoing; final ServiceReference<S>[] references; synchronized (this) { outgoing = tracked; if (outgoing == null) { return; } if (DEBUG) { LOGGER.debug("ServiceTracker.close: " + filter); } outgoing.close(); references = getServiceReferences(); tracked = null; try { context.removeServiceListener(outgoing); } catch (IllegalStateException e) { /* In case the context was stopped. */ } } modified(); /* clear the cache */ synchronized (outgoing) { outgoing.notifyAll(); /* wake up any waiters */ } if (references != null) { for (int i = 0; i < references.length; i++) { outgoing.untrack(references[i], null); } } if (DEBUG) { if ((cachedReference == null) && (cachedService == null)) { LOGGER.debug("ServiceTracker.close[cached cleared]: " + filter); } } } /** * Default implementation of the * {@code ServiceTrackerCustomizer.addingService} method. * * <p> * This method is only called when this {@code ServiceTracker} has been * constructed with a {@code null ServiceTrackerCustomizer} argument. * * <p> * This implementation returns the result of calling {@code getService} on * the {@code ModuleContext} with which this {@code ServiceTracker} was * created passing the specified {@code ServiceReference}. * <p> * This method can be overridden in a subclass to customize the service * object to be tracked for the service being added. In that case, take care * not to rely on the default implementation of * {@link #removedService(ServiceReference, Object) removedService} to unget * the service. * * @param reference The reference to the service being added to this * {@code ServiceTracker}. * @return The service object to be tracked for the service added to this * {@code ServiceTracker}. * @see ServiceTrackerCustomizer#addingService(ServiceReference) */ @SuppressWarnings("unchecked") public T addingService(ServiceReference<S> reference) { T result = (T) context.getService(reference); return result; } /** * Default implementation of the * {@code ServiceTrackerCustomizer.modifiedService} method. * * <p> * This method is only called when this {@code ServiceTracker} has been * constructed with a {@code null ServiceTrackerCustomizer} argument. * * <p> * This implementation does nothing. * * @param reference The reference to modified service. * @param service The service object for the modified service. * @see ServiceTrackerCustomizer#modifiedService(ServiceReference, Object) */ public void modifiedService(ServiceReference<S> reference, T service) { /* do nothing */ } /** * Default implementation of the * {@code ServiceTrackerCustomizer.removedService} method. * * <p> * This method is only called when this {@code ServiceTracker} has been * constructed with a {@code null ServiceTrackerCustomizer} argument. * * <p> * This implementation calls {@code ungetService}, on the * {@code ModuleContext} with which this {@code ServiceTracker} was created, * passing the specified {@code ServiceReference}. * <p> * This method can be overridden in a subclass. If the default * implementation of {@link #addingService(ServiceReference) addingService} * method was used, this method must unget the service. * * @param reference The reference to removed service. * @param service The service object for the removed service. * @see ServiceTrackerCustomizer#removedService(ServiceReference, Object) */ public void removedService(ServiceReference<S> reference, T service) { context.ungetService(reference); } /** * Wait for at least one service to be tracked by this * {@code ServiceTracker}. This method will also return when this * {@code ServiceTracker} is closed. * * <p> * It is strongly recommended that {@code waitForService} is not used during * the calling of the {@code ModuleActivator} methods. * {@code ModuleActivator} methods are expected to complete in a short * period of time. * * <p> * This implementation calls {@link #getService()} to determine if a service * is being tracked. * * @param timeout The time interval in milliseconds to wait. If zero, the * method will wait indefinitely. * @return Returns the result of {@link #getService()}. * @throws InterruptedException If another thread has interrupted the * current thread. * @throws IllegalArgumentException If the value of timeout is negative. */ public T waitForService(long timeout) throws InterruptedException { if (timeout < 0) { throw new IllegalArgumentException("timeout value is negative"); } T object = getService(); if (object != null) { return object; } final long endTime = (timeout == 0) ? 0 : (System.currentTimeMillis() + timeout); do { final Tracked t = tracked(); if (t == null) { /* if ServiceTracker is not open */ return null; } synchronized (t) { if (t.size() == 0) { t.wait(timeout); } } object = getService(); if (endTime > 0) { // if we have a timeout timeout = endTime - System.currentTimeMillis(); if (timeout <= 0) { // that has expired break; } } } while (object == null); return object; } /** * Return an array of {@code ServiceReference}s for all services being * tracked by this {@code ServiceTracker}. * * @return Array of {@code ServiceReference}s or {@code null} if no services * are being tracked. */ @SuppressWarnings("unchecked") public ServiceReference<S>[] getServiceReferences() { final Tracked t = tracked(); if (t == null) { /* if ServiceTracker is not open */ return null; } synchronized (t) { int length = t.size(); if (length == 0) { return null; } ServiceReference<S>[] result = new ServiceReference[length]; return t.copyKeys(result); } } /** * Returns a {@code ServiceReference} for one of the services being tracked * by this {@code ServiceTracker}. * * <p> * If multiple services are being tracked, the service with the highest * ranking (as specified in its {@code service.ranking} property) is * returned. If there is a tie in ranking, the service with the lowest * service ID (as specified in its {@code service.id} property); that is, * the service that was registered first is returned. This is the same * algorithm used by {@code ModuleContext.getServiceReference}. * * <p> * This implementation calls {@link #getServiceReferences()} to get the list * of references for the tracked services. * * @return A {@code ServiceReference} or {@code null} if no services are * being tracked. */ public ServiceReference<S> getServiceReference() { ServiceReference<S> reference = cachedReference; if (reference != null) { if (DEBUG) { LOGGER.debug("ServiceTracker.getServiceReference[cached]: " + filter); } return reference; } if (DEBUG) { LOGGER.debug("ServiceTracker.getServiceReference: " + filter); } ServiceReference<S>[] references = getServiceReferences(); int length = (references == null) ? 0 : references.length; if (length == 0) { /* if no service is being tracked */ return null; } int index = 0; if (length > 1) { /* if more than one service, select highest ranking */ int rankings[] = new int[length]; int count = 0; int maxRanking = Integer.MIN_VALUE; for (int i = 0; i < length; i++) { Object property = references[i].getProperty(Constants.SERVICE_RANKING); int ranking = (property instanceof Integer) ? ((Integer) property).intValue() : 0; rankings[i] = ranking; if (ranking > maxRanking) { index = i; maxRanking = ranking; count = 1; } else { if (ranking == maxRanking) { count++; } } } if (count > 1) { /* if still more than one service, select lowest id */ long minId = Long.MAX_VALUE; for (int i = 0; i < length; i++) { if (rankings[i] == maxRanking) { long id = ((Long) (references[i].getProperty(Constants.SERVICE_ID))).longValue(); if (id < minId) { index = i; minId = id; } } } } } return cachedReference = references[index]; } /** * Returns the service object for the specified {@code ServiceReference} if * the specified referenced service is being tracked by this * {@code ServiceTracker}. * * @param reference The reference to the desired service. * @return A service object or {@code null} if the service referenced by the * specified {@code ServiceReference} is not being tracked. */ public T getService(ServiceReference<S> reference) { final Tracked t = tracked(); if (t == null) { /* if ServiceTracker is not open */ return null; } synchronized (t) { return t.getCustomizedObject(reference); } } /** * Return an array of service objects for all services being tracked by this * {@code ServiceTracker}. * * <p> * This implementation calls {@link #getServiceReferences()} to get the list * of references for the tracked services and then calls * {@link #getService(ServiceReference)} for each reference to get the * tracked service object. * * @return An array of service objects or {@code null} if no services are * being tracked. */ public Object[] getServices() { final Tracked t = tracked(); if (t == null) { /* if ServiceTracker is not open */ return null; } synchronized (t) { ServiceReference<S>[] references = getServiceReferences(); int length = (references == null) ? 0 : references.length; if (length == 0) { return null; } Object[] objects = new Object[length]; for (int i = 0; i < length; i++) { objects[i] = getService(references[i]); } return objects; } } /** * Returns a service object for one of the services being tracked by this * {@code ServiceTracker}. * * <p> * If any services are being tracked, this implementation returns the result * of calling {@code getService(getServiceReference())}. * * @return A service object or {@code null} if no services are being * tracked. */ public T getService() { T service = cachedService; if (service != null) { if (DEBUG) { LOGGER.debug("ServiceTracker.getService[cached]: " + filter); } return service; } if (DEBUG) { LOGGER.debug("ServiceTracker.getService: " + filter); } ServiceReference<S> reference = getServiceReference(); if (reference == null) { return null; } return cachedService = getService(reference); } /** * Remove a service from this {@code ServiceTracker}. * * The specified service will be removed from this {@code ServiceTracker}. * If the specified service was being tracked then the * {@code ServiceTrackerCustomizer.removedService} method will be called for * that service. * * @param reference The reference to the service to be removed. */ public void remove(ServiceReference<S> reference) { final Tracked t = tracked(); if (t == null) { /* if ServiceTracker is not open */ return; } t.untrack(reference, null); } /** * Return the number of services being tracked by this * {@code ServiceTracker}. * * @return The number of services being tracked. */ public int size() { final Tracked t = tracked(); if (t == null) { /* if ServiceTracker is not open */ return 0; } synchronized (t) { return t.size(); } } /** * Returns the tracking count for this {@code ServiceTracker}. * * The tracking count is initialized to 0 when this {@code ServiceTracker} * is opened. Every time a service is added, modified or removed from this * {@code ServiceTracker}, the tracking count is incremented. * * <p> * The tracking count can be used to determine if this * {@code ServiceTracker} has added, modified or removed a service by * comparing a tracking count value previously collected with the current * tracking count value. If the value has not changed, then no service has * been added, modified or removed from this {@code ServiceTracker} since * the previous tracking count was collected. * * @return The tracking count for this {@code ServiceTracker} or -1 if this * {@code ServiceTracker} is not open. */ public int getTrackingCount() { final Tracked t = tracked(); if (t == null) { /* if ServiceTracker is not open */ return -1; } synchronized (t) { return t.getTrackingCount(); } } /** * Called by the Tracked object whenever the set of tracked services is * modified. Clears the cache. */ /* * This method must not be synchronized since it is called by Tracked while * Tracked is synchronized. We don't want synchronization interactions * between the listener thread and the user thread. */ void modified() { cachedReference = null; /* clear cached value */ cachedService = null; /* clear cached value */ if (DEBUG) { LOGGER.debug("ServiceTracker.modified: " + filter); } } /** * Return a {@code SortedMap} of the {@code ServiceReference}s and service * objects for all services being tracked by this {@code ServiceTracker}. * The map is sorted in reverse natural order of {@code ServiceReference}. * That is, the first entry is the service with the highest ranking and the * lowest service id. * * @return A {@code SortedMap} with the {@code ServiceReference}s and * service objects for all services being tracked by this * {@code ServiceTracker}. If no services are being tracked, then * the returned map is empty. */ public SortedMap<ServiceReference<S>, T> getTracked() { SortedMap<ServiceReference<S>, T> map = new TreeMap<ServiceReference<S>, T>(Collections.reverseOrder()); final Tracked t = tracked(); if (t == null) { /* if ServiceTracker is not open */ return map; } synchronized (t) { return t.copyEntries(map); } } /** * Return if this {@code ServiceTracker} is empty. * * @return {@code true} if this {@code ServiceTracker} is not tracking any * services. */ public boolean isEmpty() { final Tracked t = tracked(); if (t == null) { /* if ServiceTracker is not open */ return true; } synchronized (t) { return t.isEmpty(); } } /** * Return an array of service objects for all services being tracked by this * {@code ServiceTracker}. The runtime type of the returned array is that of * the specified array. * * <p> * This implementation calls {@link #getServiceReferences()} to get the list * of references for the tracked services and then calls * {@link #getService(ServiceReference)} for each reference to get the * tracked service object. * * @param array An array into which the tracked service objects will be * stored, if the array is large enough. * @return An array of service objects being tracked. If the specified array * is large enough to hold the result, then the specified array is * returned. If the specified array is longer then necessary to hold * the result, the array element after the last service object is * set to {@code null}. If the specified array is not large enough * to hold the result, a new array is created and returned. */ @SuppressWarnings("unchecked") public T[] getServices(T[] array) { final Tracked t = tracked(); if (t == null) { /* if ServiceTracker is not open */ if (array.length > 0) { array[0] = null; } return array; } synchronized (t) { ServiceReference<S>[] references = getServiceReferences(); int length = (references == null) ? 0 : references.length; if (length == 0) { if (array.length > 0) { array[0] = null; } return array; } if (length > array.length) { array = (T[]) Array.newInstance(array.getClass().getComponentType(), length); } for (int i = 0; i < length; i++) { array[i] = getService(references[i]); } if (array.length > length) { array[length] = null; } return array; } } /** * Inner class which subclasses AbstractTracked. This class is the * {@code ServiceListener} object for the tracker. * * @ThreadSafe */ private class Tracked extends AbstractTracked<ServiceReference<S>, T, ServiceEvent> implements ServiceListener { /** * Tracked constructor. */ Tracked() { super(); } /** * {@code ServiceListener} method for the {@code ServiceTracker} class. * This method must NOT be synchronized to avoid deadlock potential. * * @param event {@code ServiceEvent} object from the framework. */ @SuppressWarnings("unchecked") final public void serviceChanged(final ServiceEvent event) { /* * Check if we had a delayed call (which could happen when we * close). */ if (closed) { return; } final ServiceReference<S> reference = (ServiceReference<S>) event.getServiceReference(); if (DEBUG) { LOGGER.debug("ServiceTracker.Tracked.serviceChanged[" + event.getType() + "]: " + reference); } switch (event.getType()) { case ServiceEvent.REGISTERED: case ServiceEvent.MODIFIED: track(reference, event); /* * If the customizer throws an unchecked exception, it is * safe to let it propagate */ break; case ServiceEvent.MODIFIED_ENDMATCH: case ServiceEvent.UNREGISTERING: untrack(reference, event); /* * If the customizer throws an unchecked exception, it is * safe to let it propagate */ break; } } /** * Increment the tracking count and tell the tracker there was a * modification. * * @GuardedBy this */ final void modified() { super.modified(); /* increment the modification count */ ServiceTracker.this.modified(); } /** * Call the specific customizer adding method. This method must not be * called while synchronized on this object. * * @param item Item to be tracked. * @param related Action related object. * @return Customized object for the tracked item or {@code null} if the * item is not to be tracked. */ final T customizerAdding(final ServiceReference<S> item, final ServiceEvent related) { return customizer.addingService(item); } /** * Call the specific customizer modified method. This method must not be * called while synchronized on this object. * * @param item Tracked item. * @param related Action related object. * @param object Customized object for the tracked item. */ final void customizerModified(final ServiceReference<S> item, final ServiceEvent related, final T object) { customizer.modifiedService(item, object); } /** * Call the specific customizer removed method. This method must not be * called while synchronized on this object. * * @param item Tracked item. * @param related Action related object. * @param object Customized object for the tracked item. */ final void customizerRemoved(final ServiceReference<S> item, final ServiceEvent related, final T object) { customizer.removedService(item, object); } } /** * Subclass of Tracked which implements the AllServiceListener interface. * This class is used by the ServiceTracker if open is called with true. * * @ThreadSafe */ private class AllTracked extends Tracked implements AllServiceListener { /** * AllTracked constructor. */ AllTracked() { super(); } } }
/** * Copyright 2005-2007 Xue Yong Zhi, Ye Zheng * Distributed under the Apache License */ package com.xruby.runtime.lang; import com.xruby.runtime.builtin.RubyArray; public abstract class RubyBlock extends MethodBlockBase { // if run finished, and __break__ is not true, it indicated break happend //e.g. // loop do // break 'xxx' // end protected boolean __break__ = false; protected boolean __return__ = false; protected boolean __retry__ = false; // Normally RubyBlock was created in one place, invoked(yield) later in another // place. The block needs to keep the context of its creator(self, arg, block arg, // scope etc). protected RubyValue selfOfCurrentMethod_;//need this for {self} TODO why do we need 'receiver' for run method? protected RubyArray argsOfCurrentMethod_;//need this for {super} protected final RubyValue argOfCurrentMethod_;//need this for {super} protected final RubyBlock blockOfCurrentMethod_;//need this for {yield} private final boolean definedInAnotherBlock_;//not null if defined in another block private boolean createdByLambda_ = false; private RubyMethod currentMethod_; public RubyBlock(int argc, boolean has_asterisk_parameter, int default_argc, RubyValue self, RubyValue arg,//not null for one arg method RubyArray args,//not null for var arg method RubyBlock block, RubyModule scope, RubyMethod currentMethod, boolean definedInAnotherBlock) { super(argc, has_asterisk_parameter, default_argc); selfOfCurrentMethod_ = self; argOfCurrentMethod_ = arg; argsOfCurrentMethod_ = args; blockOfCurrentMethod_ = block; setScope(scope); currentMethod_ = currentMethod; definedInAnotherBlock_ = definedInAnotherBlock; } public void setArgsOfCurrentMethod(RubyArray args) { argsOfCurrentMethod_ = args; } public void setCurrentMethod(RubyMethod m) { currentMethod_ = m; } public RubyMethod getCurrentMethod() { return currentMethod_; } public RubyID getID() { return currentMethod_.getID(); } public void setSelf(RubyValue v) { selfOfCurrentMethod_ = v; } public RubyValue getSelf() { return selfOfCurrentMethod_; } public boolean isDefinedInAnotherBlock() { return definedInAnotherBlock_; } public boolean breakedOrReturned() { return __break__ || __return__; } public boolean returned() { return createdByLambda_ ? false : __return__; } public boolean shouldRetry() { return __retry__; } public void setCreatedByLambda() { createdByLambda_ = true; } public boolean createdByLambda() { return createdByLambda_; } private void validateParameterForProcCall(int actual_args_length) { if (argc_ >= 0 && !has_asterisk_parameter_) { int required_args_length = argc_ - default_argc_; if (actual_args_length != required_args_length) { throw new RubyException(RubyRuntime.ArgumentErrorClass, "wrong number of arguments (" + actual_args_length + " for " + required_args_length + ")"); } } } public RubyValue invoke(RubyValue receiver, RubyArray args) { if (createdByLambda_) { validateParameterForProcCall((null == args) ? 0 : args.size()); } __break__ = false; __return__ = false; __retry__ = false; RubyValue v = run(receiver, null != args ? args : new RubyArray(0)); //TODO Maybe we can just use the fields in BlockCallStatus, remove the //__break__, __return__, __retry__ here if (v.returnedInBlock()) { __return__ = true; } return v; } protected abstract RubyValue run(RubyValue receiver, RubyArray args); // no arg invocation public RubyValue invoke(RubyValue receiver) { if (createdByLambda_) { validateParameterForProcCall(0); } __break__ = false; __return__ = false; __retry__ = false; RubyValue v = run(receiver); //TODO Maybe we can just use the fields in BlockCallStatus, remove the //__break__, __return__, __retry__ here if (v.returnedInBlock()) { __return__ = true; } return v; } // no arg run protected RubyValue run(RubyValue receiver) { return this.run(receiver, new RubyArray(0)); } // one arg invocation public RubyValue invoke(RubyValue receiver, RubyValue arg) { if (createdByLambda_) { validateParameterForProcCall(1); } __break__ = false; __return__ = false; __retry__ = false; RubyValue v = run(receiver, arg); //TODO Maybe we can just use the fields in BlockCallStatus, remove the //__break__, __return__, __retry__ here if (v.returnedInBlock()) { __return__ = true; } return v; } // one arg run protected RubyValue run(RubyValue receiver, RubyValue arg) { return this.run(receiver, new RubyArray(arg)); } // two args invocation public RubyValue invoke(RubyValue receiver, RubyValue arg1, RubyValue arg2) { if (createdByLambda_) { validateParameterForProcCall(2); } __break__ = false; __return__ = false; __retry__ = false; RubyValue v = run(receiver, arg1, arg2); //TODO Maybe we can just use the fields in BlockCallStatus, remove the //__break__, __return__, __retry__ here if (v.returnedInBlock()) { __return__ = true; } return v; } // two args run protected RubyValue run(RubyValue receiver, RubyValue arg0, RubyValue arg1) { return this.run(receiver, new RubyArray(arg0, arg1)); } }
/* * Written by Doug Lea with assistance from members of JCP JSR-166 * Expert Group and released to the public domain, as explained at * http://creativecommons.org/licenses/publicdomain */ package java.util.concurrent; import java.util.AbstractSet; import java.util.Collection; import java.util.Collections; import java.util.Comparator; import java.util.ConcurrentModificationException; import java.util.Iterator; import java.util.Map; import java.util.NavigableSet; import java.util.NoSuchElementException; import java.util.Set; import java.util.SortedSet; import sun.misc.Unsafe; /** * A scalable concurrent {@link NavigableSet} implementation based on * a {@link ConcurrentSkipListMap}. The elements of the set are kept * sorted according to their {@linkplain Comparable natural ordering}, * or by a {@link Comparator} provided at set creation time, depending * on which constructor is used. * * <p>This implementation provides expected average <i>log(n)</i> time * cost for the <tt>contains</tt>, <tt>add</tt>, and <tt>remove</tt> * operations and their variants. Insertion, removal, and access * operations safely execute concurrently by multiple threads. * Iterators are <i>weakly consistent</i>, returning elements * reflecting the state of the set at some point at or since the * creation of the iterator. They do <em>not</em> throw {@link * ConcurrentModificationException}, and may proceed concurrently with * other operations. Ascending ordered views and their iterators are * faster than descending ones. * * <p>Beware that, unlike in most collections, the <tt>size</tt> * method is <em>not</em> a constant-time operation. Because of the * asynchronous nature of these sets, determining the current number * of elements requires a traversal of the elements. Additionally, the * bulk operations <tt>addAll</tt>, <tt>removeAll</tt>, * <tt>retainAll</tt>, and <tt>containsAll</tt> are <em>not</em> * guaranteed to be performed atomically. For example, an iterator * operating concurrently with an <tt>addAll</tt> operation might view * only some of the added elements. * * <p>This class and its iterators implement all of the * <em>optional</em> methods of the {@link Set} and {@link Iterator} * interfaces. Like most other concurrent collection implementations, * this class does not permit the use of <tt>null</tt> elements, * because <tt>null</tt> arguments and return values cannot be reliably * distinguished from the absence of elements. * * <p>This class is a member of the * <a href="{@docRoot}/../technotes/guides/collections/index.html"> * Java Collections Framework</a>. * * @author Doug Lea * @param <E> the type of elements maintained by this set * @since 1.6 */ public class ConcurrentSkipListSet<E> extends AbstractSet<E> implements NavigableSet<E>, Cloneable, java.io.Serializable { private static final long serialVersionUID = -2479143111061671589L; /** * The underlying map. Uses Boolean.TRUE as value for each * element. This field is declared final for the sake of thread * safety, which entails some ugliness in clone() */ private final ConcurrentNavigableMap<E,Object> m; /** * Constructs a new, empty set that orders its elements according to * their {@linkplain Comparable natural ordering}. */ public ConcurrentSkipListSet() { m = new ConcurrentSkipListMap<E,Object>(); } /** * Constructs a new, empty set that orders its elements according to * the specified comparator. * * @param comparator the comparator that will be used to order this set. * If <tt>null</tt>, the {@linkplain Comparable natural * ordering} of the elements will be used. */ public ConcurrentSkipListSet(Comparator<? super E> comparator) { m = new ConcurrentSkipListMap<E,Object>(comparator); } /** * Constructs a new set containing the elements in the specified * collection, that orders its elements according to their * {@linkplain Comparable natural ordering}. * * @param c The elements that will comprise the new set * @throws ClassCastException if the elements in <tt>c</tt> are * not {@link Comparable}, or are not mutually comparable * @throws NullPointerException if the specified collection or any * of its elements are null */ public ConcurrentSkipListSet(Collection<? extends E> c) { m = new ConcurrentSkipListMap<E,Object>(); addAll(c); } /** * Constructs a new set containing the same elements and using the * same ordering as the specified sorted set. * * @param s sorted set whose elements will comprise the new set * @throws NullPointerException if the specified sorted set or any * of its elements are null */ public ConcurrentSkipListSet(SortedSet<E> s) { m = new ConcurrentSkipListMap<E,Object>(s.comparator()); addAll(s); } /** * For use by submaps */ ConcurrentSkipListSet(ConcurrentNavigableMap<E,Object> m) { this.m = m; } /** * Returns a shallow copy of this <tt>ConcurrentSkipListSet</tt> * instance. (The elements themselves are not cloned.) * * @return a shallow copy of this set */ public ConcurrentSkipListSet<E> clone() { ConcurrentSkipListSet<E> clone = null; try { clone = (ConcurrentSkipListSet<E>) super.clone(); clone.setMap(new ConcurrentSkipListMap(m)); } catch (CloneNotSupportedException e) { throw new InternalError(); } return clone; } /* ---------------- Set operations -------------- */ /** * Returns the number of elements in this set. If this set * contains more than <tt>Integer.MAX_VALUE</tt> elements, it * returns <tt>Integer.MAX_VALUE</tt>. * * <p>Beware that, unlike in most collections, this method is * <em>NOT</em> a constant-time operation. Because of the * asynchronous nature of these sets, determining the current * number of elements requires traversing them all to count them. * Additionally, it is possible for the size to change during * execution of this method, in which case the returned result * will be inaccurate. Thus, this method is typically not very * useful in concurrent applications. * * @return the number of elements in this set */ public int size() { return m.size(); } /** * Returns <tt>true</tt> if this set contains no elements. * @return <tt>true</tt> if this set contains no elements */ public boolean isEmpty() { return m.isEmpty(); } /** * Returns <tt>true</tt> if this set contains the specified element. * More formally, returns <tt>true</tt> if and only if this set * contains an element <tt>e</tt> such that <tt>o.equals(e)</tt>. * * @param o object to be checked for containment in this set * @return <tt>true</tt> if this set contains the specified element * @throws ClassCastException if the specified element cannot be * compared with the elements currently in this set * @throws NullPointerException if the specified element is null */ public boolean contains(Object o) { return m.containsKey(o); } /** * Adds the specified element to this set if it is not already present. * More formally, adds the specified element <tt>e</tt> to this set if * the set contains no element <tt>e2</tt> such that <tt>e.equals(e2)</tt>. * If this set already contains the element, the call leaves the set * unchanged and returns <tt>false</tt>. * * @param e element to be added to this set * @return <tt>true</tt> if this set did not already contain the * specified element * @throws ClassCastException if <tt>e</tt> cannot be compared * with the elements currently in this set * @throws NullPointerException if the specified element is null */ public boolean add(E e) { return m.putIfAbsent(e, Boolean.TRUE) == null; } /** * Removes the specified element from this set if it is present. * More formally, removes an element <tt>e</tt> such that * <tt>o.equals(e)</tt>, if this set contains such an element. * Returns <tt>true</tt> if this set contained the element (or * equivalently, if this set changed as a result of the call). * (This set will not contain the element once the call returns.) * * @param o object to be removed from this set, if present * @return <tt>true</tt> if this set contained the specified element * @throws ClassCastException if <tt>o</tt> cannot be compared * with the elements currently in this set * @throws NullPointerException if the specified element is null */ public boolean remove(Object o) { return m.remove(o, Boolean.TRUE); } /** * Removes all of the elements from this set. */ public void clear() { m.clear(); } /** * Returns an iterator over the elements in this set in ascending order. * * @return an iterator over the elements in this set in ascending order */ public Iterator<E> iterator() { return m.navigableKeySet().iterator(); } /** * Returns an iterator over the elements in this set in descending order. * * @return an iterator over the elements in this set in descending order */ public Iterator<E> descendingIterator() { return m.descendingKeySet().iterator(); } /* ---------------- AbstractSet Overrides -------------- */ /** * Compares the specified object with this set for equality. Returns * <tt>true</tt> if the specified object is also a set, the two sets * have the same size, and every member of the specified set is * contained in this set (or equivalently, every member of this set is * contained in the specified set). This definition ensures that the * equals method works properly across different implementations of the * set interface. * * @param o the object to be compared for equality with this set * @return <tt>true</tt> if the specified object is equal to this set */ public boolean equals(Object o) { // Override AbstractSet version to avoid calling size() if (o == this) return true; if (!(o instanceof Set)) return false; Collection<?> c = (Collection<?>) o; try { return containsAll(c) && c.containsAll(this); } catch (ClassCastException unused) { return false; } catch (NullPointerException unused) { return false; } } /** * Removes from this set all of its elements that are contained in * the specified collection. If the specified collection is also * a set, this operation effectively modifies this set so that its * value is the <i>asymmetric set difference</i> of the two sets. * * @param c collection containing elements to be removed from this set * @return <tt>true</tt> if this set changed as a result of the call * @throws ClassCastException if the types of one or more elements in this * set are incompatible with the specified collection * @throws NullPointerException if the specified collection or any * of its elements are null */ public boolean removeAll(Collection<?> c) { // Override AbstractSet version to avoid unnecessary call to size() boolean modified = false; for (Iterator<?> i = c.iterator(); i.hasNext(); ) if (remove(i.next())) modified = true; return modified; } /* ---------------- Relational operations -------------- */ /** * @throws ClassCastException {@inheritDoc} * @throws NullPointerException if the specified element is null */ public E lower(E e) { return m.lowerKey(e); } /** * @throws ClassCastException {@inheritDoc} * @throws NullPointerException if the specified element is null */ public E floor(E e) { return m.floorKey(e); } /** * @throws ClassCastException {@inheritDoc} * @throws NullPointerException if the specified element is null */ public E ceiling(E e) { return m.ceilingKey(e); } /** * @throws ClassCastException {@inheritDoc} * @throws NullPointerException if the specified element is null */ public E higher(E e) { return m.higherKey(e); } public E pollFirst() { Map.Entry<E,Object> e = m.pollFirstEntry(); return e == null? null : e.getKey(); } public E pollLast() { Map.Entry<E,Object> e = m.pollLastEntry(); return e == null? null : e.getKey(); } /* ---------------- SortedSet operations -------------- */ public Comparator<? super E> comparator() { return m.comparator(); } /** * @throws NoSuchElementException {@inheritDoc} */ public E first() { return m.firstKey(); } /** * @throws NoSuchElementException {@inheritDoc} */ public E last() { return m.lastKey(); } /** * @throws ClassCastException {@inheritDoc} * @throws NullPointerException if {@code fromElement} or * {@code toElement} is null * @throws IllegalArgumentException {@inheritDoc} */ public NavigableSet<E> subSet(E fromElement, boolean fromInclusive, E toElement, boolean toInclusive) { return new ConcurrentSkipListSet<E> (m.subMap(fromElement, fromInclusive, toElement, toInclusive)); } /** * @throws ClassCastException {@inheritDoc} * @throws NullPointerException if {@code toElement} is null * @throws IllegalArgumentException {@inheritDoc} */ public NavigableSet<E> headSet(E toElement, boolean inclusive) { return new ConcurrentSkipListSet<E>(m.headMap(toElement, inclusive)); } /** * @throws ClassCastException {@inheritDoc} * @throws NullPointerException if {@code fromElement} is null * @throws IllegalArgumentException {@inheritDoc} */ public NavigableSet<E> tailSet(E fromElement, boolean inclusive) { return new ConcurrentSkipListSet<E>(m.tailMap(fromElement, inclusive)); } /** * @throws ClassCastException {@inheritDoc} * @throws NullPointerException if {@code fromElement} or * {@code toElement} is null * @throws IllegalArgumentException {@inheritDoc} */ public NavigableSet<E> subSet(E fromElement, E toElement) { return subSet(fromElement, true, toElement, false); } /** * @throws ClassCastException {@inheritDoc} * @throws NullPointerException if {@code toElement} is null * @throws IllegalArgumentException {@inheritDoc} */ public NavigableSet<E> headSet(E toElement) { return headSet(toElement, false); } /** * @throws ClassCastException {@inheritDoc} * @throws NullPointerException if {@code fromElement} is null * @throws IllegalArgumentException {@inheritDoc} */ public NavigableSet<E> tailSet(E fromElement) { return tailSet(fromElement, true); } /** * Returns a reverse order view of the elements contained in this set. * The descending set is backed by this set, so changes to the set are * reflected in the descending set, and vice-versa. * * <p>The returned set has an ordering equivalent to * <tt>{@link Collections#reverseOrder(Comparator) Collections.reverseOrder}(comparator())</tt>. * The expression {@code s.descendingSet().descendingSet()} returns a * view of {@code s} essentially equivalent to {@code s}. * * @return a reverse order view of this set */ public NavigableSet<E> descendingSet() { return new ConcurrentSkipListSet(m.descendingMap()); } // Support for resetting map in clone private static final Unsafe unsafe = Unsafe.getUnsafe(); private static final long mapOffset; static { try { mapOffset = unsafe.objectFieldOffset (ConcurrentSkipListSet.class.getDeclaredField("m")); } catch (Exception ex) { throw new Error(ex); } } private void setMap(ConcurrentNavigableMap<E,Object> map) { unsafe.putObjectVolatile(this, mapOffset, map); } }
package org.radargun.stages.test; import java.util.ArrayList; import java.util.List; import java.util.Map; import java.util.concurrent.CountDownLatch; import java.util.concurrent.TimeUnit; import org.radargun.DistStageAck; import org.radargun.StageResult; import org.radargun.config.Init; import org.radargun.config.Path; import org.radargun.config.Property; import org.radargun.config.PropertyHelper; import org.radargun.config.Stage; import org.radargun.reporting.Report; import org.radargun.stages.AbstractDistStage; import org.radargun.state.SlaveState; import org.radargun.stats.DefaultStatistics; import org.radargun.reporting.IterationData; import org.radargun.stats.Statistics; import org.radargun.traits.InjectTrait; import org.radargun.traits.Transactional; import org.radargun.utils.Projections; import org.radargun.utils.TimeConverter; import org.radargun.utils.Utils; /** * @author Radim Vansa &lt;rvansa@redhat.com&gt; */ @Stage(doc = "Base for test spawning several threads and benchmark of operations executed in those.") public abstract class TestStage extends AbstractDistStage { @Property(doc = "Name of the test as used for reporting. Default is 'Test'.") protected String testName = "Test"; @Property(doc = "By default, each stage creates a new test. If this property is set to true," + "results are amended to existing test (as iterations). Default is false.") protected boolean amendTest = false; @Property(doc = "Number of operations after which a log entry should be written. Default is 10000.") protected int logPeriod = 10000; @Property(doc = "Total number of request to be made against this session: reads + writes. If duration " + "is specified this value is ignored. Default is 50000.") protected long numRequests = 50000; @Property(doc = "The number of threads executing on each node. You have to set either this or 'total-threads'. No default.") protected int numThreadsPerNode = 0; @Property(doc = "Total number of threads across whole cluster. You have to set either this or 'num-threads-per-node'. No default.") protected int totalThreads = 0; @Property(doc = "Specifies if the requests should be explicitly wrapped in transactions. " + "Options are NEVER, ALWAYS and IF_TRANSACTIONAL: transactions are used only if " + "the cache configuration is transactional and transactionSize > 0. Default is IF_TRANSACTIONAL.") protected TransactionMode useTransactions = TransactionMode.IF_TRANSACTIONAL; @Property(doc = "Specifies whether the transactions should be committed (true) or rolled back (false). " + "Default is true") protected boolean commitTransactions = true; @Property(doc = "Number of requests in one transaction. Default is 1.") protected int transactionSize = 1; @Property(converter = TimeConverter.class, doc = "Benchmark duration. This takes precedence over numRequests. By default switched off.") protected long duration = 0; @Property(converter = TimeConverter.class, doc = "Target period of requests - e.g. when this is set to 10 ms" + "the benchmark will try to do one request every 10 ms. By default the requests are executed at maximum speed.") protected long requestPeriod = 0; @Property(doc = "Local threads synchronize on starting each round of requests. Note that with requestPeriod > 0, " + "there is still the random ramp-up delay. Default is false.") protected boolean synchronousRequests = false; @Property(doc = "Max duration of the test. Default is infinite.", converter = TimeConverter.class) protected long timeout = 0; @Property(name = "statistics", doc = "Type of gathered statistics. Default are the 'default' statistics " + "(fixed size memory footprint for each operation).", complexConverter = Statistics.Converter.class) protected Statistics statisticsPrototype = new DefaultStatistics(); @Property(doc = "Property, which value will be used to identify individual iterations (e.g. num-threads).") protected String iterationProperty; @Property(doc = "If this performance condition was not satisfied during this test, the current repeat will be exited. Default is none.", complexConverter = PerformanceCondition.Converter.class) protected PerformanceCondition repeatCondition; @Property(doc = "Merge statistics from all threads on single node to one record, instead of storing them all in-memory. Default is false.") protected boolean mergeThreadStats = false; @InjectTrait protected Transactional transactional; protected CountDownLatch startLatch; protected CountDownLatch finishLatch; protected volatile Completion completion; protected volatile boolean finished = false; protected volatile boolean terminated = false; protected int testIteration; // first iteration we should use for setting the statistics @Init public void init() { if (totalThreads <= 0 && numThreadsPerNode <= 0) throw new IllegalStateException("You have to set either total-threads or num-threads-per-node."); if (totalThreads > 0 && numThreadsPerNode > 0) throw new IllegalStateException("You have to set only one ot total-threads, num-threads-per-node"); if (totalThreads < 0 || numThreadsPerNode < 0) throw new IllegalStateException("Number of threads can't be < 0"); } protected static void avoidJit(Object result) { //this line was added just to make sure JIT doesn't skip call to cacheWrapper.get if (result != null && System.identityHashCode(result) == result.hashCode()) System.out.print(""); } public DistStageAck executeOnSlave() { if (!isServiceRunning()) { log.info("Not running test on this slave as service is not running."); return successfulResponse(); } try { long startNanos = System.nanoTime(); log.info("Starting test " + testName); List<Stressor> stressors = execute(); log.info("Finished test. Test duration is: " + Utils.getNanosDurationString(System.nanoTime() - startNanos)); return newStatisticsAck(stressors); } catch (Exception e) { return errorResponse("Exception while initializing the test", e); } } public StageResult processAckOnMaster(List<DistStageAck> acks) { StageResult result = super.processAckOnMaster(acks); if (result.isError()) return result; Report.Test test = getTest(amendTest); testIteration = test == null ? 0 : test.getIterations().size(); // we cannot use aggregated = createStatistics() since with PeriodicStatistics the merge would fail Statistics aggregated = null; int threads = 0; for (StatisticsAck ack : Projections.instancesOf(acks, StatisticsAck.class)) { if (ack.iterations != null) { int i = getTestIteration(); for (List<Statistics> threadStats : ack.iterations) { if (test != null) { // TODO: this looks like we could get same iteration value for all iterations reported String iterationValue = resolveIterationValue(); if (iterationValue != null) { test.setIterationValue(i, iterationValue); } test.addStatistics(i++, ack.getSlaveIndex(), threadStats); } threads = Math.max(threads, threadStats.size()); for (Statistics s : threadStats) { if (aggregated == null) { aggregated = s.copy(); } else { aggregated.merge(s); } } } } else { log.trace("No statistics received from slave: " + ack.getSlaveIndex()); } } if (repeatCondition == null) { return StageResult.SUCCESS; } else { try { if (repeatCondition.evaluate(threads, aggregated)) { log.info("Loop-condition condition was satisfied, continuing the loop."); return StageResult.SUCCESS; } else { log.info("Loop-condition condition not satisfied, terminating the loop"); return StageResult.BREAK; } } catch (Exception e) { log.info("Loop-condition has thrown exception, terminating the loop", e); return StageResult.BREAK; } } } protected Report.Test getTest(boolean allowExisting) { if (testName == null || testName.isEmpty()) { log.warn("No test name - results are not recorded"); return null; } else if (testName.equalsIgnoreCase("warmup")) { log.info("This test was executed as a warmup"); return null; } else { Report report = masterState.getReport(); return report.createTest(testName, iterationProperty, allowExisting); } } public List<Stressor> execute() { long startTime = System.currentTimeMillis(); int myFirstThread = getFirstThreadOn(slaveState.getSlaveIndex()); int myNumThreads = getNumThreadsOn(slaveState.getSlaveIndex()); Completion completion; if (duration > 0) { completion = new TimeStressorCompletion(duration, requestPeriod); } else { completion = new OperationCountCompletion(numRequests, requestPeriod, logPeriod); } if (synchronousRequests) { completion = new SynchronousCompletion(completion, myNumThreads); } setCompletion(completion); startLatch = new CountDownLatch(1); finishLatch = new CountDownLatch(myNumThreads); List<Stressor> stressors = new ArrayList<>(); for (int threadIndex = stressors.size(); threadIndex < myNumThreads; threadIndex++) { Stressor stressor = new Stressor(this, getLogic(), myFirstThread + threadIndex, threadIndex); stressors.add(stressor); stressor.start(); } log.info("Started " + stressors.size() + " stressor threads."); startLatch.countDown(); try { if (timeout > 0) { long waitTime = getWaitTime(startTime); if (waitTime <= 0 || !finishLatch.await(waitTime, TimeUnit.MILLISECONDS)) { throw new TestTimeoutException(); } } else { finishLatch.await(); } } catch (InterruptedException e) { throw new IllegalStateException("Unexpected interruption", e); } finally { finished = true; } for (Stressor stressor : stressors) { try { if (timeout > 0) { long waitTime = getWaitTime(startTime); if (waitTime <= 0) throw new TestTimeoutException(); stressor.join(waitTime); } else { stressor.join(); } } catch (InterruptedException e) { throw new TestTimeoutException(e); } } return stressors; } protected DistStageAck newStatisticsAck(List<Stressor> stressors) { List<List<Statistics>> results = gatherResults(stressors, new StatisticsResultRetriever()); return new StatisticsAck(slaveState, results); } protected <T> List<List<T>> gatherResults(List<Stressor> stressors, ResultRetriever<T> retriever) { List<T> results = new ArrayList<>(stressors.size()); for (Stressor stressor : stressors) { T result = retriever.getResult(stressor); if (result != null) { // stressor could have crashed during initialization results.add(result); } } List<List<T>> all = new ArrayList<>(); all.add(new ArrayList<T>()); /* expand the iteration statistics into iterations */ for (T result : results) { if (result instanceof IterationData) { int iteration = 0; for (IterationData.Iteration<T> it : ((IterationData<T>) result).getIterations()) { while (iteration >= all.size()) { all.add(new ArrayList<T>(results.size())); } addResult(all.get(iteration++), it.data, retriever); } } else { addResult(all.get(0), result, retriever); } } return all; } private <T> void addResult(List<T> results, T result, ResultRetriever<T> retriever) { if (mergeThreadStats) { if (results.isEmpty()) { results.add(result); } else { retriever.mergeResult(results.get(0), result); } } else { results.add(result); } } private long getWaitTime(long startTime) { return startTime + timeout - System.currentTimeMillis(); } public int getTotalThreads() { if (totalThreads > 0) { return totalThreads; } else if (numThreadsPerNode > 0) { return getExecutingSlaves().size() * numThreadsPerNode; } else throw new IllegalStateException(); } public int getFirstThreadOn(int slave) { List<Integer> executingSlaves = getExecutingSlaves(); int execId = executingSlaves.indexOf(slave); if (numThreadsPerNode > 0) { return execId * numThreadsPerNode; } else if (totalThreads > 0) { return execId * totalThreads / executingSlaves.size(); } else { throw new IllegalStateException(); } } public int getNumThreadsOn(int slave) { List<Integer> executingSlaves = getExecutingSlaves(); if (numThreadsPerNode > 0) { return executingSlaves.contains(slaveState.getSlaveIndex()) ? numThreadsPerNode : 0; } else if (totalThreads > 0) { int execId = executingSlaves.indexOf(slave); return (execId + 1) * totalThreads / executingSlaves.size() - execId * totalThreads / executingSlaves.size(); } else { throw new IllegalStateException(); } } protected String resolveIterationValue() { if (iterationProperty != null) { Map<String, Path> properties = PropertyHelper.getProperties(getClass(), true, false, true); String propertyString = PropertyHelper.getPropertyString(properties.get(iterationProperty), this); if (propertyString == null) { throw new IllegalStateException("Unable to resolve iteration property '" + iterationProperty + "'."); } return propertyString; } return null; } protected Statistics createStatistics() { return statisticsPrototype.copy(); } protected boolean isFinished() { return finished; } protected boolean isTerminated() { return terminated; } public void setTerminated() { this.terminated = true; } protected void setCompletion(Completion completion) { this.completion = completion; } public Completion getCompletion() { return completion; } public CountDownLatch getStartLatch() { return startLatch; } public CountDownLatch getFinishLatch() { return finishLatch; } public boolean useTransactions(String cacheName) { return useTransactions.use(transactional, cacheName, transactionSize); } public abstract OperationLogic getLogic(); protected int getTestIteration() { return testIteration; } protected static class StatisticsAck extends DistStageAck { private final List<List<Statistics>> iterations; protected StatisticsAck(SlaveState slaveState, List<List<Statistics>> iterations) { super(slaveState); this.iterations = iterations; } } protected interface ResultRetriever<T> { T getResult(Stressor stressor); void mergeResult(T into, T that); } protected static class StatisticsResultRetriever implements ResultRetriever<Statistics> { public StatisticsResultRetriever() {} @Override public Statistics getResult(Stressor stressor) { return stressor.getStats(); } @Override public void mergeResult(Statistics into, Statistics that) { into.merge(that); } } private class TestTimeoutException extends RuntimeException { public TestTimeoutException() { } public TestTimeoutException(Throwable cause) { super(cause); } } }
/* * Copyright 2008 Marc Boorshtein * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package net.sourceforge.myvd.inserts.jdbc; import java.util.ArrayList; import java.util.HashSet; import java.util.Iterator; import java.util.Properties; import java.util.StringTokenizer; import com.novell.ldap.LDAPConstraints; import com.novell.ldap.LDAPException; import com.novell.ldap.LDAPModification; import com.novell.ldap.LDAPSearchConstraints; import net.sourceforge.myvd.chain.AddInterceptorChain; import net.sourceforge.myvd.chain.BindInterceptorChain; import net.sourceforge.myvd.chain.CompareInterceptorChain; import net.sourceforge.myvd.chain.DeleteInterceptorChain; import net.sourceforge.myvd.chain.ExetendedOperationInterceptorChain; import net.sourceforge.myvd.chain.ModifyInterceptorChain; import net.sourceforge.myvd.chain.PostSearchCompleteInterceptorChain; import net.sourceforge.myvd.chain.PostSearchEntryInterceptorChain; import net.sourceforge.myvd.chain.RenameInterceptorChain; import net.sourceforge.myvd.chain.SearchInterceptorChain; import net.sourceforge.myvd.core.NameSpace; import net.sourceforge.myvd.inserts.Insert; import net.sourceforge.myvd.types.Attribute; import net.sourceforge.myvd.types.Bool; import net.sourceforge.myvd.types.DistinguishedName; import net.sourceforge.myvd.types.Entry; import net.sourceforge.myvd.types.ExtendedOperation; import net.sourceforge.myvd.types.Filter; import net.sourceforge.myvd.types.FilterNode; import net.sourceforge.myvd.types.Int; import net.sourceforge.myvd.types.Password; import net.sourceforge.myvd.types.Results; public class MapDBObjectClass implements Insert { HashSet<String> inboundOcs; String dbOc; NameSpace ns; Bool inititalized; private String name; public void add(AddInterceptorChain chain, Entry entry, LDAPConstraints constraints) throws LDAPException { chain.nextAdd(entry, constraints); } public void bind(BindInterceptorChain chain, DistinguishedName dn, Password pwd, LDAPConstraints constraints) throws LDAPException { chain.nextBind(dn, pwd, constraints); } public void compare(CompareInterceptorChain chain, DistinguishedName dn, Attribute attrib, LDAPConstraints constraints) throws LDAPException { chain.nextCompare(dn, attrib, constraints); } public void configure(String name, Properties props, NameSpace nameSpace) throws LDAPException { String ocs = props.getProperty("inboundObjectClasses",""); StringTokenizer toker = new StringTokenizer(ocs,",",false); this.inboundOcs = new HashSet<String>(); while (toker.hasMoreTokens()) { this.inboundOcs.add(toker.nextToken().toLowerCase()); } this.inititalized = new Bool(false); this.ns = nameSpace; this.name = name; } public void delete(DeleteInterceptorChain chain, DistinguishedName dn, LDAPConstraints constraints) throws LDAPException { chain.nextDelete(dn, constraints); } public void extendedOperation(ExetendedOperationInterceptorChain chain, ExtendedOperation op, LDAPConstraints constraints) throws LDAPException { chain.nextExtendedOperations(op, constraints); } public String getName() { return this.name; } public void modify(ModifyInterceptorChain chain, DistinguishedName dn, ArrayList<LDAPModification> mods, LDAPConstraints constraints) throws LDAPException { chain.nextModify(dn, mods, constraints); } public void postSearchComplete(PostSearchCompleteInterceptorChain chain, DistinguishedName base, Int scope, Filter filter, ArrayList<Attribute> attributes, Bool typesOnly, LDAPSearchConstraints constraints) throws LDAPException { chain.nextPostSearchComplete(base, scope, filter, attributes, typesOnly, constraints); } public void postSearchEntry(PostSearchEntryInterceptorChain chain, Entry entry, DistinguishedName base, Int scope, Filter filter, ArrayList<Attribute> attributes, Bool typesOnly, LDAPSearchConstraints constraints) throws LDAPException { chain.nextPostSearchEntry(entry, base, scope, filter, attributes, typesOnly, constraints); } public void rename(RenameInterceptorChain chain, DistinguishedName dn, DistinguishedName newRdn, Bool deleteOldRdn, LDAPConstraints constraints) throws LDAPException { chain.nextRename(dn, newRdn, deleteOldRdn, constraints); } public void rename(RenameInterceptorChain chain, DistinguishedName dn, DistinguishedName newRdn, DistinguishedName newParentDN, Bool deleteOldRdn, LDAPConstraints constraints) throws LDAPException { chain.nextRename(dn, newRdn, newParentDN, deleteOldRdn, constraints); } public void search(SearchInterceptorChain chain, DistinguishedName base, Int scope, Filter filter, ArrayList<Attribute> attributes, Bool typesOnly, Results results, LDAPSearchConstraints constraints) throws LDAPException { if (! this.inititalized.getValue()) { synchronized (this.inititalized) { for (int i=0;i<this.ns.getChain().getLength();i++) { if (ns.getChain().getInsert(i) instanceof net.sourceforge.myvd.inserts.jdbc.JdbcInsert) { this.dbOc = ((JdbcInsert) ns.getChain().getInsert(i)).getObjectClass().toLowerCase(); } } } } Filter newFilter = new Filter(filter.getRoot().toString()); this.updateOcInFilter(newFilter.getRoot()); chain.nextSearch(base, scope, newFilter, attributes, typesOnly, results, constraints); } private void updateOcInFilter(FilterNode root) { switch (root.getType()) { case PRESENCE : case SUBSTR: case EQUALS : case LESS_THEN : case GREATER_THEN : if (root.getName().equalsIgnoreCase("objectclass") && this.inboundOcs.contains(root.getValue().toLowerCase())) { root.setValue(this.dbOc); } break; case AND: case OR: Iterator<FilterNode> it = root.getChildren().iterator(); while (it.hasNext()) { updateOcInFilter(it.next()); } break; case NOT: updateOcInFilter(root.getNot()); break; } } public void shutdown() { } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.camel.component.mybatis; import java.util.Iterator; import org.apache.camel.Exchange; import org.apache.camel.Message; import org.apache.camel.support.DefaultProducer; import org.apache.camel.support.ExchangeHelper; import org.apache.camel.support.ObjectHelper; import org.apache.ibatis.mapping.MappedStatement; import org.apache.ibatis.session.ExecutorType; import org.apache.ibatis.session.SqlSession; import org.slf4j.Logger; import org.slf4j.LoggerFactory; public class MyBatisProducer extends DefaultProducer { private static final Logger LOG = LoggerFactory.getLogger(MyBatisProducer.class); private String statement; private MyBatisEndpoint endpoint; public MyBatisProducer(MyBatisEndpoint endpoint) { super(endpoint); this.endpoint = endpoint; this.statement = endpoint.getStatement(); } @Override public void process(Exchange exchange) throws Exception { SqlSession session; ExecutorType executorType = endpoint.getExecutorType(); if (executorType == null) { session = endpoint.getSqlSessionFactory().openSession(); } else { session = endpoint.getSqlSessionFactory().openSession(executorType); } try { switch (endpoint.getStatementType()) { case SelectOne: doSelectOne(exchange, session); break; case SelectList: doSelectList(exchange, session); break; case Insert: doInsert(exchange, session); break; case InsertList: doInsertList(exchange, session); break; case Update: doUpdate(exchange, session); break; case UpdateList: doUpdateList(exchange, session); break; case Delete: doDelete(exchange, session); break; case DeleteList: doDeleteList(exchange, session); break; default: throw new IllegalArgumentException("Unsupported statementType: " + endpoint.getStatementType()); } // flush the batch statements and commit the database connection session.commit(); } catch (Exception e) { // discard the pending batch statements and roll the database connection back session.rollback(); throw e; } finally { // and finally close the session as we're done session.close(); } } private void doSelectOne(Exchange exchange, SqlSession session) throws Exception { Object result; Object in = getInput(exchange); if (in != null) { LOG.trace("SelectOne: {} using statement: {}", in, statement); result = session.selectOne(statement, in); } else { LOG.trace("SelectOne using statement: {}", statement); result = session.selectOne(statement); } doProcessResult(exchange, result, session); } private void doSelectList(Exchange exchange, SqlSession session) throws Exception { Object result; Object in = getInput(exchange); if (in != null) { LOG.trace("SelectList: {} using statement: {}", in, statement); result = session.selectList(statement, in); } else { LOG.trace("SelectList using statement: {}", statement); result = session.selectList(statement); } doProcessResult(exchange, result, session); } private void doInsert(Exchange exchange, SqlSession session) throws Exception { Object result; Object in = getInput(exchange); if (in != null) { // lets handle arrays or collections of objects Iterator<?> iter = ObjectHelper.createIterator(in); while (iter.hasNext()) { Object value = iter.next(); LOG.trace("Inserting: {} using statement: {}", value, statement); result = session.insert(statement, value); doProcessResult(exchange, result, session); } } else { LOG.trace("Inserting using statement: {}", statement); result = session.insert(statement); doProcessResult(exchange, result, session); } } private void doInsertList(Exchange exchange, SqlSession session) throws Exception { Object result; Object in = getInput(exchange); if (in != null) { // just pass in the body as Object and allow MyBatis to iterate using its own foreach statement LOG.trace("Inserting: {} using statement: {}", in, statement); result = session.insert(statement, in); doProcessResult(exchange, result, session); } else { LOG.trace("Inserting using statement: {}", statement); result = session.insert(statement); doProcessResult(exchange, result, session); } } private void doUpdate(Exchange exchange, SqlSession session) throws Exception { Object result; Object in = getInput(exchange); if (in != null) { // lets handle arrays or collections of objects Iterator<?> iter = ObjectHelper.createIterator(in); while (iter.hasNext()) { Object value = iter.next(); LOG.trace("Updating: {} using statement: {}", value, statement); result = session.update(statement, value); doProcessResult(exchange, result, session); } } else { LOG.trace("Updating using statement: {}", statement); result = session.update(statement); doProcessResult(exchange, result, session); } } private void doUpdateList(Exchange exchange, SqlSession session) throws Exception { Object result; Object in = getInput(exchange); if (in != null) { // just pass in the body as Object and allow MyBatis to iterate using its own foreach statement LOG.trace("Updating: {} using statement: {}", in, statement); result = session.update(statement, in); doProcessResult(exchange, result, session); } else { LOG.trace("Updating using statement: {}", statement); result = session.update(statement); doProcessResult(exchange, result, session); } } private void doDelete(Exchange exchange, SqlSession session) throws Exception { Object result; Object in = getInput(exchange); if (in != null) { // lets handle arrays or collections of objects Iterator<?> iter = ObjectHelper.createIterator(in); while (iter.hasNext()) { Object value = iter.next(); LOG.trace("Deleting: {} using statement: {}", value, statement); result = session.delete(statement, value); doProcessResult(exchange, result, session); } } else { LOG.trace("Deleting using statement: {}", statement); result = session.delete(statement); doProcessResult(exchange, result, session); } } private void doDeleteList(Exchange exchange, SqlSession session) throws Exception { Object result; Object in = getInput(exchange); if (in != null) { // just pass in the body as Object and allow MyBatis to iterate using its own foreach statement LOG.trace("Deleting: {} using statement: {}", in, statement); result = session.delete(statement, in); doProcessResult(exchange, result, session); } else { LOG.trace("Deleting using statement: {}", statement); result = session.delete(statement); doProcessResult(exchange, result, session); } } private void doProcessResult(Exchange exchange, Object result, SqlSession session) { final String outputHeader = getEndpoint().getOutputHeader(); Message answer = exchange.getIn(); if (ExchangeHelper.isOutCapable(exchange)) { answer = exchange.getOut(); // preserve headers answer.getHeaders().putAll(exchange.getIn().getHeaders()); if (outputHeader != null) { //if we put the MyBatis result into a header we should preserve the body as well answer.setBody(exchange.getIn().getBody()); } } if (endpoint.getStatementType() == StatementType.SelectList || endpoint.getStatementType() == StatementType.SelectOne) { // we should not set the body if its a stored procedure as the result is already in its OUT parameter MappedStatement ms = session.getConfiguration().getMappedStatement(statement); if (ms != null && ms.getStatementType() == org.apache.ibatis.mapping.StatementType.CALLABLE) { if (result == null) { LOG.trace("Setting result as existing body as MyBatis statement type is Callable, and there was no result."); answer.setBody(exchange.getIn().getBody()); } else { if (outputHeader != null) { // set the result as header for insert LOG.trace("Setting result as header [{}]: {}", outputHeader, result); answer.setHeader(outputHeader, result); } else { // set the result as body for insert LOG.trace("Setting result as body: {}", result); answer.setBody(result); answer.setHeader(MyBatisConstants.MYBATIS_RESULT, result); } } } else { if (outputHeader != null) { LOG.trace("Setting result as header [{}]: {}", outputHeader, result); answer.setHeader(outputHeader, result); } else { // set the result as body for insert LOG.trace("Setting result as body: {}", result); answer.setBody(result); answer.setHeader(MyBatisConstants.MYBATIS_RESULT, result); } } } else { final String headerName = (outputHeader != null) ? outputHeader : MyBatisConstants.MYBATIS_RESULT; answer.setHeader(headerName, result); } answer.setHeader(MyBatisConstants.MYBATIS_STATEMENT_NAME, statement); } @Override public MyBatisEndpoint getEndpoint() { return (MyBatisEndpoint) super.getEndpoint(); } private Object getInput(final Exchange exchange) { final String inputHeader = getEndpoint().getInputHeader(); if (inputHeader != null) { return exchange.getIn().getHeader(inputHeader); } else { return exchange.getIn().getBody(); } } }
// Copyright 2016 The Chromium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. package org.chromium.chrome.browser.browsing_data; import android.app.Activity; import android.app.ActivityManager; import android.app.Dialog; import android.content.Context; import android.content.DialogInterface; import android.content.Intent; import android.content.res.Resources; import android.graphics.Bitmap; import android.graphics.drawable.Drawable; import android.os.Bundle; import android.view.LayoutInflater; import android.view.View; import android.view.ViewGroup; import android.widget.AdapterView; import android.widget.ArrayAdapter; import android.widget.CheckBox; import android.widget.ImageView; import android.widget.ListView; import android.widget.TextView; import androidx.annotation.VisibleForTesting; import androidx.appcompat.app.AlertDialog; import androidx.fragment.app.DialogFragment; import org.chromium.base.CollectionUtil; import org.chromium.base.ContextUtils; import org.chromium.chrome.R; import org.chromium.chrome.browser.profiles.Profile; import org.chromium.chrome.browser.ui.favicon.FaviconUtils; import org.chromium.chrome.browser.ui.favicon.IconType; import org.chromium.chrome.browser.ui.favicon.LargeIconBridge; import org.chromium.chrome.browser.ui.favicon.LargeIconBridge.LargeIconCallback; import org.chromium.chrome.browser.webapps.WebappRegistry; import org.chromium.components.browser_ui.util.ConversionUtils; import org.chromium.components.browser_ui.widget.RoundedIconGenerator; import java.util.ArrayList; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Map.Entry; import java.util.Set; /** * Modal dialog that shows a list of important domains to the user which they can uncheck. Used to * allow the user to exclude domains from being cleared by the clear browsing data function. * We use proper bundle construction (through the {@link #newInstance(String[], int[], String[])} * method) and onActivityResult return conventions. */ public class ConfirmImportantSitesDialogFragment extends DialogFragment { private class ClearBrowsingDataAdapter extends ArrayAdapter<String> implements AdapterView.OnItemClickListener { private final String[] mDomains; private final int mFaviconSize; private RoundedIconGenerator mIconGenerator; private ClearBrowsingDataAdapter( String[] domains, String[] faviconURLs, Resources resources) { super(getActivity(), R.layout.confirm_important_sites_list_row, domains); mDomains = domains; mFaviconURLs = faviconURLs; mFaviconSize = resources.getDimensionPixelSize(R.dimen.default_favicon_size); mIconGenerator = FaviconUtils.createRoundedRectangleIconGenerator(getResources()); } @Override public boolean hasStableIds() { return true; } @Override public View getView(int position, View convertView, ViewGroup parent) { View childView = convertView; if (childView == null) { LayoutInflater inflater = LayoutInflater.from(getActivity()); childView = inflater.inflate(R.layout.confirm_important_sites_list_row, parent, false); ViewAndFaviconHolder viewHolder = new ViewAndFaviconHolder(); viewHolder.checkboxView = (CheckBox) childView.findViewById(R.id.icon_row_checkbox); viewHolder.imageView = (ImageView) childView.findViewById(R.id.icon_row_image); childView.setTag(viewHolder); } ViewAndFaviconHolder viewHolder = (ViewAndFaviconHolder) childView.getTag(); configureChildView(position, viewHolder); return childView; } private void configureChildView(int position, ViewAndFaviconHolder viewHolder) { String domain = mDomains[position]; viewHolder.checkboxView.setChecked(mCheckedState.get(domain)); viewHolder.checkboxView.setText(domain); loadFavicon(viewHolder, mFaviconURLs[position]); } /** * Called when a list item is clicked. We toggle the checkbox and update our selected * domains list. */ @Override public void onItemClick(AdapterView<?> parent, View view, int position, long id) { String domain = mDomains[position]; ViewAndFaviconHolder viewHolder = (ViewAndFaviconHolder) view.getTag(); boolean isChecked = mCheckedState.get(domain); mCheckedState.put(domain, !isChecked); viewHolder.checkboxView.setChecked(!isChecked); } private void loadFavicon(final ViewAndFaviconHolder viewHolder, final String url) { viewHolder.imageCallback = new LargeIconCallback() { @Override public void onLargeIconAvailable(Bitmap icon, int fallbackColor, boolean isFallbackColorDefault, @IconType int iconType) { if (this != viewHolder.imageCallback) return; Drawable image = FaviconUtils.getIconDrawableWithoutFilter( icon, url, fallbackColor, mIconGenerator, getResources(), mFaviconSize); viewHolder.imageView.setImageDrawable(image); } }; mLargeIconBridge.getLargeIconForStringUrl(url, mFaviconSize, viewHolder.imageCallback); } } /** * ViewHolder class optimizes looking up table row fields. findViewById is only called once * per row view initialization, and the references are cached here. Also stores a reference to * the favicon image callback so that we can make sure we load the correct favicon. */ private static class ViewAndFaviconHolder { public CheckBox checkboxView; public ImageView imageView; public LargeIconCallback imageCallback; } /** * Constructs a new instance of the important sites dialog fragment. * @param importantDomains The list of important domains to display. * @param importantDomainReasons The reasons for choosing each important domain. * @param faviconURLs The list of favicon urls that correspond to each importantDomains. * @return An instance of ConfirmImportantSitesDialogFragment with the bundle arguments set. */ public static ConfirmImportantSitesDialogFragment newInstance( String[] importantDomains, int[] importantDomainReasons, String[] faviconURLs) { ConfirmImportantSitesDialogFragment dialogFragment = new ConfirmImportantSitesDialogFragment(); Bundle bundle = new Bundle(); bundle.putStringArray(IMPORTANT_DOMAINS_TAG, importantDomains); bundle.putIntArray(IMPORTANT_DOMAIN_REASONS_TAG, importantDomainReasons); bundle.putStringArray(FAVICON_URLS_TAG, faviconURLs); dialogFragment.setArguments(bundle); return dialogFragment; } private static final int FAVICON_MAX_CACHE_SIZE_BYTES = 100 * ConversionUtils.BYTES_PER_KILOBYTE; // 100KB /** The tag used when showing the clear browsing fragment. */ public static final String FRAGMENT_TAG = "ConfirmImportantSitesDialogFragment"; /** The tag for the string array of deselected domains. These are meant to NOT be cleared. */ public static final String DESELECTED_DOMAINS_TAG = "DeselectedDomains"; /** The tag for the int array of reasons the deselected domains were important. */ public static final String DESELECTED_DOMAIN_REASONS_TAG = "DeselectedDomainReasons"; /** The tag for the string array of ignored domains, which whill be cleared. */ public static final String IGNORED_DOMAINS_TAG = "IgnoredDomains"; /** The tag for the int array of reasons the ignored domains were important. */ public static final String IGNORED_DOMAIN_REASONS_TAG = "IgnoredDomainReasons"; /** The tag used for logging. */ public static final String TAG = "ConfirmImportantSitesDialogFragment"; /** The tag used to store the important domains in the bundle. */ private static final String IMPORTANT_DOMAINS_TAG = "ImportantDomains"; /** The tag used to store the important domain reasons in the bundle. */ private static final String IMPORTANT_DOMAIN_REASONS_TAG = "ImportantDomainReasons"; /** The tag used to store the favicon urls corresponding to each important domain. */ private static final String FAVICON_URLS_TAG = "FaviconURLs"; /** Array of important registerable domains we're showing to the user. */ private String[] mImportantDomains; /** Map of the reasons the above important domains were chosen. */ private Map<String, Integer> mImportantDomainsReasons; /** Array of favicon urls to use for each important domain above. */ private String[] mFaviconURLs; /** The map of domains to the checked state, where true is checked. */ private Map<String, Boolean> mCheckedState; /** The alert dialog shown to the user. */ private AlertDialog mDialog; /** Our adapter that we use with the list view in the dialog. */ private ClearBrowsingDataAdapter mAdapter; private LargeIconBridge mLargeIconBridge; private Profile mProfile; /** We store the custom list view for testing */ private ListView mSitesListView; public ConfirmImportantSitesDialogFragment() { mImportantDomainsReasons = new HashMap<>(); mCheckedState = new HashMap<>(); } @Override public void setArguments(Bundle args) { super.setArguments(args); mImportantDomains = args.getStringArray(IMPORTANT_DOMAINS_TAG); mFaviconURLs = args.getStringArray(FAVICON_URLS_TAG); int[] importantDomainReasons = args.getIntArray(IMPORTANT_DOMAIN_REASONS_TAG); for (int i = 0; i < mImportantDomains.length; ++i) { mImportantDomainsReasons.put(mImportantDomains[i], importantDomainReasons[i]); mCheckedState.put(mImportantDomains[i], true); } } @VisibleForTesting public Set<String> getDeselectedDomains() { HashSet<String> deselected = new HashSet<>(); for (Entry<String, Boolean> entry : mCheckedState.entrySet()) { if (!entry.getValue()) deselected.add(entry.getKey()); } return deselected; } @VisibleForTesting public ListView getSitesList() { return mSitesListView; } @Override public void onDismiss(DialogInterface dialog) { super.onDismiss(dialog); if (mLargeIconBridge != null) { mLargeIconBridge.destroy(); } } @Override public Dialog onCreateDialog(Bundle savedInstanceState) { // We check the domains and urls as well due to crbug.com/622879. if (savedInstanceState != null) { // The important domains and favicon URLs aren't currently saved, so if this dialog // is recreated from a saved instance they will be null. This method must return a // valid dialog, so these two array's are initialized, then the dialog is dismissed. // TODO(dmurph): save mImportantDomains and mFaviconURLs so that they can be restored // from a savedInstanceState and the dialog can be properly recreated rather than // dismissed. mImportantDomains = new String[0]; mFaviconURLs = new String[0]; dismiss(); } mProfile = Profile.getLastUsedRegularProfile(); mLargeIconBridge = new LargeIconBridge(mProfile); ActivityManager activityManager = ((ActivityManager) ContextUtils.getApplicationContext().getSystemService( Context.ACTIVITY_SERVICE)); int maxSize = Math.min( activityManager.getMemoryClass() / 16 * 25 * ConversionUtils.BYTES_PER_KILOBYTE, FAVICON_MAX_CACHE_SIZE_BYTES); mLargeIconBridge.createCache(maxSize); mAdapter = new ClearBrowsingDataAdapter(mImportantDomains, mFaviconURLs, getResources()); DialogInterface.OnClickListener listener = new DialogInterface.OnClickListener() { @Override public void onClick(DialogInterface dialog, int which) { if (which == AlertDialog.BUTTON_POSITIVE) { Intent data = new Intent(); List<String> deselectedDomains = new ArrayList<>(); List<Integer> deselectedDomainReasons = new ArrayList<>(); List<String> ignoredDomains = new ArrayList<>(); List<Integer> ignoredDomainReasons = new ArrayList<>(); for (Entry<String, Boolean> entry : mCheckedState.entrySet()) { Integer reason = mImportantDomainsReasons.get(entry.getKey()); if (entry.getValue()) { ignoredDomains.add(entry.getKey()); ignoredDomainReasons.add(reason); } else { deselectedDomains.add(entry.getKey()); deselectedDomainReasons.add(reason); } } data.putExtra(DESELECTED_DOMAINS_TAG, deselectedDomains.toArray(new String[0])); data.putExtra(DESELECTED_DOMAIN_REASONS_TAG, CollectionUtil.integerListToIntArray(deselectedDomainReasons)); data.putExtra(IGNORED_DOMAINS_TAG, ignoredDomains.toArray(new String[0])); data.putExtra(IGNORED_DOMAIN_REASONS_TAG, CollectionUtil.integerListToIntArray(ignoredDomainReasons)); getTargetFragment().onActivityResult( getTargetRequestCode(), Activity.RESULT_OK, data); } else { getTargetFragment().onActivityResult(getTargetRequestCode(), Activity.RESULT_CANCELED, getActivity().getIntent()); } } }; Set<String> originsWithApps = WebappRegistry.getInstance().getOriginsWithInstalledApp(); boolean includesApp = false; for (String domain : mImportantDomains) { if (originsWithApps.contains(domain)) { includesApp = true; break; } } int titleResource = includesApp ? R.string.important_sites_title_with_app : R.string.important_sites_title; int messageResource = includesApp ? R.string.clear_browsing_data_important_dialog_text_with_app : R.string.clear_browsing_data_important_dialog_text; View messageAndListView = getActivity().getLayoutInflater().inflate( R.layout.clear_browsing_important_dialog_listview, null); mSitesListView = (ListView) messageAndListView.findViewById(R.id.select_dialog_listview); mSitesListView.setAdapter(mAdapter); mSitesListView.setOnItemClickListener(mAdapter); TextView message = messageAndListView.findViewById(R.id.message); message.setText(messageResource); final AlertDialog.Builder builder = new AlertDialog.Builder(getActivity(), R.style.Theme_Chromium_AlertDialog) .setTitle(titleResource) .setPositiveButton( R.string.clear_browsing_data_important_dialog_button, listener) .setNegativeButton(R.string.cancel, listener) .setView(messageAndListView); mDialog = builder.create(); return mDialog; } }
/* * Copyright 2002-2016 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.pivotal.cla.webdriver.pages; import static org.assertj.core.api.Assertions.assertThat; import org.openqa.selenium.NoSuchElementException; import org.openqa.selenium.WebDriver; import org.openqa.selenium.WebElement; import org.openqa.selenium.support.FindBy; import org.openqa.selenium.support.PageFactory; import org.openqa.selenium.support.ui.Select; import io.pivotal.cla.mvc.SignClaForm; import io.pivotal.cla.webdriver.pages.github.GitHubPullRequestPage; public class SignIclaPage extends BasePage { @FindBy(id = "individual-cla") WebElement individualCla; @FindBy(id = "success") WebElement success; @FindBy(id = "breadcrumb-cla-link") WebElement claLink; @FindBy(id = "pull-request") WebElement pullRequest; Form form; public SignIclaPage(WebDriver driver) { super(driver); } public void assertClaLink(String claName) { assertThat(claLink.getAttribute("href")).endsWith("/sign/"+claName); } public void assertPullRequestLink(String repositoryId, int pullRequestId) { String url = "https://github.com/" + repositoryId + "/pull/" + pullRequestId; assertThat(pullRequest.getAttribute("href")).isEqualTo(url); } public GitHubPullRequestPage pullRequest() { pullRequest.click(); return PageFactory.initElements(getDriver(), GitHubPullRequestPage.class); } public boolean isSigned() { try { return success.getText() != null; } catch(NoSuchElementException missing) { return false; } } public String getIndividualCla() { return individualCla.getText(); } public Form form() { if(form != null) { return form; } form = new Form(); PageFactory.initElements(getDriver(), form); return form; } public Form form(SignClaForm form) { return form().form(form); } public void assertAt() { assertThat(getDriver().getTitle()).endsWith("- Sign ICLA"); } public static String url(String cla) { return "/sign/"+ cla + "/icla"; } public static SignIclaPage go(WebDriver driver, String cla) { get(driver, url(cla)); return PageFactory.initElements(driver, SignIclaPage.class); } public static SignIclaPage go(WebDriver driver, String cla, String repositoryId, long pullRequestId) { get(driver, url(cla) + "?repositoryId="+repositoryId+"&pullRequestId="+pullRequestId); return PageFactory.initElements(driver, SignIclaPage.class); } public class Form { WebElement sign; WebElement name; WebElement email; WebElement mailingAddress; WebElement country; WebElement telephone; WebElement confirm; public <T extends BasePage> T sign(Class<T> page) { sign.click(); return PageFactory.initElements(getDriver(), page); } private Form form(SignClaForm form) { return this.name(form.getName()) .email(form.getEmail()) .mailingAddress(form.getMailingAddress()) .country(form.getCountry()) .telephone(form.getTelephone()); } public InputAssert assertName() { return assertInput(name); } public Form name(String name) { if(name == null) { this.name.clear(); return this; } this.name.sendKeys(name); return this; } public SelectAssert assertEmail() { return assertSelect(email); } public Form email(String email) { Select select = new Select(this.email); if(email == null) { select.deselectAll(); return this; } select.selectByValue(email); return this; } public Form email(int index) { new Select(this.email).selectByIndex(index); return this; } public InputAssert assertMailingAddress() { return assertInput(mailingAddress); } public Form mailingAddress(String address) { if(address == null) { this.mailingAddress.clear(); return this; } this.mailingAddress.sendKeys(address); return this; } public InputAssert assertCountry() { return assertInput(country); } public Form country(String country) { if(country == null) { this.country.clear(); return this; } this.country.sendKeys(country); return this; } public InputAssert assertTelephone() { return assertInput(telephone); } public Form telephone(String telephone) { this.telephone.sendKeys(telephone); return this; } public Form confirm() { this.confirm.click(); return this; } public CheckboxAssert assertConfirm() { return assertCheckbox(confirm); } } }
/** * Copyright 2014 Netflix, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); you may not * use this file except in compliance with the License. You may obtain a copy of * the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations under * the License. */ package rx.util.functions; import rx.Observer; /** * @deprecated */ @Deprecated public final class Actions { private Actions() { throw new IllegalStateException("No instances!"); } public static final EmptyAction empty() { return EMPTY_ACTION; } private static final EmptyAction EMPTY_ACTION = new EmptyAction(); private static final class EmptyAction implements Action0, Action1, Action2, Action3, Action4, Action5, Action6, Action7, Action8, Action9, ActionN { @Override public void call() { } @Override public void call(Object t1) { } @Override public void call(Object t1, Object t2) { } @Override public void call(Object t1, Object t2, Object t3) { } @Override public void call(Object t1, Object t2, Object t3, Object t4) { } @Override public void call(Object t1, Object t2, Object t3, Object t4, Object t5) { } @Override public void call(Object t1, Object t2, Object t3, Object t4, Object t5, Object t6) { } @Override public void call(Object t1, Object t2, Object t3, Object t4, Object t5, Object t6, Object t7) { } @Override public void call(Object t1, Object t2, Object t3, Object t4, Object t5, Object t6, Object t7, Object t8) { } @Override public void call(Object t1, Object t2, Object t3, Object t4, Object t5, Object t6, Object t7, Object t8, Object t9) { } @Override public void call(Object... args) { } } /** * Extracts a method reference to the observer's onNext method * in the form of an Action1. * <p>Java 8: observer::onNext</p> * * @param observer * the observer to use * @return an action which calls the observer's onNext method. */ public static <T> Action1<T> onNextFrom(final Observer<T> observer) { return new Action1<T>() { @Override public void call(T t1) { observer.onNext(t1); } }; } /** * Extracts a method reference to the observer's onError method * in the form of an Action1. * <p>Java 8: observer::onError</p> * * @param observer * the observer to use * @return an action which calls the observer's onError method. */ public static <T> Action1<Throwable> onErrorFrom(final Observer<T> observer) { return new Action1<Throwable>() { @Override public void call(Throwable t1) { observer.onError(t1); } }; } /** * Extracts a method reference to the observer's onCompleted method * in the form of an Action0. * <p>Java 8: observer::onCompleted</p> * * @param observer * the observer to use * @return an action which calls the observer's onCompleted method. */ public static <T> Action0 onCompletedFrom(final Observer<T> observer) { return new Action0() { @Override public void call() { observer.onCompleted(); } }; } /** * Convert an action to a function which calls * the action returns Void (null). * * @param action * @return {@link Func0} */ public static Func0<Void> toFunc(final Action0 action) { return toFunc(action, (Void) null); } /** * Convert an action to a function which calls * the action returns Void (null). * * @param action * @return {@link Func0} */ public static <T1> Func1<T1, Void> toFunc(final Action1<T1> action) { return toFunc(action, (Void) null); } /** * Convert an action to a function which calls * the action returns Void (null). * * @param action * @return {@link Func0} */ public static <T1, T2> Func2<T1, T2, Void> toFunc(final Action2<T1, T2> action) { return toFunc(action, (Void) null); } /** * Convert an action to a function which calls * the action returns Void (null). * * @param action * @return {@link Func0} */ public static <T1, T2, T3> Func3<T1, T2, T3, Void> toFunc(final Action3<T1, T2, T3> action) { return toFunc(action, (Void) null); } /** * Convert an action to a function which calls * the action returns Void (null). * * @param action * @return {@link Func0} */ public static <T1, T2, T3, T4> Func4<T1, T2, T3, T4, Void> toFunc(final Action4<T1, T2, T3, T4> action) { return toFunc(action, (Void) null); } /** * Convert an action to a function which calls * the action returns Void (null). * * @param action * @return {@link Func0} */ public static <T1, T2, T3, T4, T5> Func5<T1, T2, T3, T4, T5, Void> toFunc( final Action5<T1, T2, T3, T4, T5> action) { return toFunc(action, (Void) null); } /** * Convert an action to a function which calls * the action returns Void (null). * * @param action * @return {@link Func0} */ public static <T1, T2, T3, T4, T5, T6> Func6<T1, T2, T3, T4, T5, T6, Void> toFunc( final Action6<T1, T2, T3, T4, T5, T6> action) { return toFunc(action, (Void) null); } /** * Convert an action to a function which calls * the action returns Void (null). * * @param action * @return {@link Func0} */ public static <T1, T2, T3, T4, T5, T6, T7> Func7<T1, T2, T3, T4, T5, T6, T7, Void> toFunc( final Action7<T1, T2, T3, T4, T5, T6, T7> action) { return toFunc(action, (Void) null); } /** * Convert an action to a function which calls * the action returns Void (null). * * @param action * @return {@link Func0} */ public static <T1, T2, T3, T4, T5, T6, T7, T8> Func8<T1, T2, T3, T4, T5, T6, T7, T8, Void> toFunc( final Action8<T1, T2, T3, T4, T5, T6, T7, T8> action) { return toFunc(action, (Void) null); } /** * Convert an action to a function which calls * the action returns Void (null). * * @param action * @return {@link Func0} */ public static <T1, T2, T3, T4, T5, T6, T7, T8, T9> Func9<T1, T2, T3, T4, T5, T6, T7, T8, T9, Void> toFunc( final Action9<T1, T2, T3, T4, T5, T6, T7, T8, T9> action) { return toFunc(action, (Void) null); } /** * Convert an action to a function which calls * the action returns Void (null). * * @param action * @return {@link Func0} */ public static FuncN<Void> toFunc( final ActionN action) { return toFunc(action, (Void) null); } /** * Convert an action to a function which calls * the action returns the given result. * * @param action * @param result * @return {@link Func0} */ public static <R> Func0<R> toFunc(final Action0 action, final R result) { return new Func0<R>() { @Override public R call() { action.call(); return result; } }; } /** * Convert an action to a function which calls * the action returns Void (null). * * @param action * @param result * @return {@link Func0} */ public static <T1, R> Func1<T1, R> toFunc(final Action1<T1> action, final R result) { return new Func1<T1, R>() { @Override public R call(T1 t1) { action.call(t1); return result; } }; } /** * Convert an action to a function which calls * the action returns Void (null). * * @param action * @param result * @return {@link Func0} */ public static <T1, T2, R> Func2<T1, T2, R> toFunc(final Action2<T1, T2> action, final R result) { return new Func2<T1, T2, R>() { @Override public R call(T1 t1, T2 t2) { action.call(t1, t2); return result; } }; } /** * Convert an action to a function which calls * the action returns Void (null). * * @param action * @param result * @return {@link Func0} */ public static <T1, T2, T3, R> Func3<T1, T2, T3, R> toFunc(final Action3<T1, T2, T3> action, final R result) { return new Func3<T1, T2, T3, R>() { @Override public R call(T1 t1, T2 t2, T3 t3) { action.call(t1, t2, t3); return result; } }; } /** * Convert an action to a function which calls * the action returns Void (null). * * @param action * @param result * @return {@link Func0} */ public static <T1, T2, T3, T4, R> Func4<T1, T2, T3, T4, R> toFunc(final Action4<T1, T2, T3, T4> action, final R result) { return new Func4<T1, T2, T3, T4, R>() { @Override public R call(T1 t1, T2 t2, T3 t3, T4 t4) { action.call(t1, t2, t3, t4); return result; } }; } /** * Convert an action to a function which calls * the action returns Void (null). * * @param action * @param result * @return {@link Func0} */ public static <T1, T2, T3, T4, T5, R> Func5<T1, T2, T3, T4, T5, R> toFunc( final Action5<T1, T2, T3, T4, T5> action, final R result) { return new Func5<T1, T2, T3, T4, T5, R>() { @Override public R call(T1 t1, T2 t2, T3 t3, T4 t4, T5 t5) { action.call(t1, t2, t3, t4, t5); return result; } }; } /** * Convert an action to a function which calls * the action returns Void (null). * * @param action * @param result * @return {@link Func0} */ public static <T1, T2, T3, T4, T5, T6, R> Func6<T1, T2, T3, T4, T5, T6, R> toFunc( final Action6<T1, T2, T3, T4, T5, T6> action, final R result) { return new Func6<T1, T2, T3, T4, T5, T6, R>() { @Override public R call(T1 t1, T2 t2, T3 t3, T4 t4, T5 t5, T6 t6) { action.call(t1, t2, t3, t4, t5, t6); return result; } }; } /** * Convert an action to a function which calls * the action returns Void (null). * * @param action * @param result * @return {@link Func0} */ public static <T1, T2, T3, T4, T5, T6, T7, R> Func7<T1, T2, T3, T4, T5, T6, T7, R> toFunc( final Action7<T1, T2, T3, T4, T5, T6, T7> action, final R result) { return new Func7<T1, T2, T3, T4, T5, T6, T7, R>() { @Override public R call(T1 t1, T2 t2, T3 t3, T4 t4, T5 t5, T6 t6, T7 t7) { action.call(t1, t2, t3, t4, t5, t6, t7); return result; } }; } /** * Convert an action to a function which calls * the action returns Void (null). * * @param action * @param result * @return {@link Func0} */ public static <T1, T2, T3, T4, T5, T6, T7, T8, R> Func8<T1, T2, T3, T4, T5, T6, T7, T8, R> toFunc( final Action8<T1, T2, T3, T4, T5, T6, T7, T8> action, final R result) { return new Func8<T1, T2, T3, T4, T5, T6, T7, T8, R>() { @Override public R call(T1 t1, T2 t2, T3 t3, T4 t4, T5 t5, T6 t6, T7 t7, T8 t8) { action.call(t1, t2, t3, t4, t5, t6, t7, t8); return result; } }; } /** * Convert an action to a function which calls * the action returns Void (null). * * @param action * @param result * @return {@link Func0} */ public static <T1, T2, T3, T4, T5, T6, T7, T8, T9, R> Func9<T1, T2, T3, T4, T5, T6, T7, T8, T9, R> toFunc( final Action9<T1, T2, T3, T4, T5, T6, T7, T8, T9> action, final R result) { return new Func9<T1, T2, T3, T4, T5, T6, T7, T8, T9, R>() { @Override public R call(T1 t1, T2 t2, T3 t3, T4 t4, T5 t5, T6 t6, T7 t7, T8 t8, T9 t9) { action.call(t1, t2, t3, t4, t5, t6, t7, t8, t9); return result; } }; } /** * Convert an action to a function which calls * the action returns Void (null). * * @param action * @param result * @return {@link Func0} */ public static <R> FuncN<R> toFunc( final ActionN action, final R result) { return new FuncN<R>() { @Override public R call(Object... args) { action.call(args); return result; } }; } }
/* Copyright 2010 Technion - Israel Institute of Technology Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package il.ac.technion.cs.d2rqUpdate; import il.ac.technion.cs.d2rqUpdate.SQLUpdateExecutionEngine.ExecutionEngine; import il.ac.technion.cs.d2rqUpdate.SQLUpdateExecutionEngine.UpdateStatement; import java.sql.Connection; import java.sql.ResultSet; import java.sql.SQLException; import java.sql.Statement; import java.util.Collection; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Set; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import com.hp.hpl.jena.graph.Triple; import de.fuberlin.wiwiss.d2rq.D2RQException; import de.fuberlin.wiwiss.d2rq.algebra.Attribute; import de.fuberlin.wiwiss.d2rq.algebra.RelationName; import de.fuberlin.wiwiss.d2rq.algebra.TripleRelation; import de.fuberlin.wiwiss.d2rq.dbschema.DatabaseSchemaInspector; import de.fuberlin.wiwiss.d2rq.sql.ConnectedDB; /** * An auxiliary class for adding triples * * @author Vadim Eisenberg <Vadim.Eisenberg@gmail.com> */ public class TripleDeleter extends TripleUpdater { static private Log log = LogFactory.getLog(TripleDeleter.class); private final Set<Attribute> allMappedAttributes; public TripleDeleter(Collection<Triple> triples, Collection<TripleRelation> propertyBridges, Set<Attribute> allMappedAttributes) { super(triples, propertyBridges); this.allMappedAttributes = allMappedAttributes; } @Override protected void execute(ExecutionEngine engine, ConnectedDB database, RelationName table, Map<Attribute, String> subjectValues, Set<ObjectValuesProjectionsPair> objectValuesProjectionsPairs) { log.debug("subjectValues = " + subjectValues); for (ObjectValuesProjectionsPair pair : objectValuesProjectionsPairs) { log.debug("object values = " + pair.getObjectValues() + " projections = " + pair.getProjections()); } DatabaseSchemaInspector inspector = new DatabaseSchemaInspector(database); boolean tableHasPrimaryKeys = !inspector.primaryKeyColumns(table).isEmpty(); if (!tableHasPrimaryKeys) { throw new D2RQException(table.toString() + " has no primary keys " + "update not yet implemented"); } if (multipleValuesPerAttributeExist(objectValuesProjectionsPairs)) { for (ObjectValuesProjectionsPair pair : objectValuesProjectionsPairs) { log.debug("object values = " + pair.getObjectValues() + " projections = " + pair.getProjections()); Map<Attribute, String> objectValues = getObjectValuesToDelete(pair); executePerSubjectsAndObjects(engine, database, table, subjectValues, inspector, objectValues); } } else { Map<Attribute, String> objectValues = getObjectValuesToDelete(objectValuesProjectionsPairs); log.debug("object values to delete = " + objectValues); executePerSubjectsAndObjects(engine, database, table, subjectValues, inspector, objectValues); } } /** * @param objectValuesProjectionsPairs * @param database * @return */ private Map<Attribute, String> getObjectValuesToDelete( Set<ObjectValuesProjectionsPair> objectValuesProjectionsPairs) { Map<Attribute, String> objectValuesToDelete = new HashMap<Attribute, String>(); for (ObjectValuesProjectionsPair pair : objectValuesProjectionsPairs){ objectValuesToDelete.putAll(getObjectValuesToDelete(pair)); } return objectValuesToDelete; } private void executePerSubjectsAndObjects(ExecutionEngine engine, ConnectedDB database, RelationName table, Map<Attribute, String> subjectValues, DatabaseSchemaInspector inspector, Map<Attribute, String> objectValues) { if (objectValues.size() == 0) { return; } if (shouldDeleteTheWholeRow(database, table, subjectValues, objectValues.keySet(), inspector)) { Map<Attribute, String> allValues = new HashMap<Attribute, String>(subjectValues); allValues.putAll(objectValues); executeDeleteRow(engine, database, table, allValues, subjectValues, inspector); return; } verifyNonNullablesAreNotDeleted(database, table, subjectValues, objectValues, inspector); executeUpdatesToNULL(engine, database, table, subjectValues, objectValues); } /** * @param objectValuesProjectionsPairs * @param database * @return */ protected Map<Attribute, String> getObjectValuesToDelete( ObjectValuesProjectionsPair pair) { Map<Attribute, String> objectValuesToDelete = new HashMap<Attribute, String>(); Map<Attribute, String> objectValues = pair.getObjectValues(); Set<Attribute> candidateObjectsToDelete = objectValues.keySet(); // if there are more object values to delete - delete only those // that participate in the projections. this will not delete // the column values in conditions (when the column alone does // not determine the value of the triple) // if there is only one object value to delete - delete it, since // (when the column alone determines the value of the triple) if (candidateObjectsToDelete.size() > 1) { candidateObjectsToDelete.retainAll(pair.getProjections()); } for (Attribute object : candidateObjectsToDelete) { objectValuesToDelete.put(object, objectValues.get(object)); } return objectValuesToDelete; } private void executeUpdatesToNULL(ExecutionEngine engine, ConnectedDB database, RelationName table, Map<Attribute, String> subjectValues, Map<Attribute, String> objectValues) { Map<Attribute, List<String>> whereValues = convertMap2Scalar_2_Map2List(subjectValues); for (Attribute attribute : objectValues.keySet()) { Map<Attribute, List<String>> whereValuesPerDeletedAttribute = new HashMap<Attribute, List<String>>(whereValues); whereValuesPerDeletedAttribute.put(attribute, java.util.Collections .singletonList(objectValues.get(attribute))); String sqlString = new UpdateStatementBuilder(table, database, java.util.Collections.singletonMap(attribute, "NULL"), whereValuesPerDeletedAttribute) .getSQLStatement(); log.debug("update sqlString = " + sqlString); engine.add(new UpdateStatement(database, table, java.util.Collections.singletonMap(attribute,"NULL"), sqlString, 0, UpdateStatement.Type.UPDATE_TO_NULL, subjectValues)); } } private void executeDeleteRow(ExecutionEngine engine, ConnectedDB database, RelationName table, Map<Attribute, String> allValues, Map<Attribute, String> subjectValues, DatabaseSchemaInspector inspector) { String sqlString = new DeleteStatementBuilder(table, database, allValues).getSQLStatement(); engine.add(new UpdateStatement(database, table, allValues, sqlString, 0, UpdateStatement.Type.DELETE, subjectValues)); } /** * @param database * @param table * @param objectValues * @param objectValues2 * @param inspector * @return */ @SuppressWarnings("unchecked") private boolean shouldDeleteTheWholeRow(ConnectedDB database, RelationName table, Map<Attribute, String> subjectValues, Set<Attribute> objectAttributes, DatabaseSchemaInspector inspector) { log.debug("attributesDelete = " + objectAttributes); // check that all the columns that are not being deleted contain // a NULL value or are not mapped to any property Map<Attribute, String> selectConditionValues = new HashMap<Attribute, String>(subjectValues); boolean allTheColumnsAreBeingDeleted = true; List<Attribute> columnsToCheck = inspector.listColumns(table); // check only columns that are mapped to some property columnsToCheck.retainAll(allMappedAttributes); for (Attribute column : columnsToCheck) { if (!objectAttributes.contains(column)) { log.debug(column + " is not deleted"); allTheColumnsAreBeingDeleted = false; selectConditionValues.put(column, "NULL"); } } if (allTheColumnsAreBeingDeleted) { return true; } log.debug("selectConditionValues = " + selectConditionValues); String selectSQLString = new SelectStatementBuilder(table, database, subjectValues .keySet(), selectConditionValues).getSQLStatement() + " LIMIT 1"; log.debug("select sqlString = " + selectSQLString); try { Connection connection = database.connection(); Statement selectStatement = connection.createStatement(); ResultSet resultSet = selectStatement.executeQuery(selectSQLString); return resultSet.next(); } catch (SQLException exceptionFromQuery) { throw new D2RQException(exceptionFromQuery.getMessage() + ": " + selectSQLString); } } private void verifyNonNullablesAreNotDeleted(ConnectedDB database, RelationName table, Map<Attribute, String> subjectValues, Map<Attribute, String> objectValues, DatabaseSchemaInspector inspector) { for (Attribute attribute : objectValues.keySet()) { if (!inspector.isNullable(attribute)) { if (doesTheColumnContainsTheValueToBeDeleted(database, table, subjectValues, attribute, objectValues .get(attribute))) { throw new D2RQUpdateException( "Unable to set a non nullable" + " attribute " + attribute + " to NULL", D2RQUpdateException.DELETE_NOT_NULLABLE_ATTRIBUTE); } } } } private boolean doesTheColumnContainsTheValueToBeDeleted( ConnectedDB database, RelationName table, Map<Attribute, String> subjectValues, Attribute attribute, String valueToBeDeleted) { boolean theAttributeContainsTheValueToBeDeleted = false; Map<Attribute, String> selectConditionValues = new HashMap<Attribute, String>(subjectValues); selectConditionValues.put(attribute, valueToBeDeleted); String selectSQLString = new SelectStatementBuilder(table, database, subjectValues .keySet(), selectConditionValues).getSQLStatement() + " LIMIT 1"; log.debug("select sqlString = " + selectSQLString); try { Connection connection = database.connection(); Statement selectStatement = connection.createStatement(); ResultSet resultSet = selectStatement.executeQuery(selectSQLString); theAttributeContainsTheValueToBeDeleted = resultSet.next(); } catch (SQLException exceptionFromQuery) { throw new D2RQException(exceptionFromQuery.getMessage() + ": " + selectSQLString); } return theAttributeContainsTheValueToBeDeleted; } /* * (non-Javadoc) * * @seeil.ac.technion.cs.d2rqUpdate.AbstractTripleUpdater# * allowEmptySubjectsOrObjectsWhileHandlingJoins() */ @Override protected boolean allowEmptySubjectsOrObjectsWhileHandlingJoins() { return false; } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.hbase.snapshot; import java.io.BufferedInputStream; import java.io.DataInput; import java.io.DataOutput; import java.io.FileNotFoundException; import java.io.IOException; import java.io.InputStream; import java.util.ArrayList; import java.util.Collections; import java.util.Comparator; import java.util.LinkedList; import java.util.List; import java.util.concurrent.ExecutionException; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import java.util.concurrent.Future; import java.util.function.BiConsumer; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FSDataInputStream; import org.apache.hadoop.fs.FSDataOutputStream; import org.apache.hadoop.fs.FileChecksum; import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.permission.FsPermission; import org.apache.hadoop.hbase.HBaseConfiguration; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.client.RegionInfo; import org.apache.hadoop.hbase.io.FileLink; import org.apache.hadoop.hbase.io.HFileLink; import org.apache.hadoop.hbase.io.WALLink; import org.apache.hadoop.hbase.io.hadoopbackport.ThrottledInputStream; import org.apache.hadoop.hbase.mapreduce.TableMapReduceUtil; import org.apache.hadoop.hbase.mob.MobUtils; import org.apache.hadoop.hbase.util.AbstractHBaseTool; import org.apache.hadoop.hbase.util.CommonFSUtils; import org.apache.hadoop.hbase.util.FSUtils; import org.apache.hadoop.hbase.util.HFileArchiveUtil; import org.apache.hadoop.hbase.util.Pair; import org.apache.hadoop.io.BytesWritable; import org.apache.hadoop.io.IOUtils; import org.apache.hadoop.io.NullWritable; import org.apache.hadoop.io.Writable; import org.apache.hadoop.mapreduce.InputFormat; import org.apache.hadoop.mapreduce.InputSplit; import org.apache.hadoop.mapreduce.Job; import org.apache.hadoop.mapreduce.JobContext; import org.apache.hadoop.mapreduce.Mapper; import org.apache.hadoop.mapreduce.RecordReader; import org.apache.hadoop.mapreduce.TaskAttemptContext; import org.apache.hadoop.mapreduce.lib.output.NullOutputFormat; import org.apache.hadoop.mapreduce.security.TokenCache; import org.apache.hadoop.util.StringUtils; import org.apache.hadoop.util.Tool; import org.apache.yetus.audience.InterfaceAudience; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.apache.hbase.thirdparty.org.apache.commons.cli.CommandLine; import org.apache.hbase.thirdparty.org.apache.commons.cli.Option; import org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription; import org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotFileInfo; import org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotRegionManifest; /** * Export the specified snapshot to a given FileSystem. * * The .snapshot/name folder is copied to the destination cluster * and then all the hfiles/wals are copied using a Map-Reduce Job in the .archive/ location. * When everything is done, the second cluster can restore the snapshot. */ @InterfaceAudience.Public public class ExportSnapshot extends AbstractHBaseTool implements Tool { public static final String NAME = "exportsnapshot"; /** Configuration prefix for overrides for the source filesystem */ public static final String CONF_SOURCE_PREFIX = NAME + ".from."; /** Configuration prefix for overrides for the destination filesystem */ public static final String CONF_DEST_PREFIX = NAME + ".to."; private static final Logger LOG = LoggerFactory.getLogger(ExportSnapshot.class); private static final String MR_NUM_MAPS = "mapreduce.job.maps"; private static final String CONF_NUM_SPLITS = "snapshot.export.format.splits"; private static final String CONF_SNAPSHOT_NAME = "snapshot.export.format.snapshot.name"; private static final String CONF_SNAPSHOT_DIR = "snapshot.export.format.snapshot.dir"; private static final String CONF_FILES_USER = "snapshot.export.files.attributes.user"; private static final String CONF_FILES_GROUP = "snapshot.export.files.attributes.group"; private static final String CONF_FILES_MODE = "snapshot.export.files.attributes.mode"; private static final String CONF_CHECKSUM_VERIFY = "snapshot.export.checksum.verify"; private static final String CONF_OUTPUT_ROOT = "snapshot.export.output.root"; private static final String CONF_INPUT_ROOT = "snapshot.export.input.root"; private static final String CONF_BUFFER_SIZE = "snapshot.export.buffer.size"; private static final String CONF_MAP_GROUP = "snapshot.export.default.map.group"; private static final String CONF_BANDWIDTH_MB = "snapshot.export.map.bandwidth.mb"; private static final String CONF_MR_JOB_NAME = "mapreduce.job.name"; protected static final String CONF_SKIP_TMP = "snapshot.export.skip.tmp"; private static final String CONF_COPY_MANIFEST_THREADS = "snapshot.export.copy.references.threads"; private static final int DEFAULT_COPY_MANIFEST_THREADS = Runtime.getRuntime().availableProcessors(); static class Testing { static final String CONF_TEST_FAILURE = "test.snapshot.export.failure"; static final String CONF_TEST_FAILURE_COUNT = "test.snapshot.export.failure.count"; int failuresCountToInject = 0; int injectedFailureCount = 0; } // Command line options and defaults. static final class Options { static final Option SNAPSHOT = new Option(null, "snapshot", true, "Snapshot to restore."); static final Option TARGET_NAME = new Option(null, "target", true, "Target name for the snapshot."); static final Option COPY_TO = new Option(null, "copy-to", true, "Remote " + "destination hdfs://"); static final Option COPY_FROM = new Option(null, "copy-from", true, "Input folder hdfs:// (default hbase.rootdir)"); static final Option NO_CHECKSUM_VERIFY = new Option(null, "no-checksum-verify", false, "Do not verify checksum, use name+length only."); static final Option NO_TARGET_VERIFY = new Option(null, "no-target-verify", false, "Do not verify the integrity of the exported snapshot."); static final Option OVERWRITE = new Option(null, "overwrite", false, "Rewrite the snapshot manifest if already exists."); static final Option CHUSER = new Option(null, "chuser", true, "Change the owner of the files to the specified one."); static final Option CHGROUP = new Option(null, "chgroup", true, "Change the group of the files to the specified one."); static final Option CHMOD = new Option(null, "chmod", true, "Change the permission of the files to the specified one."); static final Option MAPPERS = new Option(null, "mappers", true, "Number of mappers to use during the copy (mapreduce.job.maps)."); static final Option BANDWIDTH = new Option(null, "bandwidth", true, "Limit bandwidth to this value in MB/second."); } // Export Map-Reduce Counters, to keep track of the progress public enum Counter { MISSING_FILES, FILES_COPIED, FILES_SKIPPED, COPY_FAILED, BYTES_EXPECTED, BYTES_SKIPPED, BYTES_COPIED } private static class ExportMapper extends Mapper<BytesWritable, NullWritable, NullWritable, NullWritable> { private static final Logger LOG = LoggerFactory.getLogger(ExportMapper.class); final static int REPORT_SIZE = 1 * 1024 * 1024; final static int BUFFER_SIZE = 64 * 1024; private boolean verifyChecksum; private String filesGroup; private String filesUser; private short filesMode; private int bufferSize; private FileSystem outputFs; private Path outputArchive; private Path outputRoot; private FileSystem inputFs; private Path inputArchive; private Path inputRoot; private static Testing testing = new Testing(); @Override public void setup(Context context) throws IOException { Configuration conf = context.getConfiguration(); Configuration srcConf = HBaseConfiguration.createClusterConf(conf, null, CONF_SOURCE_PREFIX); Configuration destConf = HBaseConfiguration.createClusterConf(conf, null, CONF_DEST_PREFIX); verifyChecksum = conf.getBoolean(CONF_CHECKSUM_VERIFY, true); filesGroup = conf.get(CONF_FILES_GROUP); filesUser = conf.get(CONF_FILES_USER); filesMode = (short)conf.getInt(CONF_FILES_MODE, 0); outputRoot = new Path(conf.get(CONF_OUTPUT_ROOT)); inputRoot = new Path(conf.get(CONF_INPUT_ROOT)); inputArchive = new Path(inputRoot, HConstants.HFILE_ARCHIVE_DIRECTORY); outputArchive = new Path(outputRoot, HConstants.HFILE_ARCHIVE_DIRECTORY); try { srcConf.setBoolean("fs." + inputRoot.toUri().getScheme() + ".impl.disable.cache", true); inputFs = FileSystem.get(inputRoot.toUri(), srcConf); } catch (IOException e) { throw new IOException("Could not get the input FileSystem with root=" + inputRoot, e); } try { destConf.setBoolean("fs." + outputRoot.toUri().getScheme() + ".impl.disable.cache", true); outputFs = FileSystem.get(outputRoot.toUri(), destConf); } catch (IOException e) { throw new IOException("Could not get the output FileSystem with root="+ outputRoot, e); } // Use the default block size of the outputFs if bigger int defaultBlockSize = Math.max((int) outputFs.getDefaultBlockSize(outputRoot), BUFFER_SIZE); bufferSize = conf.getInt(CONF_BUFFER_SIZE, defaultBlockSize); LOG.info("Using bufferSize=" + StringUtils.humanReadableInt(bufferSize)); for (Counter c : Counter.values()) { context.getCounter(c).increment(0); } if (context.getConfiguration().getBoolean(Testing.CONF_TEST_FAILURE, false)) { testing.failuresCountToInject = conf.getInt(Testing.CONF_TEST_FAILURE_COUNT, 0); // Get number of times we have already injected failure based on attempt number of this // task. testing.injectedFailureCount = context.getTaskAttemptID().getId(); } } @Override protected void cleanup(Context context) { IOUtils.closeStream(inputFs); IOUtils.closeStream(outputFs); } @Override public void map(BytesWritable key, NullWritable value, Context context) throws InterruptedException, IOException { SnapshotFileInfo inputInfo = SnapshotFileInfo.parseFrom(key.copyBytes()); Path outputPath = getOutputPath(inputInfo); copyFile(context, inputInfo, outputPath); } /** * Returns the location where the inputPath will be copied. */ private Path getOutputPath(final SnapshotFileInfo inputInfo) throws IOException { Path path = null; switch (inputInfo.getType()) { case HFILE: Path inputPath = new Path(inputInfo.getHfile()); String family = inputPath.getParent().getName(); TableName table =HFileLink.getReferencedTableName(inputPath.getName()); String region = HFileLink.getReferencedRegionName(inputPath.getName()); String hfile = HFileLink.getReferencedHFileName(inputPath.getName()); path = new Path(CommonFSUtils.getTableDir(new Path("./"), table), new Path(region, new Path(family, hfile))); break; case WAL: LOG.warn("snapshot does not keeps WALs: " + inputInfo); break; default: throw new IOException("Invalid File Type: " + inputInfo.getType().toString()); } return new Path(outputArchive, path); } @SuppressWarnings("checkstyle:linelength") /** * Used by TestExportSnapshot to test for retries when failures happen. * Failure is injected in {@link #copyFile(Mapper.Context, org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotFileInfo, Path)}. */ private void injectTestFailure(final Context context, final SnapshotFileInfo inputInfo) throws IOException { if (!context.getConfiguration().getBoolean(Testing.CONF_TEST_FAILURE, false)) return; if (testing.injectedFailureCount >= testing.failuresCountToInject) return; testing.injectedFailureCount++; context.getCounter(Counter.COPY_FAILED).increment(1); LOG.debug("Injecting failure. Count: " + testing.injectedFailureCount); throw new IOException(String.format("TEST FAILURE (%d of max %d): Unable to copy input=%s", testing.injectedFailureCount, testing.failuresCountToInject, inputInfo)); } private void copyFile(final Context context, final SnapshotFileInfo inputInfo, final Path outputPath) throws IOException { // Get the file information FileStatus inputStat = getSourceFileStatus(context, inputInfo); // Verify if the output file exists and is the same that we want to copy if (outputFs.exists(outputPath)) { FileStatus outputStat = outputFs.getFileStatus(outputPath); if (outputStat != null && sameFile(inputStat, outputStat)) { LOG.info("Skip copy " + inputStat.getPath() + " to " + outputPath + ", same file."); context.getCounter(Counter.FILES_SKIPPED).increment(1); context.getCounter(Counter.BYTES_SKIPPED).increment(inputStat.getLen()); return; } } InputStream in = openSourceFile(context, inputInfo); int bandwidthMB = context.getConfiguration().getInt(CONF_BANDWIDTH_MB, 100); if (Integer.MAX_VALUE != bandwidthMB) { in = new ThrottledInputStream(new BufferedInputStream(in), bandwidthMB * 1024 * 1024L); } try { context.getCounter(Counter.BYTES_EXPECTED).increment(inputStat.getLen()); // Ensure that the output folder is there and copy the file createOutputPath(outputPath.getParent()); FSDataOutputStream out = outputFs.create(outputPath, true); try { copyData(context, inputStat.getPath(), in, outputPath, out, inputStat.getLen()); } finally { out.close(); } // Try to Preserve attributes if (!preserveAttributes(outputPath, inputStat)) { LOG.warn("You may have to run manually chown on: " + outputPath); } } finally { in.close(); injectTestFailure(context, inputInfo); } } /** * Create the output folder and optionally set ownership. */ private void createOutputPath(final Path path) throws IOException { if (filesUser == null && filesGroup == null) { outputFs.mkdirs(path); } else { Path parent = path.getParent(); if (!outputFs.exists(parent) && !parent.isRoot()) { createOutputPath(parent); } outputFs.mkdirs(path); if (filesUser != null || filesGroup != null) { // override the owner when non-null user/group is specified outputFs.setOwner(path, filesUser, filesGroup); } if (filesMode > 0) { outputFs.setPermission(path, new FsPermission(filesMode)); } } } /** * Try to Preserve the files attribute selected by the user copying them from the source file * This is only required when you are exporting as a different user than "hbase" or on a system * that doesn't have the "hbase" user. * * This is not considered a blocking failure since the user can force a chmod with the user * that knows is available on the system. */ private boolean preserveAttributes(final Path path, final FileStatus refStat) { FileStatus stat; try { stat = outputFs.getFileStatus(path); } catch (IOException e) { LOG.warn("Unable to get the status for file=" + path); return false; } try { if (filesMode > 0 && stat.getPermission().toShort() != filesMode) { outputFs.setPermission(path, new FsPermission(filesMode)); } else if (refStat != null && !stat.getPermission().equals(refStat.getPermission())) { outputFs.setPermission(path, refStat.getPermission()); } } catch (IOException e) { LOG.warn("Unable to set the permission for file="+ stat.getPath() +": "+ e.getMessage()); return false; } boolean hasRefStat = (refStat != null); String user = stringIsNotEmpty(filesUser) || !hasRefStat ? filesUser : refStat.getOwner(); String group = stringIsNotEmpty(filesGroup) || !hasRefStat ? filesGroup : refStat.getGroup(); if (stringIsNotEmpty(user) || stringIsNotEmpty(group)) { try { if (!(user.equals(stat.getOwner()) && group.equals(stat.getGroup()))) { outputFs.setOwner(path, user, group); } } catch (IOException e) { LOG.warn("Unable to set the owner/group for file="+ stat.getPath() +": "+ e.getMessage()); LOG.warn("The user/group may not exist on the destination cluster: user=" + user + " group=" + group); return false; } } return true; } private boolean stringIsNotEmpty(final String str) { return str != null && str.length() > 0; } private void copyData(final Context context, final Path inputPath, final InputStream in, final Path outputPath, final FSDataOutputStream out, final long inputFileSize) throws IOException { final String statusMessage = "copied %s/" + StringUtils.humanReadableInt(inputFileSize) + " (%.1f%%)"; try { byte[] buffer = new byte[bufferSize]; long totalBytesWritten = 0; int reportBytes = 0; int bytesRead; long stime = System.currentTimeMillis(); while ((bytesRead = in.read(buffer)) > 0) { out.write(buffer, 0, bytesRead); totalBytesWritten += bytesRead; reportBytes += bytesRead; if (reportBytes >= REPORT_SIZE) { context.getCounter(Counter.BYTES_COPIED).increment(reportBytes); context.setStatus(String.format(statusMessage, StringUtils.humanReadableInt(totalBytesWritten), (totalBytesWritten/(float)inputFileSize) * 100.0f) + " from " + inputPath + " to " + outputPath); reportBytes = 0; } } long etime = System.currentTimeMillis(); context.getCounter(Counter.BYTES_COPIED).increment(reportBytes); context.setStatus(String.format(statusMessage, StringUtils.humanReadableInt(totalBytesWritten), (totalBytesWritten/(float)inputFileSize) * 100.0f) + " from " + inputPath + " to " + outputPath); // Verify that the written size match if (totalBytesWritten != inputFileSize) { String msg = "number of bytes copied not matching copied=" + totalBytesWritten + " expected=" + inputFileSize + " for file=" + inputPath; throw new IOException(msg); } LOG.info("copy completed for input=" + inputPath + " output=" + outputPath); LOG.info("size=" + totalBytesWritten + " (" + StringUtils.humanReadableInt(totalBytesWritten) + ")" + " time=" + StringUtils.formatTimeDiff(etime, stime) + String.format(" %.3fM/sec", (totalBytesWritten / ((etime - stime)/1000.0))/1048576.0)); context.getCounter(Counter.FILES_COPIED).increment(1); } catch (IOException e) { LOG.error("Error copying " + inputPath + " to " + outputPath, e); context.getCounter(Counter.COPY_FAILED).increment(1); throw e; } } /** * Try to open the "source" file. * Throws an IOException if the communication with the inputFs fail or * if the file is not found. */ private FSDataInputStream openSourceFile(Context context, final SnapshotFileInfo fileInfo) throws IOException { try { Configuration conf = context.getConfiguration(); FileLink link = null; switch (fileInfo.getType()) { case HFILE: Path inputPath = new Path(fileInfo.getHfile()); link = getFileLink(inputPath, conf); break; case WAL: String serverName = fileInfo.getWalServer(); String logName = fileInfo.getWalName(); link = new WALLink(inputRoot, serverName, logName); break; default: throw new IOException("Invalid File Type: " + fileInfo.getType().toString()); } return link.open(inputFs); } catch (IOException e) { context.getCounter(Counter.MISSING_FILES).increment(1); LOG.error("Unable to open source file=" + fileInfo.toString(), e); throw e; } } private FileStatus getSourceFileStatus(Context context, final SnapshotFileInfo fileInfo) throws IOException { try { Configuration conf = context.getConfiguration(); FileLink link = null; switch (fileInfo.getType()) { case HFILE: Path inputPath = new Path(fileInfo.getHfile()); link = getFileLink(inputPath, conf); break; case WAL: link = new WALLink(inputRoot, fileInfo.getWalServer(), fileInfo.getWalName()); break; default: throw new IOException("Invalid File Type: " + fileInfo.getType().toString()); } return link.getFileStatus(inputFs); } catch (FileNotFoundException e) { context.getCounter(Counter.MISSING_FILES).increment(1); LOG.error("Unable to get the status for source file=" + fileInfo.toString(), e); throw e; } catch (IOException e) { LOG.error("Unable to get the status for source file=" + fileInfo.toString(), e); throw e; } } private FileLink getFileLink(Path path, Configuration conf) throws IOException{ String regionName = HFileLink.getReferencedRegionName(path.getName()); TableName tableName = HFileLink.getReferencedTableName(path.getName()); if(MobUtils.getMobRegionInfo(tableName).getEncodedName().equals(regionName)) { return HFileLink.buildFromHFileLinkPattern(MobUtils.getQualifiedMobRootDir(conf), HFileArchiveUtil.getArchivePath(conf), path); } return HFileLink.buildFromHFileLinkPattern(inputRoot, inputArchive, path); } private FileChecksum getFileChecksum(final FileSystem fs, final Path path) { try { return fs.getFileChecksum(path); } catch (IOException e) { LOG.warn("Unable to get checksum for file=" + path, e); return null; } } /** * Check if the two files are equal by looking at the file length, * and at the checksum (if user has specified the verifyChecksum flag). */ private boolean sameFile(final FileStatus inputStat, final FileStatus outputStat) { // Not matching length if (inputStat.getLen() != outputStat.getLen()) return false; // Mark files as equals, since user asked for no checksum verification if (!verifyChecksum) return true; // If checksums are not available, files are not the same. FileChecksum inChecksum = getFileChecksum(inputFs, inputStat.getPath()); if (inChecksum == null) return false; FileChecksum outChecksum = getFileChecksum(outputFs, outputStat.getPath()); if (outChecksum == null) return false; return inChecksum.equals(outChecksum); } } // ========================================================================== // Input Format // ========================================================================== /** * Extract the list of files (HFiles/WALs) to copy using Map-Reduce. * @return list of files referenced by the snapshot (pair of path and size) */ private static List<Pair<SnapshotFileInfo, Long>> getSnapshotFiles(final Configuration conf, final FileSystem fs, final Path snapshotDir) throws IOException { SnapshotDescription snapshotDesc = SnapshotDescriptionUtils.readSnapshotInfo(fs, snapshotDir); final List<Pair<SnapshotFileInfo, Long>> files = new ArrayList<>(); final TableName table = TableName.valueOf(snapshotDesc.getTable()); // Get snapshot files LOG.info("Loading Snapshot '" + snapshotDesc.getName() + "' hfile list"); SnapshotReferenceUtil.visitReferencedFiles(conf, fs, snapshotDir, snapshotDesc, new SnapshotReferenceUtil.SnapshotVisitor() { @Override public void storeFile(final RegionInfo regionInfo, final String family, final SnapshotRegionManifest.StoreFile storeFile) throws IOException { // for storeFile.hasReference() case, copied as part of the manifest if (!storeFile.hasReference()) { String region = regionInfo.getEncodedName(); String hfile = storeFile.getName(); Path path = HFileLink.createPath(table, region, family, hfile); SnapshotFileInfo fileInfo = SnapshotFileInfo.newBuilder() .setType(SnapshotFileInfo.Type.HFILE) .setHfile(path.toString()) .build(); long size; if (storeFile.hasFileSize()) { size = storeFile.getFileSize(); } else { size = HFileLink.buildFromHFileLinkPattern(conf, path).getFileStatus(fs).getLen(); } files.add(new Pair<>(fileInfo, size)); } } }); return files; } /** * Given a list of file paths and sizes, create around ngroups in as balanced a way as possible. * The groups created will have similar amounts of bytes. * <p> * The algorithm used is pretty straightforward; the file list is sorted by size, * and then each group fetch the bigger file available, iterating through groups * alternating the direction. */ static List<List<Pair<SnapshotFileInfo, Long>>> getBalancedSplits( final List<Pair<SnapshotFileInfo, Long>> files, final int ngroups) { // Sort files by size, from small to big Collections.sort(files, new Comparator<Pair<SnapshotFileInfo, Long>>() { public int compare(Pair<SnapshotFileInfo, Long> a, Pair<SnapshotFileInfo, Long> b) { long r = a.getSecond() - b.getSecond(); return (r < 0) ? -1 : ((r > 0) ? 1 : 0); } }); // create balanced groups List<List<Pair<SnapshotFileInfo, Long>>> fileGroups = new LinkedList<>(); long[] sizeGroups = new long[ngroups]; int hi = files.size() - 1; int lo = 0; List<Pair<SnapshotFileInfo, Long>> group; int dir = 1; int g = 0; while (hi >= lo) { if (g == fileGroups.size()) { group = new LinkedList<>(); fileGroups.add(group); } else { group = fileGroups.get(g); } Pair<SnapshotFileInfo, Long> fileInfo = files.get(hi--); // add the hi one sizeGroups[g] += fileInfo.getSecond(); group.add(fileInfo); // change direction when at the end or the beginning g += dir; if (g == ngroups) { dir = -1; g = ngroups - 1; } else if (g < 0) { dir = 1; g = 0; } } if (LOG.isDebugEnabled()) { for (int i = 0; i < sizeGroups.length; ++i) { LOG.debug("export split=" + i + " size=" + StringUtils.humanReadableInt(sizeGroups[i])); } } return fileGroups; } private static class ExportSnapshotInputFormat extends InputFormat<BytesWritable, NullWritable> { @Override public RecordReader<BytesWritable, NullWritable> createRecordReader(InputSplit split, TaskAttemptContext tac) throws IOException, InterruptedException { return new ExportSnapshotRecordReader(((ExportSnapshotInputSplit)split).getSplitKeys()); } @Override public List<InputSplit> getSplits(JobContext context) throws IOException, InterruptedException { Configuration conf = context.getConfiguration(); Path snapshotDir = new Path(conf.get(CONF_SNAPSHOT_DIR)); FileSystem fs = FileSystem.get(snapshotDir.toUri(), conf); List<Pair<SnapshotFileInfo, Long>> snapshotFiles = getSnapshotFiles(conf, fs, snapshotDir); int mappers = conf.getInt(CONF_NUM_SPLITS, 0); if (mappers == 0 && snapshotFiles.size() > 0) { mappers = 1 + (snapshotFiles.size() / conf.getInt(CONF_MAP_GROUP, 10)); mappers = Math.min(mappers, snapshotFiles.size()); conf.setInt(CONF_NUM_SPLITS, mappers); conf.setInt(MR_NUM_MAPS, mappers); } List<List<Pair<SnapshotFileInfo, Long>>> groups = getBalancedSplits(snapshotFiles, mappers); List<InputSplit> splits = new ArrayList(groups.size()); for (List<Pair<SnapshotFileInfo, Long>> files: groups) { splits.add(new ExportSnapshotInputSplit(files)); } return splits; } private static class ExportSnapshotInputSplit extends InputSplit implements Writable { private List<Pair<BytesWritable, Long>> files; private long length; public ExportSnapshotInputSplit() { this.files = null; } public ExportSnapshotInputSplit(final List<Pair<SnapshotFileInfo, Long>> snapshotFiles) { this.files = new ArrayList(snapshotFiles.size()); for (Pair<SnapshotFileInfo, Long> fileInfo: snapshotFiles) { this.files.add(new Pair<>( new BytesWritable(fileInfo.getFirst().toByteArray()), fileInfo.getSecond())); this.length += fileInfo.getSecond(); } } private List<Pair<BytesWritable, Long>> getSplitKeys() { return files; } @Override public long getLength() throws IOException, InterruptedException { return length; } @Override public String[] getLocations() throws IOException, InterruptedException { return new String[] {}; } @Override public void readFields(DataInput in) throws IOException { int count = in.readInt(); files = new ArrayList<>(count); length = 0; for (int i = 0; i < count; ++i) { BytesWritable fileInfo = new BytesWritable(); fileInfo.readFields(in); long size = in.readLong(); files.add(new Pair<>(fileInfo, size)); length += size; } } @Override public void write(DataOutput out) throws IOException { out.writeInt(files.size()); for (final Pair<BytesWritable, Long> fileInfo: files) { fileInfo.getFirst().write(out); out.writeLong(fileInfo.getSecond()); } } } private static class ExportSnapshotRecordReader extends RecordReader<BytesWritable, NullWritable> { private final List<Pair<BytesWritable, Long>> files; private long totalSize = 0; private long procSize = 0; private int index = -1; ExportSnapshotRecordReader(final List<Pair<BytesWritable, Long>> files) { this.files = files; for (Pair<BytesWritable, Long> fileInfo: files) { totalSize += fileInfo.getSecond(); } } @Override public void close() { } @Override public BytesWritable getCurrentKey() { return files.get(index).getFirst(); } @Override public NullWritable getCurrentValue() { return NullWritable.get(); } @Override public float getProgress() { return (float)procSize / totalSize; } @Override public void initialize(InputSplit split, TaskAttemptContext tac) { } @Override public boolean nextKeyValue() { if (index >= 0) { procSize += files.get(index).getSecond(); } return(++index < files.size()); } } } // ========================================================================== // Tool // ========================================================================== /** * Run Map-Reduce Job to perform the files copy. */ private void runCopyJob(final Path inputRoot, final Path outputRoot, final String snapshotName, final Path snapshotDir, final boolean verifyChecksum, final String filesUser, final String filesGroup, final int filesMode, final int mappers, final int bandwidthMB) throws IOException, InterruptedException, ClassNotFoundException { Configuration conf = getConf(); if (filesGroup != null) conf.set(CONF_FILES_GROUP, filesGroup); if (filesUser != null) conf.set(CONF_FILES_USER, filesUser); if (mappers > 0) { conf.setInt(CONF_NUM_SPLITS, mappers); conf.setInt(MR_NUM_MAPS, mappers); } conf.setInt(CONF_FILES_MODE, filesMode); conf.setBoolean(CONF_CHECKSUM_VERIFY, verifyChecksum); conf.set(CONF_OUTPUT_ROOT, outputRoot.toString()); conf.set(CONF_INPUT_ROOT, inputRoot.toString()); conf.setInt(CONF_BANDWIDTH_MB, bandwidthMB); conf.set(CONF_SNAPSHOT_NAME, snapshotName); conf.set(CONF_SNAPSHOT_DIR, snapshotDir.toString()); String jobname = conf.get(CONF_MR_JOB_NAME, "ExportSnapshot-" + snapshotName); Job job = new Job(conf); job.setJobName(jobname); job.setJarByClass(ExportSnapshot.class); TableMapReduceUtil.addDependencyJars(job); job.setMapperClass(ExportMapper.class); job.setInputFormatClass(ExportSnapshotInputFormat.class); job.setOutputFormatClass(NullOutputFormat.class); job.setMapSpeculativeExecution(false); job.setNumReduceTasks(0); // Acquire the delegation Tokens Configuration srcConf = HBaseConfiguration.createClusterConf(conf, null, CONF_SOURCE_PREFIX); TokenCache.obtainTokensForNamenodes(job.getCredentials(), new Path[] { inputRoot }, srcConf); Configuration destConf = HBaseConfiguration.createClusterConf(conf, null, CONF_DEST_PREFIX); TokenCache.obtainTokensForNamenodes(job.getCredentials(), new Path[] { outputRoot }, destConf); // Run the MR Job if (!job.waitForCompletion(true)) { throw new ExportSnapshotException(job.getStatus().getFailureInfo()); } } private void verifySnapshot(final Configuration baseConf, final FileSystem fs, final Path rootDir, final Path snapshotDir) throws IOException { // Update the conf with the current root dir, since may be a different cluster Configuration conf = new Configuration(baseConf); CommonFSUtils.setRootDir(conf, rootDir); CommonFSUtils.setFsDefault(conf, CommonFSUtils.getRootDir(conf)); SnapshotDescription snapshotDesc = SnapshotDescriptionUtils.readSnapshotInfo(fs, snapshotDir); SnapshotReferenceUtil.verifySnapshot(conf, fs, snapshotDir, snapshotDesc); } private void setConfigParallel(FileSystem outputFs, List<Path> traversedPath, BiConsumer<FileSystem, Path> task, Configuration conf) throws IOException { ExecutorService pool = Executors .newFixedThreadPool(conf.getInt(CONF_COPY_MANIFEST_THREADS, DEFAULT_COPY_MANIFEST_THREADS)); List<Future<Void>> futures = new ArrayList<>(); for (Path dstPath : traversedPath) { Future<Void> future = (Future<Void>) pool.submit(() -> task.accept(outputFs, dstPath)); futures.add(future); } try { for (Future<Void> future : futures) { future.get(); } } catch (InterruptedException | ExecutionException e) { throw new IOException(e); } finally { pool.shutdownNow(); } } private void setOwnerParallel(FileSystem outputFs, String filesUser, String filesGroup, Configuration conf, List<Path> traversedPath) throws IOException { setConfigParallel(outputFs, traversedPath, (fs, path) -> { try { fs.setOwner(path, filesUser, filesGroup); } catch (IOException e) { throw new RuntimeException( "set owner for file " + path + " to " + filesUser + ":" + filesGroup + " failed", e); } }, conf); } private void setPermissionParallel(final FileSystem outputFs, final short filesMode, final List<Path> traversedPath, final Configuration conf) throws IOException { if (filesMode <= 0) { return; } FsPermission perm = new FsPermission(filesMode); setConfigParallel(outputFs, traversedPath, (fs, path) -> { try { fs.setPermission(path, perm); } catch (IOException e) { throw new RuntimeException( "set permission for file " + path + " to " + filesMode + " failed", e); } }, conf); } private boolean verifyTarget = true; private boolean verifyChecksum = true; private String snapshotName = null; private String targetName = null; private boolean overwrite = false; private String filesGroup = null; private String filesUser = null; private Path outputRoot = null; private Path inputRoot = null; private int bandwidthMB = Integer.MAX_VALUE; private int filesMode = 0; private int mappers = 0; @Override protected void processOptions(CommandLine cmd) { snapshotName = cmd.getOptionValue(Options.SNAPSHOT.getLongOpt(), snapshotName); targetName = cmd.getOptionValue(Options.TARGET_NAME.getLongOpt(), targetName); if (cmd.hasOption(Options.COPY_TO.getLongOpt())) { outputRoot = new Path(cmd.getOptionValue(Options.COPY_TO.getLongOpt())); } if (cmd.hasOption(Options.COPY_FROM.getLongOpt())) { inputRoot = new Path(cmd.getOptionValue(Options.COPY_FROM.getLongOpt())); } mappers = getOptionAsInt(cmd, Options.MAPPERS.getLongOpt(), mappers); filesUser = cmd.getOptionValue(Options.CHUSER.getLongOpt(), filesUser); filesGroup = cmd.getOptionValue(Options.CHGROUP.getLongOpt(), filesGroup); filesMode = getOptionAsInt(cmd, Options.CHMOD.getLongOpt(), filesMode, 8); bandwidthMB = getOptionAsInt(cmd, Options.BANDWIDTH.getLongOpt(), bandwidthMB); overwrite = cmd.hasOption(Options.OVERWRITE.getLongOpt()); // And verifyChecksum and verifyTarget with values read from old args in processOldArgs(...). verifyChecksum = !cmd.hasOption(Options.NO_CHECKSUM_VERIFY.getLongOpt()); verifyTarget = !cmd.hasOption(Options.NO_TARGET_VERIFY.getLongOpt()); } /** * Execute the export snapshot by copying the snapshot metadata, hfiles and wals. * @return 0 on success, and != 0 upon failure. */ @Override public int doWork() throws IOException { Configuration conf = getConf(); // Check user options if (snapshotName == null) { System.err.println("Snapshot name not provided."); LOG.error("Use -h or --help for usage instructions."); return 0; } if (outputRoot == null) { System.err.println("Destination file-system (--" + Options.COPY_TO.getLongOpt() + ") not provided."); LOG.error("Use -h or --help for usage instructions."); return 0; } if (targetName == null) { targetName = snapshotName; } if (inputRoot == null) { inputRoot = CommonFSUtils.getRootDir(conf); } else { CommonFSUtils.setRootDir(conf, inputRoot); } Configuration srcConf = HBaseConfiguration.createClusterConf(conf, null, CONF_SOURCE_PREFIX); srcConf.setBoolean("fs." + inputRoot.toUri().getScheme() + ".impl.disable.cache", true); FileSystem inputFs = FileSystem.get(inputRoot.toUri(), srcConf); Configuration destConf = HBaseConfiguration.createClusterConf(conf, null, CONF_DEST_PREFIX); destConf.setBoolean("fs." + outputRoot.toUri().getScheme() + ".impl.disable.cache", true); FileSystem outputFs = FileSystem.get(outputRoot.toUri(), destConf); boolean skipTmp = conf.getBoolean(CONF_SKIP_TMP, false) || conf.get(SnapshotDescriptionUtils.SNAPSHOT_WORKING_DIR) != null; Path snapshotDir = SnapshotDescriptionUtils.getCompletedSnapshotDir(snapshotName, inputRoot); Path snapshotTmpDir = SnapshotDescriptionUtils.getWorkingSnapshotDir(targetName, outputRoot, destConf); Path outputSnapshotDir = SnapshotDescriptionUtils.getCompletedSnapshotDir(targetName, outputRoot); Path initialOutputSnapshotDir = skipTmp ? outputSnapshotDir : snapshotTmpDir; LOG.debug("inputFs={}, inputRoot={}", inputFs.getUri().toString(), inputRoot); LOG.debug("outputFs={}, outputRoot={}, skipTmp={}, initialOutputSnapshotDir={}", outputFs, outputRoot.toString(), skipTmp, initialOutputSnapshotDir); // Find the necessary directory which need to change owner and group Path needSetOwnerDir = SnapshotDescriptionUtils.getSnapshotRootDir(outputRoot); if (outputFs.exists(needSetOwnerDir)) { if (skipTmp) { needSetOwnerDir = outputSnapshotDir; } else { needSetOwnerDir = SnapshotDescriptionUtils.getWorkingSnapshotDir(outputRoot, destConf); if (outputFs.exists(needSetOwnerDir)) { needSetOwnerDir = snapshotTmpDir; } } } // Check if the snapshot already exists if (outputFs.exists(outputSnapshotDir)) { if (overwrite) { if (!outputFs.delete(outputSnapshotDir, true)) { System.err.println("Unable to remove existing snapshot directory: " + outputSnapshotDir); return 1; } } else { System.err.println("The snapshot '" + targetName + "' already exists in the destination: " + outputSnapshotDir); return 1; } } if (!skipTmp) { // Check if the snapshot already in-progress if (outputFs.exists(snapshotTmpDir)) { if (overwrite) { if (!outputFs.delete(snapshotTmpDir, true)) { System.err.println("Unable to remove existing snapshot tmp directory: "+snapshotTmpDir); return 1; } } else { System.err.println("A snapshot with the same name '"+ targetName +"' may be in-progress"); System.err.println("Please check "+snapshotTmpDir+". If the snapshot has completed, "); System.err.println("consider removing "+snapshotTmpDir+" by using the -overwrite option"); return 1; } } } // Step 1 - Copy fs1:/.snapshot/<snapshot> to fs2:/.snapshot/.tmp/<snapshot> // The snapshot references must be copied before the hfiles otherwise the cleaner // will remove them because they are unreferenced. List<Path> travesedPaths = new ArrayList<>(); boolean copySucceeded = false; try { LOG.info("Copy Snapshot Manifest from " + snapshotDir + " to " + initialOutputSnapshotDir); travesedPaths = FSUtils.copyFilesParallel(inputFs, snapshotDir, outputFs, initialOutputSnapshotDir, conf, conf.getInt(CONF_COPY_MANIFEST_THREADS, DEFAULT_COPY_MANIFEST_THREADS)); copySucceeded = true; } catch (IOException e) { throw new ExportSnapshotException("Failed to copy the snapshot directory: from=" + snapshotDir + " to=" + initialOutputSnapshotDir, e); } finally { if (copySucceeded) { if (filesUser != null || filesGroup != null) { LOG.warn((filesUser == null ? "" : "Change the owner of " + needSetOwnerDir + " to " + filesUser) + (filesGroup == null ? "" : ", Change the group of " + needSetOwnerDir + " to " + filesGroup)); setOwnerParallel(outputFs, filesUser, filesGroup, conf, travesedPaths); } if (filesMode > 0) { LOG.warn("Change the permission of " + needSetOwnerDir + " to " + filesMode); setPermissionParallel(outputFs, (short)filesMode, travesedPaths, conf); } } } // Write a new .snapshotinfo if the target name is different from the source name if (!targetName.equals(snapshotName)) { SnapshotDescription snapshotDesc = SnapshotDescriptionUtils.readSnapshotInfo(inputFs, snapshotDir) .toBuilder() .setName(targetName) .build(); SnapshotDescriptionUtils.writeSnapshotInfo(snapshotDesc, initialOutputSnapshotDir, outputFs); if (filesUser != null || filesGroup != null) { outputFs.setOwner(new Path(initialOutputSnapshotDir, SnapshotDescriptionUtils.SNAPSHOTINFO_FILE), filesUser, filesGroup); } if (filesMode > 0) { outputFs.setPermission(new Path(initialOutputSnapshotDir, SnapshotDescriptionUtils.SNAPSHOTINFO_FILE), new FsPermission((short)filesMode)); } } // Step 2 - Start MR Job to copy files // The snapshot references must be copied before the files otherwise the files gets removed // by the HFileArchiver, since they have no references. try { runCopyJob(inputRoot, outputRoot, snapshotName, snapshotDir, verifyChecksum, filesUser, filesGroup, filesMode, mappers, bandwidthMB); LOG.info("Finalize the Snapshot Export"); if (!skipTmp) { // Step 3 - Rename fs2:/.snapshot/.tmp/<snapshot> fs2:/.snapshot/<snapshot> if (!outputFs.rename(snapshotTmpDir, outputSnapshotDir)) { throw new ExportSnapshotException("Unable to rename snapshot directory from=" + snapshotTmpDir + " to=" + outputSnapshotDir); } } // Step 4 - Verify snapshot integrity if (verifyTarget) { LOG.info("Verify snapshot integrity"); verifySnapshot(destConf, outputFs, outputRoot, outputSnapshotDir); } LOG.info("Export Completed: " + targetName); return 0; } catch (Exception e) { LOG.error("Snapshot export failed", e); if (!skipTmp) { outputFs.delete(snapshotTmpDir, true); } outputFs.delete(outputSnapshotDir, true); return 1; } finally { IOUtils.closeStream(inputFs); IOUtils.closeStream(outputFs); } } @Override protected void printUsage() { super.printUsage(); System.out.println("\n" + "Examples:\n" + " hbase snapshot export \\\n" + " --snapshot MySnapshot --copy-to hdfs://srv2:8082/hbase \\\n" + " --chuser MyUser --chgroup MyGroup --chmod 700 --mappers 16\n" + "\n" + " hbase snapshot export \\\n" + " --snapshot MySnapshot --copy-from hdfs://srv2:8082/hbase \\\n" + " --copy-to hdfs://srv1:50070/hbase"); } @Override protected void addOptions() { addRequiredOption(Options.SNAPSHOT); addOption(Options.COPY_TO); addOption(Options.COPY_FROM); addOption(Options.TARGET_NAME); addOption(Options.NO_CHECKSUM_VERIFY); addOption(Options.NO_TARGET_VERIFY); addOption(Options.OVERWRITE); addOption(Options.CHUSER); addOption(Options.CHGROUP); addOption(Options.CHMOD); addOption(Options.MAPPERS); addOption(Options.BANDWIDTH); } public static void main(String[] args) { new ExportSnapshot().doStaticMain(args); } }
package com.viesis.viescraft.configs; import java.io.File; import com.viesis.viescraft.api.util.LogHelper; import net.minecraft.util.text.TextFormatting; import net.minecraftforge.common.config.Configuration; public class ViesCraftConfig { public static boolean worldRestart; public static boolean mcRestart; public static Configuration config; public static final String CATEGORY_VC = "ViesCraft Config"; //public static boolean v1AirshipEnabled; //public static boolean v2AirshipEnabled; //public static boolean v3AirshipEnabled; //public static boolean v4AirshipEnabled; //public static boolean v5AirshipEnabled; //public static boolean v6AirshipEnabled; public static int v1AirshipSpeed; public static int v2AirshipSpeed; public static int v3AirshipSpeed; public static int v4AirshipSpeed; public static int v5AirshipSpeed; public static int v6AirshipSpeed; public static boolean vanillaFuel; public static boolean outsideModFuel; public static int viesolineBurnTime; public static String v1AirshipName; public static String v2AirshipName; public static String v3AirshipName; public static String v4AirshipName; public static String v5AirshipName; public static String v6AirshipName; public static boolean recipeDismounterPlayer; //public static boolean recipeMythic; public static boolean engineSounds; public static int HUDX; public static int HUDY; public static int HUDScale; public static final boolean V1AIRSHIPENABLED_DEFAULT = true; public static final String V1AIRSHIPENABLED_NAME = "Enable V1 Viesdenburg Airships?"; public static final String V1AIRSHIPENABLED_COMMENT = "Should Viesdenburg airships be globally enabled?"; public static final boolean V2AIRSHIPENABLED_DEFAULT = true; public static final String V2AIRSHIPENABLED_NAME = "Enable V2 Viesigible Airships?"; public static final String V2AIRSHIPENABLED_COMMENT = "Should Viesigible airships be globally enabled?"; public static final boolean V3AIRSHIPENABLED_DEFAULT = true; public static final String V3AIRSHIPENABLED_NAME = "Enable V3 Viesepelin Airships?"; public static final String V3AIRSHIPENABLED_COMMENT = "Should Viesepelin airships be globally enabled?"; public static final boolean V4AIRSHIPENABLED_DEFAULT = true; public static final String V4AIRSHIPENABLED_NAME = "Enable V4 Viesakron Airships?"; public static final String V4AIRSHIPENABLED_COMMENT = "Should Viesakron airships be globally enabled?"; public static final boolean V5AIRSHIPENABLED_DEFAULT = true; public static final String V5AIRSHIPENABLED_NAME = "Enable V5 Viesindus Airships?"; public static final String V5AIRSHIPENABLED_COMMENT = "Should Viesindus airships be globally enabled?"; public static final boolean V6AIRSHIPENABLED_DEFAULT = true; public static final String V6AIRSHIPENABLED_NAME = "Enable V6 Viesamune Airships?"; public static final String V6AIRSHIPENABLED_COMMENT = "Should Viesamune airships be globally enabled?"; public static final int V1AIRSHIPSPEED_DEFAULT = 100; public static final String V1AIRSHIPSPEED_NAME = "Airship Speed - Viesdenburg"; public static final String V1AIRSHIPSPEED_COMMENT = "How fast do airships move based on walkspeed %? "; public static final int V2AIRSHIPSPEED_DEFAULT = 100; public static final String V2AIRSHIPSPEED_NAME = "Airship Speed - Viesigible"; public static final String V2AIRSHIPSPEED_COMMENT = "How fast do airships move based on walkspeed %? "; public static final int V3AIRSHIPSPEED_DEFAULT = 100; public static final String V3AIRSHIPSPEED_NAME = "Airship Speed - Viesepelin"; public static final String V3AIRSHIPSPEED_COMMENT = "How fast do airships move based on walkspeed %? "; public static final int V4AIRSHIPSPEED_DEFAULT = 100; public static final String V4AIRSHIPSPEED_NAME = "Airship Speed - Viesakron"; public static final String V4AIRSHIPSPEED_COMMENT = "How fast do airships move based on walkspeed %? "; public static final int V5AIRSHIPSPEED_DEFAULT = 100; public static final String V5AIRSHIPSPEED_NAME = "Airship Speed - Viesindus"; public static final String V5AIRSHIPSPEED_COMMENT = "How fast do airships move based on walkspeed %? "; public static final int V6AIRSHIPSPEED_DEFAULT = 100; public static final String V6AIRSHIPSPEED_NAME = "Airship Speed - Viesamune"; public static final String V6AIRSHIPSPEED_COMMENT = "How fast do airships move based on walkspeed %? "; public static final String V1AIRSHIPNAME_DEFAULT = "Viesdenburg"; public static final String V1AIRSHIPNAME_NAME = "Airship Name - Viesdenburg"; public static final String V1AIRSHIPNAME_COMMENT = "Change the name of Viesdenburg Airships?"; public static final String V2AIRSHIPNAME_DEFAULT = "Viesigible"; public static final String V2AIRSHIPNAME_NAME = "Airship Name - Viesigible"; public static final String V2AIRSHIPNAME_COMMENT = "Change the name of Viesigible Airships?"; public static final String V3AIRSHIPNAME_DEFAULT = "Viesepelin"; public static final String V3AIRSHIPNAME_NAME = "Airship Name - Viesepelin"; public static final String V3AIRSHIPNAME_COMMENT = "Change the name of Viespelin Airships?"; public static final String V4AIRSHIPNAME_DEFAULT = "Viesakron"; public static final String V4AIRSHIPNAME_NAME = "Airship Name - Viesakron"; public static final String V4AIRSHIPNAME_COMMENT = "Change the name of Viesakron Airships?"; public static final String V5AIRSHIPNAME_DEFAULT = "Viesindus"; public static final String V5AIRSHIPNAME_NAME = "Airship Name - Viesindus"; public static final String V5AIRSHIPNAME_COMMENT = "Change the name of Viesindus Airships?"; public static final String V6AIRSHIPNAME_DEFAULT = "Viesamune"; public static final String V6AIRSHIPNAME_NAME = "Airship Name - Viesamune"; public static final String V6AIRSHIPNAME_COMMENT = "Change the name of Viesamune Airships?"; public static final boolean VANILLAFUEL_DEFAULT = true; public static final String VANILLAFUEL_NAME = "Enable using vanilla fuel?"; public static final String VANILLAFUEL_COMMENT = "Should airships be able to use vanilla fuel?"; public static final boolean OUTSIDEMODFUEL_DEFAULT = true; public static final String OUTSIDEMODFUEL_NAME = "Enable using other mod's fuel?"; public static final String OUTSIDEMODFUEL_COMMENT = "Should airships be able to use other mod's fuel?"; public static final int VIESOLINEBURNTIME_DEFAULT = 90; public static final String VIESOLINEBURNTIME_NAME = "Viesoline Burn Time?"; public static final String VIESOLINEBURNTIME_COMMENT = "How long does Viesoline burn for in seconds? "; public static final boolean RECIPEDISMOUNTERPLAYER_DEFAULT = true; public static final String RECIPEDISMOUNTERPLAYER_NAME = "Enable Player Dismounter Recipe?"; public static final String RECIPEDISMOUNTERPLAYER_COMMENT = "Should Player Dismounter be craftable?"; public static final boolean ENGINESOUND_DEFAULT = true; public static final String ENGINESOUND_NAME = "Enable airship engine sounds?"; public static final String ENGINESOUND_COMMENT = "Should airships have engine sounds when powered?"; public static final int HUDX_DEFAULT = 0; public static final String HUDX_NAME = "Set the X position of the HUD."; public static final String HUDX_COMMENT = "0 puts it back in the original position."; public static final int HUDY_DEFAULT = 0; public static final String HUDY_NAME = "Set the Y position of the HUD."; public static final String HUDY_COMMENT = "0 puts it back in the original position."; public static final int HUDSCALE_DEFAULT = 100; public static final String HUDSCALE_NAME = "Set the Scale of the HUD."; public static final String HUDSCALE_COMMENT = "100 puts it back in the original scale."; public static void init(File file) { config = new Configuration(file); syncConfig(); LogHelper.info("Good news everyone! The config has been loaded!"); } public static void syncConfig() { //Main settings final String category1 = CATEGORY_VC + config.CATEGORY_SPLITTER + TextFormatting.LIGHT_PURPLE + "Global Settings"; config.addCustomCategoryComment(category1, "Global settings."); recipeDismounterPlayer = config.getBoolean(TextFormatting.WHITE + RECIPEDISMOUNTERPLAYER_NAME, category1, RECIPEDISMOUNTERPLAYER_DEFAULT, RECIPEDISMOUNTERPLAYER_COMMENT); //General settings final String category2 = CATEGORY_VC + config.CATEGORY_SPLITTER + TextFormatting.GREEN + "General Settings"; config.addCustomCategoryComment(category2, "General airship options."); v1AirshipSpeed = config.getInt(TextFormatting.WHITE + V1AIRSHIPSPEED_NAME, category2, V1AIRSHIPSPEED_DEFAULT, 100, 400, V1AIRSHIPSPEED_COMMENT); v2AirshipSpeed = config.getInt(TextFormatting.WHITE + V2AIRSHIPSPEED_NAME, category2, V2AIRSHIPSPEED_DEFAULT, 100, 400, V2AIRSHIPSPEED_COMMENT); v3AirshipSpeed = config.getInt(TextFormatting.WHITE + V3AIRSHIPSPEED_NAME, category2, V3AIRSHIPSPEED_DEFAULT, 100, 400, V3AIRSHIPSPEED_COMMENT); v4AirshipSpeed = config.getInt(TextFormatting.WHITE + V4AIRSHIPSPEED_NAME, category2, V4AIRSHIPSPEED_DEFAULT, 100, 400, V4AIRSHIPSPEED_COMMENT); ///////v5AirshipSpeed = config.getInt(TextFormatting.WHITE + V5AIRSHIPSPEED_NAME, category2, V5AIRSHIPSPEED_DEFAULT, 100, 400, V5AIRSHIPSPEED_COMMENT); ///////v6AirshipSpeed = config.getInt(TextFormatting.WHITE + V6AIRSHIPSPEED_NAME, category2, V6AIRSHIPSPEED_DEFAULT, 100, 400, V6AIRSHIPSPEED_COMMENT); //Fuel settings final String category3 = CATEGORY_VC + config.CATEGORY_SPLITTER + TextFormatting.AQUA + "Fuel Settings"; config.addCustomCategoryComment(category3, "Airship fuel options."); vanillaFuel = config.getBoolean(TextFormatting.WHITE + VANILLAFUEL_NAME, category3, VANILLAFUEL_DEFAULT, VANILLAFUEL_COMMENT); outsideModFuel = config.getBoolean(TextFormatting.WHITE + OUTSIDEMODFUEL_NAME, category3, OUTSIDEMODFUEL_DEFAULT, OUTSIDEMODFUEL_COMMENT); viesolineBurnTime = config.getInt(TextFormatting.WHITE + VIESOLINEBURNTIME_NAME, category3, VIESOLINEBURNTIME_DEFAULT, 20, 500, VIESOLINEBURNTIME_COMMENT); //Client settings final String category4 = CATEGORY_VC + config.CATEGORY_SPLITTER + TextFormatting.GOLD + "Client Side Settings"; config.addCustomCategoryComment(category4, "Client side airship options."); v1AirshipName = config.getString(TextFormatting.WHITE + V1AIRSHIPNAME_NAME, category4, V1AIRSHIPNAME_DEFAULT, V1AIRSHIPNAME_COMMENT); v2AirshipName = config.getString(TextFormatting.WHITE + V2AIRSHIPNAME_NAME, category4, V2AIRSHIPNAME_DEFAULT, V2AIRSHIPNAME_COMMENT); v3AirshipName = config.getString(TextFormatting.WHITE + V3AIRSHIPNAME_NAME, category4, V3AIRSHIPNAME_DEFAULT, V3AIRSHIPNAME_COMMENT); v4AirshipName = config.getString(TextFormatting.WHITE + V4AIRSHIPNAME_NAME, category4, V4AIRSHIPNAME_DEFAULT, V4AIRSHIPNAME_COMMENT); ///////v5AirshipName = config.getString(TextFormatting.WHITE + V5AIRSHIPNAME_NAME, category4, V5AIRSHIPNAME_DEFAULT, V5AIRSHIPNAME_COMMENT); ///////v6AirshipName = config.getString(TextFormatting.WHITE + V6AIRSHIPNAME_NAME, category4, V6AIRSHIPNAME_DEFAULT, V6AIRSHIPNAME_COMMENT); engineSounds = config.getBoolean(TextFormatting.WHITE + ENGINESOUND_NAME, category4, ENGINESOUND_DEFAULT, ENGINESOUND_COMMENT); //HUD settings final String category5 = CATEGORY_VC + config.CATEGORY_SPLITTER + TextFormatting.GOLD + "HUD Settings"; config.addCustomCategoryComment(category5, "HUD airship options."); HUDX = config.getInt(TextFormatting.WHITE + HUDX_NAME, category5, HUDX_DEFAULT, -200, 200, HUDX_COMMENT); HUDY = config.getInt(TextFormatting.WHITE + HUDY_NAME, category5, HUDY_DEFAULT, -200, 200, HUDY_COMMENT); HUDScale = config.getInt(TextFormatting.WHITE + HUDSCALE_NAME, category5, HUDSCALE_DEFAULT, 25, 150, HUDSCALE_COMMENT); //Save the config config.save(); } }
package com.philihp.weblabora.model; import com.google.common.collect.ImmutableMap; import com.philihp.weblabora.model.Scorecard.PlayerScore; import com.philihp.weblabora.model.building.*; import org.junit.Before; import org.junit.Test; import java.util.Map; import java.util.Map.Entry; import static org.hamcrest.Matchers.*; import static org.junit.Assert.assertThat; /** * Runs through game 53955 and checks ending board. */ public class Game53772Test { Board board; @Before public void setUp() throws Exception { board = new Board(); MoveProcessor.processMove(board, "config PLAYERS 3"); MoveProcessor.processMove(board, "config LENGTH LONG"); MoveProcessor.processMove(board, "config COUNTRY FRANCE"); MoveProcessor.processMove(board, "start"); MoveProcessor.processMove(board, "fell_trees 1 1"); MoveProcessor.processMove(board, "commit"); MoveProcessor.processMove(board, "use LG3"); MoveProcessor.processMove(board, "commit"); MoveProcessor.processMove(board, "use LB3 Jo"); MoveProcessor.processMove(board, "commit"); MoveProcessor.processMove(board, "build G02 3 1"); MoveProcessor.processMove(board, "use G02 ClPnGn Pn *"); MoveProcessor.processMove(board, "buy_district 2 PLAINS"); MoveProcessor.processMove(board, "commit"); MoveProcessor.processMove(board, "build F09 3 1"); MoveProcessor.processMove(board, "use F09"); MoveProcessor.processMove(board, "use LG2 Sh"); MoveProcessor.processMove(board, "commit"); MoveProcessor.processMove(board, "buy_district 2 PLAINS"); MoveProcessor.processMove(board, "build G01 3 1"); MoveProcessor.processMove(board, "use G01 *"); MoveProcessor.processMove(board, "use F09"); MoveProcessor.processMove(board, "use LG2 Gn"); MoveProcessor.processMove(board, "commit"); MoveProcessor.processMove(board, "buy_plot 1 COAST"); MoveProcessor.processMove(board, "use LR1"); MoveProcessor.processMove(board, "commit"); MoveProcessor.processMove(board, "cut_peat 0 1"); MoveProcessor.processMove(board, "commit"); MoveProcessor.processMove(board, "use LB1 Jo"); MoveProcessor.processMove(board, "commit"); MoveProcessor.processMove(board, "use LR3"); MoveProcessor.processMove(board, "commit"); MoveProcessor.processMove(board, "build G12 3 0"); MoveProcessor.processMove(board, "commit"); MoveProcessor.processMove(board, "fell_trees 0 2"); MoveProcessor.processMove(board, "commit"); MoveProcessor.processMove(board, "work_contract G12 Pn"); MoveProcessor.processMove(board, "with LAYBROTHER"); MoveProcessor.processMove(board, "use G12 ShPnPnPt"); MoveProcessor.processMove(board, "commit"); MoveProcessor.processMove(board, "work_contract G01 Pn"); MoveProcessor.processMove(board, "use G01"); MoveProcessor.processMove(board, "use F09"); MoveProcessor.processMove(board, "use LG2 Sh"); MoveProcessor.processMove(board, "commit"); MoveProcessor.processMove(board, "use LB1"); MoveProcessor.processMove(board, "commit"); MoveProcessor.processMove(board, "use LR2 Gn"); MoveProcessor.processMove(board, "commit"); MoveProcessor.processMove(board, "use F09"); MoveProcessor.processMove(board, "use G12 PtPtShShShSh"); MoveProcessor.processMove(board, "commit"); MoveProcessor.processMove(board, "build F04 4 2"); MoveProcessor.processMove(board, "use F04 GnGnGnGn *"); MoveProcessor.processMove(board, "commit"); MoveProcessor.processMove(board, "convert Gn"); MoveProcessor.processMove(board, "use G02 GnSwPn Sh"); MoveProcessor.processMove(board, "commit"); MoveProcessor.processMove(board, "build F08 0 1"); MoveProcessor.processMove(board, "use F08 ClPtGnSh *"); MoveProcessor.processMove(board, "commit"); MoveProcessor.processMove(board, "settle S02 1 2 ShPnPtSwSw"); MoveProcessor.processMove(board, "commit"); MoveProcessor.processMove(board, "settle S03 1 2 ShShShSh"); MoveProcessor.processMove(board, "commit"); MoveProcessor.processMove(board, "buy_district 2 PLAINS"); MoveProcessor.processMove(board, "settle S03 2 2 ShShBr"); MoveProcessor.processMove(board, "commit"); MoveProcessor.processMove(board, "build G16 3 0"); MoveProcessor.processMove(board, "use G16 *"); MoveProcessor.processMove(board, "commit"); MoveProcessor.processMove(board, "build F11 -1 1"); MoveProcessor.processMove(board, "use F11 *"); MoveProcessor.processMove(board, "commit"); MoveProcessor.processMove(board, "cut_peat 0 0"); MoveProcessor.processMove(board, "commit"); MoveProcessor.processMove(board, "use G01"); MoveProcessor.processMove(board, "use F11"); MoveProcessor.processMove(board, "commit"); MoveProcessor.processMove(board, "use LR3 Jo"); MoveProcessor.processMove(board, "buy_district 3 HILLS"); MoveProcessor.processMove(board, "commit"); MoveProcessor.processMove(board, "use LG3"); MoveProcessor.processMove(board, "buy_plot 0 COAST"); MoveProcessor.processMove(board, "commit"); MoveProcessor.processMove(board, "fell_trees 1 1"); MoveProcessor.processMove(board, "commit"); MoveProcessor.processMove(board, "use F11"); MoveProcessor.processMove(board, "commit"); MoveProcessor.processMove(board, "use F09"); MoveProcessor.processMove(board, "use LG2 Gn"); MoveProcessor.processMove(board, "commit"); MoveProcessor.processMove(board, "use LB2 Sh"); MoveProcessor.processMove(board, "commit"); MoveProcessor.processMove(board, "use LR1"); MoveProcessor.processMove(board, "commit"); MoveProcessor.processMove(board, "convert Gn"); MoveProcessor.processMove(board, "build F17 3 2"); MoveProcessor.processMove(board, "use F17 PnPnPn Bo *"); MoveProcessor.processMove(board, "commit"); MoveProcessor.processMove(board, "use G16"); MoveProcessor.processMove(board, "commit"); MoveProcessor.processMove(board, "build F14 4 2"); MoveProcessor.processMove(board, "use F14 Jo *"); MoveProcessor.processMove(board, "commit"); MoveProcessor.processMove(board, "convert Gn"); MoveProcessor.processMove(board, "use F08 PnPtGnSw"); MoveProcessor.processMove(board, "buy_plot 1 MOUNTAIN"); MoveProcessor.processMove(board, "commit"); MoveProcessor.processMove(board, "use G01"); MoveProcessor.processMove(board, "use F17 PnPnPn Bo"); MoveProcessor.processMove(board, "commit"); MoveProcessor.processMove(board, "cut_peat 0 1"); MoveProcessor.processMove(board, "commit"); MoveProcessor.processMove(board, "use LG1"); MoveProcessor.processMove(board, "commit"); MoveProcessor.processMove(board, "build F05 2 2"); MoveProcessor.processMove(board, "use F05 PtFlFlFlFlBrBr *"); MoveProcessor.processMove(board, "commit"); MoveProcessor.processMove(board, "use G02 ClGnPn Gn"); MoveProcessor.processMove(board, "commit"); MoveProcessor.processMove(board, "settle S04 -1 1 PtPtMtBr"); MoveProcessor.processMove(board, "commit"); MoveProcessor.processMove(board, "settle S05 3 2 WoMt"); MoveProcessor.processMove(board, "commit"); MoveProcessor.processMove(board, "settle S05 3 2 ShShPnWo"); MoveProcessor.processMove(board, "commit"); MoveProcessor.processMove(board, "convert GnGn"); MoveProcessor.processMove(board, "build F21 1 2"); MoveProcessor.processMove(board, "use F21 GpGpGpGp Wn *"); MoveProcessor.processMove(board, "buy_district 3 HILLS"); MoveProcessor.processMove(board, "commit"); MoveProcessor.processMove(board, "use LB3"); MoveProcessor.processMove(board, "commit"); MoveProcessor.processMove(board, "fell_trees 0 2"); MoveProcessor.processMove(board, "commit"); MoveProcessor.processMove(board, "build G22 6 1"); MoveProcessor.processMove(board, "commit"); MoveProcessor.processMove(board, "work_contract G22 Wn"); MoveProcessor.processMove(board, "with LAYBROTHER"); MoveProcessor.processMove(board, "use G22 Jo"); MoveProcessor.processMove(board, "buy_district 3 HILLS"); MoveProcessor.processMove(board, "commit"); MoveProcessor.processMove(board, "convert GnGn"); MoveProcessor.processMove(board, "build F20 2 2"); MoveProcessor.processMove(board, "commit"); MoveProcessor.processMove(board, "work_contract G16 Wn"); MoveProcessor.processMove(board, "with LAYBROTHER"); MoveProcessor.processMove(board, "use G16"); MoveProcessor.processMove(board, "commit"); MoveProcessor.processMove(board, "use G01"); MoveProcessor.processMove(board, "use F14"); MoveProcessor.processMove(board, "buy_plot 1 MOUNTAIN"); MoveProcessor.processMove(board, "commit"); MoveProcessor.processMove(board, "use LR2 Sh"); MoveProcessor.processMove(board, "commit"); MoveProcessor.processMove(board, "use F09"); MoveProcessor.processMove(board, "use LG2 Gn"); MoveProcessor.processMove(board, "commit"); MoveProcessor.processMove(board, "work_contract F21 Wn"); MoveProcessor.processMove(board, "with LAYBROTHER"); MoveProcessor.processMove(board, "use F21 GpGpGpGpGpGp Wn"); MoveProcessor.processMove(board, "buy_plot 1 COAST"); MoveProcessor.processMove(board, "commit"); MoveProcessor.processMove(board, "work_contract G16 Wn"); MoveProcessor.processMove(board, "with LAYBROTHER"); MoveProcessor.processMove(board, "use G16"); MoveProcessor.processMove(board, "commit"); MoveProcessor.processMove(board, "cut_peat 0 3"); MoveProcessor.processMove(board, "commit"); MoveProcessor.processMove(board, "work_contract G22 Wn"); MoveProcessor.processMove(board, "with LAYBROTHER"); MoveProcessor.processMove(board, "use G22 Jo"); MoveProcessor.processMove(board, "commit"); MoveProcessor.processMove(board, "use F20 ShShShWn"); MoveProcessor.processMove(board, "buy_plot 3 COAST"); MoveProcessor.processMove(board, "commit"); MoveProcessor.processMove(board, "fell_trees 2 0"); MoveProcessor.processMove(board, "commit"); MoveProcessor.processMove(board, "settle S04 -1 2 ShShShShPtWo"); MoveProcessor.processMove(board, "commit"); MoveProcessor.processMove(board, "settle S04 -1 2 ShShShShPtWo"); MoveProcessor.processMove(board, "commit"); MoveProcessor.processMove(board, "settle S02 0 0 PtWoShGp"); MoveProcessor.processMove(board, "commit"); MoveProcessor.processMove(board, "use LB1"); MoveProcessor.processMove(board, "commit"); MoveProcessor.processMove(board, "use F11"); MoveProcessor.processMove(board, "commit"); MoveProcessor.processMove(board, "use F09"); MoveProcessor.processMove(board, "use LG3"); MoveProcessor.processMove(board, "buy_plot 2 COAST"); MoveProcessor.processMove(board, "commit"); MoveProcessor.processMove(board, "convert Ni"); MoveProcessor.processMove(board, "build F32 5 1"); MoveProcessor.processMove(board, "use F32 Pn *"); MoveProcessor.processMove(board, "cut_peat 0 1 Jo"); MoveProcessor.processMove(board, "fell_trees 2 3"); MoveProcessor.processMove(board, "commit"); MoveProcessor.processMove(board, "build F33 -1 3"); MoveProcessor.processMove(board, "use F33 WoPt Mt *"); MoveProcessor.processMove(board, "commit"); MoveProcessor.processMove(board, "use G22"); MoveProcessor.processMove(board, "commit"); MoveProcessor.processMove(board, "convert GnGn"); MoveProcessor.processMove(board, "build F30 0 2"); MoveProcessor.processMove(board, "use F30 Po *"); MoveProcessor.processMove(board, "commit"); MoveProcessor.processMove(board, "work_contract G01 Wn"); MoveProcessor.processMove(board, "with LAYBROTHER"); MoveProcessor.processMove(board, "use G01"); MoveProcessor.processMove(board, "use F30 Po"); MoveProcessor.processMove(board, "commit"); MoveProcessor.processMove(board, "build G07 2 0"); MoveProcessor.processMove(board, "use G07 PtPtPtPtPtPtPtPt *"); MoveProcessor.processMove(board, "commit"); MoveProcessor.processMove(board, "work_contract F20 Wn"); MoveProcessor.processMove(board, "with LAYBROTHER"); MoveProcessor.processMove(board, "use F20 ShShBrWn"); MoveProcessor.processMove(board, "commit"); MoveProcessor.processMove(board, "use F14"); MoveProcessor.processMove(board, "commit"); MoveProcessor.processMove(board, "work_contract LR2 Wn"); MoveProcessor.processMove(board, "with LAYBROTHER"); MoveProcessor.processMove(board, "use LR2 Sh"); MoveProcessor.processMove(board, "commit"); MoveProcessor.processMove(board, "work_contract G22 Wn"); MoveProcessor.processMove(board, "with LAYBROTHER"); MoveProcessor.processMove(board, "use G22 Jo"); MoveProcessor.processMove(board, "commit"); MoveProcessor.processMove(board, "work_contract F32 PnPn"); MoveProcessor.processMove(board, "with LAYBROTHER"); MoveProcessor.processMove(board, "use F32 Pn"); MoveProcessor.processMove(board, "cut_peat 0 3"); MoveProcessor.processMove(board, "fell_trees 2 3"); MoveProcessor.processMove(board, "commit"); MoveProcessor.processMove(board, "use LG1"); MoveProcessor.processMove(board, "commit"); MoveProcessor.processMove(board, "work_contract F11 Wn"); MoveProcessor.processMove(board, "with LAYBROTHER"); MoveProcessor.processMove(board, "use F11"); MoveProcessor.processMove(board, "commit"); MoveProcessor.processMove(board, "build G26 -1 4"); MoveProcessor.processMove(board, "use G26 WoWo *"); MoveProcessor.processMove(board, "commit"); MoveProcessor.processMove(board, "convert GnGnGnGnGn"); MoveProcessor.processMove(board, "build G19 5 1"); MoveProcessor.processMove(board, "use G19 ShSwShSwShSwShSwShSw *"); MoveProcessor.processMove(board, "commit"); MoveProcessor.processMove(board, "build F27 3 3"); MoveProcessor.processMove(board, "use F27 Wn *"); MoveProcessor.processMove(board, "use F11"); MoveProcessor.processMove(board, "commit"); MoveProcessor.processMove(board, "use LR2 Gn"); MoveProcessor.processMove(board, "commit"); MoveProcessor.processMove(board, "settle S07 -1 2 CoCoCoMtMtMt"); MoveProcessor.processMove(board, "commit"); MoveProcessor.processMove(board, "settle S03 2 3 ShBrPnPn"); MoveProcessor.processMove(board, "commit"); MoveProcessor.processMove(board, "settle S02 0 1 PtWoPnPnPn"); MoveProcessor.processMove(board, "commit"); MoveProcessor.processMove(board, "build F38 -1 0"); MoveProcessor.processMove(board, "use F38 1 0 1 1 0 2 2 3 *"); MoveProcessor.processMove(board, "commit"); MoveProcessor.processMove(board, "use F30 Po"); MoveProcessor.processMove(board, "commit"); MoveProcessor.processMove(board, "build F36 0 2"); MoveProcessor.processMove(board, "use F36 Or Po *"); MoveProcessor.processMove(board, "commit"); MoveProcessor.processMove(board, "work_contract F33 Wn"); MoveProcessor.processMove(board, "with LAYBROTHER"); MoveProcessor.processMove(board, "use F33 Co Mt"); MoveProcessor.processMove(board, "commit"); MoveProcessor.processMove(board, "use F32 Pn"); MoveProcessor.processMove(board, "fell_trees 2 0"); MoveProcessor.processMove(board, "cut_peat 0 3"); MoveProcessor.processMove(board, "commit"); MoveProcessor.processMove(board, "work_contract F21 PnPn"); MoveProcessor.processMove(board, "with LAYBROTHER"); MoveProcessor.processMove(board, "use F21 GpGpGpGpGpGpGpGpGp Wn"); MoveProcessor.processMove(board, "commit"); MoveProcessor.processMove(board, "use F09"); MoveProcessor.processMove(board, "use LG2 Sh"); MoveProcessor.processMove(board, "commit"); MoveProcessor.processMove(board, "work_contract F11 Wn"); MoveProcessor.processMove(board, "with LAYBROTHER"); MoveProcessor.processMove(board, "use F11"); MoveProcessor.processMove(board, "commit"); MoveProcessor.processMove(board, "use F33 PtPt Mt"); MoveProcessor.processMove(board, "commit"); MoveProcessor.processMove(board, "fell_trees 1 3"); MoveProcessor.processMove(board, "commit"); MoveProcessor.processMove(board, "buy_plot 3 MOUNTAIN"); MoveProcessor.processMove(board, "build G28 5 3"); MoveProcessor.processMove(board, "use G28 *"); MoveProcessor.processMove(board, "settle S01 4 3 WoPn"); MoveProcessor.processMove(board, "commit"); MoveProcessor.processMove(board, "work_contract G01 Wn"); MoveProcessor.processMove(board, "with LAYBROTHER"); MoveProcessor.processMove(board, "use G01"); MoveProcessor.processMove(board, "use G28"); MoveProcessor.processMove(board, "settle S08 3 3 PtWoMtMtMtMtMtNi"); MoveProcessor.processMove(board, "commit"); MoveProcessor.processMove(board, "use F09"); MoveProcessor.processMove(board, "use F17 PnPnPn Bo"); MoveProcessor.processMove(board, "commit"); MoveProcessor.processMove(board, "use LB2 Gn"); MoveProcessor.processMove(board, "commit"); MoveProcessor.processMove(board, "work_contract G07 Wn"); MoveProcessor.processMove(board, "with LAYBROTHER"); MoveProcessor.processMove(board, "use G07 PtPtPtPtPt"); MoveProcessor.processMove(board, "commit"); MoveProcessor.processMove(board, "convert Gn"); MoveProcessor.processMove(board, "build F40 3 3"); MoveProcessor.processMove(board, "use F40 *"); MoveProcessor.processMove(board, "use G18 ClClSnCo"); MoveProcessor.processMove(board, "commit"); MoveProcessor.processMove(board, "use G28"); MoveProcessor.processMove(board, "settle S06 5 2 NiPtPtPt"); MoveProcessor.processMove(board, "commit"); MoveProcessor.processMove(board, "convert Gn"); MoveProcessor.processMove(board, "build G06 1 1"); MoveProcessor.processMove(board, "use G06 Co *"); MoveProcessor.processMove(board, "commit"); MoveProcessor.processMove(board, "work_contract F27 Wn"); MoveProcessor.processMove(board, "with LAYBROTHER"); MoveProcessor.processMove(board, "use F27 Wn"); MoveProcessor.processMove(board, "use G28"); MoveProcessor.processMove(board, "settle S08 4 2 MtMtMtMtMtMtCo"); MoveProcessor.processMove(board, "commit"); MoveProcessor.processMove(board, "convert GnGnGnGnGn"); MoveProcessor.processMove(board, "build F37 2 0"); MoveProcessor.processMove(board, "use F37 WoSwWoSwWoSwWoSwWoSw *"); MoveProcessor.processMove(board, "commit"); MoveProcessor.processMove(board, "use G28"); MoveProcessor.processMove(board, "settle S06 2 3 CoCoGnGnGnGnGn"); MoveProcessor.processMove(board, "commit"); MoveProcessor.processMove(board, "settle S05 0 2 CoShShGp"); MoveProcessor.processMove(board, "commit"); MoveProcessor.processMove(board, "use F09"); MoveProcessor.processMove(board, "use LG2 Sh"); MoveProcessor.processMove(board, "commit"); MoveProcessor.processMove(board, "settle S07 4 3 CoCoCoPnPnPnPnPnGnGnGnGnGnNi"); MoveProcessor.processMove(board, "commit"); MoveProcessor.processMove(board, "settle S06 4 3 CoCoShShGp"); MoveProcessor.processMove(board, "commit"); MoveProcessor.processMove(board, "settle S07 -1 1 ShShShShGpNiPnPtPtPtWoWoWo"); MoveProcessor.processMove(board, "commit"); } @Test public void testEndingScores() throws WeblaboraException { assertThat(board.isGameOver(), is(true)); assertThat(board.isSettling(), is(false)); Map<Color, PlayerScore> scores = board.getScorecard().getScores(); assertThat(scores, hasKey(Color.RED)); assertThat(scores.get(Color.RED), hasProperty("itemScore", is(25))); assertThat(scores.get(Color.RED), hasProperty("shieldScore", is(69))); assertThat(scores.get(Color.RED), hasProperty("settlementTotalScore", is(148))); Map<Class<? extends Settlement>, Integer> redScores = ImmutableMap.<Class<? extends Settlement>, Integer>builder() .put(FarmingVillage.class, 16) .put(FishingVillage.class, 26) .put(MarketTown.class, 16) .put(ArtistsColony.class, 29) .put(Hamlet.class, 18) .put(HilltopVillage.class, 23) .put(Village.class, 20) .build(); for (Entry<Class<? extends Settlement>, Integer> entry : redScores.entrySet()) { assertThat( scores.get(Color.RED).getSettlementScores(), hasItem(allOf( hasProperty("settlement", instanceOf(entry.getKey())), hasProperty("score", is(entry.getValue()))))); } assertThat(scores, hasKey(Color.GREEN)); assertThat(scores.get(Color.GREEN), hasProperty("itemScore", is(26))); assertThat(scores.get(Color.GREEN), hasProperty("shieldScore", is(85))); assertThat(scores.get(Color.GREEN), hasProperty("settlementTotalScore", is(140))); Map<Class<? extends Settlement>, Integer> greenScores = ImmutableMap.<Class<? extends Settlement>, Integer>builder() .put(FarmingVillage.class, 14) .put(FishingVillage.class, 28) .put(Village.class, 20) .put(ArtistsColony.class, 24) .put(MarketTown.class, 16) .put(HilltopVillage.class, 21) .put(Hamlet.class, 17) .build(); for (Entry<Class<? extends Settlement>, Integer> entry : greenScores.entrySet()) { assertThat( scores.get(Color.GREEN).getSettlementScores(), hasItem(allOf( hasProperty("settlement", instanceOf(entry.getKey())), hasProperty("score", is(entry.getValue()))))); } assertThat(scores, hasKey(Color.BLUE)); assertThat(scores.get(Color.BLUE), hasProperty("itemScore", is(24))); assertThat(scores.get(Color.BLUE), hasProperty("shieldScore", is(88))); assertThat(scores.get(Color.BLUE), hasProperty("settlementTotalScore", is(128))); Map<Class<? extends Settlement>, Integer> blueScores = ImmutableMap.<Class<? extends Settlement>, Integer>builder() .put(Village.class, 15) .put(FishingVillage.class, 20) .put(FarmingVillage.class, 11) .put(ArtistsColony.class, 27) .put(Hamlet.class, 22) .put(MarketTown.class, 15) .put(ShantyTown.class, 18) .build(); for (Entry<Class<? extends Settlement>, Integer> entry : blueScores.entrySet()) { assertThat( scores.get(Color.BLUE).getSettlementScores(), hasItem(allOf( hasProperty("settlement", instanceOf(entry.getKey())), hasProperty("score", is(entry.getValue()))))); } } }
package org.renjin.primitives; import org.renjin.eval.Context; import org.renjin.eval.EvalException; import org.renjin.primitives.annotations.processor.ArgumentException; import org.renjin.primitives.annotations.processor.ArgumentIterator; import org.renjin.primitives.annotations.processor.WrapperRuntime; import org.renjin.primitives.vector.DeferredComputation; import org.renjin.sexp.AbstractSEXP; import org.renjin.sexp.AttributeMap; import org.renjin.sexp.BuiltinFunction; import org.renjin.sexp.ComplexVector; import org.renjin.sexp.DoubleVector; import org.renjin.sexp.Environment; import org.renjin.sexp.FunctionCall; import org.renjin.sexp.IntVector; import org.renjin.sexp.PairList; import org.renjin.sexp.SEXP; import org.renjin.sexp.Vector; public class R$primitive$$minus extends BuiltinFunction { public R$primitive$$minus() { super("-"); } public SEXP apply(Context context, Environment environment, FunctionCall call, PairList args) { try { ArgumentIterator argIt = new ArgumentIterator(context, environment, args); SEXP s0 = argIt.evalNext(); if (!argIt.hasNext()) { if (((AbstractSEXP) s0).isObject()) { SEXP genericResult = S3 .tryDispatchGroupFromPrimitive(context, environment, call, "Ops", "-", s0); if (genericResult!= null) { return genericResult; } } return this.doApply(context, environment, s0); } SEXP s1 = argIt.evalNext(); if (!argIt.hasNext()) { if (((AbstractSEXP) s0).isObject()||((AbstractSEXP) s1).isObject()) { SEXP genericResult = S3 .tryDispatchGroupFromPrimitive(context, environment, call, "Ops", "-", s0, s1); if (genericResult!= null) { return genericResult; } } return this.doApply(context, environment, s0, s1); } throw new EvalException("-: too many arguments, expected at most 2."); } catch (ArgumentException e) { throw new EvalException(context, "Invalid argument: %s. Expected:\n\t-(integer)\n\t-(double, double)\n\t-(double)\n\t-(Complex, Complex)", e.getMessage()); } catch (EvalException e) { e.initContext(context); throw e; } catch (RuntimeException e) { throw e; } catch (Exception e) { throw new EvalException(e); } } public static SEXP doApply(Context context, Environment environment, FunctionCall call, String[] argNames, SEXP[] args) { try { if ((args.length) == 1) { return doApply(context, environment, args[ 0 ]); } if ((args.length) == 2) { return doApply(context, environment, args[ 0 ], args[ 1 ]); } } catch (EvalException e) { e.initContext(context); throw e; } catch (RuntimeException e) { throw e; } catch (Exception e) { throw new EvalException(e); } throw new EvalException("-: max arity is 2"); } public SEXP apply(Context context, Environment environment, FunctionCall call, String[] argNames, SEXP[] args) { return R$primitive$$minus.doApply(context, environment, call, argNames, args); } public static SEXP doApply(Context context, Environment environment, SEXP arg0) throws Exception { if ((arg0 .length() == 0)||(arg0 instanceof IntVector)) { Vector vector0 = ((Vector) WrapperRuntime.convertToVector(arg0)); int length0 = vector0 .length(); int currentElementIndex0 = 0; int cycles = 0; if (length0 == 0) { return IntVector.EMPTY; } if (length0 >cycles) { cycles = length0; } if ((cycles > 100)||(vector0 instanceof DeferredComputation)) { return new R$primitive$$minus$deferred_i(vector0, vector0 .getAttributes()); } org.renjin.sexp.IntArrayVector.Builder builder = new org.renjin.sexp.IntArrayVector.Builder(cycles); for (int i = 0; (i!= cycles); i ++) { if (vector0 .isElementNA(currentElementIndex0)) { builder.setNA(i); } else { builder.set(i, Ops.minus(vector0 .getElementAsInt(currentElementIndex0))); } currentElementIndex0 += 1; if (currentElementIndex0 == length0) { currentElementIndex0 = 0; } } if (length0 == cycles) { builder.copyAttributesFrom(vector0); } return builder.build(); } else { if ((arg0 .length() == 0)||((arg0 instanceof Vector)&&DoubleVector.VECTOR_TYPE.isWiderThanOrEqualTo(((Vector) arg0)))) { Vector vector0 = ((Vector) WrapperRuntime.convertToVector(arg0)); int length0 = vector0 .length(); int currentElementIndex0 = 0; int cycles = 0; if (length0 == 0) { return DoubleVector.EMPTY; } if (length0 >cycles) { cycles = length0; } if ((cycles > 100)||(vector0 instanceof DeferredComputation)) { return new R$primitive$$minus$deferred_d(vector0, vector0 .getAttributes()); } org.renjin.sexp.DoubleArrayVector.Builder builder = new org.renjin.sexp.DoubleArrayVector.Builder(cycles); for (int i = 0; (i!= cycles); i ++) { if (vector0 .isElementNA(currentElementIndex0)) { builder.setNA(i); } else { builder.set(i, Ops.minus(vector0 .getElementAsDouble(currentElementIndex0))); } currentElementIndex0 += 1; if (currentElementIndex0 == length0) { currentElementIndex0 = 0; } } if (length0 == cycles) { builder.copyAttributesFrom(vector0); } return builder.build(); } else { throw new EvalException(String.format("Invalid argument:\n\t-(%s)\n\tExpected:\n\t-(integer)\n\t-(double, double)\n\t-(double)\n\t-(Complex, Complex)", arg0 .getTypeName())); } } } public static SEXP doApply(Context context, Environment environment, SEXP arg0, SEXP arg1) throws Exception { if (((arg0 .length() == 0)||((arg0 instanceof Vector)&&DoubleVector.VECTOR_TYPE.isWiderThanOrEqualTo(((Vector) arg0))))&&((arg1 .length() == 0)||((arg1 instanceof Vector)&&DoubleVector.VECTOR_TYPE.isWiderThanOrEqualTo(((Vector) arg1))))) { Vector vector0 = ((Vector) WrapperRuntime.convertToVector(arg0)); int length0 = vector0 .length(); int currentElementIndex0 = 0; Vector vector1 = ((Vector) WrapperRuntime.convertToVector(arg1)); int length1 = vector1 .length(); int currentElementIndex1 = 0; int cycles = 0; if (length0 == 0) { return DoubleVector.EMPTY; } if (length0 >cycles) { cycles = length0; } if (length1 == 0) { return DoubleVector.EMPTY; } if (length1 >cycles) { cycles = length1; } if (((cycles > 100)||(vector0 instanceof DeferredComputation))||(vector1 instanceof DeferredComputation)) { return new R$primitive$$minus$deferred_dd(vector0, vector1, AttributeMap.combineAttributes(vector0, vector1)); } org.renjin.sexp.DoubleArrayVector.Builder builder = new org.renjin.sexp.DoubleArrayVector.Builder(cycles); for (int i = 0; (i!= cycles); i ++) { if (vector0 .isElementNA(currentElementIndex0)||vector1 .isElementNA(currentElementIndex1)) { builder.setNA(i); } else { builder.set(i, Ops.minus(vector0 .getElementAsDouble(currentElementIndex0), vector1 .getElementAsDouble(currentElementIndex1))); } currentElementIndex0 += 1; if (currentElementIndex0 == length0) { currentElementIndex0 = 0; } currentElementIndex1 += 1; if (currentElementIndex1 == length1) { currentElementIndex1 = 0; } } if (length1 == cycles) { builder.copyAttributesFrom(vector1); } if (length0 == cycles) { builder.copyAttributesFrom(vector0); } return builder.build(); } else { if (((arg0 .length() == 0)||((arg0 instanceof Vector)&&ComplexVector.VECTOR_TYPE.isWiderThanOrEqualTo(((Vector) arg0))))&&((arg1 .length() == 0)||((arg1 instanceof Vector)&&ComplexVector.VECTOR_TYPE.isWiderThanOrEqualTo(((Vector) arg1))))) { Vector vector0 = ((Vector) WrapperRuntime.convertToVector(arg0)); int length0 = vector0 .length(); int currentElementIndex0 = 0; Vector vector1 = ((Vector) WrapperRuntime.convertToVector(arg1)); int length1 = vector1 .length(); int currentElementIndex1 = 0; int cycles = 0; if (length0 == 0) { return ComplexVector.EMPTY; } if (length0 >cycles) { cycles = length0; } if (length1 == 0) { return ComplexVector.EMPTY; } if (length1 >cycles) { cycles = length1; } ComplexVector.Builder builder = new ComplexVector.Builder(cycles); for (int i = 0; (i!= cycles); i ++) { if (vector0 .isElementNA(currentElementIndex0)||vector1 .isElementNA(currentElementIndex1)) { builder.setNA(i); } else { builder.set(i, Ops.minus(vector0 .getElementAsComplex(currentElementIndex0), vector1 .getElementAsComplex(currentElementIndex1))); } currentElementIndex0 += 1; if (currentElementIndex0 == length0) { currentElementIndex0 = 0; } currentElementIndex1 += 1; if (currentElementIndex1 == length1) { currentElementIndex1 = 0; } } if (length1 == cycles) { builder.copyAttributesFrom(vector1); } if (length0 == cycles) { builder.copyAttributesFrom(vector0); } return builder.build(); } else { throw new EvalException(String.format("Invalid argument:\n\t-(%s, %s)\n\tExpected:\n\t-(integer)\n\t-(double, double)\n\t-(double)\n\t-(Complex, Complex)", arg0 .getTypeName(), arg1 .getTypeName())); } } } }
package io.grpc.clientcacheexample; import android.util.Log; import android.util.LruCache; import com.google.common.base.Splitter; import com.google.protobuf.MessageLite; import io.grpc.CallOptions; import io.grpc.Channel; import io.grpc.ClientCall; import io.grpc.ClientInterceptor; import io.grpc.Deadline; import io.grpc.ForwardingClientCall; import io.grpc.ForwardingClientCallListener; import io.grpc.Metadata; import io.grpc.MethodDescriptor; import io.grpc.Status; import java.util.Locale; import java.util.Objects; import java.util.concurrent.TimeUnit; /** * An example of an on-device cache for Android implemented using the {@link ClientInterceptor} API. * * <p>Client-side cache-control directives are not directly supported. Instead, two call options can * be added to the call: no-cache (always go to the network) or only-if-cached (never use network; * if response is not in cache, the request fails). * * <p>This interceptor respects the cache-control directives in the server's response: max-age * determines when the cache entry goes stale. no-cache, no-store, and no-transform entirely skip * caching of the response. must-revalidate is ignored, as the cache does not support returning * stale responses. * * <p>Note: other response headers besides cache-control (such as Expiration, Varies) are ignored by * this implementation. */ final class SafeMethodCachingInterceptor implements ClientInterceptor { static CallOptions.Key<Boolean> NO_CACHE_CALL_OPTION = CallOptions.Key.of("no-cache", false); static CallOptions.Key<Boolean> ONLY_IF_CACHED_CALL_OPTION = CallOptions.Key.of("only-if-cached", false); private static final String TAG = "grpcCacheExample"; public static final class Key { private final String fullMethodName; private final MessageLite request; public Key(String fullMethodName, MessageLite request) { this.fullMethodName = fullMethodName; this.request = request; } @Override public boolean equals(Object object) { if (object instanceof Key) { Key other = (Key) object; return Objects.equals(this.fullMethodName, other.fullMethodName) && Objects.equals(this.request, other.request); } return false; } @Override public int hashCode() { return Objects.hash(fullMethodName, request); } } public static final class Value { private final MessageLite response; private final Deadline maxAgeDeadline; public Value(MessageLite response, Deadline maxAgeDeadline) { this.response = response; this.maxAgeDeadline = maxAgeDeadline; } @Override public boolean equals(Object object) { if (object instanceof Value) { Value other = (Value) object; return Objects.equals(this.response, other.response) && Objects.equals(this.maxAgeDeadline, other.maxAgeDeadline); } return false; } @Override public int hashCode() { return Objects.hash(response, maxAgeDeadline); } } public interface Cache { void put(Key key, Value value); Value get(Key key); void remove(Key key); void clear(); } /** * Obtain a new cache with a least-recently used eviction policy and the specified size limit. The * backing caching implementation is provided by {@link LruCache}. It is safe for a single cache * to be shared across multiple {@link SafeMethodCachingInterceptor}s without synchronization. */ public static Cache newLruCache(final int cacheSizeInBytes) { return new Cache() { private final LruCache<Key, Value> lruCache = new LruCache<Key, Value>(cacheSizeInBytes) { protected int sizeOf(Key key, Value value) { return value.response.getSerializedSize(); } }; @Override public void put(Key key, Value value) { lruCache.put(key, value); } @Override public Value get(Key key) { return lruCache.get(key); } @Override public void remove(Key key) { lruCache.remove(key); } @Override public void clear() { lruCache.evictAll(); } }; } public static SafeMethodCachingInterceptor newSafeMethodCachingInterceptor(Cache cache) { return newSafeMethodCachingInterceptor(cache, DEFAULT_MAX_AGE_SECONDS); } public static SafeMethodCachingInterceptor newSafeMethodCachingInterceptor( Cache cache, int defaultMaxAge) { return new SafeMethodCachingInterceptor(cache, defaultMaxAge); } private static int DEFAULT_MAX_AGE_SECONDS = 3600; private static final Metadata.Key<String> CACHE_CONTROL_KEY = Metadata.Key.of("cache-control", Metadata.ASCII_STRING_MARSHALLER); private static final Splitter CACHE_CONTROL_SPLITTER = Splitter.on(',').trimResults().omitEmptyStrings(); private final Cache internalCache; private final int defaultMaxAge; private SafeMethodCachingInterceptor(Cache cache, int defaultMaxAge) { this.internalCache = cache; this.defaultMaxAge = defaultMaxAge; } @Override public <ReqT, RespT> ClientCall<ReqT, RespT> interceptCall( final MethodDescriptor<ReqT, RespT> method, final CallOptions callOptions, Channel next) { // Currently only unary methods can be marked safe, but check anyways. if (!method.isSafe() || method.getType() != MethodDescriptor.MethodType.UNARY) { return next.newCall(method, callOptions); } final String fullMethodName = method.getFullMethodName(); return new ForwardingClientCall.SimpleForwardingClientCall<ReqT, RespT>( next.newCall(method, callOptions)) { private Listener<RespT> interceptedListener; private Key requestKey; private boolean cacheResponse = true; private volatile String cacheOptionsErrorMsg; @Override public void start(Listener<RespT> responseListener, Metadata headers) { interceptedListener = new ForwardingClientCallListener.SimpleForwardingClientCallListener<RespT>( responseListener) { private Deadline deadline; private int maxAge = -1; @Override public void onHeaders(Metadata headers) { Iterable<String> cacheControlHeaders = headers.getAll(CACHE_CONTROL_KEY); if (cacheResponse && cacheControlHeaders != null) { for (String cacheControlHeader : cacheControlHeaders) { for (String directive : CACHE_CONTROL_SPLITTER.split(cacheControlHeader)) { if (directive.equalsIgnoreCase("no-cache")) { cacheResponse = false; break; } else if (directive.equalsIgnoreCase("no-store")) { cacheResponse = false; break; } else if (directive.equalsIgnoreCase("no-transform")) { cacheResponse = false; break; } else if (directive.toLowerCase(Locale.US).startsWith("max-age")) { String[] parts = directive.split("="); if (parts.length == 2) { try { maxAge = Integer.parseInt(parts[1]); } catch (NumberFormatException e) { Log.e(TAG, "max-age directive failed to parse", e); continue; } } } } } } if (cacheResponse) { if (maxAge > -1) { deadline = Deadline.after(maxAge, TimeUnit.SECONDS); } else { deadline = Deadline.after(defaultMaxAge, TimeUnit.SECONDS); } } super.onHeaders(headers); } @Override public void onMessage(RespT message) { if (cacheResponse && !deadline.isExpired()) { Value value = new Value((MessageLite) message, deadline); internalCache.put(requestKey, value); } super.onMessage(message); } @Override public void onClose(Status status, Metadata trailers) { if (cacheOptionsErrorMsg != null) { // UNAVAILABLE is the canonical gRPC mapping for HTTP response code 504 (as used // by the built-in Android HTTP request cache). super.onClose( Status.UNAVAILABLE.withDescription(cacheOptionsErrorMsg), new Metadata()); } else { super.onClose(status, trailers); } } }; delegate().start(interceptedListener, headers); } @Override public void sendMessage(ReqT message) { boolean noCache = callOptions.getOption(NO_CACHE_CALL_OPTION); boolean onlyIfCached = callOptions.getOption(ONLY_IF_CACHED_CALL_OPTION); if (noCache) { if (onlyIfCached) { cacheOptionsErrorMsg = "Unsatisfiable Request (no-cache and only-if-cached conflict)"; super.cancel(cacheOptionsErrorMsg, null); return; } cacheResponse = false; super.sendMessage(message); return; } // Check the cache requestKey = new Key(fullMethodName, (MessageLite) message); Value cachedResponse = internalCache.get(requestKey); if (cachedResponse != null) { if (cachedResponse.maxAgeDeadline.isExpired()) { internalCache.remove(requestKey); } else { cacheResponse = false; // already cached interceptedListener.onMessage((RespT) cachedResponse.response); Metadata metadata = new Metadata(); interceptedListener.onClose(Status.OK, metadata); return; } } if (onlyIfCached) { cacheOptionsErrorMsg = "Unsatisfiable Request (only-if-cached set, but value not in cache)"; super.cancel(cacheOptionsErrorMsg, null); return; } super.sendMessage(message); } @Override public void halfClose() { if (cacheOptionsErrorMsg != null) { // already canceled return; } super.halfClose(); } }; } }
/** * Copyright (c) 2007-2014 Kaazing Corporation. All rights reserved. * * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.kaazing.gateway.transport.ws.util; import java.nio.ByteBuffer; import java.security.MessageDigest; import java.security.NoSuchAlgorithmException; import java.util.ArrayList; import java.util.List; import org.apache.mina.util.Base64; import org.kaazing.gateway.transport.http.HttpAcceptSession; import org.kaazing.gateway.transport.http.HttpStatus; import org.kaazing.gateway.transport.http.bridge.HttpRequestMessage; import org.kaazing.gateway.util.ws.WebSocketWireProtocol; public class WsUtils { private static final int DIGEST_LENGTH = 16; public static final String SEC_WEB_SOCKET_VERSION = "Sec-WebSocket-Version"; public static final String SEC_WEB_SOCKET_KEY = "Sec-WebSocket-Key"; public static final String SEC_WEB_SOCKET_KEY1 = "Sec-WebSocket-Key1"; public static final String SEC_WEB_SOCKET_KEY2 = "Sec-WebSocket-Key2"; private static final String WEBSOCKET_GUID = "258EAFA5-E914-47DA-95CA-C5AB0DC85B11"; private WsUtils() { // no instances } /* * Parse a string as an integer according to the algorithm defined in the WebSocket protocol * (draft Hixie-76) * * @param key Sec-WeSocket-Key[1|2] key value * @return */ private static int parseIntKey(CharSequence key) { int numSpaces = 0; StringBuilder digits = new StringBuilder(); // build a string from the numerical characters for (int i=0; i<key.length(); i++) { char c = key.charAt(i); if (' ' == c) { numSpaces++; } else if (Character.isDigit(c)) { digits.append(c); } } String s = digits.toString(); // This may be greater than the max value for signed integers long n = Long.parseLong(s); // result is the numerical value divided by the number of spaces return (int) (n / numSpaces); } /* * Compute the MD5 sum of the three WebSocket keys * (draft Hixie-76) * * @param key1 Sec-WebSocket-Key1 value * @param key2 Sec-WebSocket-Key2 value * @param key3 8 bytes immediately following WebSocket upgrade request * @return * @throws NoSuchAlgorithmException */ public static ByteBuffer computeHash(CharSequence key1, CharSequence key2, ByteBuffer key3) throws WsDigestException, NoSuchAlgorithmException { MessageDigest md5 = MessageDigest.getInstance("MD5"); ByteBuffer buf = ByteBuffer.allocate(DIGEST_LENGTH); buf.putInt(parseIntKey(key1)); buf.putInt(parseIntKey(key2)); // key3 must be exactly 8 bytes if (key3.remaining() != 8) { throw new WsDigestException("WebSocket key3 must be exactly 8 bytes"); } buf.put(key3); buf.flip(); byte[] input = new byte[DIGEST_LENGTH]; buf.get(input, 0, DIGEST_LENGTH); byte[] digest = md5.digest(input); return ByteBuffer.wrap(digest); } /* * Compute the Sec-WebSocket-Accept key (RFC-6455) * * @param key * @return * @throws Exception */ public static String AcceptHash(String key) throws Exception { String input = key + WEBSOCKET_GUID; MessageDigest sha1 = MessageDigest.getInstance("SHA-1"); byte[] hash = sha1.digest(input.getBytes()); byte[] output = Base64.encodeBase64(hash); return new String(output); } public static int calculateEncodedLengthSize(int lengthValue) { int ceilLog2LengthPlus1 = 32 - Integer.numberOfLeadingZeros(lengthValue); switch (ceilLog2LengthPlus1) { case 0: case 1: case 2: case 3: case 4: case 5: case 6: case 7: return 1; case 8: case 9: case 10: case 11: case 12: case 13: case 14: return 2; case 15: case 16: case 17: case 18: case 19: case 20: case 21: return 3; case 22: case 23: case 24: case 25: case 26: case 27: case 28: return 4; case 29: case 30: case 31: return 5; default: throw new IllegalArgumentException("Negative length is not supported"); } } public static void encodeLength(ByteBuffer buf, int lengthValue) { int lengthInProgress = lengthValue; // Length-bytes are written out in order from most to // least significant, but are computed most efficiently (using // bit shifts) from least to most significant. An integer serves // as a temporary storage, which is then written out in reversed // order. int howMany = 0; long byteHolder = 0; do { byteHolder <<= 8; byte lv = (byte)(lengthInProgress &0x7F); byteHolder |= lv; lengthInProgress >>= 7; howMany++; } while (lengthInProgress > 0); do { byte bv = (byte)(byteHolder & 0xFF); byteHolder >>= 8; // The last length byte does not have the highest bit set if (howMany != 1) { bv |= (byte)0x80; } buf.put(bv); } while (--howMany > 0); } public static WebSocketWireProtocol guessWireProtocolVersion(HttpRequestMessage httpRequest) { String httpRequestVersionHeader = httpRequest.getHeader(SEC_WEB_SOCKET_VERSION); if ( httpRequestVersionHeader == null || httpRequestVersionHeader.length() == 0) { // Let's see if the request looks like Hixie 75 or 76 if ( httpRequest.getHeader(SEC_WEB_SOCKET_KEY1) != null && httpRequest.getHeader(SEC_WEB_SOCKET_KEY2) != null ) { return WebSocketWireProtocol.HIXIE_76; } else { return WebSocketWireProtocol.HIXIE_75; } } else { try { return WebSocketWireProtocol.valueOf(Integer.parseInt(httpRequestVersionHeader)); } catch (NumberFormatException e) { return null; } } } public static String negotiateWebSocketProtocol(HttpAcceptSession session, String protocolHeaderName, List<String> clientRequestedWsProtocols, List<String> serverWsProtocols) throws WsHandshakeNegotiationException { if (clientRequestedWsProtocols != null) { List<String> wsCandidateProtocols = new ArrayList<String>(serverWsProtocols); wsCandidateProtocols.retainAll(clientRequestedWsProtocols); if (wsCandidateProtocols.isEmpty()) { if (!serverWsProtocols.contains(null)) { session.setStatus(HttpStatus.CLIENT_NOT_FOUND); session.setReason("WebSocket SubProtocol Not Found"); session.close(false); throw new WsHandshakeNegotiationException("WebSocket SubProtocol Not Found"); } } else { final String chosenProtocol = wsCandidateProtocols.get(0); session.addWriteHeader(protocolHeaderName, chosenProtocol); return chosenProtocol; } } return null; } }
/* * Copyright 2016 LINE Corporation * * LINE Corporation licenses this file to you under the Apache License, * version 2.0 (the "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at: * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations * under the License. */ package com.linecorp.armeria.server.grpc; import static com.google.common.collect.ImmutableList.toImmutableList; import static com.google.common.collect.ImmutableMap.toImmutableMap; import static com.linecorp.armeria.common.stream.SubscriptionOption.WITH_POOLED_OBJECTS; import static java.util.Objects.requireNonNull; import java.time.Duration; import java.util.AbstractMap.SimpleImmutableEntry; import java.util.List; import java.util.Map; import java.util.Map.Entry; import java.util.Objects; import java.util.Set; import java.util.concurrent.TimeUnit; import java.util.function.Consumer; import java.util.function.Function; import javax.annotation.Nullable; import org.curioswitch.common.protobuf.json.MessageMarshaller; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.google.common.collect.ImmutableList; import com.linecorp.armeria.common.HttpHeaders; import com.linecorp.armeria.common.HttpRequest; import com.linecorp.armeria.common.HttpResponse; import com.linecorp.armeria.common.HttpResponseWriter; import com.linecorp.armeria.common.HttpStatus; import com.linecorp.armeria.common.MediaType; import com.linecorp.armeria.common.ResponseHeaders; import com.linecorp.armeria.common.ResponseHeadersBuilder; import com.linecorp.armeria.common.SerializationFormat; import com.linecorp.armeria.common.grpc.GrpcSerializationFormats; import com.linecorp.armeria.common.grpc.protocol.ArmeriaMessageDeframer; import com.linecorp.armeria.common.grpc.protocol.GrpcHeaderNames; import com.linecorp.armeria.common.util.SafeCloseable; import com.linecorp.armeria.common.util.TimeoutMode; import com.linecorp.armeria.internal.common.grpc.GrpcJsonUtil; import com.linecorp.armeria.internal.common.grpc.GrpcStatus; import com.linecorp.armeria.internal.common.grpc.MetadataUtil; import com.linecorp.armeria.internal.common.grpc.TimeoutHeaderUtil; import com.linecorp.armeria.server.AbstractHttpService; import com.linecorp.armeria.server.Route; import com.linecorp.armeria.server.ServiceConfig; import com.linecorp.armeria.server.ServiceRequestContext; import io.grpc.Codec.Identity; import io.grpc.CompressorRegistry; import io.grpc.DecompressorRegistry; import io.grpc.Metadata; import io.grpc.MethodDescriptor; import io.grpc.Server; import io.grpc.ServerCall; import io.grpc.ServerMethodDefinition; import io.grpc.ServerServiceDefinition; import io.grpc.Status; import io.grpc.protobuf.services.ProtoReflectionService; /** * The framed {@link GrpcService} implementation. */ final class FramedGrpcService extends AbstractHttpService implements GrpcService { private static final Logger logger = LoggerFactory.getLogger(FramedGrpcService.class); private final HandlerRegistry registry; private final Set<Route> routes; private final DecompressorRegistry decompressorRegistry; private final CompressorRegistry compressorRegistry; private final Set<SerializationFormat> supportedSerializationFormats; @Nullable private final MessageMarshaller jsonMarshaller; private final int maxOutboundMessageSizeBytes; private final boolean useBlockingTaskExecutor; private final boolean unsafeWrapRequestBuffers; private final boolean useClientTimeoutHeader; private final String advertisedEncodingsHeader; @Nullable private final ProtoReflectionService protoReflectionService; private final Map<SerializationFormat, ResponseHeaders> defaultHeaders; private int maxInboundMessageSizeBytes; FramedGrpcService(HandlerRegistry registry, Set<Route> routes, DecompressorRegistry decompressorRegistry, CompressorRegistry compressorRegistry, Set<SerializationFormat> supportedSerializationFormats, Consumer<MessageMarshaller.Builder> jsonMarshallerCustomizer, int maxOutboundMessageSizeBytes, boolean useBlockingTaskExecutor, boolean unsafeWrapRequestBuffers, boolean useClientTimeoutHeader, @Nullable ProtoReflectionService protoReflectionService, int maxInboundMessageSizeBytes) { this.registry = requireNonNull(registry, "registry"); this.routes = requireNonNull(routes, "routes"); this.decompressorRegistry = requireNonNull(decompressorRegistry, "decompressorRegistry"); this.compressorRegistry = requireNonNull(compressorRegistry, "compressorRegistry"); this.supportedSerializationFormats = supportedSerializationFormats; this.useClientTimeoutHeader = useClientTimeoutHeader; this.protoReflectionService = protoReflectionService; jsonMarshaller = jsonMarshaller(registry, supportedSerializationFormats, jsonMarshallerCustomizer); this.maxOutboundMessageSizeBytes = maxOutboundMessageSizeBytes; this.useBlockingTaskExecutor = useBlockingTaskExecutor; this.unsafeWrapRequestBuffers = unsafeWrapRequestBuffers; this.maxInboundMessageSizeBytes = maxInboundMessageSizeBytes; advertisedEncodingsHeader = String.join(",", decompressorRegistry.getAdvertisedMessageEncodings()); defaultHeaders = supportedSerializationFormats .stream() .map(format -> { final ResponseHeadersBuilder builder = ResponseHeaders .builder(HttpStatus.OK) .contentType(format.mediaType()) .add(GrpcHeaderNames.GRPC_ENCODING, Identity.NONE.getMessageEncoding()); if (!advertisedEncodingsHeader.isEmpty()) { builder.add(GrpcHeaderNames.GRPC_ACCEPT_ENCODING, advertisedEncodingsHeader); } return new SimpleImmutableEntry<>(format, builder.build()); }) .collect(toImmutableMap(Entry::getKey, Entry::getValue)); } @Override protected HttpResponse doPost(ServiceRequestContext ctx, HttpRequest req) throws Exception { final MediaType contentType = req.contentType(); final SerializationFormat serializationFormat = findSerializationFormat(contentType); if (serializationFormat == null) { return HttpResponse.of(HttpStatus.UNSUPPORTED_MEDIA_TYPE, MediaType.PLAIN_TEXT_UTF_8, "Missing or invalid Content-Type header."); } ctx.logBuilder().serializationFormat(serializationFormat); final String methodName = GrpcRequestUtil.determineMethod(ctx); if (methodName == null) { return HttpResponse.of(HttpStatus.BAD_REQUEST, MediaType.PLAIN_TEXT_UTF_8, "Invalid path."); } final ServerMethodDefinition<?, ?> method = registry.lookupMethod(methodName); if (method == null) { return HttpResponse.of( (ResponseHeaders) ArmeriaServerCall.statusToTrailers( ctx, Status.UNIMPLEMENTED.withDescription("Method not found: " + methodName), new Metadata(), false)); } if (useClientTimeoutHeader) { final String timeoutHeader = req.headers().get(GrpcHeaderNames.GRPC_TIMEOUT); if (timeoutHeader != null) { try { final long timeout = TimeoutHeaderUtil.fromHeaderValue(timeoutHeader); if (timeout == 0) { ctx.clearRequestTimeout(); } else { ctx.setRequestTimeout(TimeoutMode.SET_FROM_NOW, Duration.ofNanos(timeout)); } } catch (IllegalArgumentException e) { return HttpResponse.of( (ResponseHeaders) ArmeriaServerCall.statusToTrailers( ctx, GrpcStatus.fromThrowable(e), new Metadata(), false)); } } } ctx.logBuilder().name(methodName); ctx.logBuilder().deferRequestContent(); ctx.logBuilder().deferResponseContent(); final HttpResponseWriter res = HttpResponse.streaming(); final ArmeriaServerCall<?, ?> call = startCall( methodName, method, ctx, req.headers(), res, serializationFormat); if (call != null) { ctx.setRequestTimeoutHandler(() -> call.close(Status.CANCELLED, new Metadata())); req.subscribe(call.messageReader(), ctx.eventLoop(), WITH_POOLED_OBJECTS); req.whenComplete().handleAsync(call.messageReader(), ctx.eventLoop()); } return res; } @Nullable private <I, O> ArmeriaServerCall<I, O> startCall( String fullMethodName, ServerMethodDefinition<I, O> methodDef, ServiceRequestContext ctx, HttpHeaders headers, HttpResponseWriter res, SerializationFormat serializationFormat) { final ArmeriaServerCall<I, O> call = new ArmeriaServerCall<>( headers, methodDef.getMethodDescriptor(), compressorRegistry, decompressorRegistry, res, maxInboundMessageSizeBytes, maxOutboundMessageSizeBytes, ctx, serializationFormat, jsonMarshaller, unsafeWrapRequestBuffers, useBlockingTaskExecutor, defaultHeaders.get(serializationFormat)); final ServerCall.Listener<I> listener; try (SafeCloseable ignored = ctx.push()) { listener = methodDef.getServerCallHandler().startCall(call, MetadataUtil.copyFromHeaders(headers)); } catch (Throwable t) { call.setListener(new EmptyListener<>()); call.close(GrpcStatus.fromThrowable(t), new Metadata()); logger.warn( "Exception thrown from streaming request stub method before processing any request data" + " - this is likely a bug in the stub implementation."); return null; } if (listener == null) { // This will never happen for normal generated stubs but could conceivably happen for manually // constructed ones. throw new NullPointerException( "startCall() returned a null listener for method " + fullMethodName); } call.setListener(listener); return call; } @Override public void serviceAdded(ServiceConfig cfg) { if (maxInboundMessageSizeBytes == ArmeriaMessageDeframer.NO_MAX_INBOUND_MESSAGE_SIZE) { maxInboundMessageSizeBytes = (int) Math.min(cfg.maxRequestLength(), Integer.MAX_VALUE); } if (protoReflectionService != null) { final Map<String, ServerServiceDefinition> grpcServices = cfg.server().config().virtualHosts().stream() .flatMap(host -> host.serviceConfigs().stream()) .map(serviceConfig -> serviceConfig.service().as(FramedGrpcService.class)) .filter(Objects::nonNull) .flatMap(service -> service.services().stream()) // Armeria allows the same service to be registered multiple times at different // paths, but proto reflection service only supports a single instance of each // service so we dedupe here. .collect(toImmutableMap(def -> def.getServiceDescriptor().getName(), Function.identity(), (a, b) -> a)); protoReflectionService.notifyOnBuild(new Server() { @Override public Server start() { throw new UnsupportedOperationException(); } @Override public List<ServerServiceDefinition> getServices() { return ImmutableList.copyOf(grpcServices.values()); } @Override public List<ServerServiceDefinition> getImmutableServices() { // NB: This will probably go away in favor of just getServices above, so we // implement both the same. // https://github.com/grpc/grpc-java/issues/4600 return getServices(); } @Override public List<ServerServiceDefinition> getMutableServices() { // Armeria does not have the concept of mutable services. return ImmutableList.of(); } @Override public Server shutdown() { throw new UnsupportedOperationException(); } @Override public Server shutdownNow() { throw new UnsupportedOperationException(); } @Override public boolean isShutdown() { throw new UnsupportedOperationException(); } @Override public boolean isTerminated() { throw new UnsupportedOperationException(); } @Override public boolean awaitTermination(long timeout, TimeUnit unit) { throw new UnsupportedOperationException(); } @Override public void awaitTermination() { throw new UnsupportedOperationException(); } }); } } @Override public boolean isFramed() { return true; } @Override public List<ServerServiceDefinition> services() { return registry.services(); } @Override public Set<SerializationFormat> supportedSerializationFormats() { return supportedSerializationFormats; } @Nullable private SerializationFormat findSerializationFormat(@Nullable MediaType contentType) { if (contentType == null) { return null; } for (SerializationFormat format : supportedSerializationFormats) { if (format.isAccepted(contentType)) { return format; } } return null; } @Nullable private static MessageMarshaller jsonMarshaller( HandlerRegistry registry, Set<SerializationFormat> supportedSerializationFormats, Consumer<MessageMarshaller.Builder> jsonMarshallerCustomizer) { if (supportedSerializationFormats.stream().noneMatch(GrpcSerializationFormats::isJson)) { return null; } final List<MethodDescriptor<?, ?>> methods = registry.services().stream() .flatMap(service -> service.getMethods().stream()) .map(ServerMethodDefinition::getMethodDescriptor) .collect(toImmutableList()); return GrpcJsonUtil.jsonMarshaller(methods, jsonMarshallerCustomizer); } @Override public Set<Route> routes() { return routes; } private static class EmptyListener<T> extends ServerCall.Listener<T> {} }
package com.gps.capstone.traceroute.GLFiles; import android.app.AlertDialog; import android.app.AlertDialog.Builder; import android.content.DialogInterface; import android.content.Intent; import android.os.Bundle; import android.text.InputType; import android.util.Log; import android.view.LayoutInflater; import android.view.Menu; import android.view.MenuItem; import android.view.MotionEvent; import android.view.View; import android.view.View.OnClickListener; import android.view.View.OnTouchListener; import android.view.ViewGroup; import android.view.WindowManager; import android.view.WindowManager.LayoutParams; import android.view.animation.AccelerateDecelerateInterpolator; import android.view.animation.Animation; import android.view.animation.RotateAnimation; import android.widget.EditText; import android.widget.FrameLayout; import android.widget.ImageView; import android.widget.LinearLayout; import android.widget.TextView; import android.widget.Toast; import com.github.amlcurran.showcaseview.OnShowcaseEventListener; import com.github.amlcurran.showcaseview.ShowcaseView; import com.github.amlcurran.showcaseview.targets.ViewTarget; import com.github.clans.fab.FloatingActionButton; import com.gps.capstone.traceroute.AboutActivity; import com.gps.capstone.traceroute.BasicActivity; import com.gps.capstone.traceroute.R; import com.gps.capstone.traceroute.UserInfoActivity; import com.gps.capstone.traceroute.Utils.BusProvider; import com.gps.capstone.traceroute.Utils.SensorUtil.EventType; import com.gps.capstone.traceroute.Utils.SharedPrefUtil; import com.gps.capstone.traceroute.sensors.SensorDataProvider; import com.gps.capstone.traceroute.sensors.events.NewDataEvent; import com.gps.capstone.traceroute.sensors.events.NewLocationEvent; import com.gps.capstone.traceroute.sensors.events.PathCompletion; import com.squareup.otto.Subscribe; public class OpenGLActivity extends BasicActivity implements OnClickListener, OnShowcaseEventListener { // Tag for debugging private final String TAG = getClass().getSimpleName(); // Defines whether the user is in control of the map or not public static boolean USER_CONTROL; // Defines whether the camera follows path public static boolean FOLLOW_PATH; public static boolean USE_SHAPE; public static boolean USE_GYROSCOPE; // The source of our sensor data private SensorDataProvider mDataProvider; private int mStepCount; private ShowcaseView mSV; private ImageView mPointer; private FloatingActionButton mFabStart; private FloatingActionButton mFabStop; private FloatingActionButton mFabSave; private View mCard; private FrameLayout mFrame; int n; @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.activity_open_gl); mStepCount = 0; mFrame = (FrameLayout) findViewById(R.id.frame); mPointer = (ImageView) findViewById(R.id.compass); mFabStart = (FloatingActionButton) findViewById(R.id.fab_start); mFabStop = (FloatingActionButton) findViewById(R.id.fab_stop); mFabSave = (FloatingActionButton) findViewById(R.id.fab_save); mFabStop.hide(false); mFabSave.hide(false); n = 0; } @Override protected void onResume() { super.onResume(); getWindow().addFlags(LayoutParams.FLAG_KEEP_SCREEN_ON); mFabStart.setOnClickListener(this); mFabStop.setOnClickListener(this); mFabSave.setOnClickListener(this); mDataProvider = new SensorDataProvider(this); mPointer.setOnClickListener(this); USE_SHAPE = true; FOLLOW_PATH = false; USER_CONTROL = false; USE_GYROSCOPE = true; mDataProvider.register(); mDataProvider.rotateModeFromGyroscope(USE_GYROSCOPE); BusProvider.getInstance().register(this); if (!SharedPrefUtil.getBoolean(this, R.string.pref_key_got_user_info, false)) { Intent i = new Intent(this, UserInfoActivity.class); i.addFlags(Intent.FLAG_ACTIVITY_NO_HISTORY); startActivity(i); finish(); } else if (SharedPrefUtil.getBoolean(this, R.string.pref_key_first_run, true)) { SharedPrefUtil.putBoolean(this, R.string.pref_key_first_run, false); firstRun(); } } @Override public boolean onOptionsItemSelected(MenuItem item) { if (!super.onOptionsItemSelected(item) && item.getItemId() == R.id.about) { about(); return true; } return false; } private void about() { Intent i = new Intent(this, AboutActivity.class); startActivity(i); } private void firstRun() { mSV = new ShowcaseView.Builder(this) .setContentTitle(R.string.showcase_path_start_text) .setContentText(R.string.showcase_path_start_description) .setTarget(new ViewTarget(mFabStart)) .doNotBlockTouches() .hideOnTouchOutside() .setStyle(com.github.amlcurran.showcaseview.R.style.ShowcaseButton) .setShowcaseEventListener(this).build(); mSV.hideButton(); n++; } @Override protected void onPause() { super.onPause(); mDataProvider.unregister(); BusProvider.getInstance().unregister(this); getWindow().clearFlags(LayoutParams.FLAG_KEEP_SCREEN_ON); } @Override public boolean onCreateOptionsMenu(Menu menu) { // Inflate the menu; this adds items to the action bar if it is present. getMenuInflater().inflate(R.menu.menu_open_gl, menu); return true; } @Override public void onClick(View v) { // Don't accept input when this is shown for now if (mSV != null && mSV.isShown()) { mSV.hide(); return; } int id = v.getId(); if (id == R.id.fab_start) { if (mCard != null && mCard.isShown()) { mFrame.removeView(mCard); } mFabStart.hide(true); mFabSave.hide(true); mFabStop.show(true); startPath(); } else if (id == R.id.fab_stop) { mFabStart.show(true); mFabSave.show(true); mFabStop.hide(true); stopPath(); } else if (id == R.id.fab_save) { if (mCard != null && mCard.isShown()) { mFrame.removeView(mCard); } saveAction(); } else if (id == R.id.compass) { if (mCard != null && mCard.isShown()) { mFrame.removeView(mCard); } // Switch using the gyroscope USE_GYROSCOPE = !USE_GYROSCOPE; USER_CONTROL = !USER_CONTROL; mDataProvider.rotateModeFromGyroscope(USE_GYROSCOPE); } else { Log.e(TAG, "WHAT THE HELL DID WE CLICK?"); } } /** * Starts the path drawing and listening */ private void startPath() { FOLLOW_PATH = true; USE_SHAPE = false; USE_GYROSCOPE = false; USER_CONTROL = false; mDataProvider.rotateModeFromGyroscope(false); mDataProvider.startPath(); } /** * Stops the path listening and allows for the 3d moving of the path */ private void stopPath() { FOLLOW_PATH = false; USE_GYROSCOPE = false; USER_CONTROL = true; mDataProvider.rotateModeFromGyroscope(false); // No longer want to be getting data? mDataProvider.stopPath(); } /** * Dialog interface that will get the path name and save it to file */ private void saveAction() { AlertDialog.Builder builder = new Builder(this); builder.setTitle(R.string.save_path_dialog_title); final EditText editText = new EditText(this); editText.setInputType(InputType.TYPE_CLASS_TEXT); builder.setView(editText); builder.setPositiveButton(R.string.save_path_positive_button, new DialogInterface.OnClickListener() { @Override public void onClick(DialogInterface dialog, int which) { String pathName = editText.getText().toString(); String message; if (mDataProvider.saveCurrentPath(pathName)) { message = String.format(getString(R.string.successful_save), pathName); } else { message = String.format(getString(R.string.unsuccessful_save), pathName); } Toast.makeText(OpenGLActivity.this, message, Toast.LENGTH_SHORT).show(); } }).setNegativeButton("Cancel", new DialogInterface.OnClickListener() { @Override public void onClick(DialogInterface dialog, int which) { dialog.dismiss(); } }); AlertDialog dialog = builder.create(); dialog.getWindow().clearFlags(WindowManager.LayoutParams.FLAG_NOT_FOCUSABLE | WindowManager.LayoutParams.FLAG_ALT_FOCUSABLE_IM); dialog.getWindow().setSoftInputMode(LayoutParams.SOFT_INPUT_STATE_VISIBLE); dialog.show(); } /* Data change listeners */ private float mHeading = 0; private float mAltitude; @Subscribe public void onDataChange(NewDataEvent newDataEvent) { if (newDataEvent.type == EventType.DIRECTION_CHANGE) { float heading = ((float) (Math.round(Math.toDegrees(newDataEvent.values[0]) + 360) %360)); if (Math.abs(heading - mHeading) > 1) { RotateAnimation ra; float end = heading; // Special case for when it is switching from greater to smaller if (heading < 20 && mHeading > 340) { end = heading + 360; } else if (heading > 340 && mHeading < 20) { end = heading - 360; } ra = new RotateAnimation( mHeading, end, Animation.RELATIVE_TO_SELF, 0.5f, Animation.RELATIVE_TO_SELF, 0.5f ); ra.setInterpolator(new AccelerateDecelerateInterpolator()); ra.setDuration(250); ra.setFillAfter(true); mPointer.startAnimation(ra); mHeading = heading; ((TextView) findViewById(R.id.heading_direction)).setText("Heading Direction : " + mHeading); } } else if (newDataEvent.type == EventType.ALTITUDE_CHANGE) { mAltitude = newDataEvent.values[0]; } } @Subscribe public void onPathEnd(PathCompletion path) { mCard = LayoutInflater.from(this).inflate(R.layout.card, null); View.OnTouchListener touchListener = new OnTouchListener() { float startX = 0; float deltaX = 0; float originalX = 0; @Override public boolean onTouch(View v, MotionEvent event) { switch (event.getActionMasked()) { case MotionEvent.ACTION_DOWN: startX = event.getRawX(); originalX = v.getX(); break; case MotionEvent.ACTION_MOVE: deltaX = startX - event.getRawX(); v.setX(originalX - deltaX); deltaX = Math.abs(deltaX); v.setAlpha(1 - (deltaX / (v.getWidth()/2))); break; case MotionEvent.ACTION_UP: if (deltaX > ((View)v.getParent()).getWidth() / 3) { ((FrameLayout) findViewById(R.id.frame)).removeView(v); } else { v.setX(originalX); v.setAlpha(1); } break; } float y = event.getRawY(); float height = v.getHeight()/2; float viewY = v.getY(); return y >= viewY && y <= viewY + height; } }; mCard.setOnTouchListener(touchListener); ((TextView) mCard.findViewById(R.id.total_steps)).setText(String.valueOf(path.steps)); double distanceFT = Math.round(path.distance) / 12; double distanceIN = Math.round(path.distance) % 12; ((TextView) mCard.findViewById(R.id.total_distance)).setText(distanceFT + " feet " + distanceIN + " inches"); int roundedInitialAlt = Math.round(path.initialAltitude); int roundedFinalAlt = Math.round(path.finalAltitude); ((TextView) mCard.findViewById(R.id.init_alt)).setText(roundedInitialAlt + " feet"); ((TextView) mCard.findViewById(R.id.final_alt)).setText(roundedFinalAlt + " feet"); ((TextView) mCard.findViewById(R.id.alt_change)).setText(roundedFinalAlt-roundedInitialAlt + " feet"); ((ViewGroup) findViewById(R.id.frame)).addView(mCard); } /** * This was used for debugging and can probably be removed... */ @Subscribe public void onData(NewLocationEvent locationEvent) { LinearLayout linearLayout = (LinearLayout) findViewById(R.id.prev_step_values); if (locationEvent.location != null) { TextView tv = new TextView(this); // Another reference issue tv.setText(String.format("Step %d at <%f, %f, %f> XY diff (%f, %f) with heading at the moment %f and altitude of %f", mStepCount, locationEvent.otherLocation[0], locationEvent.otherLocation[1], locationEvent.otherLocation[2], Math.sin(mHeading), Math.cos(mHeading), mHeading, mAltitude)); linearLayout.addView(tv, new LayoutParams(LayoutParams.MATCH_PARENT, LayoutParams.WRAP_CONTENT)); mStepCount++; } } @Override public void onShowcaseViewHide(ShowcaseView showcaseView) { if (n == 1) { n = 0; mSV = new ShowcaseView.Builder(this) .setContentTitle(R.string.showcase_vr_mode) .setContentText(R.string.showcase_vr_mode_description) .setTarget(new ViewTarget(mPointer)) .setStyle(com.github.amlcurran.showcaseview.R.style.ShowcaseButton) .hideOnTouchOutside() .doNotBlockTouches() .setShowcaseEventListener(this) .build(); mSV.hideButton(); } } @Override public void onShowcaseViewDidHide(ShowcaseView showcaseView) { // Currently not used } @Override public void onShowcaseViewShow(ShowcaseView showcaseView) { // Currently not used } }
/* * FactSet.java * * Created on April 3, 2008, 10:44 AM * * To change this template, choose Tools | Template Manager * and open the template in the editor. */ package kobdig.agent; import kobdig.logic.*; import java.io.IOException; import java.util.HashSet; import java.util.Iterator; import java.util.Set; import java.util.SortedSet; /** * A set of facts, represented by formulas of some suitable logic language. * A fact set may or may not be consistent. * <p>The semantics, or modality, of the facts is implicit and depens on the * context in which a fact set is used. For example, is a fact set is * intended to be a "desire set", the facts it contains will have to be construed * as "facts the agent would like to be true", i.e., <em>desires</em>; if, * on the other hand, the fact set is taken to be a "knowledge set", the facts * it ontains are understood to be true, i.e., <em>knowledge</em> items.</p> * * @see Fact * * @author Andrea G. B. Tettamanzi */ public class FactSet { /** The fuzzy set of facts. */ protected FuzzySet<Fact> facts; /** Creates a new empty set of facts */ public FactSet() { facts = new FuzzySet<Fact>(); } /** Creates a new fact set from a given fuzzy set of facts */ protected FactSet(FuzzySet<Fact> fs) { facts = fs; } /** * Returns the cardinality of the support of the set. * * @return the number of facts whose membership is greater than zero. */ public int size() { return facts.size(); } /** * Parse the facts of this fact set from an agent program source file. */ public void parse(AplTokenizer source) throws IOException { source.nextToken(); source.require('{'); while(source.nextToken()!='}') { PropositionalFormula phi = new PropositionalFormula(source); TruthDegree t = TruthDegree.TRUE; if(source.ttype==':') { source.nextToken(); source.requireNumber("membership degree in [0, 1]"); t = new TruthDegree(source.nval); source.nextToken(); } // System.out.println("Parsed formula = " + phi); tell(new Fact(phi), t); if(source.ttype!=',') break; } source.require('}'); } /** * Return the level set of this fact set, i.e., an ordered set * of truth degrees that are used in the set. */ public SortedSet<TruthDegree> levelSet() { return facts.levelSet(); } /** * Return an &alpha;-cut of this fact set. */ public FactSet cut(TruthDegree alpha) { return new FactSet(facts.cut(alpha)); } /** * Returns a propositional interpretation containing all the atomic symbols occurring * in the formulas of this fact set. The returned interpretation sets * the truth of all atoms that occur in a literal formula according to * the membership of the relevant literal, and of all the remaining * atoms to <code>0.5</code>. */ public PropositionalInterpretation interpretation() { PropositionalInterpretation itp = new PropositionalInterpretation(); Iterator<Atom> i = atomSet().iterator(); while(i.hasNext()) { PropositionalAtom atom = (PropositionalAtom) i.next(); PropositionalFormula positive = new PropositionalFormula(atom); PropositionalFormula negative = new PropositionalFormula(Operator.NOT, positive); TruthDegree mupos = membership(new Fact(positive)); TruthDegree muneg = membership(new Fact(negative)).negated(); if((mupos.isFalse() && muneg.isTrue()) || (!mupos.isFalse() && !muneg.isTrue())) { double sum = mupos.doubleValue() + muneg.doubleValue(); itp.assign(atom, new TruthDegree(0.5*sum)); } else if(mupos.isFalse()) itp.assign(atom, muneg); else itp.assign(atom, mupos); } return itp; } /** * Returns the propositional interpretation which satisfies this fact set * to the maximum degree (ideally 1.0). The returned interpretation contains * all the atomic symbols occurring in the formulas of this fact set and * sets their truth degree in such a way as to maximize the degree of * satisfaction of the fact set. * * Finding such an interpretation requires solving an optimization problem * which, in general, can be very hard. * The provisional implementation of this method performs an exhaustive search * of all fuzzy interpretations with truth degrees that are multiples of 0.1. * While this has a complexity of 10<sup><var>n</var></sup>, where <var>n</var> * is the number of atomic symbols occurring in the fact set, i.e., the * cardinality of the interpretation, it guarantees to return an approximated * solution within 0.05 from the real one. */ public PropositionalInterpretation satisfyingInterpretation() { final double INCREMENT = 0.1; PropositionalInterpretation itp = new PropositionalInterpretation(); Atom[] atoms = atomSet().toArray(new Atom[1]); double[] t = new double[atoms.length]; double[] best = new double[atoms.length]; for(int i = 0; i<t.length; i++) best[i] = t[i] = 0.0; TruthDegree maxsat = TruthDegree.FALSE; boolean done = false; do { // Assign the degrees t to the atoms: for(int i = 0; i<atoms.length; i++) itp.assign((PropositionalAtom) atoms[i], new TruthDegree(t[i])); TruthDegree sat = truth(itp); // Check whether there is an improvement and, if so, update the best so far: if(sat.compareTo(maxsat)>0) { for(int i = 0; i<atoms.length; i++) best[i] = t[i]; maxsat = sat; } // Consider the next assignment: for(int i = 0; i<t.length; i++) { t[i] += INCREMENT; done = t[i]>1.0; if(!done) break; else t[i] = 0.0; } } while(!done); // Assign the solution degrees t to the atoms: for(int i = 0; i<atoms.length; i++) itp.assign((PropositionalAtom) atoms[i], new TruthDegree(best[i])); return itp; } /** * Checks the degree to which this fact set is logically consistent. * <p>A classical fact set is consistent if it does not contain contradictions, i.e., * if there exists an interpretation that satisfies it.</p> * <p>The consistency of a fuzzy fact set is the maximum degree to which an * interpretation satisfies it.</p> * <p>Instead of checking all infinite fuzzy interpretations, this method * uses all crisp interpretations, which are a finite number. Of course, * the maximum degree of satisfaction over all crisp interpretations is * but a lower bound on the consistency of the fact set.</p> * * @return the degree to which the facts contained in the fact set are logically * consistent. */ public TruthDegree consistency() { TruthDegree t = TruthDegree.FALSE; Iterator<Interpretation> itps = interpretation().iterator(); while(itps.hasNext() && !t.isTrue()) { Interpretation j = itps.next(); t = TruthDegree.snorm(t, truth(j)); if(t.isTrue()) break; // No need to contine... } return t; } /** * Add a new fact to the fact set with membership 1. * * @param newFact the new fact that has to be added. */ public void tell(Fact newFact) { facts.member(newFact, TruthDegree.TRUE); } /** * Add a new fact to the fact set with the given membership. * * @param newFact the new fact that has to be added. * @param mu the degree of membership of the new fact. */ public void tell(Fact newFact, TruthDegree mu) { facts.member(newFact, mu); } /** * Removes completely a fact from the fact set. * If the fact does not belong to the fact set, this method has no effect. * * @param fact the fact that has to be removed. */ public void untell(Fact fact) { facts.member(fact, TruthDegree.FALSE); } /** * Returns the degree to which this fact set contains the specified fact. */ public TruthDegree membership(Fact fact) { return facts.member(fact); } /** * Returns the degree to which the given interpretation satisfies this fact set. * The degree to which an interpretation satisfies a fact set is the minimum * of the degrees to which it satisfies every individual fact belonging to * the set. Since facts have a fuzzy membership in the set, that degree of * satisfaction is the maximum between the degree to which the interpretation * satisfies the fact and the degree to which the fact does not belong into * the fact set. */ public TruthDegree truth(Interpretation itp) { TruthDegree t = TruthDegree.TRUE; Iterator<Fact> i = facts.iterator(); while(i.hasNext()) { Fact fact = i.next(); TruthDegree mu = facts.member(fact); TruthDegree sat = fact.formula().truth(itp); t = TruthDegree.tnorm(t, TruthDegree.snorm(mu.negated(), sat)); } return t; } /** * Returns the set of all atoms occurring in this fact set. */ public Set<Atom> atomSet() { Set<Atom> atoms = new HashSet<Atom>(); Iterator<Fact> i = facts.iterator(); while(i.hasNext()) atoms.addAll(i.next().formula().atomSet()); return atoms; } /** * Returns an iterator on all facts whose membership in this * fact set is non null. */ public Iterator<Fact> factIterator() { return facts.iterator(); } /** * Tells whether this fact set equals the given object. * A fact set equals another fact set if the two fact sets contain * exactly the same facts to the same degree. * * @param o * @return true if the given object is a fact set and equals this one */ @Override public boolean equals(Object o) { if(!(o instanceof FactSet)) return false; FactSet that = (FactSet) o; return facts.equals(that.facts); } /** * Returns the hash code for this fact set. * * @return the hash code for this fact set. */ @Override public int hashCode() { return facts.hashCode(); } /** * Returns a string representation of this fact set. */ @Override public String toString() { String s = "{ "; Iterator<Fact> i = facts.iterator(); while(i.hasNext()) { Fact fact = i.next(); if(s.length()>2) s += ", "; s += fact.formula(); TruthDegree mu = membership(fact); if(!mu.isTrue()) s += " : " + mu; } return s + " }"; } }
/* * Copyright (c) [2016] [ <ether.camp> ] * This file is part of the ethereumJ library. * * The ethereumJ library is free software: you can redistribute it and/or modify * it under the terms of the GNU Lesser General Public License as published by * the Free Software Foundation, either version 3 of the License, or * (at your option) any later version. * * The ethereumJ library is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU Lesser General Public License for more details. * * You should have received a copy of the GNU Lesser General Public License * along with the ethereumJ library. If not, see <http://www.gnu.org/licenses/>. */ package org.ethereum.util; import com.cedarsoftware.util.DeepEquals; import org.ethereum.crypto.HashUtil; import org.spongycastle.util.encoders.Hex; import java.math.BigInteger; import java.util.Arrays; import java.util.List; /** * Class to encapsulate an object and provide utilities for conversion */ public class Value { private Object value; private byte[] rlp; private byte[] sha3; private boolean decoded = false; public static Value fromRlpEncoded(byte[] data) { if (data != null && data.length != 0) { Value v = new Value(); v.init(data); return v; } return null; } public Value(){ } public void init(byte[] rlp){ this.rlp = rlp; } public Value(Object obj) { this.decoded = true; if (obj == null) return; if (obj instanceof Value) { this.value = ((Value) obj).asObj(); } else { this.value = obj; } } public Value withHash(byte[] hash) { sha3 = hash; return this; } /* ***************** * Convert * *****************/ public Object asObj() { decode(); return value; } public List<Object> asList() { decode(); Object[] valueArray = (Object[]) value; return Arrays.asList(valueArray); } public int asInt() { decode(); if (isInt()) { return (Integer) value; } else if (isBytes()) { return new BigInteger(1, asBytes()).intValue(); } return 0; } public long asLong() { decode(); if (isLong()) { return (Long) value; } else if (isBytes()) { return new BigInteger(1, asBytes()).longValue(); } return 0; } public BigInteger asBigInt() { decode(); return (BigInteger) value; } public String asString() { decode(); if (isBytes()) { return new String((byte[]) value); } else if (isString()) { return (String) value; } return ""; } public byte[] asBytes() { decode(); if (isBytes()) { return (byte[]) value; } else if (isString()) { return asString().getBytes(); } return ByteUtil.EMPTY_BYTE_ARRAY; } public String getHex(){ return Hex.toHexString(this.encode()); } public byte[] getData(){ return this.encode(); } public int[] asSlice() { return (int[]) value; } public Value get(int index) { if (isList()) { // Guard for OutOfBounds if (asList().size() <= index) { return new Value(null); } if (index < 0) { throw new RuntimeException("Negative index not allowed"); } return new Value(asList().get(index)); } // If this wasn't a slice you probably shouldn't be using this function return new Value(null); } /* ***************** * Utility * *****************/ public void decode(){ if (!this.decoded) { this.value = RLP.decode(rlp, 0).getDecoded(); this.decoded = true; } } public byte[] encode() { if (rlp == null) rlp = RLP.encode(value); return rlp; } public byte[] hash(){ if (sha3 == null) sha3 = HashUtil.sha3(encode()); return sha3; } public boolean cmp(Value o) { return DeepEquals.deepEquals(this, o); } /* ***************** * Checks * *****************/ public boolean isList() { decode(); return value != null && value.getClass().isArray() && !value.getClass().getComponentType().isPrimitive(); } public boolean isString() { decode(); return value instanceof String; } public boolean isInt() { decode(); return value instanceof Integer; } public boolean isLong() { decode(); return value instanceof Long; } public boolean isBigInt() { decode(); return value instanceof BigInteger; } public boolean isBytes() { decode(); return value instanceof byte[]; } // it's only if the isBytes() = true; public boolean isReadableString() { decode(); int readableChars = 0; byte[] data = (byte[]) value; if (data.length == 1 && data[0] > 31 && data[0] < 126) { return true; } for (byte aData : data) { if (aData > 32 && aData < 126) ++readableChars; } return (double) readableChars / (double) data.length > 0.55; } // it's only if the isBytes() = true; public boolean isHexString() { decode(); int hexChars = 0; byte[] data = (byte[]) value; for (byte aData : data) { if ((aData >= 48 && aData <= 57) || (aData >= 97 && aData <= 102)) ++hexChars; } return (double) hexChars / (double) data.length > 0.9; } public boolean isHashCode() { decode(); return this.asBytes().length == 32; } public boolean isNull() { decode(); return value == null; } public boolean isEmpty() { decode(); if (isNull()) return true; if (isBytes() && asBytes().length == 0) return true; if (isList() && asList().isEmpty()) return true; if (isString() && asString().equals("")) return true; return false; } public int length() { decode(); if (isList()) { return asList().size(); } else if (isBytes()) { return asBytes().length; } else if (isString()) { return asString().length(); } return 0; } public String toString() { decode(); StringBuilder stringBuilder = new StringBuilder(); if (isList()) { Object[] list = (Object[]) value; // special case - key/value node if (list.length == 2) { stringBuilder.append("[ "); Value key = new Value(list[0]); byte[] keyNibbles = CompactEncoder.binToNibblesNoTerminator(key.asBytes()); String keyString = ByteUtil.nibblesToPrettyString(keyNibbles); stringBuilder.append(keyString); stringBuilder.append(","); Value val = new Value(list[1]); stringBuilder.append(val.toString()); stringBuilder.append(" ]"); return stringBuilder.toString(); } stringBuilder.append(" ["); for (int i = 0; i < list.length; ++i) { Value val = new Value(list[i]); if (val.isString() || val.isEmpty()) { stringBuilder.append("'").append(val.toString()).append("'"); } else { stringBuilder.append(val.toString()); } if (i < list.length - 1) stringBuilder.append(", "); } stringBuilder.append("] "); return stringBuilder.toString(); } else if (isEmpty()) { return ""; } else if (isBytes()) { StringBuilder output = new StringBuilder(); if (isHashCode()) { output.append(Hex.toHexString(asBytes())); } else if (isReadableString()) { output.append("'"); for (byte oneByte : asBytes()) { if (oneByte < 16) { output.append("\\x").append(ByteUtil.oneByteToHexString(oneByte)); } else { output.append(Character.valueOf((char) oneByte)); } } output.append("'"); return output.toString(); } return Hex.toHexString(this.asBytes()); } else if (isString()) { return asString(); } return "Unexpected type"; } public int countBranchNodes() { decode(); if (this.isList()) { List<Object> objList = this.asList(); int i = 0; for (Object obj : objList) { i += (new Value(obj)).countBranchNodes(); } return i; } else if (this.isBytes()) { this.asBytes(); } return 0; } }
/* * Copyright 2014 The Netty Project * * The Netty Project licenses this file to you under the Apache License, version 2.0 (the * "License"); you may not use this file except in compliance with the License. You may obtain a * copy of the License at: * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express * or implied. See the License for the specific language governing permissions and limitations under * the License. */ package io.netty.handler.codec.http2; import java.util.Collections; import java.util.HashSet; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Map.Entry; import java.util.Set; import java.util.TreeSet; /** * An immutable collection of headers sent or received via HTTP/2. */ public abstract class Http2Headers implements Iterable<Entry<String, String>> { public static final Http2Headers EMPTY_HEADERS = new Http2Headers() { @Override public String get(String name) { return null; } @Override public List<String> getAll(String name) { return Collections.emptyList(); } @Override public List<Entry<String, String>> entries() { return Collections.emptyList(); } @Override public boolean contains(String name) { return false; } @Override public boolean isEmpty() { return true; } @Override public Set<String> names() { return Collections.emptySet(); } @Override public Iterator<Entry<String, String>> iterator() { return entries().iterator(); } }; /** * HTTP2 header names. */ public enum HttpName { /** * {@code :method}. */ METHOD(":method"), /** * {@code :scheme}. */ SCHEME(":scheme"), /** * {@code :authority}. */ AUTHORITY(":authority"), /** * {@code :path}. */ PATH(":path"), /** * {@code :status}. */ STATUS(":status"); private final String value; HttpName(String value) { this.value = value; } public String value() { return value; } } /** * Returns the {@link Set} of all header names. */ public abstract Set<String> names(); /** * Returns the header value with the specified header name. If there is more than one header * value for the specified header name, the first value is returned. * * @return the header value or {@code null} if there is no such header */ public abstract String get(String name); /** * Returns the header values with the specified header name. * * @return the {@link List} of header values. An empty list if there is no such header. */ public abstract List<String> getAll(String name); /** * Returns all header names and values that this frame contains. * * @return the {@link List} of the header name-value pairs. An empty list if there is no header * in this message. */ public abstract List<Map.Entry<String, String>> entries(); /** * Returns {@code true} if and only if there is a header with the specified header name. */ public abstract boolean contains(String name); /** * Checks if no header exists. */ public abstract boolean isEmpty(); /** * Gets the {@link HttpName#METHOD} header or {@code null} if there is no such header */ public final String method() { return get(HttpName.METHOD.value()); } /** * Gets the {@link HttpName#SCHEME} header or {@code null} if there is no such header */ public final String scheme() { return get(HttpName.SCHEME.value()); } /** * Gets the {@link HttpName#AUTHORITY} header or {@code null} if there is no such header */ public final String authority() { return get(HttpName.AUTHORITY.value()); } /** * Gets the {@link HttpName#PATH} header or {@code null} if there is no such header */ public final String path() { return get(HttpName.PATH.value()); } /** * Gets the {@link HttpName#STATUS} header or {@code null} if there is no such header */ public final String status() { return get(HttpName.STATUS.value()); } @Override public int hashCode() { final int prime = 31; int result = 1; for (String name : names()) { result = prime * result + name.hashCode(); Set<String> values = new TreeSet<String>(getAll(name)); for (String value : values) { result = prime * result + value.hashCode(); } } return result; } @Override public boolean equals(Object o) { if (!(o instanceof Http2Headers)) { return false; } Http2Headers other = (Http2Headers) o; // First, check that the set of names match. Set<String> names = names(); if (!names.equals(other.names())) { return false; } // Compare the values for each name. for (String name : names) { List<String> values = getAll(name); List<String> otherValues = other.getAll(name); if (values.size() != otherValues.size()) { return false; } // Convert the values to a set and remove values from the other object to see if // they match. Set<String> valueSet = new HashSet<String>(values); valueSet.removeAll(otherValues); if (!valueSet.isEmpty()) { return false; } } // They match. return true; } @Override public String toString() { StringBuilder builder = new StringBuilder("Http2Headers["); for (Map.Entry<String, String> header : this) { builder.append(header.getKey()); builder.append(':'); builder.append(header.getValue()); builder.append(','); } builder.append(']'); return builder.toString(); } }
// Copyright 2021 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. /** * ContentPage.java * * This file was auto-generated from WSDL * by the Apache Axis 1.4 Mar 02, 2009 (07:08:06 PST) WSDL2Java emitter. */ package com.google.api.ads.admanager.axis.v202111; /** * Captures a page of {@code Content} objects. */ public class ContentPage implements java.io.Serializable , Iterable<com.google.api.ads.admanager.axis.v202111.Content>{ /* The size of the total result set to which this page belongs. */ private java.lang.Integer totalResultSetSize; /* The absolute index in the total result set on which this page * begins. */ private java.lang.Integer startIndex; /* The collection of content contained within this page. */ private com.google.api.ads.admanager.axis.v202111.Content[] results; public ContentPage() { } public ContentPage( java.lang.Integer totalResultSetSize, java.lang.Integer startIndex, com.google.api.ads.admanager.axis.v202111.Content[] results) { this.totalResultSetSize = totalResultSetSize; this.startIndex = startIndex; this.results = results; } @Override public String toString() { return com.google.common.base.MoreObjects.toStringHelper(this.getClass()) .omitNullValues() // Only include length of results to avoid overly verbose output .add("results.length", getResults() == null ? 0 : getResults().length) .add("startIndex", getStartIndex()) .add("totalResultSetSize", getTotalResultSetSize()) .toString(); } /** * Gets the totalResultSetSize value for this ContentPage. * * @return totalResultSetSize * The size of the total result set to which this page belongs. */ public java.lang.Integer getTotalResultSetSize() { return totalResultSetSize; } /** * Sets the totalResultSetSize value for this ContentPage. * * @param totalResultSetSize * The size of the total result set to which this page belongs. */ public void setTotalResultSetSize(java.lang.Integer totalResultSetSize) { this.totalResultSetSize = totalResultSetSize; } /** * Gets the startIndex value for this ContentPage. * * @return startIndex * The absolute index in the total result set on which this page * begins. */ public java.lang.Integer getStartIndex() { return startIndex; } /** * Sets the startIndex value for this ContentPage. * * @param startIndex * The absolute index in the total result set on which this page * begins. */ public void setStartIndex(java.lang.Integer startIndex) { this.startIndex = startIndex; } /** * Gets the results value for this ContentPage. * * @return results * The collection of content contained within this page. */ public com.google.api.ads.admanager.axis.v202111.Content[] getResults() { return results; } /** * Sets the results value for this ContentPage. * * @param results * The collection of content contained within this page. */ public void setResults(com.google.api.ads.admanager.axis.v202111.Content[] results) { this.results = results; } public com.google.api.ads.admanager.axis.v202111.Content getResults(int i) { return this.results[i]; } public void setResults(int i, com.google.api.ads.admanager.axis.v202111.Content _value) { this.results[i] = _value; } /** * Returns an iterator over this page's {@code results} that: * <ul> * <li>Will not be {@code null}.</li> * <li>Will not support {@link java.util.Iterator#remove()}.</li> * </ul> * * @return a non-null iterator. */ @Override public java.util.Iterator<com.google.api.ads.admanager.axis.v202111.Content> iterator() { if (results == null) { return java.util.Collections.<com.google.api.ads.admanager.axis.v202111.Content>emptyIterator(); } return java.util.Arrays.<com.google.api.ads.admanager.axis.v202111.Content>asList(results).iterator(); } private java.lang.Object __equalsCalc = null; public synchronized boolean equals(java.lang.Object obj) { if (!(obj instanceof ContentPage)) return false; ContentPage other = (ContentPage) obj; if (obj == null) return false; if (this == obj) return true; if (__equalsCalc != null) { return (__equalsCalc == obj); } __equalsCalc = obj; boolean _equals; _equals = true && ((this.totalResultSetSize==null && other.getTotalResultSetSize()==null) || (this.totalResultSetSize!=null && this.totalResultSetSize.equals(other.getTotalResultSetSize()))) && ((this.startIndex==null && other.getStartIndex()==null) || (this.startIndex!=null && this.startIndex.equals(other.getStartIndex()))) && ((this.results==null && other.getResults()==null) || (this.results!=null && java.util.Arrays.equals(this.results, other.getResults()))); __equalsCalc = null; return _equals; } private boolean __hashCodeCalc = false; public synchronized int hashCode() { if (__hashCodeCalc) { return 0; } __hashCodeCalc = true; int _hashCode = 1; if (getTotalResultSetSize() != null) { _hashCode += getTotalResultSetSize().hashCode(); } if (getStartIndex() != null) { _hashCode += getStartIndex().hashCode(); } if (getResults() != null) { for (int i=0; i<java.lang.reflect.Array.getLength(getResults()); i++) { java.lang.Object obj = java.lang.reflect.Array.get(getResults(), i); if (obj != null && !obj.getClass().isArray()) { _hashCode += obj.hashCode(); } } } __hashCodeCalc = false; return _hashCode; } // Type metadata private static org.apache.axis.description.TypeDesc typeDesc = new org.apache.axis.description.TypeDesc(ContentPage.class, true); static { typeDesc.setXmlType(new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202111", "ContentPage")); org.apache.axis.description.ElementDesc elemField = new org.apache.axis.description.ElementDesc(); elemField.setFieldName("totalResultSetSize"); elemField.setXmlName(new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202111", "totalResultSetSize")); elemField.setXmlType(new javax.xml.namespace.QName("http://www.w3.org/2001/XMLSchema", "int")); elemField.setMinOccurs(0); elemField.setNillable(false); typeDesc.addFieldDesc(elemField); elemField = new org.apache.axis.description.ElementDesc(); elemField.setFieldName("startIndex"); elemField.setXmlName(new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202111", "startIndex")); elemField.setXmlType(new javax.xml.namespace.QName("http://www.w3.org/2001/XMLSchema", "int")); elemField.setMinOccurs(0); elemField.setNillable(false); typeDesc.addFieldDesc(elemField); elemField = new org.apache.axis.description.ElementDesc(); elemField.setFieldName("results"); elemField.setXmlName(new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202111", "results")); elemField.setXmlType(new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202111", "Content")); elemField.setMinOccurs(0); elemField.setNillable(false); elemField.setMaxOccursUnbounded(true); typeDesc.addFieldDesc(elemField); } /** * Return type metadata object */ public static org.apache.axis.description.TypeDesc getTypeDesc() { return typeDesc; } /** * Get Custom Serializer */ public static org.apache.axis.encoding.Serializer getSerializer( java.lang.String mechType, java.lang.Class _javaType, javax.xml.namespace.QName _xmlType) { return new org.apache.axis.encoding.ser.BeanSerializer( _javaType, _xmlType, typeDesc); } /** * Get Custom Deserializer */ public static org.apache.axis.encoding.Deserializer getDeserializer( java.lang.String mechType, java.lang.Class _javaType, javax.xml.namespace.QName _xmlType) { return new org.apache.axis.encoding.ser.BeanDeserializer( _javaType, _xmlType, typeDesc); } }
/* * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.facebook.presto.execution; import com.facebook.presto.Session; import com.facebook.presto.common.type.Type; import com.facebook.presto.metadata.Metadata; import com.facebook.presto.metadata.QualifiedObjectName; import com.facebook.presto.metadata.TableMetadata; import com.facebook.presto.security.AccessControl; import com.facebook.presto.spi.ColumnMetadata; import com.facebook.presto.spi.ConnectorId; import com.facebook.presto.spi.ConnectorTableMetadata; import com.facebook.presto.spi.PrestoException; import com.facebook.presto.spi.TableHandle; import com.facebook.presto.sql.analyzer.SemanticException; import com.facebook.presto.sql.tree.ColumnDefinition; import com.facebook.presto.sql.tree.CreateTable; import com.facebook.presto.sql.tree.Expression; import com.facebook.presto.sql.tree.LikeClause; import com.facebook.presto.sql.tree.TableElement; import com.facebook.presto.transaction.TransactionManager; import com.google.common.annotations.VisibleForTesting; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.util.concurrent.ListenableFuture; import java.util.HashMap; import java.util.LinkedHashMap; import java.util.List; import java.util.Locale; import java.util.Map; import java.util.Optional; import java.util.Set; import static com.facebook.presto.common.type.TypeSignature.parseTypeSignature; import static com.facebook.presto.metadata.MetadataUtil.createQualifiedObjectName; import static com.facebook.presto.spi.StandardErrorCode.ALREADY_EXISTS; import static com.facebook.presto.spi.StandardErrorCode.GENERIC_INTERNAL_ERROR; import static com.facebook.presto.spi.StandardErrorCode.NOT_FOUND; import static com.facebook.presto.spi.connector.ConnectorCapabilities.NOT_NULL_COLUMN_CONSTRAINT; import static com.facebook.presto.sql.NodeUtils.mapFromProperties; import static com.facebook.presto.sql.analyzer.SemanticErrorCode.DUPLICATE_COLUMN_NAME; import static com.facebook.presto.sql.analyzer.SemanticErrorCode.MISSING_CATALOG; import static com.facebook.presto.sql.analyzer.SemanticErrorCode.MISSING_TABLE; import static com.facebook.presto.sql.analyzer.SemanticErrorCode.NOT_SUPPORTED; import static com.facebook.presto.sql.analyzer.SemanticErrorCode.TABLE_ALREADY_EXISTS; import static com.facebook.presto.sql.analyzer.SemanticErrorCode.TYPE_MISMATCH; import static com.facebook.presto.type.UnknownType.UNKNOWN; import static com.google.common.base.Preconditions.checkArgument; import static com.google.common.util.concurrent.Futures.immediateFuture; public class CreateTableTask implements DataDefinitionTask<CreateTable> { @Override public String getName() { return "CREATE TABLE"; } @Override public String explain(CreateTable statement, List<Expression> parameters) { return "CREATE TABLE " + statement.getName(); } @Override public ListenableFuture<?> execute(CreateTable statement, TransactionManager transactionManager, Metadata metadata, AccessControl accessControl, QueryStateMachine stateMachine, List<Expression> parameters) { return internalExecute(statement, metadata, accessControl, stateMachine.getSession(), parameters); } @VisibleForTesting public ListenableFuture<?> internalExecute(CreateTable statement, Metadata metadata, AccessControl accessControl, Session session, List<Expression> parameters) { checkArgument(!statement.getElements().isEmpty(), "no columns for table"); QualifiedObjectName tableName = createQualifiedObjectName(session, statement, statement.getName()); Optional<TableHandle> tableHandle = metadata.getTableHandle(session, tableName); if (tableHandle.isPresent()) { if (!statement.isNotExists()) { throw new SemanticException(TABLE_ALREADY_EXISTS, statement, "Table '%s' already exists", tableName); } return immediateFuture(null); } ConnectorId connectorId = metadata.getCatalogHandle(session, tableName.getCatalogName()) .orElseThrow(() -> new PrestoException(NOT_FOUND, "Catalog does not exist: " + tableName.getCatalogName())); LinkedHashMap<String, ColumnMetadata> columns = new LinkedHashMap<>(); Map<String, Object> inheritedProperties = ImmutableMap.of(); boolean includingProperties = false; for (TableElement element : statement.getElements()) { if (element instanceof ColumnDefinition) { ColumnDefinition column = (ColumnDefinition) element; String name = column.getName().getValue().toLowerCase(Locale.ENGLISH); Type type; try { type = metadata.getType(parseTypeSignature(column.getType())); } catch (IllegalArgumentException e) { throw new SemanticException(TYPE_MISMATCH, element, "Unknown type '%s' for column '%s'", column.getType(), column.getName()); } if (type.equals(UNKNOWN)) { throw new SemanticException(TYPE_MISMATCH, element, "Unknown type '%s' for column '%s'", column.getType(), column.getName()); } if (columns.containsKey(name)) { throw new SemanticException(DUPLICATE_COLUMN_NAME, column, "Column name '%s' specified more than once", column.getName()); } if (!column.isNullable() && !metadata.getConnectorCapabilities(session, connectorId).contains(NOT_NULL_COLUMN_CONSTRAINT)) { throw new SemanticException(NOT_SUPPORTED, column, "Catalog '%s' does not support non-null column for column name '%s'", connectorId.getCatalogName(), column.getName()); } Map<String, Expression> sqlProperties = mapFromProperties(column.getProperties()); Map<String, Object> columnProperties = metadata.getColumnPropertyManager().getProperties( connectorId, tableName.getCatalogName(), sqlProperties, session, metadata, parameters); columns.put(name, new ColumnMetadata( name, type, column.isNullable(), column.getComment().orElse(null), null, false, columnProperties)); } else if (element instanceof LikeClause) { LikeClause likeClause = (LikeClause) element; QualifiedObjectName likeTableName = createQualifiedObjectName(session, statement, likeClause.getTableName()); if (!metadata.getCatalogHandle(session, likeTableName.getCatalogName()).isPresent()) { throw new SemanticException(MISSING_CATALOG, statement, "LIKE table catalog '%s' does not exist", likeTableName.getCatalogName()); } if (!tableName.getCatalogName().equals(likeTableName.getCatalogName())) { throw new SemanticException(NOT_SUPPORTED, statement, "LIKE table across catalogs is not supported"); } TableHandle likeTable = metadata.getTableHandle(session, likeTableName) .orElseThrow(() -> new SemanticException(MISSING_TABLE, statement, "LIKE table '%s' does not exist", likeTableName)); TableMetadata likeTableMetadata = metadata.getTableMetadata(session, likeTable); Optional<LikeClause.PropertiesOption> propertiesOption = likeClause.getPropertiesOption(); if (propertiesOption.isPresent() && propertiesOption.get().equals(LikeClause.PropertiesOption.INCLUDING)) { if (includingProperties) { throw new SemanticException(NOT_SUPPORTED, statement, "Only one LIKE clause can specify INCLUDING PROPERTIES"); } includingProperties = true; inheritedProperties = likeTableMetadata.getMetadata().getProperties(); } likeTableMetadata.getColumns().stream() .filter(column -> !column.isHidden()) .forEach(column -> { if (columns.containsKey(column.getName().toLowerCase(Locale.ENGLISH))) { throw new SemanticException(DUPLICATE_COLUMN_NAME, element, "Column name '%s' specified more than once", column.getName()); } columns.put(column.getName().toLowerCase(Locale.ENGLISH), column); }); } else { throw new PrestoException(GENERIC_INTERNAL_ERROR, "Invalid TableElement: " + element.getClass().getName()); } } accessControl.checkCanCreateTable(session.getRequiredTransactionId(), session.getIdentity(), session.getAccessControlContext(), tableName); Map<String, Expression> sqlProperties = mapFromProperties(statement.getProperties()); Map<String, Object> properties = metadata.getTablePropertyManager().getProperties( connectorId, tableName.getCatalogName(), sqlProperties, session, metadata, parameters); Map<String, Object> finalProperties = combineProperties(sqlProperties.keySet(), properties, inheritedProperties); ConnectorTableMetadata tableMetadata = new ConnectorTableMetadata(tableName.asSchemaTableName(), ImmutableList.copyOf(columns.values()), finalProperties, statement.getComment()); try { metadata.createTable(session, tableName.getCatalogName(), tableMetadata, statement.isNotExists()); } catch (PrestoException e) { // connectors are not required to handle the ignoreExisting flag if (!e.getErrorCode().equals(ALREADY_EXISTS.toErrorCode()) || !statement.isNotExists()) { throw e; } } return immediateFuture(null); } private static Map<String, Object> combineProperties(Set<String> specifiedPropertyKeys, Map<String, Object> defaultProperties, Map<String, Object> inheritedProperties) { Map<String, Object> finalProperties = new HashMap<>(inheritedProperties); for (Map.Entry<String, Object> entry : defaultProperties.entrySet()) { if (specifiedPropertyKeys.contains(entry.getKey()) || !finalProperties.containsKey(entry.getKey())) { finalProperties.put(entry.getKey(), entry.getValue()); } } return finalProperties; } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.yarn.server.resourcemanager; import java.io.IOException; import java.util.ArrayList; import java.util.HashMap; import java.util.Iterator; import java.util.List; import java.util.Map; import org.junit.Assert; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.classification.InterfaceAudience.Private; import org.apache.hadoop.yarn.api.ContainerManagementProtocol; import org.apache.hadoop.yarn.api.protocolrecords.GetContainerStatusesRequest; import org.apache.hadoop.yarn.api.protocolrecords.GetContainerStatusesResponse; import org.apache.hadoop.yarn.api.protocolrecords.StartContainerRequest; import org.apache.hadoop.yarn.api.protocolrecords.StartContainersRequest; import org.apache.hadoop.yarn.api.protocolrecords.StartContainersResponse; import org.apache.hadoop.yarn.api.protocolrecords.StopContainersRequest; import org.apache.hadoop.yarn.api.protocolrecords.StopContainersResponse; import org.apache.hadoop.yarn.api.records.ApplicationId; import org.apache.hadoop.yarn.api.records.Container; import org.apache.hadoop.yarn.api.records.ContainerId; import org.apache.hadoop.yarn.api.records.ContainerState; import org.apache.hadoop.yarn.api.records.ContainerStatus; import org.apache.hadoop.yarn.api.records.NodeId; import org.apache.hadoop.yarn.api.records.Resource; import org.apache.hadoop.yarn.api.records.Token; import org.apache.hadoop.yarn.exceptions.YarnException; import org.apache.hadoop.yarn.factories.RecordFactory; import org.apache.hadoop.yarn.factory.providers.RecordFactoryProvider; import org.apache.hadoop.yarn.ipc.RPCUtil; import org.apache.hadoop.yarn.security.ContainerTokenIdentifier; import org.apache.hadoop.yarn.server.api.protocolrecords.NodeHeartbeatRequest; import org.apache.hadoop.yarn.server.api.protocolrecords.NodeHeartbeatResponse; import org.apache.hadoop.yarn.server.api.protocolrecords.RegisterNodeManagerRequest; import org.apache.hadoop.yarn.server.api.records.NodeHealthStatus; import org.apache.hadoop.yarn.server.api.records.NodeStatus; import org.apache.hadoop.yarn.server.utils.BuilderUtils; import org.apache.hadoop.yarn.util.YarnVersionInfo; import org.apache.hadoop.yarn.util.resource.Resources; @Private public class NodeManager implements ContainerManagementProtocol { private static final Log LOG = LogFactory.getLog(NodeManager.class); private static final RecordFactory recordFactory = RecordFactoryProvider.getRecordFactory(null); final private String containerManagerAddress; final private String nodeHttpAddress; final private String rackName; final private NodeId nodeId; final private Resource capability; final private ResourceManager resourceManager; Resource available = recordFactory.newRecordInstance(Resource.class); Resource used = recordFactory.newRecordInstance(Resource.class); final ResourceTrackerService resourceTrackerService; final Map<ApplicationId, List<Container>> containers = new HashMap<ApplicationId, List<Container>>(); final Map<Container, ContainerStatus> containerStatusMap = new HashMap<Container, ContainerStatus>(); public NodeManager(String hostName, int containerManagerPort, int httpPort, String rackName, Resource capability, ResourceManager resourceManager) throws IOException, YarnException { this.containerManagerAddress = hostName + ":" + containerManagerPort; this.nodeHttpAddress = hostName + ":" + httpPort; this.rackName = rackName; this.resourceTrackerService = resourceManager.getResourceTrackerService(); this.capability = capability; Resources.addTo(available, capability); this.nodeId = NodeId.newInstance(hostName, containerManagerPort); RegisterNodeManagerRequest request = recordFactory .newRecordInstance(RegisterNodeManagerRequest.class); request.setHttpPort(httpPort); request.setResource(capability); request.setNodeId(this.nodeId); request.setNMVersion(YarnVersionInfo.getVersion()); resourceTrackerService.registerNodeManager(request); this.resourceManager = resourceManager; resourceManager.getResourceScheduler().getNodeReport(this.nodeId); } public String getHostName() { return containerManagerAddress; } public String getRackName() { return rackName; } public NodeId getNodeId() { return nodeId; } public Resource getCapability() { return capability; } public Resource getAvailable() { return available; } public Resource getUsed() { return used; } int responseID = 0; private List<ContainerStatus> getContainerStatuses(Map<ApplicationId, List<Container>> containers) { List<ContainerStatus> containerStatuses = new ArrayList<ContainerStatus>(); for (List<Container> appContainers : containers.values()) { for (Container container : appContainers) { containerStatuses.add(containerStatusMap.get(container)); } } return containerStatuses; } public void heartbeat() throws IOException, YarnException { NodeStatus nodeStatus = org.apache.hadoop.yarn.server.resourcemanager.NodeManager.createNodeStatus( nodeId, getContainerStatuses(containers)); nodeStatus.setResponseId(responseID); NodeHeartbeatRequest request = recordFactory .newRecordInstance(NodeHeartbeatRequest.class); request.setNodeStatus(nodeStatus); NodeHeartbeatResponse response = resourceTrackerService .nodeHeartbeat(request); responseID = response.getResponseId(); } @Override synchronized public StartContainersResponse startContainers( StartContainersRequest requests) throws YarnException { for (StartContainerRequest request : requests.getStartContainerRequests()) { Token containerToken = request.getContainerToken(); ContainerTokenIdentifier tokenId = null; try { tokenId = BuilderUtils.newContainerTokenIdentifier(containerToken); } catch (IOException e) { throw RPCUtil.getRemoteException(e); } ContainerId containerID = tokenId.getContainerID(); ApplicationId applicationId = containerID.getApplicationAttemptId().getApplicationId(); List<Container> applicationContainers = containers.get(applicationId); if (applicationContainers == null) { applicationContainers = new ArrayList<Container>(); containers.put(applicationId, applicationContainers); } // Sanity check for (Container container : applicationContainers) { if (container.getId().compareTo(containerID) == 0) { throw new IllegalStateException("Container " + containerID + " already setup on node " + containerManagerAddress); } } Container container = BuilderUtils.newContainer(containerID, this.nodeId, nodeHttpAddress, tokenId.getResource(), null, null // DKDC - Doesn't matter ); ContainerStatus containerStatus = BuilderUtils.newContainerStatus(container.getId(), ContainerState.NEW, "", -1000); applicationContainers.add(container); containerStatusMap.put(container, containerStatus); Resources.subtractFrom(available, tokenId.getResource()); Resources.addTo(used, tokenId.getResource()); if (LOG.isDebugEnabled()) { LOG.debug("startContainer:" + " node=" + containerManagerAddress + " application=" + applicationId + " container=" + container + " available=" + available + " used=" + used); } } StartContainersResponse response = StartContainersResponse.newInstance(null, null, null); return response; } synchronized public void checkResourceUsage() { LOG.info("Checking resource usage for " + containerManagerAddress); Assert.assertEquals(available.getMemory(), resourceManager.getResourceScheduler().getNodeReport( this.nodeId).getAvailableResource().getMemory()); Assert.assertEquals(used.getMemory(), resourceManager.getResourceScheduler().getNodeReport( this.nodeId).getUsedResource().getMemory()); } @Override synchronized public StopContainersResponse stopContainers(StopContainersRequest request) throws YarnException { for (ContainerId containerID : request.getContainerIds()) { String applicationId = String.valueOf(containerID.getApplicationAttemptId() .getApplicationId().getId()); // Mark the container as COMPLETE List<Container> applicationContainers = containers.get(containerID.getApplicationAttemptId() .getApplicationId()); for (Container c : applicationContainers) { if (c.getId().compareTo(containerID) == 0) { ContainerStatus containerStatus = containerStatusMap.get(c); containerStatus.setState(ContainerState.COMPLETE); containerStatusMap.put(c, containerStatus); } } // Send a heartbeat try { heartbeat(); } catch (IOException ioe) { throw RPCUtil.getRemoteException(ioe); } // Remove container and update status int ctr = 0; Container container = null; for (Iterator<Container> i = applicationContainers.iterator(); i .hasNext();) { container = i.next(); if (container.getId().compareTo(containerID) == 0) { i.remove(); ++ctr; } } if (ctr != 1) { throw new IllegalStateException("Container " + containerID + " stopped " + ctr + " times!"); } Resources.addTo(available, container.getResource()); Resources.subtractFrom(used, container.getResource()); if (LOG.isDebugEnabled()) { LOG.debug("stopContainer:" + " node=" + containerManagerAddress + " application=" + applicationId + " container=" + containerID + " available=" + available + " used=" + used); } } return StopContainersResponse.newInstance(null,null); } @Override synchronized public GetContainerStatusesResponse getContainerStatuses( GetContainerStatusesRequest request) throws YarnException { List<ContainerStatus> statuses = new ArrayList<ContainerStatus>(); for (ContainerId containerId : request.getContainerIds()) { List<Container> appContainers = containers.get(containerId.getApplicationAttemptId() .getApplicationId()); Container container = null; for (Container c : appContainers) { if (c.getId().equals(containerId)) { container = c; } } if (container != null && containerStatusMap.get(container).getState() != null) { statuses.add(containerStatusMap.get(container)); } } return GetContainerStatusesResponse.newInstance(statuses, null); } public static org.apache.hadoop.yarn.server.api.records.NodeStatus createNodeStatus(NodeId nodeId, List<ContainerStatus> containers) { RecordFactory recordFactory = RecordFactoryProvider.getRecordFactory(null); org.apache.hadoop.yarn.server.api.records.NodeStatus nodeStatus = recordFactory.newRecordInstance(org.apache.hadoop.yarn.server.api.records.NodeStatus.class); nodeStatus.setNodeId(nodeId); nodeStatus.setContainersStatuses(containers); NodeHealthStatus nodeHealthStatus = recordFactory.newRecordInstance(NodeHealthStatus.class); nodeHealthStatus.setIsNodeHealthy(true); nodeStatus.setNodeHealthStatus(nodeHealthStatus); return nodeStatus; } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.ignite.internal.processors.hadoop.impl; import java.io.IOException; import java.util.concurrent.atomic.AtomicReference; /** * Error simulator. */ public class HadoopErrorSimulator { /** No-op singleton instance. */ public static final HadoopErrorSimulator noopInstance = new HadoopErrorSimulator(); /** Instance ref. */ private static final AtomicReference<HadoopErrorSimulator> ref = new AtomicReference<>(noopInstance); /** * Creates simulator of given kind with given stage bits. * * @param kind The kind. * @param bits The stage bits. * @return The simulator. */ public static HadoopErrorSimulator create(Kind kind, int bits) { switch (kind) { case Noop: return noopInstance; case Runtime: return new RuntimeExceptionBitHadoopErrorSimulator(bits); case IOException: return new IOExceptionBitHadoopErrorSimulator(bits); case Error: return new ErrorBitHadoopErrorSimulator(bits); default: throw new IllegalStateException("Unknown kind: " + kind); } } /** * Gets the error simulator instance. */ public static HadoopErrorSimulator instance() { return ref.get(); } /** * Sets instance. */ public static boolean setInstance(HadoopErrorSimulator expect, HadoopErrorSimulator update) { return ref.compareAndSet(expect, update); } /** * Constructor. */ private HadoopErrorSimulator() { // no-op } /** * Invoked on the named stage. */ public void onMapConfigure() { // no-op } /** * Invoked on the named stage. */ public void onMapSetup() throws IOException, InterruptedException { // no-op } /** * Invoked on the named stage. */ public void onMap() throws IOException { // no-op } /** * Invoked on the named stage. */ public void onMapCleanup() throws IOException, InterruptedException { // no-op } /** * Invoked on the named stage. */ public void onMapClose() throws IOException { // no-op } /** * setConf() does not declare IOException to be thrown. */ public void onCombineConfigure() { // no-op } /** * Invoked on the named stage. */ public void onCombineSetup() throws IOException, InterruptedException { // no-op } /** * Invoked on the named stage. */ public void onCombine() throws IOException { // no-op } /** * Invoked on the named stage. */ public void onCombineCleanup() throws IOException, InterruptedException { // no-op } /** * Invoked on the named stage. */ public void onReduceConfigure() { // no-op } /** * Invoked on the named stage. */ public void onReduceSetup() throws IOException, InterruptedException { // no-op } /** * Invoked on the named stage. */ public void onReduce() throws IOException { // no-op } /** * Invoked on the named stage. */ public void onReduceCleanup() throws IOException, InterruptedException { // no-op } /** * Error kind. */ public enum Kind { /** No error. */ Noop, /** Runtime. */ Runtime, /** IOException. */ IOException, /** java.lang.Error. */ Error } /** * Runtime error simulator. */ public static class RuntimeExceptionBitHadoopErrorSimulator extends HadoopErrorSimulator { /** Stage bits: defines what map-reduce stages will cause errors. */ private final int bits; /** * Constructor. */ protected RuntimeExceptionBitHadoopErrorSimulator(int b) { bits = b; } /** * Simulates an error. */ protected void simulateError() throws IOException { throw new RuntimeException("An error simulated by " + getClass().getSimpleName()); } /** {@inheritDoc} */ @Override public final void onMapConfigure() { try { if ((bits & 1) != 0) simulateError(); } catch (IOException e) { // ignore } } /** {@inheritDoc} */ @Override public final void onMapSetup() throws IOException, InterruptedException { if ((bits & 2) != 0) simulateError(); } /** {@inheritDoc} */ @Override public final void onMap() throws IOException { if ((bits & 4) != 0) simulateError(); } /** {@inheritDoc} */ @Override public final void onMapCleanup() throws IOException, InterruptedException { if ((bits & 8) != 0) simulateError(); } /** {@inheritDoc} */ @Override public final void onCombineConfigure() { try { if ((bits & 16) != 0) simulateError(); } catch (IOException e) { // ignore } } /** {@inheritDoc} */ @Override public final void onCombineSetup() throws IOException, InterruptedException { if ((bits & 32) != 0) simulateError(); } /** {@inheritDoc} */ @Override public final void onCombine() throws IOException { if ((bits & 64) != 0) simulateError(); } /** {@inheritDoc} */ @Override public final void onCombineCleanup() throws IOException, InterruptedException { if ((bits & 128) != 0) simulateError(); } /** {@inheritDoc} */ @Override public final void onReduceConfigure() { try { if ((bits & 256) != 0) simulateError(); } catch (IOException e) { // ignore } } /** {@inheritDoc} */ @Override public final void onReduceSetup() throws IOException, InterruptedException { if ((bits & 512) != 0) simulateError(); } /** {@inheritDoc} */ @Override public final void onReduce() throws IOException { if ((bits & 1024) != 0) simulateError(); } /** {@inheritDoc} */ @Override public final void onReduceCleanup() throws IOException, InterruptedException { if ((bits & 2048) != 0) simulateError(); } } /** * java.lang.Error simulator. */ public static class ErrorBitHadoopErrorSimulator extends RuntimeExceptionBitHadoopErrorSimulator { /** * Constructor. */ public ErrorBitHadoopErrorSimulator(int bits) { super(bits); } /** {@inheritDoc} */ @Override protected void simulateError() { throw new Error("An error simulated by " + getClass().getSimpleName()); } } /** * IOException simulator. */ public static class IOExceptionBitHadoopErrorSimulator extends RuntimeExceptionBitHadoopErrorSimulator { /** * Constructor. */ public IOExceptionBitHadoopErrorSimulator(int bits) { super(bits); } /** {@inheritDoc} */ @Override protected void simulateError() throws IOException { throw new IOException("An IOException simulated by " + getClass().getSimpleName()); } } }
package backend.resource; import backend.UpdateSignature; import backend.interfaces.IBaseModel; import backend.resource.serialization.SerializableModel; import java.util.ArrayList; import java.util.List; import java.util.Optional; import java.util.stream.Collectors; @SuppressWarnings("unused") public class Model implements IBaseModel { private final UpdateSignature updateSignature; private final String repoId; private final List<TurboIssue> issues; private final List<TurboLabel> labels; private final List<TurboMilestone> milestones; private final List<TurboUser> users; /** * Standard constructor. */ public Model(String repoId, List<TurboIssue> issues, List<TurboLabel> labels, List<TurboMilestone> milestones, List<TurboUser> users, UpdateSignature updateSignature) { this.updateSignature = updateSignature; this.repoId = repoId; this.issues = issues; this.labels = labels; this.milestones = milestones; this.users = users; } /** * Standard constructor with empty update signature -- for use when * a model is first downloaded. */ public Model(String repoId, List<TurboIssue> issues, List<TurboLabel> labels, List<TurboMilestone> milestones, List<TurboUser> users) { this.updateSignature = UpdateSignature.EMPTY; this.repoId = repoId; this.issues = issues; this.labels = labels; this.milestones = milestones; this.users = users; } /** * Constructor for the empty model. */ public Model(String repoId) { this.updateSignature = UpdateSignature.EMPTY; this.repoId = repoId; this.issues = new ArrayList<>(); this.labels = new ArrayList<>(); this.milestones = new ArrayList<>(); this.users = new ArrayList<>(); } /** * Copy constructor. */ public Model(Model model) { this.updateSignature = model.updateSignature; this.repoId = model.getRepoId(); this.issues = new ArrayList<>(model.getIssues()); this.labels = new ArrayList<>(model.getLabels()); this.milestones = new ArrayList<>(model.getMilestones()); this.users = new ArrayList<>(model.getUsers()); } public Model(SerializableModel model) { this.updateSignature = model.updateSignature; this.repoId = model.repoId; this.issues = model.issues.stream() .map(i -> new TurboIssue(model.repoId, i)) .collect(Collectors.toList()); this.labels = model.labels.stream() .map(l -> new TurboLabel(model.repoId, l)) .collect(Collectors.toList()); this.milestones = model.milestones.stream() .map(m -> new TurboMilestone(model.repoId, m)) .collect(Collectors.toList()); this.users = model.users.stream() .map(u -> new TurboUser(model.repoId, u)) .collect(Collectors.toList()); } public String getRepoId() { return repoId; } public UpdateSignature getUpdateSignature() { return updateSignature; } @Override public List<TurboIssue> getIssues() { return new ArrayList<>(issues); } @Override public List<TurboLabel> getLabels() { return new ArrayList<>(labels); } @Override public List<TurboMilestone> getMilestones() { return new ArrayList<>(milestones); } @Override public List<TurboUser> getUsers() { return new ArrayList<>(users); } private void ______OPERATIONS_____() { } public Optional<TurboIssue> getIssueById(int issueId) { assert issueId >= 1 : "Invalid issue id " + issueId; for (TurboIssue issue : getIssues()) { if (issue.getId() == issueId) { return Optional.of(issue); } } return Optional.empty(); } public Optional<TurboLabel> getLabelByActualName(String labelName) { assert labelName != null && !labelName.isEmpty() : "Invalid label name " + labelName; for (TurboLabel label : getLabels()) { if (label.getActualName().equals(labelName)) { return Optional.of(label); } } return Optional.empty(); } public Optional<TurboUser> getUserByLogin(String login) { assert login != null && !login.isEmpty() : "Invalid user name " + login; for (TurboUser user : getUsers()) { if (user.getLoginName().equals(login)) { return Optional.of(user); } } return Optional.empty(); } public Optional<TurboMilestone> getMilestoneByTitle(String title) { assert title != null && !title.isEmpty() : "Invalid milestone title " + title; for (TurboMilestone milestone : getMilestones()) { if (milestone.getTitle().equals(title)) { return Optional.of(milestone); } } return Optional.empty(); } public Optional<TurboMilestone> getMilestoneById(int id) { assert id >= 1 : "Invalid milestone id " + id; for (TurboMilestone milestone : getMilestones()) { if (milestone.getId() == id) { return Optional.of(milestone); } } return Optional.empty(); } public Optional<TurboMilestone> getMilestoneOfIssue(TurboIssue issue) { return issue.getMilestone().flatMap(this::getMilestoneById); } public Optional<TurboUser> getAssigneeOfIssue(TurboIssue issue) { return issue.getAssignee().flatMap(this::getUserByLogin); } public List<TurboLabel> getLabelsOfIssue(TurboIssue issue) { return issue.getLabels().stream() .map(this::getLabelByActualName) .filter(Optional::isPresent).map(Optional::get) .collect(Collectors.toList()); } private void ______BOILERPLATE______() { } @Override public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; Model model = (Model) o; return issues.equals(model.issues) && labels.equals(model.labels) && milestones.equals(model.milestones) && repoId.equals(model.repoId) && updateSignature.equals(model.updateSignature) && users.equals(model.users); } @Override public int hashCode() { int result = updateSignature.hashCode(); result = 31 * result + repoId.hashCode(); result = 31 * result + issues.hashCode(); result = 31 * result + labels.hashCode(); result = 31 * result + milestones.hashCode(); result = 31 * result + users.hashCode(); return result; } }
package com.daimajia.swipe.implments; import android.view.View; import android.widget.BaseAdapter; import com.daimajia.swipe.SimpleSwipeListener; import com.daimajia.swipe.SwipeLayout; import com.daimajia.swipe.interfaces.SwipeAdapterInterface; import com.daimajia.swipe.interfaces.SwipeItemMangerInterface; import java.util.ArrayList; import java.util.Arrays; import java.util.HashSet; import java.util.List; import java.util.Set; /** * SwipeItemMangerImpl is a helper class to help all the adapters to maintain open status. */ public class SwipeItemMangerImpl implements SwipeItemMangerInterface { private Mode mode = Mode.Single; public final int INVALID_POSITION = -1; protected int mOpenPosition = INVALID_POSITION; protected Set<Integer> mOpenPositions = new HashSet<Integer>(); protected Set<SwipeLayout> mShownLayouts = new HashSet<SwipeLayout>(); protected BaseAdapter mAdapter; public SwipeItemMangerImpl(BaseAdapter adapter) { if(adapter == null) throw new IllegalArgumentException("Adapter can not be null"); if(!(adapter instanceof SwipeItemMangerInterface)) throw new IllegalArgumentException("adapter should implement the SwipeAdapterInterface"); this.mAdapter = adapter; } public enum Mode{ Single, Multiple }; public Mode getMode(){ return mode; } public void setMode(Mode mode){ this.mode = mode; mOpenPositions.clear(); mShownLayouts.clear(); mOpenPosition = INVALID_POSITION; } public void initialize(View target, int position) { int resId = getSwipeLayoutId(position); OnLayoutListener onLayoutListener = new OnLayoutListener(position); SwipeLayout swipeLayout = (SwipeLayout)target.findViewById(resId); if(swipeLayout == null) throw new IllegalStateException("can not find SwipeLayout in target view"); SwipeMemory swipeMemory = new SwipeMemory(position); swipeLayout.addSwipeListener(swipeMemory); swipeLayout.addOnLayoutListener(onLayoutListener); swipeLayout.setTag(resId, new ValueBox(position, swipeMemory, onLayoutListener)); mShownLayouts.add(swipeLayout); } public void updateConvertView(View target, int position) { int resId = getSwipeLayoutId(position); SwipeLayout swipeLayout = (SwipeLayout)target.findViewById(resId); if(swipeLayout == null) throw new IllegalStateException("can not find SwipeLayout in target view"); ValueBox valueBox = (ValueBox)swipeLayout.getTag(resId); valueBox.swipeMemory.setPosition(position); valueBox.onLayoutListener.setPosition(position); valueBox.position = position; } private int getSwipeLayoutId(int position){ return ((SwipeAdapterInterface)(mAdapter)).getSwipeLayoutResourceId(position); } @Override public void openItem(int position) { if(mode == Mode.Multiple){ if(!mOpenPositions.contains(position)) mOpenPositions.add(position); }else{ mOpenPosition = position; } mAdapter.notifyDataSetChanged(); } @Override public void closeItem(int position) { if(mode == Mode.Multiple){ mOpenPositions.remove(position); }else{ if(mOpenPosition == position) mOpenPosition = INVALID_POSITION; } mAdapter.notifyDataSetChanged(); } @Override public void closeAllExcept(SwipeLayout layout) { for(SwipeLayout s : mShownLayouts){ if(s != layout) s.close(); } } @Override public void removeShownLayouts(SwipeLayout layout) { mShownLayouts.remove(layout); } @Override public List<Integer> getOpenItems() { if(mode == Mode.Multiple){ return new ArrayList<Integer>(mOpenPositions); }else{ return Arrays.asList(mOpenPosition); } } @Override public List<SwipeLayout> getOpenLayouts() { return new ArrayList<SwipeLayout>(mShownLayouts); } @Override public boolean isOpen(int position) { if(mode == Mode.Multiple){ return mOpenPositions.contains(position); }else{ return mOpenPosition == position; } } class ValueBox { OnLayoutListener onLayoutListener; SwipeMemory swipeMemory; int position; ValueBox(int position, SwipeMemory swipeMemory, OnLayoutListener onLayoutListener) { this.swipeMemory = swipeMemory; this.onLayoutListener = onLayoutListener; this.position = position; } } class OnLayoutListener implements SwipeLayout.OnLayout{ private int position; OnLayoutListener(int position) { this.position = position; } public void setPosition(int position){ this.position = position; } @Override public void onLayout(SwipeLayout v) { if(isOpen(position)){ v.open(false, false); }else{ v.close(false, false); } } } class SwipeMemory extends SimpleSwipeListener { private int position; SwipeMemory(int position) { this.position = position; } @Override public void onClose(SwipeLayout layout) { if(mode == Mode.Multiple){ mOpenPositions.remove(position); }else{ mOpenPosition = INVALID_POSITION; } } @Override public void onStartOpen(SwipeLayout layout) { if(mode == Mode.Single) { closeAllExcept(layout); } } @Override public void onOpen(SwipeLayout layout) { if (mode == Mode.Multiple) mOpenPositions.add(position); else { closeAllExcept(layout); mOpenPosition = position; } } public void setPosition(int position){ this.position = position; } } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.hdfs.server.mover; import java.io.IOException; import java.net.URI; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.List; import java.util.Map; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FSDataInputStream; import org.apache.hadoop.fs.FSDataOutputStream; import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.StorageType; import org.apache.hadoop.hdfs.protocol.BlockStoragePolicy; import org.apache.hadoop.hdfs.DFSConfigKeys; import org.apache.hadoop.hdfs.DFSOutputStream; import org.apache.hadoop.hdfs.DFSTestUtil; import org.apache.hadoop.hdfs.DFSUtil; import org.apache.hadoop.hdfs.DistributedFileSystem; import org.apache.hadoop.hdfs.HdfsConfiguration; import org.apache.hadoop.hdfs.MiniDFSCluster; import org.apache.hadoop.hdfs.protocol.DirectoryListing; import org.apache.hadoop.hdfs.protocol.HdfsConstants; import org.apache.hadoop.hdfs.protocol.HdfsFileStatus; import org.apache.hadoop.hdfs.protocol.HdfsLocatedFileStatus; import org.apache.hadoop.hdfs.protocol.LocatedBlock; import org.apache.hadoop.hdfs.protocol.LocatedBlocks; import org.apache.hadoop.hdfs.protocol.datatransfer.DataTransferProtocol; import org.apache.hadoop.hdfs.server.balancer.Dispatcher; import org.apache.hadoop.hdfs.server.balancer.ExitStatus; import org.apache.hadoop.hdfs.server.balancer.TestBalancer; import org.apache.hadoop.hdfs.server.blockmanagement.BlockPlacementPolicy; import org.apache.hadoop.hdfs.server.blockmanagement.BlockStoragePolicySuite; import org.apache.hadoop.hdfs.server.datanode.DataNode; import org.apache.hadoop.hdfs.server.datanode.DataNodeTestUtils; import org.apache.hadoop.hdfs.server.datanode.fsdataset.FsDatasetSpi; import org.apache.hadoop.hdfs.server.datanode.fsdataset.FsVolumeSpi; import org.apache.hadoop.hdfs.server.datanode.fsdataset.impl.FsVolumeImpl; import org.apache.hadoop.hdfs.server.namenode.snapshot.SnapshotTestHelper; import org.apache.hadoop.io.IOUtils; import org.apache.hadoop.test.GenericTestUtils; import org.apache.log4j.Level; import org.junit.Assert; import org.junit.Test; import com.google.common.base.Preconditions; import com.google.common.collect.Maps; /** * Test the data migration tool (for Archival Storage) */ public class TestStorageMover { static final Log LOG = LogFactory.getLog(TestStorageMover.class); static { GenericTestUtils.setLogLevel(LogFactory.getLog(BlockPlacementPolicy.class), Level.ALL); GenericTestUtils.setLogLevel(LogFactory.getLog(Dispatcher.class), Level.ALL); GenericTestUtils.setLogLevel(DataTransferProtocol.LOG, Level.ALL); } private static final int BLOCK_SIZE = 1024; private static final short REPL = 3; private static final int NUM_DATANODES = 6; private static final Configuration DEFAULT_CONF = new HdfsConfiguration(); private static final BlockStoragePolicySuite DEFAULT_POLICIES; private static final BlockStoragePolicy HOT; private static final BlockStoragePolicy WARM; private static final BlockStoragePolicy COLD; static { DEFAULT_CONF.setLong(DFSConfigKeys.DFS_BLOCK_SIZE_KEY, BLOCK_SIZE); DEFAULT_CONF.setLong(DFSConfigKeys.DFS_HEARTBEAT_INTERVAL_KEY, 1L); DEFAULT_CONF.setLong(DFSConfigKeys.DFS_NAMENODE_REPLICATION_INTERVAL_KEY, 2L); DEFAULT_CONF.setLong(DFSConfigKeys.DFS_MOVER_MOVEDWINWIDTH_KEY, 2000L); DEFAULT_POLICIES = BlockStoragePolicySuite.createDefaultSuite(); HOT = DEFAULT_POLICIES.getPolicy(HdfsConstants.HOT_STORAGE_POLICY_NAME); WARM = DEFAULT_POLICIES.getPolicy(HdfsConstants.WARM_STORAGE_POLICY_NAME); COLD = DEFAULT_POLICIES.getPolicy(HdfsConstants.COLD_STORAGE_POLICY_NAME); TestBalancer.initTestSetup(); Dispatcher.setDelayAfterErrors(1000L); } /** * This scheme defines files/directories and their block storage policies. It * also defines snapshots. */ static class NamespaceScheme { final List<Path> dirs; final List<Path> files; final long fileSize; final Map<Path, List<String>> snapshotMap; final Map<Path, BlockStoragePolicy> policyMap; NamespaceScheme(List<Path> dirs, List<Path> files, long fileSize, Map<Path,List<String>> snapshotMap, Map<Path, BlockStoragePolicy> policyMap) { this.dirs = dirs == null? Collections.<Path>emptyList(): dirs; this.files = files == null? Collections.<Path>emptyList(): files; this.fileSize = fileSize; this.snapshotMap = snapshotMap == null ? Collections.<Path, List<String>>emptyMap() : snapshotMap; this.policyMap = policyMap; } /** * Create files/directories/snapshots. */ void prepare(DistributedFileSystem dfs, short repl) throws Exception { for (Path d : dirs) { dfs.mkdirs(d); } for (Path file : files) { DFSTestUtil.createFile(dfs, file, fileSize, repl, 0L); } for (Map.Entry<Path, List<String>> entry : snapshotMap.entrySet()) { for (String snapshot : entry.getValue()) { SnapshotTestHelper.createSnapshot(dfs, entry.getKey(), snapshot); } } } /** * Set storage policies according to the corresponding scheme. */ void setStoragePolicy(DistributedFileSystem dfs) throws Exception { for (Map.Entry<Path, BlockStoragePolicy> entry : policyMap.entrySet()) { dfs.setStoragePolicy(entry.getKey(), entry.getValue().getName()); } } } /** * This scheme defines DataNodes and their storage, including storage types * and remaining capacities. */ static class ClusterScheme { final Configuration conf; final int numDataNodes; final short repl; final StorageType[][] storageTypes; final long[][] storageCapacities; ClusterScheme() { this(DEFAULT_CONF, NUM_DATANODES, REPL, genStorageTypes(NUM_DATANODES), null); } ClusterScheme(Configuration conf, int numDataNodes, short repl, StorageType[][] types, long[][] capacities) { Preconditions.checkArgument(types == null || types.length == numDataNodes); Preconditions.checkArgument(capacities == null || capacities.length == numDataNodes); this.conf = conf; this.numDataNodes = numDataNodes; this.repl = repl; this.storageTypes = types; this.storageCapacities = capacities; } } class MigrationTest { private final ClusterScheme clusterScheme; private final NamespaceScheme nsScheme; private final Configuration conf; private MiniDFSCluster cluster; private DistributedFileSystem dfs; private final BlockStoragePolicySuite policies; MigrationTest(ClusterScheme cScheme, NamespaceScheme nsScheme) { this.clusterScheme = cScheme; this.nsScheme = nsScheme; this.conf = clusterScheme.conf; this.policies = DEFAULT_POLICIES; } /** * Set up the cluster and start NameNode and DataNodes according to the * corresponding scheme. */ void setupCluster() throws Exception { cluster = new MiniDFSCluster.Builder(conf).numDataNodes(clusterScheme .numDataNodes).storageTypes(clusterScheme.storageTypes) .storageCapacities(clusterScheme.storageCapacities).build(); cluster.waitActive(); dfs = cluster.getFileSystem(); } private void runBasicTest(boolean shutdown) throws Exception { setupCluster(); try { prepareNamespace(); verify(true); setStoragePolicy(); migrate(ExitStatus.SUCCESS); verify(true); } finally { if (shutdown) { shutdownCluster(); } } } void shutdownCluster() throws Exception { IOUtils.cleanup(null, dfs); if (cluster != null) { cluster.shutdown(); } } /** * Create files/directories and set their storage policies according to the * corresponding scheme. */ void prepareNamespace() throws Exception { nsScheme.prepare(dfs, clusterScheme.repl); } void setStoragePolicy() throws Exception { nsScheme.setStoragePolicy(dfs); } /** * Run the migration tool. */ void migrate(ExitStatus expectedExitCode) throws Exception { runMover(expectedExitCode); Thread.sleep(5000); // let the NN finish deletion } /** * Verify block locations after running the migration tool. */ void verify(boolean verifyAll) throws Exception { for (DataNode dn : cluster.getDataNodes()) { DataNodeTestUtils.triggerBlockReport(dn); } if (verifyAll) { verifyNamespace(); } } private void runMover(ExitStatus expectedExitCode) throws Exception { Collection<URI> namenodes = DFSUtil.getNsServiceRpcUris(conf); Map<URI, List<Path>> nnMap = Maps.newHashMap(); for (URI nn : namenodes) { nnMap.put(nn, null); } int result = Mover.run(nnMap, conf); Assert.assertEquals(expectedExitCode.getExitCode(), result); } private void verifyNamespace() throws Exception { HdfsFileStatus status = dfs.getClient().getFileInfo("/"); verifyRecursively(null, status); } private void verifyRecursively(final Path parent, final HdfsFileStatus status) throws Exception { if (status.isDir()) { Path fullPath = parent == null ? new Path("/") : status.getFullPath(parent); DirectoryListing children = dfs.getClient().listPaths( fullPath.toString(), HdfsFileStatus.EMPTY_NAME, true); for (HdfsFileStatus child : children.getPartialListing()) { verifyRecursively(fullPath, child); } } else if (!status.isSymlink()) { // is file verifyFile(parent, status, null); } } void verifyFile(final Path file, final Byte expectedPolicyId) throws Exception { final Path parent = file.getParent(); DirectoryListing children = dfs.getClient().listPaths( parent.toString(), HdfsFileStatus.EMPTY_NAME, true); for (HdfsFileStatus child : children.getPartialListing()) { if (child.getLocalName().equals(file.getName())) { verifyFile(parent, child, expectedPolicyId); return; } } Assert.fail("File " + file + " not found."); } private void verifyFile(final Path parent, final HdfsFileStatus status, final Byte expectedPolicyId) throws Exception { HdfsLocatedFileStatus fileStatus = (HdfsLocatedFileStatus) status; byte policyId = fileStatus.getStoragePolicy(); BlockStoragePolicy policy = policies.getPolicy(policyId); if (expectedPolicyId != null) { Assert.assertEquals((byte)expectedPolicyId, policy.getId()); } final List<StorageType> types = policy.chooseStorageTypes( status.getReplication()); for(LocatedBlock lb : fileStatus.getBlockLocations().getLocatedBlocks()) { final Mover.StorageTypeDiff diff = new Mover.StorageTypeDiff(types, lb.getStorageTypes()); Assert.assertTrue(fileStatus.getFullName(parent.toString()) + " with policy " + policy + " has non-empty overlap: " + diff + ", the corresponding block is " + lb.getBlock().getLocalBlock(), diff.removeOverlap(true)); } } Replication getReplication(Path file) throws IOException { return getOrVerifyReplication(file, null); } Replication verifyReplication(Path file, int expectedDiskCount, int expectedArchiveCount) throws IOException { final Replication r = new Replication(); r.disk = expectedDiskCount; r.archive = expectedArchiveCount; return getOrVerifyReplication(file, r); } private Replication getOrVerifyReplication(Path file, Replication expected) throws IOException { final List<LocatedBlock> lbs = dfs.getClient().getLocatedBlocks( file.toString(), 0).getLocatedBlocks(); Assert.assertEquals(1, lbs.size()); LocatedBlock lb = lbs.get(0); StringBuilder types = new StringBuilder(); final Replication r = new Replication(); for(StorageType t : lb.getStorageTypes()) { types.append(t).append(", "); if (t == StorageType.DISK) { r.disk++; } else if (t == StorageType.ARCHIVE) { r.archive++; } else { Assert.fail("Unexpected storage type " + t); } } if (expected != null) { final String s = "file = " + file + "\n types = [" + types + "]"; Assert.assertEquals(s, expected, r); } return r; } } static class Replication { int disk; int archive; @Override public int hashCode() { return disk ^ archive; } @Override public boolean equals(Object obj) { if (obj == this) { return true; } else if (obj == null || !(obj instanceof Replication)) { return false; } final Replication that = (Replication)obj; return this.disk == that.disk && this.archive == that.archive; } @Override public String toString() { return "[disk=" + disk + ", archive=" + archive + "]"; } } private static StorageType[][] genStorageTypes(int numDataNodes) { return genStorageTypes(numDataNodes, 0, 0, 0); } private static StorageType[][] genStorageTypes(int numDataNodes, int numAllDisk, int numAllArchive, int numRamDisk) { Preconditions.checkArgument( (numAllDisk + numAllArchive + numRamDisk) <= numDataNodes); StorageType[][] types = new StorageType[numDataNodes][]; int i = 0; for (; i < numRamDisk; i++) { types[i] = new StorageType[]{StorageType.RAM_DISK, StorageType.DISK}; } for (; i < numRamDisk + numAllDisk; i++) { types[i] = new StorageType[]{StorageType.DISK, StorageType.DISK}; } for (; i < numRamDisk + numAllDisk + numAllArchive; i++) { types[i] = new StorageType[]{StorageType.ARCHIVE, StorageType.ARCHIVE}; } for (; i < types.length; i++) { types[i] = new StorageType[]{StorageType.DISK, StorageType.ARCHIVE}; } return types; } private static class PathPolicyMap { final Map<Path, BlockStoragePolicy> map = Maps.newHashMap(); final Path hot = new Path("/hot"); final Path warm = new Path("/warm"); final Path cold = new Path("/cold"); final List<Path> files; PathPolicyMap(int filesPerDir){ map.put(hot, HOT); map.put(warm, WARM); map.put(cold, COLD); files = new ArrayList<Path>(); for(Path dir : map.keySet()) { for(int i = 0; i < filesPerDir; i++) { files.add(new Path(dir, "file" + i)); } } } NamespaceScheme newNamespaceScheme() { return new NamespaceScheme(Arrays.asList(hot, warm, cold), files, BLOCK_SIZE/2, null, map); } /** * Move hot files to warm and cold, warm files to hot and cold, * and cold files to hot and warm. */ void moveAround(DistributedFileSystem dfs) throws Exception { for(Path srcDir : map.keySet()) { int i = 0; for(Path dstDir : map.keySet()) { if (!srcDir.equals(dstDir)) { final Path src = new Path(srcDir, "file" + i++); final Path dst = new Path(dstDir, srcDir.getName() + "2" + dstDir.getName()); LOG.info("rename " + src + " to " + dst); dfs.rename(src, dst); } } } } } /** * A normal case for Mover: move a file into archival storage */ @Test public void testMigrateFileToArchival() throws Exception { LOG.info("testMigrateFileToArchival"); final Path foo = new Path("/foo"); Map<Path, BlockStoragePolicy> policyMap = Maps.newHashMap(); policyMap.put(foo, COLD); NamespaceScheme nsScheme = new NamespaceScheme(null, Arrays.asList(foo), 2*BLOCK_SIZE, null, policyMap); ClusterScheme clusterScheme = new ClusterScheme(DEFAULT_CONF, NUM_DATANODES, REPL, genStorageTypes(NUM_DATANODES), null); new MigrationTest(clusterScheme, nsScheme).runBasicTest(true); } /** * Print a big banner in the test log to make debug easier. */ static void banner(String string) { LOG.info("\n\n\n\n================================================\n" + string + "\n" + "==================================================\n\n"); } /** * Run Mover with arguments specifying files and directories */ @Test public void testMoveSpecificPaths() throws Exception { LOG.info("testMoveSpecificPaths"); final Path foo = new Path("/foo"); final Path barFile = new Path(foo, "bar"); final Path foo2 = new Path("/foo2"); final Path bar2File = new Path(foo2, "bar2"); Map<Path, BlockStoragePolicy> policyMap = Maps.newHashMap(); policyMap.put(foo, COLD); policyMap.put(foo2, WARM); NamespaceScheme nsScheme = new NamespaceScheme(Arrays.asList(foo, foo2), Arrays.asList(barFile, bar2File), BLOCK_SIZE, null, policyMap); ClusterScheme clusterScheme = new ClusterScheme(DEFAULT_CONF, NUM_DATANODES, REPL, genStorageTypes(NUM_DATANODES), null); MigrationTest test = new MigrationTest(clusterScheme, nsScheme); test.setupCluster(); try { test.prepareNamespace(); test.setStoragePolicy(); Map<URI, List<Path>> map = Mover.Cli.getNameNodePathsToMove(test.conf, "-p", "/foo/bar", "/foo2"); int result = Mover.run(map, test.conf); Assert.assertEquals(ExitStatus.SUCCESS.getExitCode(), result); Thread.sleep(5000); test.verify(true); } finally { test.shutdownCluster(); } } /** * Move an open file into archival storage */ @Test public void testMigrateOpenFileToArchival() throws Exception { LOG.info("testMigrateOpenFileToArchival"); final Path fooDir = new Path("/foo"); Map<Path, BlockStoragePolicy> policyMap = Maps.newHashMap(); policyMap.put(fooDir, COLD); NamespaceScheme nsScheme = new NamespaceScheme(Arrays.asList(fooDir), null, BLOCK_SIZE, null, policyMap); ClusterScheme clusterScheme = new ClusterScheme(DEFAULT_CONF, NUM_DATANODES, REPL, genStorageTypes(NUM_DATANODES), null); MigrationTest test = new MigrationTest(clusterScheme, nsScheme); test.setupCluster(); // create an open file banner("writing to file /foo/bar"); final Path barFile = new Path(fooDir, "bar"); DFSTestUtil.createFile(test.dfs, barFile, BLOCK_SIZE, (short) 1, 0L); FSDataOutputStream out = test.dfs.append(barFile); out.writeBytes("hello, "); ((DFSOutputStream) out.getWrappedStream()).hsync(); try { banner("start data migration"); test.setStoragePolicy(); // set /foo to COLD test.migrate(ExitStatus.SUCCESS); // make sure the under construction block has not been migrated LocatedBlocks lbs = test.dfs.getClient().getLocatedBlocks( barFile.toString(), BLOCK_SIZE); LOG.info("Locations: " + lbs); List<LocatedBlock> blks = lbs.getLocatedBlocks(); Assert.assertEquals(1, blks.size()); Assert.assertEquals(1, blks.get(0).getLocations().length); banner("finish the migration, continue writing"); // make sure the writing can continue out.writeBytes("world!"); ((DFSOutputStream) out.getWrappedStream()).hsync(); IOUtils.cleanup(LOG, out); lbs = test.dfs.getClient().getLocatedBlocks( barFile.toString(), BLOCK_SIZE); LOG.info("Locations: " + lbs); blks = lbs.getLocatedBlocks(); Assert.assertEquals(1, blks.size()); Assert.assertEquals(1, blks.get(0).getLocations().length); banner("finish writing, starting reading"); // check the content of /foo/bar FSDataInputStream in = test.dfs.open(barFile); byte[] buf = new byte[13]; // read from offset 1024 in.readFully(BLOCK_SIZE, buf, 0, buf.length); IOUtils.cleanup(LOG, in); Assert.assertEquals("hello, world!", new String(buf)); } finally { test.shutdownCluster(); } } /** * Test directories with Hot, Warm and Cold polices. */ @Test public void testHotWarmColdDirs() throws Exception { LOG.info("testHotWarmColdDirs"); PathPolicyMap pathPolicyMap = new PathPolicyMap(3); NamespaceScheme nsScheme = pathPolicyMap.newNamespaceScheme(); ClusterScheme clusterScheme = new ClusterScheme(); MigrationTest test = new MigrationTest(clusterScheme, nsScheme); try { test.runBasicTest(false); pathPolicyMap.moveAround(test.dfs); test.migrate(ExitStatus.SUCCESS); test.verify(true); } finally { test.shutdownCluster(); } } private void waitForAllReplicas(int expectedReplicaNum, Path file, DistributedFileSystem dfs) throws Exception { for (int i = 0; i < 5; i++) { LocatedBlocks lbs = dfs.getClient().getLocatedBlocks(file.toString(), 0, BLOCK_SIZE); LocatedBlock lb = lbs.get(0); if (lb.getLocations().length >= expectedReplicaNum) { return; } else { Thread.sleep(1000); } } } private void setVolumeFull(DataNode dn, StorageType type) { try (FsDatasetSpi.FsVolumeReferences refs = dn.getFSDataset() .getFsVolumeReferences()) { for (FsVolumeSpi fvs : refs) { FsVolumeImpl volume = (FsVolumeImpl) fvs; if (volume.getStorageType() == type) { LOG.info("setCapacity to 0 for [" + volume.getStorageType() + "]" + volume.getStorageID()); volume.setCapacityForTesting(0); } } } catch (IOException e) { LOG.error("Unexpected exception by closing FsVolumeReference", e); } } /** * Test DISK is running out of spaces. */ @Test public void testNoSpaceDisk() throws Exception { LOG.info("testNoSpaceDisk"); final PathPolicyMap pathPolicyMap = new PathPolicyMap(0); final NamespaceScheme nsScheme = pathPolicyMap.newNamespaceScheme(); Configuration conf = new Configuration(DEFAULT_CONF); final ClusterScheme clusterScheme = new ClusterScheme(conf, NUM_DATANODES, REPL, genStorageTypes(NUM_DATANODES), null); final MigrationTest test = new MigrationTest(clusterScheme, nsScheme); try { test.runBasicTest(false); // create 2 hot files with replication 3 final short replication = 3; for (int i = 0; i < 2; i++) { final Path p = new Path(pathPolicyMap.hot, "file" + i); DFSTestUtil.createFile(test.dfs, p, BLOCK_SIZE, replication, 0L); waitForAllReplicas(replication, p, test.dfs); } // set all the DISK volume to full for (DataNode dn : test.cluster.getDataNodes()) { setVolumeFull(dn, StorageType.DISK); DataNodeTestUtils.triggerHeartbeat(dn); } // test increasing replication. Since DISK is full, // new replicas should be stored in ARCHIVE as a fallback storage. final Path file0 = new Path(pathPolicyMap.hot, "file0"); final Replication r = test.getReplication(file0); final short newReplication = (short) 5; test.dfs.setReplication(file0, newReplication); Thread.sleep(10000); test.verifyReplication(file0, r.disk, newReplication - r.disk); // test creating a cold file and then increase replication final Path p = new Path(pathPolicyMap.cold, "foo"); DFSTestUtil.createFile(test.dfs, p, BLOCK_SIZE, replication, 0L); test.verifyReplication(p, 0, replication); test.dfs.setReplication(p, newReplication); Thread.sleep(10000); test.verifyReplication(p, 0, newReplication); //test move a hot file to warm final Path file1 = new Path(pathPolicyMap.hot, "file1"); test.dfs.rename(file1, pathPolicyMap.warm); test.migrate(ExitStatus.NO_MOVE_BLOCK); test.verifyFile(new Path(pathPolicyMap.warm, "file1"), WARM.getId()); } finally { test.shutdownCluster(); } } /** * Test ARCHIVE is running out of spaces. */ @Test public void testNoSpaceArchive() throws Exception { LOG.info("testNoSpaceArchive"); final PathPolicyMap pathPolicyMap = new PathPolicyMap(0); final NamespaceScheme nsScheme = pathPolicyMap.newNamespaceScheme(); final ClusterScheme clusterScheme = new ClusterScheme(DEFAULT_CONF, NUM_DATANODES, REPL, genStorageTypes(NUM_DATANODES), null); final MigrationTest test = new MigrationTest(clusterScheme, nsScheme); try { test.runBasicTest(false); // create 2 hot files with replication 3 final short replication = 3; for (int i = 0; i < 2; i++) { final Path p = new Path(pathPolicyMap.cold, "file" + i); DFSTestUtil.createFile(test.dfs, p, BLOCK_SIZE, replication, 0L); waitForAllReplicas(replication, p, test.dfs); } // set all the ARCHIVE volume to full for (DataNode dn : test.cluster.getDataNodes()) { setVolumeFull(dn, StorageType.ARCHIVE); DataNodeTestUtils.triggerHeartbeat(dn); } { // test increasing replication but new replicas cannot be created // since no more ARCHIVE space. final Path file0 = new Path(pathPolicyMap.cold, "file0"); final Replication r = test.getReplication(file0); Assert.assertEquals(0, r.disk); final short newReplication = (short) 5; test.dfs.setReplication(file0, newReplication); Thread.sleep(10000); test.verifyReplication(file0, 0, r.archive); } { // test creating a hot file final Path p = new Path(pathPolicyMap.hot, "foo"); DFSTestUtil.createFile(test.dfs, p, BLOCK_SIZE, (short) 3, 0L); } { //test move a cold file to warm final Path file1 = new Path(pathPolicyMap.cold, "file1"); test.dfs.rename(file1, pathPolicyMap.warm); test.migrate(ExitStatus.SUCCESS); test.verify(true); } } finally { test.shutdownCluster(); } } }
/* * Copyright 2006-2016 Dave Griffith, Bas Leijdekkers * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.siyeh.ig.dependency; import com.intellij.codeInspection.reference.*; import com.intellij.openapi.util.Key; import java.util.*; public final class DependencyUtils { private static final Key<Set<RefClass>> DEPENDENT_CLASSES_KEY = new Key<>("DEPENDENT_CLASSES"); private static final Key<Set<RefClass>> DEPENDENCY_CLASSES_KEY = new Key<>("DEPENDENCY_CLASSES"); private static final Key<Set<RefClass>> TRANSITIVE_DEPENDENT_CLASSES_KEY = new Key<>("TRANSITIVE_DEPENDENT_CLASSES"); private static final Key<Set<RefClass>> TRANSITIVE_DEPENDENCY_CLASSES_KEY = new Key<>("TRANSITIVE_DEPENDENCY_CLASSES"); private static final Key<Set<RefPackage>> DEPENDENT_PACKAGES_KEY = new Key<>("DEPENDENT_PACKAGES"); private static final Key<Set<RefPackage>> DEPENDENCY_PACKAGES_KEY = new Key<>("DEPENDENCY_PACKAGES"); private static final Key<Set<RefPackage>> TRANSITIVE_DEPENDENT_PACKAGES_KEY = new Key<>("TRANSITIVE_DEPENDENT_PACKAGES"); private static final Key<Set<RefPackage>> TRANSITIVE_DEPENDENCY_PACKAGES_KEY = new Key<>("TRANSITIVE_DEPENDENCY_PACKAGES"); private DependencyUtils() { } public static Set<RefClass> calculateDependenciesForClass(RefClass refClass) { final Set<RefClass> dependencies = refClass.getUserData(DEPENDENCY_CLASSES_KEY); if (dependencies != null) { return dependencies; } final Set<RefClass> newDependencies = new HashSet<>(); tabulateDependencyClasses(refClass, newDependencies); newDependencies.remove(refClass); refClass.putUserData(DEPENDENCY_CLASSES_KEY, newDependencies); return newDependencies; } private static void tabulateDependencyClasses(RefJavaElement element, Set<? super RefClass> dependencies) { addOwnerClassesToSet(element.getOutReferences(), dependencies); addOwnerClassesToSet(element.getOutTypeReferences(), dependencies); final List<RefEntity> children = element.getChildren(); for (RefEntity child : children) { if (child instanceof RefJavaElement && !(child instanceof RefClass)) { tabulateDependencyClasses((RefJavaElement)child, dependencies); } } } private static void addOwnerClassesToSet(Collection<? extends RefElement> references, Set<? super RefClass> set) { final RefJavaUtil refUtil = RefJavaUtil.getInstance(); for (RefElement reference : references) { final RefClass refClass = reference instanceof RefClass ? (RefClass)reference : refUtil.getOwnerClass(reference); if (refClass != null && !refClass.isAnonymous() && !refClass.isLocalClass()) { set.add(refClass); } } } static Set<RefClass> calculateTransitiveDependenciesForClass(RefClass refClass) { final Set<RefClass> dependencies = refClass.getUserData(TRANSITIVE_DEPENDENCY_CLASSES_KEY); if (dependencies != null) { return dependencies; } final Set<RefClass> newDependencies = new HashSet<>(); tabulateTransitiveDependencyClasses(refClass, newDependencies); refClass.putUserData(TRANSITIVE_DEPENDENCY_CLASSES_KEY, newDependencies); return newDependencies; } private static void tabulateTransitiveDependencyClasses( RefClass refClass, Set<? super RefClass> newDependencies) { final LinkedList<RefClass> pendingClasses = new LinkedList<>(); pendingClasses.addLast(refClass); final Set<RefClass> processedClasses = new HashSet<>(); while (!pendingClasses.isEmpty()) { final RefClass classToProcess = pendingClasses.removeFirst(); newDependencies.add(classToProcess); processedClasses.add(classToProcess); final Set<RefClass> dependencies = calculateDependenciesForClass(classToProcess); for (RefClass dependency : dependencies) { if (!pendingClasses.contains(dependency) && !processedClasses.contains(dependency)) { pendingClasses.addLast(dependency); } } } newDependencies.remove(refClass); } public static Set<RefClass> calculateDependentsForClass(RefClass refClass) { final Set<RefClass> dependents = refClass.getUserData(DEPENDENT_CLASSES_KEY); if (dependents != null) { return dependents; } final Set<RefClass> newDependents = new HashSet<>(); tabulateDependentClasses(refClass, newDependents); newDependents.remove(refClass); refClass.putUserData(DEPENDENT_CLASSES_KEY, newDependents); return newDependents; } private static void tabulateDependentClasses(RefElement element, Set<? super RefClass> dependents) { addOwnerClassesToSet(element.getInReferences(), dependents); if (element instanceof RefClass) { final RefClass refClass = (RefClass)element; addOwnerClassesToSet(refClass.getInTypeReferences(), dependents); } final List<RefEntity> children = element.getChildren(); for (RefEntity child : children) { if (child instanceof RefElement && !(child instanceof RefClass)) { tabulateDependentClasses((RefElement)child, dependents); } } } static Set<RefClass> calculateTransitiveDependentsForClass(RefClass refClass) { final Set<RefClass> dependents = refClass.getUserData(TRANSITIVE_DEPENDENT_CLASSES_KEY); if (dependents != null) { return dependents; } final Set<RefClass> newDependents = new HashSet<>(); tabulateTransitiveDependentClasses(refClass, newDependents); refClass.putUserData(TRANSITIVE_DEPENDENT_CLASSES_KEY, newDependents); return newDependents; } private static void tabulateTransitiveDependentClasses( RefClass refClass, Set<? super RefClass> newDependents) { final LinkedList<RefClass> pendingClasses = new LinkedList<>(); pendingClasses.addLast(refClass); final Set<RefClass> processedClasses = new HashSet<>(); while (!pendingClasses.isEmpty()) { final RefClass classToProcess = pendingClasses.removeFirst(); newDependents.add(classToProcess); processedClasses.add(classToProcess); final Set<RefClass> dependents = calculateDependentsForClass(classToProcess); for (RefClass dependent : dependents) { if (!pendingClasses.contains(dependent) && !processedClasses.contains(dependent)) { pendingClasses.addLast(dependent); } } } newDependents.remove(refClass); } private static Set<RefPackage> calculateDependenciesForPackage( RefPackage refPackage) { final Set<RefPackage> dependencies = refPackage.getUserData(DEPENDENCY_PACKAGES_KEY); if (dependencies != null) { return dependencies; } final Set<RefPackage> newDependencies = new HashSet<>(); tabulateDependencyPackages(refPackage, newDependencies); newDependencies.remove(refPackage); refPackage.putUserData(DEPENDENCY_PACKAGES_KEY, newDependencies); return newDependencies; } private static void tabulateDependencyPackages(RefEntity entity, Set<? super RefPackage> dependencies) { if (entity instanceof RefElement) { final RefElement element = (RefElement)entity; final Collection<RefElement> references = element.getOutReferences(); for (RefElement reference : references) { final RefPackage refPackage = RefJavaUtil.getPackage(reference); if (refPackage != null) { dependencies.add(refPackage); } } } final List<RefEntity> children = entity.getChildren(); for (RefEntity child : children) { if (!(child instanceof RefPackage)) { tabulateDependencyPackages(child, dependencies); } } } private static Set<RefPackage> calculateDependentsForPackage( RefPackage refPackage) { final Set<RefPackage> dependents = refPackage.getUserData(DEPENDENT_PACKAGES_KEY); if (dependents != null) { return dependents; } final Set<RefPackage> newDependents = new HashSet<>(); tabulateDependentPackages(refPackage, newDependents); newDependents.remove(refPackage); refPackage.putUserData(DEPENDENT_PACKAGES_KEY, newDependents); return newDependents; } private static void tabulateDependentPackages(RefEntity entity, Set<? super RefPackage> dependents) { if (entity instanceof RefElement) { final RefElement element = (RefElement)entity; final Collection<RefElement> references = element.getInReferences(); for (RefElement reference : references) { final RefPackage refPackage = RefJavaUtil.getPackage(reference); if (refPackage != null) { dependents.add(refPackage); } } } final List<RefEntity> children = entity.getChildren(); for (RefEntity child : children) { if (!(child instanceof RefPackage)) { tabulateDependentPackages(child, dependents); } } } static Set<RefPackage> calculateTransitiveDependentsForPackage( RefPackage refPackage) { final Set<RefPackage> dependents = refPackage.getUserData(TRANSITIVE_DEPENDENT_PACKAGES_KEY); if (dependents != null) { return dependents; } final Set<RefPackage> newDependents = new HashSet<>(); tabulateTransitiveDependentPackages(refPackage, newDependents); refPackage.putUserData(TRANSITIVE_DEPENDENT_PACKAGES_KEY, newDependents); return newDependents; } private static void tabulateTransitiveDependentPackages( RefPackage refPackage, Set<? super RefPackage> newDependents) { final LinkedList<RefPackage> pendingPackages = new LinkedList<>(); pendingPackages.addLast(refPackage); final Set<RefPackage> processedPackages = new HashSet<>(); while (!pendingPackages.isEmpty()) { final RefPackage packageToProcess = pendingPackages.removeFirst(); newDependents.add(packageToProcess); processedPackages.add(packageToProcess); final Set<RefPackage> dependents = calculateDependentsForPackage(packageToProcess); for (RefPackage dependent : dependents) { if (!pendingPackages.contains(dependent) && !processedPackages.contains(dependent)) { pendingPackages.addLast(dependent); } } } newDependents.remove(refPackage); } static Set<RefPackage> calculateTransitiveDependenciesForPackage( RefPackage refPackage) { final Set<RefPackage> dependencies = refPackage.getUserData(TRANSITIVE_DEPENDENCY_PACKAGES_KEY); if (dependencies != null) { return dependencies; } final Set<RefPackage> newDependencies = new HashSet<>(); tabulateTransitiveDependencyPackages(refPackage, newDependencies); refPackage.putUserData(TRANSITIVE_DEPENDENCY_PACKAGES_KEY, newDependencies); return newDependencies; } private static void tabulateTransitiveDependencyPackages( RefPackage refPackage, Set<? super RefPackage> newDependencies) { final LinkedList<RefPackage> pendingPackages = new LinkedList<>(); pendingPackages.addLast(refPackage); final Set<RefPackage> processedPackages = new HashSet<>(); while (!pendingPackages.isEmpty()) { final RefPackage packageToProcess = pendingPackages.removeFirst(); newDependencies.add(packageToProcess); processedPackages.add(packageToProcess); final Set<RefPackage> dependencies = calculateDependenciesForPackage(packageToProcess); for (RefPackage dependency : dependencies) { if (!pendingPackages.contains(dependency) && !processedPackages.contains(dependency)) { pendingPackages.addLast(dependency); } } } newDependencies.remove(refPackage); } }
/** * Copyright (c) 2016-present, RxJava Contributors. * * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in * compliance with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License is * distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See * the License for the specific language governing permissions and limitations under the License. */ package io.reactivex.parallel; import java.util.*; import java.util.concurrent.Callable; import io.reactivex.*; import io.reactivex.annotations.*; import io.reactivex.exceptions.Exceptions; import io.reactivex.functions.*; import io.reactivex.internal.functions.*; import io.reactivex.internal.operators.parallel.*; import io.reactivex.internal.subscriptions.EmptySubscription; import io.reactivex.internal.util.*; import io.reactivex.plugins.RxJavaPlugins; import org.reactivestreams.*; /** * Abstract base class for Parallel publishers that take an array of Subscribers. * <p> * Use {@code from()} to start processing a regular Publisher in 'rails'. * Use {@code runOn()} to introduce where each 'rail' should run on thread-vise. * Use {@code sequential()} to merge the sources back into a single Flowable. * * <p>History: 2.0.5 - experimental; 2.1 - beta * @param <T> the value type * @since 2.2 */ public abstract class ParallelFlowable<T> { /** * Subscribes an array of Subscribers to this ParallelFlowable and triggers * the execution chain for all 'rails'. * * @param subscribers the subscribers array to run in parallel, the number * of items must be equal to the parallelism level of this ParallelFlowable * @see #parallelism() */ public abstract void subscribe(@NonNull Subscriber<? super T>[] subscribers); /** * Returns the number of expected parallel Subscribers. * @return the number of expected parallel Subscribers */ public abstract int parallelism(); /** * Validates the number of subscribers and returns true if their number * matches the parallelism level of this ParallelFlowable. * * @param subscribers the array of Subscribers * @return true if the number of subscribers equals to the parallelism level */ protected final boolean validate(@NonNull Subscriber<?>[] subscribers) { int p = parallelism(); if (subscribers.length != p) { Throwable iae = new IllegalArgumentException("parallelism = " + p + ", subscribers = " + subscribers.length); for (Subscriber<?> s : subscribers) { EmptySubscription.error(iae, s); } return false; } return true; } /** * Take a Publisher and prepare to consume it on multiple 'rails' (number of CPUs) * in a round-robin fashion. * @param <T> the value type * @param source the source Publisher * @return the ParallelFlowable instance */ @CheckReturnValue public static <T> ParallelFlowable<T> from(@NonNull Publisher<? extends T> source) { return from(source, Runtime.getRuntime().availableProcessors(), Flowable.bufferSize()); } /** * Take a Publisher and prepare to consume it on parallelism number of 'rails' in a round-robin fashion. * @param <T> the value type * @param source the source Publisher * @param parallelism the number of parallel rails * @return the new ParallelFlowable instance */ @CheckReturnValue public static <T> ParallelFlowable<T> from(@NonNull Publisher<? extends T> source, int parallelism) { return from(source, parallelism, Flowable.bufferSize()); } /** * Take a Publisher and prepare to consume it on parallelism number of 'rails' , * possibly ordered and round-robin fashion and use custom prefetch amount and queue * for dealing with the source Publisher's values. * @param <T> the value type * @param source the source Publisher * @param parallelism the number of parallel rails * @param prefetch the number of values to prefetch from the source * the source until there is a rail ready to process it. * @return the new ParallelFlowable instance */ @CheckReturnValue @NonNull public static <T> ParallelFlowable<T> from(@NonNull Publisher<? extends T> source, int parallelism, int prefetch) { ObjectHelper.requireNonNull(source, "source"); ObjectHelper.verifyPositive(parallelism, "parallelism"); ObjectHelper.verifyPositive(prefetch, "prefetch"); return RxJavaPlugins.onAssembly(new ParallelFromPublisher<T>(source, parallelism, prefetch)); } /** * Calls the specified converter function during assembly time and returns its resulting value. * <p> * This allows fluent conversion to any other type. * <p>History: 2.1.7 - experimental * @param <R> the resulting object type * @param converter the function that receives the current ParallelFlowable instance and returns a value * @return the converted value * @throws NullPointerException if converter is null * @since 2.2 */ @CheckReturnValue @NonNull public final <R> R as(@NonNull ParallelFlowableConverter<T, R> converter) { return ObjectHelper.requireNonNull(converter, "converter is null").apply(this); } /** * Maps the source values on each 'rail' to another value. * <p> * Note that the same mapper function may be called from multiple threads concurrently. * @param <R> the output value type * @param mapper the mapper function turning Ts into Us. * @return the new ParallelFlowable instance */ @CheckReturnValue @NonNull public final <R> ParallelFlowable<R> map(@NonNull Function<? super T, ? extends R> mapper) { ObjectHelper.requireNonNull(mapper, "mapper"); return RxJavaPlugins.onAssembly(new ParallelMap<T, R>(this, mapper)); } /** * Maps the source values on each 'rail' to another value and * handles errors based on the given {@link ParallelFailureHandling} enumeration value. * <p> * Note that the same mapper function may be called from multiple threads concurrently. * <p>History: 2.0.8 - experimental * @param <R> the output value type * @param mapper the mapper function turning Ts into Us. * @param errorHandler the enumeration that defines how to handle errors thrown * from the mapper function * @return the new ParallelFlowable instance * @since 2.2 */ @CheckReturnValue @NonNull public final <R> ParallelFlowable<R> map(@NonNull Function<? super T, ? extends R> mapper, @NonNull ParallelFailureHandling errorHandler) { ObjectHelper.requireNonNull(mapper, "mapper"); ObjectHelper.requireNonNull(errorHandler, "errorHandler is null"); return RxJavaPlugins.onAssembly(new ParallelMapTry<T, R>(this, mapper, errorHandler)); } /** * Maps the source values on each 'rail' to another value and * handles errors based on the returned value by the handler function. * <p> * Note that the same mapper function may be called from multiple threads concurrently. * <p>History: 2.0.8 - experimental * @param <R> the output value type * @param mapper the mapper function turning Ts into Us. * @param errorHandler the function called with the current repeat count and * failure Throwable and should return one of the {@link ParallelFailureHandling} * enumeration values to indicate how to proceed. * @return the new ParallelFlowable instance * @since 2.2 */ @CheckReturnValue @NonNull public final <R> ParallelFlowable<R> map(@NonNull Function<? super T, ? extends R> mapper, @NonNull BiFunction<? super Long, ? super Throwable, ParallelFailureHandling> errorHandler) { ObjectHelper.requireNonNull(mapper, "mapper"); ObjectHelper.requireNonNull(errorHandler, "errorHandler is null"); return RxJavaPlugins.onAssembly(new ParallelMapTry<T, R>(this, mapper, errorHandler)); } /** * Filters the source values on each 'rail'. * <p> * Note that the same predicate may be called from multiple threads concurrently. * @param predicate the function returning true to keep a value or false to drop a value * @return the new ParallelFlowable instance */ @CheckReturnValue public final ParallelFlowable<T> filter(@NonNull Predicate<? super T> predicate) { ObjectHelper.requireNonNull(predicate, "predicate"); return RxJavaPlugins.onAssembly(new ParallelFilter<T>(this, predicate)); } /** * Filters the source values on each 'rail' and * handles errors based on the given {@link ParallelFailureHandling} enumeration value. * <p> * Note that the same predicate may be called from multiple threads concurrently. * <p>History: 2.0.8 - experimental * @param predicate the function returning true to keep a value or false to drop a value * @param errorHandler the enumeration that defines how to handle errors thrown * from the predicate * @return the new ParallelFlowable instance * @since 2.2 */ @CheckReturnValue public final ParallelFlowable<T> filter(@NonNull Predicate<? super T> predicate, @NonNull ParallelFailureHandling errorHandler) { ObjectHelper.requireNonNull(predicate, "predicate"); ObjectHelper.requireNonNull(errorHandler, "errorHandler is null"); return RxJavaPlugins.onAssembly(new ParallelFilterTry<T>(this, predicate, errorHandler)); } /** * Filters the source values on each 'rail' and * handles errors based on the returned value by the handler function. * <p> * Note that the same predicate may be called from multiple threads concurrently. * <p>History: 2.0.8 - experimental * @param predicate the function returning true to keep a value or false to drop a value * @param errorHandler the function called with the current repeat count and * failure Throwable and should return one of the {@link ParallelFailureHandling} * enumeration values to indicate how to proceed. * @return the new ParallelFlowable instance * @since 2.2 */ @CheckReturnValue public final ParallelFlowable<T> filter(@NonNull Predicate<? super T> predicate, @NonNull BiFunction<? super Long, ? super Throwable, ParallelFailureHandling> errorHandler) { ObjectHelper.requireNonNull(predicate, "predicate"); ObjectHelper.requireNonNull(errorHandler, "errorHandler is null"); return RxJavaPlugins.onAssembly(new ParallelFilterTry<T>(this, predicate, errorHandler)); } /** * Specifies where each 'rail' will observe its incoming values with * no work-stealing and default prefetch amount. * <p> * This operator uses the default prefetch size returned by {@code Flowable.bufferSize()}. * <p> * The operator will call {@code Scheduler.createWorker()} as many * times as this ParallelFlowable's parallelism level is. * <p> * No assumptions are made about the Scheduler's parallelism level, * if the Scheduler's parallelism level is lower than the ParallelFlowable's, * some rails may end up on the same thread/worker. * <p> * This operator doesn't require the Scheduler to be trampolining as it * does its own built-in trampolining logic. * * @param scheduler the scheduler to use * @return the new ParallelFlowable instance */ @CheckReturnValue @NonNull public final ParallelFlowable<T> runOn(@NonNull Scheduler scheduler) { return runOn(scheduler, Flowable.bufferSize()); } /** * Specifies where each 'rail' will observe its incoming values with * possibly work-stealing and a given prefetch amount. * <p> * This operator uses the default prefetch size returned by {@code Flowable.bufferSize()}. * <p> * The operator will call {@code Scheduler.createWorker()} as many * times as this ParallelFlowable's parallelism level is. * <p> * No assumptions are made about the Scheduler's parallelism level, * if the Scheduler's parallelism level is lower than the ParallelFlowable's, * some rails may end up on the same thread/worker. * <p> * This operator doesn't require the Scheduler to be trampolining as it * does its own built-in trampolining logic. * * @param scheduler the scheduler to use * that rail's worker has run out of work. * @param prefetch the number of values to request on each 'rail' from the source * @return the new ParallelFlowable instance */ @CheckReturnValue @NonNull public final ParallelFlowable<T> runOn(@NonNull Scheduler scheduler, int prefetch) { ObjectHelper.requireNonNull(scheduler, "scheduler"); ObjectHelper.verifyPositive(prefetch, "prefetch"); return RxJavaPlugins.onAssembly(new ParallelRunOn<T>(this, scheduler, prefetch)); } /** * Reduces all values within a 'rail' and across 'rails' with a reducer function into a single * sequential value. * <p> * Note that the same reducer function may be called from multiple threads concurrently. * @param reducer the function to reduce two values into one. * @return the new Flowable instance emitting the reduced value or empty if the ParallelFlowable was empty */ @CheckReturnValue @NonNull public final Flowable<T> reduce(@NonNull BiFunction<T, T, T> reducer) { ObjectHelper.requireNonNull(reducer, "reducer"); return RxJavaPlugins.onAssembly(new ParallelReduceFull<T>(this, reducer)); } /** * Reduces all values within a 'rail' to a single value (with a possibly different type) via * a reducer function that is initialized on each rail from an initialSupplier value. * <p> * Note that the same mapper function may be called from multiple threads concurrently. * @param <R> the reduced output type * @param initialSupplier the supplier for the initial value * @param reducer the function to reduce a previous output of reduce (or the initial value supplied) * with a current source value. * @return the new ParallelFlowable instance */ @CheckReturnValue @NonNull public final <R> ParallelFlowable<R> reduce(@NonNull Callable<R> initialSupplier, @NonNull BiFunction<R, ? super T, R> reducer) { ObjectHelper.requireNonNull(initialSupplier, "initialSupplier"); ObjectHelper.requireNonNull(reducer, "reducer"); return RxJavaPlugins.onAssembly(new ParallelReduce<T, R>(this, initialSupplier, reducer)); } /** * Merges the values from each 'rail' in a round-robin or same-order fashion and * exposes it as a regular Publisher sequence, running with a default prefetch value * for the rails. * <p> * This operator uses the default prefetch size returned by {@code Flowable.bufferSize()}. * <img width="640" height="602" src="https://raw.github.com/wiki/ReactiveX/RxJava/images/rx-operators/parallelflowable.sequential.png" alt=""> * <dl> * <dt><b>Backpressure:</b></dt> * <dd>The operator honors backpressure.</dd> * <dt><b>Scheduler:</b></dt> * <dd>{@code sequential} does not operate by default on a particular {@link Scheduler}.</dd> * </dl> * @return the new Flowable instance * @see ParallelFlowable#sequential(int) * @see ParallelFlowable#sequentialDelayError() */ @BackpressureSupport(BackpressureKind.FULL) @SchedulerSupport(SchedulerSupport.NONE) @CheckReturnValue public final Flowable<T> sequential() { return sequential(Flowable.bufferSize()); } /** * Merges the values from each 'rail' in a round-robin or same-order fashion and * exposes it as a regular Publisher sequence, running with a give prefetch value * for the rails. * <img width="640" height="602" src="https://raw.github.com/wiki/ReactiveX/RxJava/images/rx-operators/parallelflowable.sequential.png" alt=""> * <dl> * <dt><b>Backpressure:</b></dt> * <dd>The operator honors backpressure.</dd> * <dt><b>Scheduler:</b></dt> * <dd>{@code sequential} does not operate by default on a particular {@link Scheduler}.</dd> * </dl> * @param prefetch the prefetch amount to use for each rail * @return the new Flowable instance * @see ParallelFlowable#sequential() * @see ParallelFlowable#sequentialDelayError(int) */ @BackpressureSupport(BackpressureKind.FULL) @SchedulerSupport(SchedulerSupport.NONE) @CheckReturnValue @NonNull public final Flowable<T> sequential(int prefetch) { ObjectHelper.verifyPositive(prefetch, "prefetch"); return RxJavaPlugins.onAssembly(new ParallelJoin<T>(this, prefetch, false)); } /** * Merges the values from each 'rail' in a round-robin or same-order fashion and * exposes it as a regular Flowable sequence, running with a default prefetch value * for the rails and delaying errors from all rails till all terminate. * <p> * This operator uses the default prefetch size returned by {@code Flowable.bufferSize()}. * <img width="640" height="602" src="https://raw.github.com/wiki/ReactiveX/RxJava/images/rx-operators/parallelflowable.sequential.png" alt=""> * <dl> * <dt><b>Backpressure:</b></dt> * <dd>The operator honors backpressure.</dd> * <dt><b>Scheduler:</b></dt> * <dd>{@code sequentialDelayError} does not operate by default on a particular {@link Scheduler}.</dd> * </dl> * <p>History: 2.0.7 - experimental * @return the new Flowable instance * @see ParallelFlowable#sequentialDelayError(int) * @see ParallelFlowable#sequential() * @since 2.2 */ @BackpressureSupport(BackpressureKind.FULL) @SchedulerSupport(SchedulerSupport.NONE) @CheckReturnValue @NonNull public final Flowable<T> sequentialDelayError() { return sequentialDelayError(Flowable.bufferSize()); } /** * Merges the values from each 'rail' in a round-robin or same-order fashion and * exposes it as a regular Publisher sequence, running with a give prefetch value * for the rails and delaying errors from all rails till all terminate. * <img width="640" height="602" src="https://raw.github.com/wiki/ReactiveX/RxJava/images/rx-operators/parallelflowable.sequential.png" alt=""> * <dl> * <dt><b>Backpressure:</b></dt> * <dd>The operator honors backpressure.</dd> * <dt><b>Scheduler:</b></dt> * <dd>{@code sequentialDelayError} does not operate by default on a particular {@link Scheduler}.</dd> * </dl> * <p>History: 2.0.7 - experimental * @param prefetch the prefetch amount to use for each rail * @return the new Flowable instance * @see ParallelFlowable#sequential() * @see ParallelFlowable#sequentialDelayError() * @since 2.2 */ @BackpressureSupport(BackpressureKind.FULL) @SchedulerSupport(SchedulerSupport.NONE) @CheckReturnValue @NonNull public final Flowable<T> sequentialDelayError(int prefetch) { ObjectHelper.verifyPositive(prefetch, "prefetch"); return RxJavaPlugins.onAssembly(new ParallelJoin<T>(this, prefetch, true)); } /** * Sorts the 'rails' of this ParallelFlowable and returns a Publisher that sequentially * picks the smallest next value from the rails. * <p> * This operator requires a finite source ParallelFlowable. * * @param comparator the comparator to use * @return the new Flowable instance */ @CheckReturnValue @NonNull public final Flowable<T> sorted(@NonNull Comparator<? super T> comparator) { return sorted(comparator, 16); } /** * Sorts the 'rails' of this ParallelFlowable and returns a Publisher that sequentially * picks the smallest next value from the rails. * <p> * This operator requires a finite source ParallelFlowable. * * @param comparator the comparator to use * @param capacityHint the expected number of total elements * @return the new Flowable instance */ @CheckReturnValue @NonNull public final Flowable<T> sorted(@NonNull Comparator<? super T> comparator, int capacityHint) { ObjectHelper.requireNonNull(comparator, "comparator is null"); ObjectHelper.verifyPositive(capacityHint, "capacityHint"); int ch = capacityHint / parallelism() + 1; ParallelFlowable<List<T>> railReduced = reduce(Functions.<T>createArrayList(ch), ListAddBiConsumer.<T>instance()); ParallelFlowable<List<T>> railSorted = railReduced.map(new SorterFunction<T>(comparator)); return RxJavaPlugins.onAssembly(new ParallelSortedJoin<T>(railSorted, comparator)); } /** * Sorts the 'rails' according to the comparator and returns a full sorted list as a Publisher. * <p> * This operator requires a finite source ParallelFlowable. * * @param comparator the comparator to compare elements * @return the new Flowable instance */ @CheckReturnValue @NonNull public final Flowable<List<T>> toSortedList(@NonNull Comparator<? super T> comparator) { return toSortedList(comparator, 16); } /** * Sorts the 'rails' according to the comparator and returns a full sorted list as a Publisher. * <p> * This operator requires a finite source ParallelFlowable. * * @param comparator the comparator to compare elements * @param capacityHint the expected number of total elements * @return the new Flowable instance */ @CheckReturnValue @NonNull public final Flowable<List<T>> toSortedList(@NonNull Comparator<? super T> comparator, int capacityHint) { ObjectHelper.requireNonNull(comparator, "comparator is null"); ObjectHelper.verifyPositive(capacityHint, "capacityHint"); int ch = capacityHint / parallelism() + 1; ParallelFlowable<List<T>> railReduced = reduce(Functions.<T>createArrayList(ch), ListAddBiConsumer.<T>instance()); ParallelFlowable<List<T>> railSorted = railReduced.map(new SorterFunction<T>(comparator)); Flowable<List<T>> merged = railSorted.reduce(new MergerBiFunction<T>(comparator)); return RxJavaPlugins.onAssembly(merged); } /** * Call the specified consumer with the current element passing through any 'rail'. * * @param onNext the callback * @return the new ParallelFlowable instance */ @CheckReturnValue @NonNull public final ParallelFlowable<T> doOnNext(@NonNull Consumer<? super T> onNext) { ObjectHelper.requireNonNull(onNext, "onNext is null"); return RxJavaPlugins.onAssembly(new ParallelPeek<T>(this, onNext, Functions.emptyConsumer(), Functions.emptyConsumer(), Functions.EMPTY_ACTION, Functions.EMPTY_ACTION, Functions.emptyConsumer(), Functions.EMPTY_LONG_CONSUMER, Functions.EMPTY_ACTION )); } /** * Call the specified consumer with the current element passing through any 'rail' and * handles errors based on the given {@link ParallelFailureHandling} enumeration value. * <p>History: 2.0.8 - experimental * @param onNext the callback * @param errorHandler the enumeration that defines how to handle errors thrown * from the onNext consumer * @return the new ParallelFlowable instance * @since 2.2 */ @CheckReturnValue @NonNull public final ParallelFlowable<T> doOnNext(@NonNull Consumer<? super T> onNext, @NonNull ParallelFailureHandling errorHandler) { ObjectHelper.requireNonNull(onNext, "onNext is null"); ObjectHelper.requireNonNull(errorHandler, "errorHandler is null"); return RxJavaPlugins.onAssembly(new ParallelDoOnNextTry<T>(this, onNext, errorHandler)); } /** * Call the specified consumer with the current element passing through any 'rail' and * handles errors based on the returned value by the handler function. * <p>History: 2.0.8 - experimental * @param onNext the callback * @param errorHandler the function called with the current repeat count and * failure Throwable and should return one of the {@link ParallelFailureHandling} * enumeration values to indicate how to proceed. * @return the new ParallelFlowable instance * @since 2.2 */ @CheckReturnValue @NonNull public final ParallelFlowable<T> doOnNext(@NonNull Consumer<? super T> onNext, @NonNull BiFunction<? super Long, ? super Throwable, ParallelFailureHandling> errorHandler) { ObjectHelper.requireNonNull(onNext, "onNext is null"); ObjectHelper.requireNonNull(errorHandler, "errorHandler is null"); return RxJavaPlugins.onAssembly(new ParallelDoOnNextTry<T>(this, onNext, errorHandler)); } /** * Call the specified consumer with the current element passing through any 'rail' * after it has been delivered to downstream within the rail. * * @param onAfterNext the callback * @return the new ParallelFlowable instance */ @CheckReturnValue @NonNull public final ParallelFlowable<T> doAfterNext(@NonNull Consumer<? super T> onAfterNext) { ObjectHelper.requireNonNull(onAfterNext, "onAfterNext is null"); return RxJavaPlugins.onAssembly(new ParallelPeek<T>(this, Functions.emptyConsumer(), onAfterNext, Functions.emptyConsumer(), Functions.EMPTY_ACTION, Functions.EMPTY_ACTION, Functions.emptyConsumer(), Functions.EMPTY_LONG_CONSUMER, Functions.EMPTY_ACTION )); } /** * Call the specified consumer with the exception passing through any 'rail'. * * @param onError the callback * @return the new ParallelFlowable instance */ @CheckReturnValue @NonNull public final ParallelFlowable<T> doOnError(@NonNull Consumer<Throwable> onError) { ObjectHelper.requireNonNull(onError, "onError is null"); return RxJavaPlugins.onAssembly(new ParallelPeek<T>(this, Functions.emptyConsumer(), Functions.emptyConsumer(), onError, Functions.EMPTY_ACTION, Functions.EMPTY_ACTION, Functions.emptyConsumer(), Functions.EMPTY_LONG_CONSUMER, Functions.EMPTY_ACTION )); } /** * Run the specified Action when a 'rail' completes. * * @param onComplete the callback * @return the new ParallelFlowable instance */ @CheckReturnValue @NonNull public final ParallelFlowable<T> doOnComplete(@NonNull Action onComplete) { ObjectHelper.requireNonNull(onComplete, "onComplete is null"); return RxJavaPlugins.onAssembly(new ParallelPeek<T>(this, Functions.emptyConsumer(), Functions.emptyConsumer(), Functions.emptyConsumer(), onComplete, Functions.EMPTY_ACTION, Functions.emptyConsumer(), Functions.EMPTY_LONG_CONSUMER, Functions.EMPTY_ACTION )); } /** * Run the specified Action when a 'rail' completes or signals an error. * * @param onAfterTerminate the callback * @return the new ParallelFlowable instance */ @CheckReturnValue @NonNull public final ParallelFlowable<T> doAfterTerminated(@NonNull Action onAfterTerminate) { ObjectHelper.requireNonNull(onAfterTerminate, "onAfterTerminate is null"); return RxJavaPlugins.onAssembly(new ParallelPeek<T>(this, Functions.emptyConsumer(), Functions.emptyConsumer(), Functions.emptyConsumer(), Functions.EMPTY_ACTION, onAfterTerminate, Functions.emptyConsumer(), Functions.EMPTY_LONG_CONSUMER, Functions.EMPTY_ACTION )); } /** * Call the specified callback when a 'rail' receives a Subscription from its upstream. * * @param onSubscribe the callback * @return the new ParallelFlowable instance */ @CheckReturnValue @NonNull public final ParallelFlowable<T> doOnSubscribe(@NonNull Consumer<? super Subscription> onSubscribe) { ObjectHelper.requireNonNull(onSubscribe, "onSubscribe is null"); return RxJavaPlugins.onAssembly(new ParallelPeek<T>(this, Functions.emptyConsumer(), Functions.emptyConsumer(), Functions.emptyConsumer(), Functions.EMPTY_ACTION, Functions.EMPTY_ACTION, onSubscribe, Functions.EMPTY_LONG_CONSUMER, Functions.EMPTY_ACTION )); } /** * Call the specified consumer with the request amount if any rail receives a request. * * @param onRequest the callback * @return the new ParallelFlowable instance */ @CheckReturnValue @NonNull public final ParallelFlowable<T> doOnRequest(@NonNull LongConsumer onRequest) { ObjectHelper.requireNonNull(onRequest, "onRequest is null"); return RxJavaPlugins.onAssembly(new ParallelPeek<T>(this, Functions.emptyConsumer(), Functions.emptyConsumer(), Functions.emptyConsumer(), Functions.EMPTY_ACTION, Functions.EMPTY_ACTION, Functions.emptyConsumer(), onRequest, Functions.EMPTY_ACTION )); } /** * Run the specified Action when a 'rail' receives a cancellation. * * @param onCancel the callback * @return the new ParallelFlowable instance */ @CheckReturnValue @NonNull public final ParallelFlowable<T> doOnCancel(@NonNull Action onCancel) { ObjectHelper.requireNonNull(onCancel, "onCancel is null"); return RxJavaPlugins.onAssembly(new ParallelPeek<T>(this, Functions.emptyConsumer(), Functions.emptyConsumer(), Functions.emptyConsumer(), Functions.EMPTY_ACTION, Functions.EMPTY_ACTION, Functions.emptyConsumer(), Functions.EMPTY_LONG_CONSUMER, onCancel )); } /** * Collect the elements in each rail into a collection supplied via a collectionSupplier * and collected into with a collector action, emitting the collection at the end. * * @param <C> the collection type * @param collectionSupplier the supplier of the collection in each rail * @param collector the collector, taking the per-rail collection and the current item * @return the new ParallelFlowable instance */ @CheckReturnValue @NonNull public final <C> ParallelFlowable<C> collect(@NonNull Callable<? extends C> collectionSupplier, @NonNull BiConsumer<? super C, ? super T> collector) { ObjectHelper.requireNonNull(collectionSupplier, "collectionSupplier is null"); ObjectHelper.requireNonNull(collector, "collector is null"); return RxJavaPlugins.onAssembly(new ParallelCollect<T, C>(this, collectionSupplier, collector)); } /** * Wraps multiple Publishers into a ParallelFlowable which runs them * in parallel and unordered. * * @param <T> the value type * @param publishers the array of publishers * @return the new ParallelFlowable instance */ @CheckReturnValue @NonNull public static <T> ParallelFlowable<T> fromArray(@NonNull Publisher<T>... publishers) { if (publishers.length == 0) { throw new IllegalArgumentException("Zero publishers not supported"); } return RxJavaPlugins.onAssembly(new ParallelFromArray<T>(publishers)); } /** * Perform a fluent transformation to a value via a converter function which * receives this ParallelFlowable. * * @param <U> the output value type * @param converter the converter function from ParallelFlowable to some type * @return the value returned by the converter function */ @CheckReturnValue @NonNull public final <U> U to(@NonNull Function<? super ParallelFlowable<T>, U> converter) { try { return ObjectHelper.requireNonNull(converter, "converter is null").apply(this); } catch (Throwable ex) { Exceptions.throwIfFatal(ex); throw ExceptionHelper.wrapOrThrow(ex); } } /** * Allows composing operators, in assembly time, on top of this ParallelFlowable * and returns another ParallelFlowable with composed features. * * @param <U> the output value type * @param composer the composer function from ParallelFlowable (this) to another ParallelFlowable * @return the ParallelFlowable returned by the function */ @CheckReturnValue @NonNull public final <U> ParallelFlowable<U> compose(@NonNull ParallelTransformer<T, U> composer) { return RxJavaPlugins.onAssembly(ObjectHelper.requireNonNull(composer, "composer is null").apply(this)); } /** * Generates and flattens Publishers on each 'rail'. * <p> * Errors are not delayed and uses unbounded concurrency along with default inner prefetch. * * @param <R> the result type * @param mapper the function to map each rail's value into a Publisher * @return the new ParallelFlowable instance */ @CheckReturnValue @NonNull public final <R> ParallelFlowable<R> flatMap(@NonNull Function<? super T, ? extends Publisher<? extends R>> mapper) { return flatMap(mapper, false, Integer.MAX_VALUE, Flowable.bufferSize()); } /** * Generates and flattens Publishers on each 'rail', optionally delaying errors. * <p> * It uses unbounded concurrency along with default inner prefetch. * * @param <R> the result type * @param mapper the function to map each rail's value into a Publisher * @param delayError should the errors from the main and the inner sources delayed till everybody terminates? * @return the new ParallelFlowable instance */ @CheckReturnValue @NonNull public final <R> ParallelFlowable<R> flatMap( @NonNull Function<? super T, ? extends Publisher<? extends R>> mapper, boolean delayError) { return flatMap(mapper, delayError, Integer.MAX_VALUE, Flowable.bufferSize()); } /** * Generates and flattens Publishers on each 'rail', optionally delaying errors * and having a total number of simultaneous subscriptions to the inner Publishers. * <p> * It uses a default inner prefetch. * * @param <R> the result type * @param mapper the function to map each rail's value into a Publisher * @param delayError should the errors from the main and the inner sources delayed till everybody terminates? * @param maxConcurrency the maximum number of simultaneous subscriptions to the generated inner Publishers * @return the new ParallelFlowable instance */ @CheckReturnValue @NonNull public final <R> ParallelFlowable<R> flatMap( @NonNull Function<? super T, ? extends Publisher<? extends R>> mapper, boolean delayError, int maxConcurrency) { return flatMap(mapper, delayError, maxConcurrency, Flowable.bufferSize()); } /** * Generates and flattens Publishers on each 'rail', optionally delaying errors, * having a total number of simultaneous subscriptions to the inner Publishers * and using the given prefetch amount for the inner Publishers. * * @param <R> the result type * @param mapper the function to map each rail's value into a Publisher * @param delayError should the errors from the main and the inner sources delayed till everybody terminates? * @param maxConcurrency the maximum number of simultaneous subscriptions to the generated inner Publishers * @param prefetch the number of items to prefetch from each inner Publisher * @return the new ParallelFlowable instance */ @CheckReturnValue @NonNull public final <R> ParallelFlowable<R> flatMap( @NonNull Function<? super T, ? extends Publisher<? extends R>> mapper, boolean delayError, int maxConcurrency, int prefetch) { ObjectHelper.requireNonNull(mapper, "mapper is null"); ObjectHelper.verifyPositive(maxConcurrency, "maxConcurrency"); ObjectHelper.verifyPositive(prefetch, "prefetch"); return RxJavaPlugins.onAssembly(new ParallelFlatMap<T, R>(this, mapper, delayError, maxConcurrency, prefetch)); } /** * Generates and concatenates Publishers on each 'rail', signalling errors immediately * and generating 2 publishers upfront. * * @param <R> the result type * @param mapper the function to map each rail's value into a Publisher * source and the inner Publishers (immediate, boundary, end) * @return the new ParallelFlowable instance */ @CheckReturnValue @NonNull public final <R> ParallelFlowable<R> concatMap( @NonNull Function<? super T, ? extends Publisher<? extends R>> mapper) { return concatMap(mapper, 2); } /** * Generates and concatenates Publishers on each 'rail', signalling errors immediately * and using the given prefetch amount for generating Publishers upfront. * * @param <R> the result type * @param mapper the function to map each rail's value into a Publisher * @param prefetch the number of items to prefetch from each inner Publisher * source and the inner Publishers (immediate, boundary, end) * @return the new ParallelFlowable instance */ @CheckReturnValue @NonNull public final <R> ParallelFlowable<R> concatMap( @NonNull Function<? super T, ? extends Publisher<? extends R>> mapper, int prefetch) { ObjectHelper.requireNonNull(mapper, "mapper is null"); ObjectHelper.verifyPositive(prefetch, "prefetch"); return RxJavaPlugins.onAssembly(new ParallelConcatMap<T, R>(this, mapper, prefetch, ErrorMode.IMMEDIATE)); } /** * Generates and concatenates Publishers on each 'rail', optionally delaying errors * and generating 2 publishers upfront. * * @param <R> the result type * @param mapper the function to map each rail's value into a Publisher * @param tillTheEnd if true all errors from the upstream and inner Publishers are delayed * till all of them terminate, if false, the error is emitted when an inner Publisher terminates. * source and the inner Publishers (immediate, boundary, end) * @return the new ParallelFlowable instance */ @CheckReturnValue @NonNull public final <R> ParallelFlowable<R> concatMapDelayError( @NonNull Function<? super T, ? extends Publisher<? extends R>> mapper, boolean tillTheEnd) { return concatMapDelayError(mapper, 2, tillTheEnd); } /** * Generates and concatenates Publishers on each 'rail', optionally delaying errors * and using the given prefetch amount for generating Publishers upfront. * * @param <R> the result type * @param mapper the function to map each rail's value into a Publisher * @param prefetch the number of items to prefetch from each inner Publisher * @param tillTheEnd if true all errors from the upstream and inner Publishers are delayed * till all of them terminate, if false, the error is emitted when an inner Publisher terminates. * @return the new ParallelFlowable instance */ @CheckReturnValue @NonNull public final <R> ParallelFlowable<R> concatMapDelayError( @NonNull Function<? super T, ? extends Publisher<? extends R>> mapper, int prefetch, boolean tillTheEnd) { ObjectHelper.requireNonNull(mapper, "mapper is null"); ObjectHelper.verifyPositive(prefetch, "prefetch"); return RxJavaPlugins.onAssembly(new ParallelConcatMap<T, R>( this, mapper, prefetch, tillTheEnd ? ErrorMode.END : ErrorMode.BOUNDARY)); } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.metron.stellar.zeppelin; import org.apache.commons.lang3.exception.ExceptionUtils; import org.apache.metron.stellar.common.shell.DefaultStellarAutoCompleter; import org.apache.metron.stellar.common.shell.DefaultStellarShellExecutor; import org.apache.metron.stellar.common.shell.StellarAutoCompleter; import org.apache.metron.stellar.common.shell.StellarResult; import org.apache.metron.stellar.common.shell.StellarShellExecutor; import org.apache.zeppelin.interpreter.Interpreter; import org.apache.zeppelin.interpreter.InterpreterContext; import org.apache.zeppelin.interpreter.InterpreterResult; import org.apache.zeppelin.interpreter.thrift.InterpreterCompletion; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.lang.invoke.MethodHandles; import java.util.ArrayList; import java.util.List; import java.util.Optional; import java.util.Properties; import static org.apache.zeppelin.interpreter.InterpreterResult.Code.ERROR; import static org.apache.zeppelin.interpreter.InterpreterResult.Code.SUCCESS; import static org.apache.zeppelin.interpreter.InterpreterResult.Type.TEXT; /** * A Zeppelin Interpreter for Stellar. */ public class StellarInterpreter extends Interpreter { private static final Logger LOG = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass()); /** * Executes the Stellar expressions. * * <p>Zeppelin will handle isolation and how the same executor is or is not used across * multiple notebooks. This is configurable by the user. * * <p>See https://zeppelin.apache.org/docs/latest/manual/interpreters.html#interpreter-binding-mode. */ private StellarShellExecutor executor; /** * Handles auto-completion for Stellar expressions. */ private StellarAutoCompleter autoCompleter; public StellarInterpreter(Properties properties) { super(properties); } @Override public void open() { try { // create the auto-completer this.autoCompleter = new DefaultStellarAutoCompleter(); // create the stellar executor Properties props = getProperty(); this.executor = createExecutor(props); } catch (Exception e) { LOG.error("Unable to create a StellarShellExecutor", e); throw new RuntimeException(e); } } @Override public void close() { // nothing to do } @Override public InterpreterResult interpret(final String input, InterpreterContext context) { InterpreterResult result = new InterpreterResult(SUCCESS, TEXT, ""); try { // allow separate expressions on each line String[] expressions = input.split(System.lineSeparator()); for (String expression : expressions) { result = execute(expression); } } catch(Throwable t){ // unexpected exception String message = getErrorMessage(Optional.of(t), input); result = new InterpreterResult(ERROR, TEXT, message); } // result is from the last expression that was executed return result; } /** * Execute a single Stellar expression. * @param expression The Stellar expression to execute. * @return The result of execution. */ private InterpreterResult execute(final String expression) { InterpreterResult result; // execute the expression StellarResult stellarResult = executor.execute(expression); if (stellarResult.isSuccess()) { // on success - if no result, use a blank value Object value = stellarResult.getValue().orElse(""); String text = value.toString(); result = new InterpreterResult(SUCCESS, TEXT, text); } else if (stellarResult.isError()) { // an error occurred Optional<Throwable> e = stellarResult.getException(); String message = getErrorMessage(e, expression); result = new InterpreterResult(ERROR, TEXT, message); } else { // should never happen throw new IllegalStateException("Unexpected error. result=" + stellarResult); } return result; } @Override public void cancel(InterpreterContext context) { // there is no way to cancel the execution of a Stellar expression } @Override public FormType getFormType() { return FormType.SIMPLE; } @Override public int getProgress(InterpreterContext context) { // unable to provide progress return 0; } @Override public List<InterpreterCompletion> completion(String buf, int cursor) { // use the autoCompleter to return a list of completes to Zeppelin List<InterpreterCompletion> completes = new ArrayList<>(); for(String candidate : autoCompleter.autoComplete(buf)) { completes.add(new InterpreterCompletion(candidate, candidate)); } return completes; } /** * Generates an error message that is shown to the user. * * @param e An optional exception that occurred. * @param input The user input that led to the error condition. * @return An error message for the user. */ private String getErrorMessage(Optional<Throwable> e, String input) { String message; if(e.isPresent()) { // base the error message on the exception String error = ExceptionUtils.getRootCauseMessage(e.get()); String trace = ExceptionUtils.getStackTrace(e.get()); message = error + System.lineSeparator() + trace; } else { // no exception provided; create generic error message message = "Invalid expression: " + input; } return message; } /** * Create an executor that will run the Stellar code for the Zeppelin Notebook. * @return The stellar executor. */ private StellarShellExecutor createExecutor(Properties properties) throws Exception { // a zookeeper URL may be defined String zookeeperURL = StellarInterpreterProperty.ZOOKEEPER_URL.get(properties, String.class); StellarShellExecutor executor = new DefaultStellarShellExecutor(properties, Optional.ofNullable(zookeeperURL)); // register the auto-completer to be notified executor.addSpecialListener((magic) -> autoCompleter.addCandidateFunction(magic.getCommand())); executor.addFunctionListener((fn) -> autoCompleter.addCandidateFunction(fn.getName())); executor.addVariableListener((name, val) -> autoCompleter.addCandidateVariable(name)); executor.init(); return executor; } /** * Returns the executor used to execute Stellar expressions. * @return The executor of Stellar expressions. */ public StellarShellExecutor getExecutor() { return executor; } }
package dumplingyzr.hearthtracker; import android.Manifest; import android.annotation.TargetApi; import android.content.Intent; import android.content.pm.PackageManager; import android.content.res.Configuration; import android.net.Uri; import android.os.Build; import android.os.Bundle; import android.os.Environment; import android.support.annotation.NonNull; import android.support.design.widget.NavigationView; import android.support.v4.widget.DrawerLayout; import android.support.v7.app.ActionBarDrawerToggle; import android.support.v7.app.AppCompatActivity; import android.support.v7.widget.Toolbar; import android.view.MenuItem; import android.widget.Toast; import java.io.File; import java.io.FileOutputStream; import java.io.InputStream; import butterknife.BindView; import butterknife.ButterKnife; import dumplingyzr.hearthtracker.activities.ClassSelectActivity; import dumplingyzr.hearthtracker.fragments.DeckListAdapter; import dumplingyzr.hearthtracker.fragments.DeckListFragment; import dumplingyzr.hearthtracker.tracker_window.TrackerWindow; public class MainActivity extends AppCompatActivity { private static final int REQUEST_CODE_PERMISSIONS = 1; private static final int REQUEST_CODE_GET_OVERLAY_PERMISSIONS = 2; public static final String HEARTHSTONE_FILES_DIR = Environment.getExternalStorageDirectory().getPath()+ "/Android/data/com.blizzard.wtcg.hearthstone/files/"; public static final String HEARTHSTONE_PACKAGE_ID = "com.blizzard.wtcg.hearthstone"; private File mFile = new File(HEARTHSTONE_FILES_DIR + "log.config"); private ActionBarDrawerToggle mDrawerToggle; private DeckListAdapter mDeckListAdapter; @BindView(R.id.toolbar) Toolbar toolbar; @BindView(R.id.drawer_layout) DrawerLayout drawerLayout; @BindView(R.id.drawer) NavigationView navigationView; @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.activity_main); ButterKnife.bind(this); setSupportActionBar(toolbar); if(getSupportActionBar() != null) { getSupportActionBar().setDisplayHomeAsUpEnabled(true); getSupportActionBar().setHomeButtonEnabled(true); } setupDrawer(); if (savedInstanceState == null){ new CardAPI().init(this); } } @Override public void onResume(){ super.onResume(); if(mDeckListAdapter != null){ mDeckListAdapter.updateDeckList(); } } @TargetApi(Build.VERSION_CODES.M) @Override protected void onActivityResult(int requestCode, int resultCode, Intent data) { super.onActivityResult(requestCode, resultCode, data); if (!android.provider.Settings.canDrawOverlays(this)) { Toast.makeText(MainActivity.this, "Please Enable Permissions", Toast.LENGTH_LONG).show(); } else { LaunchLogWindow(); } } @TargetApi(Build.VERSION_CODES.M) @Override public void onRequestPermissionsResult(int requestCode, @NonNull String[] permissions, @NonNull int[] grantResults) { super.onRequestPermissionsResult(requestCode, permissions, grantResults); if (checkCallingOrSelfPermission(Manifest.permission.READ_EXTERNAL_STORAGE) != PackageManager.PERMISSION_GRANTED || checkCallingOrSelfPermission(Manifest.permission.WRITE_EXTERNAL_STORAGE) != PackageManager.PERMISSION_GRANTED) { Toast.makeText(MainActivity.this, "Please Enable Permissions", Toast.LENGTH_LONG).show(); } else { LaunchLogWindow(); } } public void LaunchLogWindow() { if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.M) { if (checkCallingOrSelfPermission(Manifest.permission.READ_EXTERNAL_STORAGE) != PackageManager.PERMISSION_GRANTED || checkCallingOrSelfPermission(Manifest.permission.WRITE_EXTERNAL_STORAGE) != PackageManager.PERMISSION_GRANTED) { requestPermissions(new String[]{Manifest.permission.READ_EXTERNAL_STORAGE, Manifest.permission.WRITE_EXTERNAL_STORAGE}, REQUEST_CODE_PERMISSIONS); return; } else if (!android.provider.Settings.canDrawOverlays(this)) { Intent intent = new Intent(android.provider.Settings.ACTION_MANAGE_OVERLAY_PERMISSION, Uri.parse("package:" + getPackageName())); startActivityForResult(intent, REQUEST_CODE_GET_OVERLAY_PERMISSIONS); return; } } if (!mFile.exists()){ try { InputStream inputStream = getResources().openRawResource(R.raw.config); FileOutputStream outputStream = new FileOutputStream(mFile); byte buffer[] = new byte[8192]; while (true) { int read = inputStream.read(buffer); if (read == -1) { break; } else if (read > 0) { outputStream.write(buffer, 0, read); } } } catch (Exception e) { e.printStackTrace(); } Toast.makeText(this, "HearthTracker setup completed. Please kill and restart HearthStone.", Toast.LENGTH_LONG).show(); } else { Toast.makeText(this, "HearthTracker is started.\nPlease open Hearthstone game. Enjoy!", Toast.LENGTH_LONG).show(); } Intent serviceIntent = new Intent(); serviceIntent.setClass(MainActivity.this, TrackerWindow.class); startService(serviceIntent); } public void LaunchClassSelectActivity() { Intent newIntent = new Intent(); newIntent.setClass(this, ClassSelectActivity.class); startActivity(newIntent); } public void onCardsReady(){ Utils.getUserMetrics(this); DeckListFragment deckListFragment = DeckListFragment.newInstance(); deckListFragment.setContext(this); mDeckListAdapter = deckListFragment.getAdapter(); getSupportFragmentManager() .beginTransaction() .add(R.id.fragment_container, deckListFragment) .commit(); } @Override protected void onPostCreate(Bundle savedInstanceState) { super.onPostCreate(savedInstanceState); mDrawerToggle.syncState(); } @Override public void onConfigurationChanged(Configuration newConfig) { super.onConfigurationChanged(newConfig); mDrawerToggle.onConfigurationChanged(newConfig); } @Override public boolean onOptionsItemSelected(MenuItem item) { return mDrawerToggle.onOptionsItemSelected(item) || super.onOptionsItemSelected(item); } private void setupDrawer() { mDrawerToggle = new ActionBarDrawerToggle( this, /* host Activity */ drawerLayout, /* DrawerLayout object */ R.string.open_drawer, /* "open drawer" description */ R.string.close_drawer /* "close drawer" description */ ); drawerLayout.setDrawerListener(mDrawerToggle); navigationView.setNavigationItemSelectedListener(new NavigationView.OnNavigationItemSelectedListener() { @Override public boolean onNavigationItemSelected(MenuItem item) { switch (item.getItemId()) { case R.id.start_tracker: drawerLayout.closeDrawers(); LaunchLogWindow(); return true; case R.id.new_deck: drawerLayout.closeDrawers(); LaunchClassSelectActivity(); return true; } return false; } }); } @Override public void onDestroy(){ super.onDestroy(); Utils.saveUserMetrics(this); } }
package com.wavefront.agent; import com.google.common.util.concurrent.RateLimiter; import com.wavefront.agent.api.ForceQueueEnabledAgentAPI; import com.wavefront.api.agent.Constants; import com.yammer.metrics.Metrics; import com.yammer.metrics.core.Counter; import com.yammer.metrics.core.MetricName; import com.yammer.metrics.core.Timer; import com.yammer.metrics.core.TimerContext; import javax.ws.rs.core.Response; import java.util.ArrayList; import java.util.List; import java.util.UUID; import java.util.concurrent.TimeUnit; import java.util.concurrent.locks.ReentrantReadWriteLock; import java.util.logging.Level; import java.util.logging.Logger; /** * @author Andrew Kao (andrew@wavefront.com) */ public class PostPushDataTimedTask implements Runnable { private static final Logger logger = Logger.getLogger(PostPushDataTimedTask.class.getCanonicalName()); // TODO: enum public static final String LOG_NONE = "NONE"; public static final String LOG_SUMMARY = "SUMMARY"; public static final String LOG_DETAILED = "DETAILED"; private static long INTERVALS_PER_SUMMARY = 60; private List<String> points = new ArrayList<String>(); private List<String> blockedSamples = new ArrayList<String>(); private ReentrantReadWriteLock.WriteLock writeLock; private RateLimiter warningMessageRateLimiter = RateLimiter.create(0.2); private final Counter pointsReceived; private final Counter pointsSent; private final Counter pointsQueued; private final Counter pointsBlocked; private final Timer batchSendTime; private long numIntervals = 0; private long numApiCalls = 0; private UUID daemonId; private int port; private int pointsPerBatch; private String logLevel; private ForceQueueEnabledAgentAPI agentAPI; public void addPoint(String metricString) { writeLock.lock(); try { pointsReceived.inc(); this.points.add(metricString); } finally { writeLock.unlock(); } } public void addPoints(List<String> metricStrings) { writeLock.lock(); try { pointsReceived.inc(metricStrings.size()); this.points.addAll(metricStrings); } finally { writeLock.unlock(); } } public int getBlockedSampleSize() { return blockedSamples.size(); } public void addBlockedSample(String blockedSample) { writeLock.lock(); try { blockedSamples.add(blockedSample); } finally { writeLock.unlock(); } } public void incrementBlockedPoints() { this.pointsBlocked.inc(); } public long getNumPointsSent() { return this.pointsSent.count(); } public long getNumPointsQueued() { return this.pointsQueued.count(); } public long getNumApiCalls() { return numApiCalls; } public UUID getDaemonId() { return daemonId; } public PostPushDataTimedTask(ForceQueueEnabledAgentAPI agentAPI, int pointsPerBatch, String logLevel, UUID daemonId, int port) { ReentrantReadWriteLock lock = new ReentrantReadWriteLock(); writeLock = lock.writeLock(); this.pointsPerBatch = pointsPerBatch; this.logLevel = logLevel; this.daemonId = daemonId; this.port = port; this.agentAPI = agentAPI; this.pointsSent = Metrics.newCounter(new MetricName("points." + String.valueOf(port), "", "sent")); this.pointsQueued = Metrics.newCounter(new MetricName("points." + String.valueOf(port), "", "queued")); this.pointsBlocked = Metrics.newCounter(new MetricName("points." + String.valueOf(port), "", "blocked")); this.pointsReceived = Metrics.newCounter(new MetricName("points." + String.valueOf(port), "", "received")); this.batchSendTime = Metrics.newTimer(new MetricName("push." + String.valueOf(port), "", "duration"), TimeUnit.MILLISECONDS, TimeUnit.MINUTES); } @Override public void run() { try { List<String> current = createAgentPostBatch(); if (current.size() != 0) { TimerContext timerContext = this.batchSendTime.time(); try { Response response = agentAPI.postPushData(daemonId, Constants.GRAPHITE_BLOCK_WORK_UNIT, System.currentTimeMillis(), Constants.PUSH_FORMAT_GRAPHITE_V2, GraphiteStringHandler.joinPushData(current)); int pointsInList = current.size(); this.pointsSent.inc(pointsInList); if (response.getStatus() == Response.Status.NOT_ACCEPTABLE.getStatusCode()) { this.pointsQueued.inc(pointsInList); } } finally { numApiCalls++; timerContext.stop(); } if (points.size() > getQueuedPointLimit()) { if (warningMessageRateLimiter.tryAcquire()) { logger.warning("too many pending points (" + points.size() + "), block size: " + pointsPerBatch + ". flushing to retry queue"); } // there are going to be too many points to be able to flush w/o the agent blowing up // drain the leftovers straight to the retry queue (i.e. to disk) writeLock.lock(); // don't let anyone add any more to points while we're draining it. try { while (points.size() > 0) { List<String> pushData = createAgentPostBatch(); int pushDataPointCount = pushData.size(); if (pushDataPointCount > 0) { agentAPI.postPushData(daemonId, Constants.GRAPHITE_BLOCK_WORK_UNIT, System.currentTimeMillis(), Constants.PUSH_FORMAT_GRAPHITE_V2, GraphiteStringHandler.joinPushData(pushData), true); // update the counters as if this was a failed call to the API this.pointsSent.inc(pushDataPointCount); this.pointsQueued.inc(pushDataPointCount); numApiCalls++; } else { // this is probably unnecessary break; } } } finally { writeLock.unlock(); } } } } catch (Throwable t) { logger.log(Level.SEVERE, "Unexpected error in flush loop", t); } } private List<String> createAgentPostBatch() { List<String> current; List<String> currentBlockedSamples; int blockSize; writeLock.lock(); try { blockSize = Math.min(points.size(), pointsPerBatch); current = points.subList(0, blockSize); currentBlockedSamples = null; numIntervals += 1; points = new ArrayList<>(points.subList(blockSize, points.size())); if (((numIntervals % INTERVALS_PER_SUMMARY) == 0) && !blockedSamples.isEmpty()) { // Copy this to a temp structure that we can iterate over for printing below if ((!logLevel.equals(LOG_NONE))) { currentBlockedSamples = new ArrayList<>(blockedSamples); } blockedSamples.clear(); } } finally { writeLock.unlock(); } if (logLevel.equals(LOG_DETAILED)) { logger.warning(port + " (DETAILED): Will send " + current.size() + " valid points in this interval, with " + points.size() + " in backlog; in total, have sent " + getNumPointsSent() + " valid, " + this.pointsBlocked.count() + " " + "blocked."); } if (((numIntervals % INTERVALS_PER_SUMMARY) == 0) && (!logLevel.equals(LOG_NONE))) { logger.warning(port + " (SUMMARY): Have sent " + getNumPointsSent() + " valid points; blocked " + this.pointsBlocked.count() + "."); if (currentBlockedSamples != null) { for (String blockedLine : currentBlockedSamples) { logger.warning(port + ": Blocked line [" + blockedLine + "]"); } } } return current; } private long getQueuedPointLimit() { // if there's more than 2 batches worth of points, that's going to be too much return pointsPerBatch * Runtime.getRuntime().availableProcessors() * 2; } }
/* * Copyright 2012 The Netty Project * * The Netty Project licenses this file to you under the Apache License, * version 2.0 (the "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at: * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations * under the License. */ package io.netty.handler.codec.spdy; import io.netty.buffer.ByteBuf; import io.netty.buffer.MessageBuf; import io.netty.channel.ChannelHandlerContext; import io.netty.handler.codec.MessageToMessageDecoder; import io.netty.handler.codec.TooLongFrameException; import io.netty.handler.codec.http.DefaultFullHttpRequest; import io.netty.handler.codec.http.DefaultFullHttpResponse; import io.netty.handler.codec.http.FullHttpMessage; import io.netty.handler.codec.http.FullHttpRequest; import io.netty.handler.codec.http.FullHttpResponse; import io.netty.handler.codec.http.HttpHeaders; import io.netty.handler.codec.http.HttpMethod; import io.netty.handler.codec.http.HttpResponseStatus; import io.netty.handler.codec.http.HttpVersion; import java.util.HashMap; import java.util.Map; /** * Decodes {@link SpdySynStreamFrame}s, {@link SpdySynReplyFrame}s, * and {@link SpdyDataFrame}s into {@link FullHttpRequest}s and {@link FullHttpResponse}s. */ public class SpdyHttpDecoder extends MessageToMessageDecoder<SpdyDataOrControlFrame> { private final int spdyVersion; private final int maxContentLength; private final Map<Integer, FullHttpMessage> messageMap; /** * Creates a new instance. * * @param version the protocol version * @param maxContentLength the maximum length of the message content. * If the length of the message content exceeds this value, * a {@link TooLongFrameException} will be raised. */ public SpdyHttpDecoder(int version, int maxContentLength) { this(version, maxContentLength, new HashMap<Integer, FullHttpMessage>()); } /** * Creates a new instance with the specified parameters. * * @param version the protocol version * @param maxContentLength the maximum length of the message content. * If the length of the message content exceeds this value, * a {@link TooLongFrameException} will be raised. * @param messageMap the {@link Map} used to hold partially received messages. */ protected SpdyHttpDecoder(int version, int maxContentLength, Map<Integer, FullHttpMessage> messageMap) { if (version < SpdyConstants.SPDY_MIN_VERSION || version > SpdyConstants.SPDY_MAX_VERSION) { throw new IllegalArgumentException( "unsupported version: " + version); } if (maxContentLength <= 0) { throw new IllegalArgumentException( "maxContentLength must be a positive integer: " + maxContentLength); } spdyVersion = version; this.maxContentLength = maxContentLength; this.messageMap = messageMap; } protected FullHttpMessage putMessage(int streamId, FullHttpMessage message) { return messageMap.put(streamId, message); } protected FullHttpMessage getMessage(int streamId) { return messageMap.get(streamId); } protected FullHttpMessage removeMessage(int streamId) { return messageMap.remove(streamId); } @Override protected void decode(ChannelHandlerContext ctx, SpdyDataOrControlFrame msg, MessageBuf<Object> out) throws Exception { if (msg instanceof SpdySynStreamFrame) { // HTTP requests/responses are mapped one-to-one to SPDY streams. SpdySynStreamFrame spdySynStreamFrame = (SpdySynStreamFrame) msg; int streamId = spdySynStreamFrame.getStreamId(); if (SpdyCodecUtil.isServerId(streamId)) { // SYN_STREAM frames initiated by the server are pushed resources int associatedToStreamId = spdySynStreamFrame.getAssociatedToStreamId(); // If a client receives a SYN_STREAM with an Associated-To-Stream-ID of 0 // it must reply with a RST_STREAM with error code INVALID_STREAM if (associatedToStreamId == 0) { SpdyRstStreamFrame spdyRstStreamFrame = new DefaultSpdyRstStreamFrame(streamId, SpdyStreamStatus.INVALID_STREAM); ctx.write(spdyRstStreamFrame); } String URL = SpdyHeaders.getUrl(spdyVersion, spdySynStreamFrame); // If a client receives a SYN_STREAM without a 'url' header // it must reply with a RST_STREAM with error code PROTOCOL_ERROR if (URL == null) { SpdyRstStreamFrame spdyRstStreamFrame = new DefaultSpdyRstStreamFrame(streamId, SpdyStreamStatus.PROTOCOL_ERROR); ctx.write(spdyRstStreamFrame); } try { FullHttpResponse httpResponseWithEntity = createHttpResponse(spdyVersion, spdySynStreamFrame); // Set the Stream-ID, Associated-To-Stream-ID, Priority, and URL as headers SpdyHttpHeaders.setStreamId(httpResponseWithEntity, streamId); SpdyHttpHeaders.setAssociatedToStreamId(httpResponseWithEntity, associatedToStreamId); SpdyHttpHeaders.setPriority(httpResponseWithEntity, spdySynStreamFrame.getPriority()); SpdyHttpHeaders.setUrl(httpResponseWithEntity, URL); if (spdySynStreamFrame.isLast()) { HttpHeaders.setContentLength(httpResponseWithEntity, 0); out.add(httpResponseWithEntity); } else { // Response body will follow in a series of Data Frames putMessage(streamId, httpResponseWithEntity); } } catch (Exception e) { SpdyRstStreamFrame spdyRstStreamFrame = new DefaultSpdyRstStreamFrame(streamId, SpdyStreamStatus.PROTOCOL_ERROR); ctx.write(spdyRstStreamFrame); } } else { // SYN_STREAM frames initiated by the client are HTTP requests try { FullHttpRequest httpRequestWithEntity = createHttpRequest(spdyVersion, spdySynStreamFrame); // Set the Stream-ID as a header SpdyHttpHeaders.setStreamId(httpRequestWithEntity, streamId); if (spdySynStreamFrame.isLast()) { out.add(httpRequestWithEntity); } else { // Request body will follow in a series of Data Frames putMessage(streamId, httpRequestWithEntity); } } catch (Exception e) { // If a client sends a SYN_STREAM without all of the getMethod, url (host and path), // scheme, and version headers the server must reply with a HTTP 400 BAD REQUEST reply. // Also sends HTTP 400 BAD REQUEST reply if header name/value pairs are invalid SpdySynReplyFrame spdySynReplyFrame = new DefaultSpdySynReplyFrame(streamId); spdySynReplyFrame.setLast(true); SpdyHeaders.setStatus(spdyVersion, spdySynReplyFrame, HttpResponseStatus.BAD_REQUEST); SpdyHeaders.setVersion(spdyVersion, spdySynReplyFrame, HttpVersion.HTTP_1_0); ctx.write(spdySynReplyFrame); } } } else if (msg instanceof SpdySynReplyFrame) { SpdySynReplyFrame spdySynReplyFrame = (SpdySynReplyFrame) msg; int streamId = spdySynReplyFrame.getStreamId(); try { FullHttpResponse httpResponseWithEntity = createHttpResponse(spdyVersion, spdySynReplyFrame); // Set the Stream-ID as a header SpdyHttpHeaders.setStreamId(httpResponseWithEntity, streamId); if (spdySynReplyFrame.isLast()) { HttpHeaders.setContentLength(httpResponseWithEntity, 0); out.add(httpResponseWithEntity); } else { // Response body will follow in a series of Data Frames putMessage(streamId, httpResponseWithEntity); } } catch (Exception e) { // If a client receives a SYN_REPLY without valid getStatus and version headers // the client must reply with a RST_STREAM frame indicating a PROTOCOL_ERROR SpdyRstStreamFrame spdyRstStreamFrame = new DefaultSpdyRstStreamFrame(streamId, SpdyStreamStatus.PROTOCOL_ERROR); ctx.write(spdyRstStreamFrame); } } else if (msg instanceof SpdyHeadersFrame) { SpdyHeadersFrame spdyHeadersFrame = (SpdyHeadersFrame) msg; int streamId = spdyHeadersFrame.getStreamId(); FullHttpMessage fullHttpMessage = getMessage(streamId); // If message is not in map discard HEADERS frame. if (fullHttpMessage == null) { return; } for (Map.Entry<String, String> e: spdyHeadersFrame.headers().entries()) { fullHttpMessage.headers().add(e.getKey(), e.getValue()); } if (spdyHeadersFrame.isLast()) { HttpHeaders.setContentLength(fullHttpMessage, fullHttpMessage.content().readableBytes()); removeMessage(streamId); out.add(fullHttpMessage); } } else if (msg instanceof SpdyDataFrame) { SpdyDataFrame spdyDataFrame = (SpdyDataFrame) msg; int streamId = spdyDataFrame.getStreamId(); FullHttpMessage fullHttpMessage = getMessage(streamId); // If message is not in map discard Data Frame. if (fullHttpMessage == null) { return; } ByteBuf content = fullHttpMessage.content(); if (content.readableBytes() > maxContentLength - spdyDataFrame.content().readableBytes()) { removeMessage(streamId); throw new TooLongFrameException( "HTTP content length exceeded " + maxContentLength + " bytes."); } ByteBuf spdyDataFrameData = spdyDataFrame.content(); int spdyDataFrameDataLen = spdyDataFrameData.readableBytes(); content.writeBytes(spdyDataFrameData, spdyDataFrameData.readerIndex(), spdyDataFrameDataLen); if (spdyDataFrame.isLast()) { HttpHeaders.setContentLength(fullHttpMessage, content.readableBytes()); removeMessage(streamId); out.add(fullHttpMessage); } } else if (msg instanceof SpdyRstStreamFrame) { SpdyRstStreamFrame spdyRstStreamFrame = (SpdyRstStreamFrame) msg; int streamId = spdyRstStreamFrame.getStreamId(); removeMessage(streamId); } } private static FullHttpRequest createHttpRequest(int spdyVersion, SpdyHeaderBlock requestFrame) throws Exception { // Create the first line of the request from the name/value pairs HttpMethod method = SpdyHeaders.getMethod(spdyVersion, requestFrame); String url = SpdyHeaders.getUrl(spdyVersion, requestFrame); HttpVersion httpVersion = SpdyHeaders.getVersion(spdyVersion, requestFrame); SpdyHeaders.removeMethod(spdyVersion, requestFrame); SpdyHeaders.removeUrl(spdyVersion, requestFrame); SpdyHeaders.removeVersion(spdyVersion, requestFrame); FullHttpRequest req = new DefaultFullHttpRequest(httpVersion, method, url); // Remove the scheme header SpdyHeaders.removeScheme(spdyVersion, requestFrame); if (spdyVersion >= 3) { // Replace the SPDY host header with the HTTP host header String host = SpdyHeaders.getHost(requestFrame); SpdyHeaders.removeHost(requestFrame); HttpHeaders.setHost(req, host); } for (Map.Entry<String, String> e: requestFrame.headers().entries()) { req.headers().add(e.getKey(), e.getValue()); } // The Connection and Keep-Alive headers are no longer valid HttpHeaders.setKeepAlive(req, true); // Transfer-Encoding header is not valid req.headers().remove(HttpHeaders.Names.TRANSFER_ENCODING); return req; } private static FullHttpResponse createHttpResponse(int spdyVersion, SpdyHeaderBlock responseFrame) throws Exception { // Create the first line of the response from the name/value pairs HttpResponseStatus status = SpdyHeaders.getStatus(spdyVersion, responseFrame); HttpVersion version = SpdyHeaders.getVersion(spdyVersion, responseFrame); SpdyHeaders.removeStatus(spdyVersion, responseFrame); SpdyHeaders.removeVersion(spdyVersion, responseFrame); FullHttpResponse res = new DefaultFullHttpResponse(version, status); for (Map.Entry<String, String> e: responseFrame.headers().entries()) { res.headers().add(e.getKey(), e.getValue()); } // The Connection and Keep-Alive headers are no longer valid HttpHeaders.setKeepAlive(res, true); // Transfer-Encoding header is not valid res.headers().remove(HttpHeaders.Names.TRANSFER_ENCODING); res.headers().remove(HttpHeaders.Names.TRAILER); return res; } }
/*************************************************************************** * Copyright 2017 Kieker Project (http://kieker-monitoring.net) * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. ***************************************************************************/ package kieker.common.record.flow.trace.concurrency.monitor; import java.nio.BufferOverflowException; import com.google.gson.Gson; import kieker.common.record.flow.trace.concurrency.monitor.AbstractMonitorEvent; import kieker.common.record.io.IValueDeserializer; import kieker.common.record.io.IValueSerializer; import kieker.common.util.registry.IRegistry; /** * @author Jan Waller * API compatibility: Kieker 1.13.0 * * @since 1.8 */ public class MonitorNotifyAllEvent extends AbstractMonitorEvent { private static final long serialVersionUID = -1304400445515878549L; /** Descriptive definition of the serialization size of the record. */ public static final int SIZE = TYPE_SIZE_LONG // IEventRecord.timestamp + TYPE_SIZE_LONG // ITraceRecord.traceId + TYPE_SIZE_INT // ITraceRecord.orderIndex + TYPE_SIZE_INT // AbstractMonitorEvent.lockId ; public static final Class<?>[] TYPES = { long.class, // IEventRecord.timestamp long.class, // ITraceRecord.traceId int.class, // ITraceRecord.orderIndex int.class, // AbstractMonitorEvent.lockId }; /** property name array. */ private static final String[] PROPERTY_NAMES = { "timestamp", "traceId", "orderIndex", "lockId", }; /** * Creates a new instance of this class using the given parameters. * * @param timestamp * timestamp * @param traceId * traceId * @param orderIndex * orderIndex * @param lockId * lockId */ public MonitorNotifyAllEvent(final long timestamp, final long traceId, final int orderIndex, final int lockId) { super(timestamp, traceId, orderIndex, lockId); } /** * This constructor converts the given array into a record. * It is recommended to use the array which is the result of a call to {@link #toArray()}. * * @param values * The values for the record. * * @deprecated since 1.13. Use {@link #MonitorNotifyAllEvent(IValueDeserializer)} instead. */ @Deprecated public MonitorNotifyAllEvent(final Object[] values) { // NOPMD (direct store of values) super(values, TYPES); } /** * This constructor uses the given array to initialize the fields of this record. * * @param values * The values for the record. * @param valueTypes * The types of the elements in the first array. * * @deprecated since 1.13. Use {@link #MonitorNotifyAllEvent(IValueDeserializer)} instead. */ @Deprecated protected MonitorNotifyAllEvent(final Object[] values, final Class<?>[] valueTypes) { // NOPMD (values stored directly) super(values, valueTypes); } /** * @param deserializer * The deserializer to use */ public MonitorNotifyAllEvent(final IValueDeserializer deserializer) { super(deserializer); } /** * {@inheritDoc} * * @deprecated since 1.13. Use {@link #serialize(IValueSerializer)} with an array serializer instead. */ @Override @Deprecated public Object[] toArray() { return new Object[] { this.getTimestamp(), this.getTraceId(), this.getOrderIndex(), this.getLockId() }; } /** * {@inheritDoc} */ @Override public void registerStrings(final IRegistry<String> stringRegistry) { // NOPMD (generated code) } /** * {@inheritDoc} */ @Override public void serialize(final IValueSerializer serializer) throws BufferOverflowException { //super.serialize(serializer); serializer.putLong(this.getTimestamp()); serializer.putLong(this.getTraceId()); serializer.putInt(this.getOrderIndex()); serializer.putInt(this.getLockId()); } /** * {@inheritDoc} */ @Override public Class<?>[] getValueTypes() { return TYPES; // NOPMD } /** * {@inheritDoc} */ @Override public String[] getValueNames() { return PROPERTY_NAMES; // NOPMD } /** * {@inheritDoc} */ @Override public int getSize() { return SIZE; } /** * {@inheritDoc} * * @deprecated This record uses the {@link kieker.common.record.IMonitoringRecord.Factory} mechanism. Hence, this method is not implemented. */ @Override @Deprecated public void initFromArray(final Object[] values) { throw new UnsupportedOperationException(); } /** * {@inheritDoc} */ @Override public boolean equals(final Object obj) { if (obj == null) return false; if (obj == this) return true; if (obj.getClass() != this.getClass()) return false; final MonitorNotifyAllEvent castedRecord = (MonitorNotifyAllEvent) obj; if (this.getLoggingTimestamp() != castedRecord.getLoggingTimestamp()) return false; if (this.getTimestamp() != castedRecord.getTimestamp()) return false; if (this.getTraceId() != castedRecord.getTraceId()) return false; if (this.getOrderIndex() != castedRecord.getOrderIndex()) return false; if (this.getLockId() != castedRecord.getLockId()) return false; return true; } @Override public String toJson() { Gson gson = new Gson(); return gson.toJson(this); } }
/* * Copyright (C) 2020 Cirrus Logic, Inc. and * Cirrus Logic International Semiconductor Ltd. * All rights reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.cirrus.tinyhal.test.thcm; import java.io.File; import java.io.FileWriter; import java.io.IOException; import java.lang.Math; import java.lang.String; import java.nio.file.Path; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.List; import org.junit.After; import org.junit.AfterClass; import org.junit.Before; import org.junit.BeforeClass; import org.junit.Test; import static org.junit.Assert.*; import org.junit.runners.Parameterized; import org.junit.runners.Parameterized.Parameters; import org.junit.runner.RunWith; import com.cirrus.tinyhal.test.thcm.CAlsaMock; import com.cirrus.tinyhal.test.thcm.CConfigMgr; import com.cirrus.tinyhal.test.thcm.ThcmPlatform; /** * Tests that the set of available input and output devices defined by * &lt;device&gt; elements are correctly reported. */ @RunWith(Parameterized.class) public class ThcmSupportedDevicesTest { private static final File sWorkFilesPath = ThcmPlatform.workFilesPath(); private static final File sControlsFile = new File(sWorkFilesPath, "thcm_supported_devices.csv"); private static final File sXmlFile = new File(sWorkFilesPath, "thcm_supported_devices.xml"); private CAlsaMock mAlsaMock = new CAlsaMock(); private CConfigMgr mConfigMgr = new CConfigMgr(); private String[] mOutputDevices; private String[] mInputDevices; private long mAllOutputBits; private long mAllInputBits; @Parameterized.Parameters public static Collection parameters() { // No need to test every combination. Just add one more device to the // test set on each pass. List<Integer[]> list = new ArrayList<Integer[]>(); int maxNumDevices = Math.max(CConfigMgr.OUTPUT_DEVICES.length, CConfigMgr.INPUT_DEVICES.length); for (int i = 1; i < maxNumDevices; ++i) { list.add(new Integer[] {i}); } return list; } public ThcmSupportedDevicesTest(int numDevices) { mOutputDevices = Arrays.copyOfRange(CConfigMgr.OUTPUT_DEVICES, 0, numDevices); mInputDevices = Arrays.copyOfRange(CConfigMgr.INPUT_DEVICES, 0, numDevices); mAllOutputBits = 0; for (String name : mOutputDevices) { mAllOutputBits |= CConfigMgr.deviceFromName(name); } mAllInputBits = 0; for (String name : mInputDevices) { mAllInputBits |= CConfigMgr.deviceFromName(name); } } @BeforeClass public static void setUpClass() throws IOException { createAlsaControlsFile(); } @AfterClass public static void tearDownClass() { if (sControlsFile.exists()) { sControlsFile.delete(); } } @Before public void setUp() { assertEquals("Failed to create CAlsaMock", 0, mAlsaMock.createMixer(sControlsFile.toPath().toString())); } @After public void tearDown() { if (mConfigMgr != null) { mConfigMgr.free_audio_config(); mConfigMgr = null; assertFalse("Configmgr leaked memory", CConfigMgr.are_allocs_leaked()); } if (mAlsaMock != null) { mAlsaMock.closeMixer(); mAlsaMock = null; } if (sXmlFile.exists()) { sXmlFile.delete(); } } private static void createAlsaControlsFile() throws IOException { FileWriter writer = new FileWriter(sControlsFile); writer.write("dummy,bool,1,0,0:1\n"); writer.close(); } private void createXmlFileOutputsOnly() throws IOException { createXmlFile(true, false); } private void createXmlFileInputsOnly() throws IOException { createXmlFile(false, true); } private void createXmlFileOutputsAndInputs() throws IOException { createXmlFile(true, true); } private void createXmlFile(boolean outputs, boolean inputs) throws IOException { FileWriter writer = new FileWriter(sXmlFile); // Header elements writer.write("<audiohal>\n<mixer card=\"0\" />\n"); if (outputs) { for (String name : mOutputDevices) { writer.write("<device name=\"" + name + "\" />\n"); } } if (inputs) { for (String name : mInputDevices) { writer.write("<device name=\"" + name + "\" />\n"); } } // Footer elements writer.write("\n</audiohal>\n"); writer.close(); } /** * Test a configuration containing only output devices. * @throws java.io.IOException if unable to write the XML file. */ @Test public void testOutputsOnly() throws IOException { createXmlFileOutputsOnly(); assertEquals("Failed to open CConfigMgr", 0, mConfigMgr.init_audio_config(sXmlFile.toPath().toString())); assertEquals("get_supported_output_devices not correct", mAllOutputBits, mConfigMgr.get_supported_output_devices()); assertEquals("get_supported_input_devices was not zero", 0, mConfigMgr.get_supported_input_devices()); } /** * Test a configuration containing only input devices. * @throws java.io.IOException if unable to write the XML file. */ @Test public void testReportedInputsOnly() throws IOException { createXmlFileInputsOnly(); assertEquals("Failed to open CConfigMgr", 0, mConfigMgr.init_audio_config(sXmlFile.toPath().toString())); assertEquals("get_supported_output_devices was not zero", 0, mConfigMgr.get_supported_output_devices()); assertEquals("get_supported_input_devices not correct", mAllInputBits, mConfigMgr.get_supported_input_devices()); } /** * Test a configuration containing output and input devices. * @throws java.io.IOException if unable to write the XML file. */ @Test public void testReportedOutputsAndInputs() throws IOException { createXmlFileOutputsAndInputs(); assertEquals("Failed to open CConfigMgr", 0, mConfigMgr.init_audio_config(sXmlFile.toPath().toString())); assertEquals("get_supported_output_devices not correct", mAllOutputBits, mConfigMgr.get_supported_output_devices()); assertEquals("get_supported_input_devices not correct", mAllInputBits, mConfigMgr.get_supported_input_devices()); } };
package org.bouncycastle.asn1.dvcs; import java.math.BigInteger; import org.bouncycastle.asn1.ASN1Encodable; import org.bouncycastle.asn1.ASN1EncodableVector; import org.bouncycastle.asn1.ASN1GeneralizedTime; import org.bouncycastle.asn1.ASN1Integer; import org.bouncycastle.asn1.ASN1Object; import org.bouncycastle.asn1.ASN1Primitive; import org.bouncycastle.asn1.ASN1Sequence; import org.bouncycastle.asn1.ASN1TaggedObject; import org.bouncycastle.asn1.DERSequence; import org.bouncycastle.asn1.DERTaggedObject; import org.bouncycastle.asn1.x509.Extensions; import org.bouncycastle.asn1.x509.GeneralNames; import org.bouncycastle.asn1.x509.PolicyInformation; /** * <pre> * DVCSRequestInformation ::= SEQUENCE { * version INTEGER DEFAULT 1 , * service ServiceType, * nonce Nonce OPTIONAL, * requestTime DVCSTime OPTIONAL, * requester [0] GeneralNames OPTIONAL, * requestPolicy [1] PolicyInformation OPTIONAL, * dvcs [2] GeneralNames OPTIONAL, * dataLocations [3] GeneralNames OPTIONAL, * extensions [4] IMPLICIT Extensions OPTIONAL * } * </pre> */ public class DVCSRequestInformation extends ASN1Object { private int version = DEFAULT_VERSION; private ServiceType service; private BigInteger nonce; private DVCSTime requestTime; private GeneralNames requester; private PolicyInformation requestPolicy; private GeneralNames dvcs; private GeneralNames dataLocations; private Extensions extensions; private static final int DEFAULT_VERSION = 1; private static final int TAG_REQUESTER = 0; private static final int TAG_REQUEST_POLICY = 1; private static final int TAG_DVCS = 2; private static final int TAG_DATA_LOCATIONS = 3; private static final int TAG_EXTENSIONS = 4; private DVCSRequestInformation(ASN1Sequence seq) { int i = 0; if (seq.getObjectAt(0) instanceof ASN1Integer) { ASN1Integer encVersion = ASN1Integer.getInstance(seq.getObjectAt(i++)); this.version = encVersion.getValue().intValue(); } else { this.version = 1; } this.service = ServiceType.getInstance(seq.getObjectAt(i++)); while (i < seq.size()) { ASN1Encodable x = seq.getObjectAt(i); if (x instanceof ASN1Integer) { this.nonce = ASN1Integer.getInstance(x).getValue(); } else if (x instanceof ASN1GeneralizedTime) { this.requestTime = DVCSTime.getInstance(x); } else if (x instanceof ASN1TaggedObject) { ASN1TaggedObject t = ASN1TaggedObject.getInstance(x); int tagNo = t.getTagNo(); switch (tagNo) { case TAG_REQUESTER: this.requester = GeneralNames.getInstance(t, false); break; case TAG_REQUEST_POLICY: this.requestPolicy = PolicyInformation.getInstance(ASN1Sequence.getInstance(t, false)); break; case TAG_DVCS: this.dvcs = GeneralNames.getInstance(t, false); break; case TAG_DATA_LOCATIONS: this.dataLocations = GeneralNames.getInstance(t, false); break; case TAG_EXTENSIONS: this.extensions = Extensions.getInstance(t, false); break; } } else { this.requestTime = DVCSTime.getInstance(x); } i++; } } public static DVCSRequestInformation getInstance(Object obj) { if (obj instanceof DVCSRequestInformation) { return (DVCSRequestInformation)obj; } else if (obj != null) { return new DVCSRequestInformation(ASN1Sequence.getInstance(obj)); } return null; } public static DVCSRequestInformation getInstance( ASN1TaggedObject obj, boolean explicit) { return getInstance(ASN1Sequence.getInstance(obj, explicit)); } public ASN1Primitive toASN1Primitive() { ASN1EncodableVector v = new ASN1EncodableVector(); if (version != DEFAULT_VERSION) { v.add(new ASN1Integer(version)); } v.add(service); if (nonce != null) { v.add(new ASN1Integer(nonce)); } if (requestTime != null) { v.add(requestTime); } int[] tags = new int[]{ TAG_REQUESTER, TAG_REQUEST_POLICY, TAG_DVCS, TAG_DATA_LOCATIONS, TAG_EXTENSIONS }; ASN1Encodable[] taggedObjects = new ASN1Encodable[]{ requester, requestPolicy, dvcs, dataLocations, extensions }; for (int i = 0; i < tags.length; i++) { int tag = tags[i]; ASN1Encodable taggedObject = taggedObjects[i]; if (taggedObject != null) { v.add(new DERTaggedObject(false, tag, taggedObject)); } } return new DERSequence(v); } public String toString() { StringBuffer s = new StringBuffer(); s.append("DVCSRequestInformation {\n"); if (version != DEFAULT_VERSION) { s.append("version: " + version + "\n"); } s.append("service: " + service + "\n"); if (nonce != null) { s.append("nonce: " + nonce + "\n"); } if (requestTime != null) { s.append("requestTime: " + requestTime + "\n"); } if (requester != null) { s.append("requester: " + requester + "\n"); } if (requestPolicy != null) { s.append("requestPolicy: " + requestPolicy + "\n"); } if (dvcs != null) { s.append("dvcs: " + dvcs + "\n"); } if (dataLocations != null) { s.append("dataLocations: " + dataLocations + "\n"); } if (extensions != null) { s.append("extensions: " + extensions + "\n"); } s.append("}\n"); return s.toString(); } public int getVersion() { return version; } public ServiceType getService() { return service; } public BigInteger getNonce() { return nonce; } public DVCSTime getRequestTime() { return requestTime; } public GeneralNames getRequester() { return requester; } public PolicyInformation getRequestPolicy() { return requestPolicy; } public GeneralNames getDVCS() { return dvcs; } public GeneralNames getDataLocations() { return dataLocations; } public Extensions getExtensions() { return extensions; } }
/* * Copyright 2000-2016 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intellij.xdebugger.impl.ui.tree; import com.intellij.openapi.diagnostic.Logger; import com.intellij.openapi.project.Project; import com.intellij.openapi.util.text.StringUtil; import com.intellij.ui.ColoredTreeCellRenderer; import com.intellij.ui.ExpandableItemsHandler; import com.intellij.ui.ScreenUtil; import com.intellij.ui.SimpleTextAttributes; import com.intellij.xdebugger.XDebuggerBundle; import com.intellij.xdebugger.frame.ImmediateFullValueEvaluator; import com.intellij.xdebugger.frame.XDebuggerTreeNodeHyperlink; import com.intellij.xdebugger.impl.ui.DebuggerUIUtil; import com.intellij.xdebugger.impl.ui.tree.nodes.XDebuggerTreeNode; import com.intellij.xdebugger.impl.ui.tree.nodes.XValueNodeImpl; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import javax.swing.*; import javax.swing.plaf.basic.BasicTreeUI; import javax.swing.tree.TreePath; import java.awt.*; import java.awt.event.MouseEvent; import java.lang.reflect.Method; /** * @author nik */ class XDebuggerTreeRenderer extends ColoredTreeCellRenderer { private static final Logger LOG = Logger.getInstance(XDebuggerTreeRenderer.class); private final MyColoredTreeCellRenderer myLink = new MyColoredTreeCellRenderer(); private boolean myHaveLink; private int myLinkOffset; private int myLinkWidth; private final MyLongTextHyperlink myLongTextLink = new MyLongTextHyperlink(); public XDebuggerTreeRenderer() { getIpad().right = 0; myLink.getIpad().left = 0; } public void customizeCellRenderer(@NotNull final JTree tree, final Object value, final boolean selected, final boolean expanded, final boolean leaf, final int row, final boolean hasFocus) { myHaveLink = false; myLink.getTreeCellRendererComponent(tree, value, selected, expanded, leaf, row, hasFocus); XDebuggerTreeNode node = (XDebuggerTreeNode)value; node.appendToComponent(this); setIcon(node.getIcon()); Rectangle treeVisibleRect = tree.getParent() instanceof JViewport ? ((JViewport)tree.getParent()).getViewRect() : tree.getVisibleRect(); TreePath path = tree.getPathForRow(row); int rowX = path != null ? getRowX((BasicTreeUI)tree.getUI(), row, path.getPathCount() - 1) : 0; if (myHaveLink) { setupLinkDimensions(treeVisibleRect, rowX); } else { int visibleRectRightX = treeVisibleRect.x + treeVisibleRect.width; int notFittingWidth = rowX + super.getPreferredSize().width - visibleRectRightX; if (node instanceof XValueNodeImpl && notFittingWidth > 0) { // text does not fit visible area - show link String rawValue = DebuggerUIUtil.getNodeRawValue((XValueNodeImpl)node); if (!StringUtil.isEmpty(rawValue) && tree.isShowing()) { Point treeRightSideOnScreen = new Point(visibleRectRightX, 0); SwingUtilities.convertPointToScreen(treeRightSideOnScreen, tree); Rectangle screen = ScreenUtil.getScreenRectangle(treeRightSideOnScreen); // text may fit the screen in ExpandableItemsHandler if (screen.x + screen.width < treeRightSideOnScreen.x + notFittingWidth) { myLongTextLink.setupComponent(rawValue, ((XDebuggerTree)tree).getProject()); append(myLongTextLink.getLinkText(), myLongTextLink.getTextAttributes(), myLongTextLink); setupLinkDimensions(treeVisibleRect, rowX); myLinkWidth = 0; } } } } putClientProperty(ExpandableItemsHandler.RENDERER_DISABLED, myHaveLink); } private static Method ourGetRowXMethod = null; private static int getRowX(BasicTreeUI ui, int row, int depth) { if (ourGetRowXMethod == null) { try { ourGetRowXMethod = BasicTreeUI.class.getDeclaredMethod("getRowX", int.class, int.class); ourGetRowXMethod.setAccessible(true); } catch (NoSuchMethodException e) { LOG.error(e); } } if (ourGetRowXMethod != null) { try { return (Integer)ourGetRowXMethod.invoke(ui, row, depth); } catch (Exception e) { LOG.error(e); } } return 0; } private void setupLinkDimensions(Rectangle treeVisibleRect, int rowX) { Dimension linkSize = myLink.getPreferredSize(); myLinkWidth = linkSize.width; myLinkOffset = Math.min(super.getPreferredSize().width, treeVisibleRect.x + treeVisibleRect.width - myLinkWidth - rowX); } @Override public void append(@NotNull String fragment, @NotNull SimpleTextAttributes attributes, Object tag) { if (tag instanceof XDebuggerTreeNodeHyperlink && ((XDebuggerTreeNodeHyperlink)tag).alwaysOnScreen()) { myHaveLink = true; myLink.append(fragment, attributes, tag); } else { super.append(fragment, attributes, tag); } } @Override protected void doPaint(Graphics2D g) { if (myHaveLink) { Graphics2D textGraphics = (Graphics2D)g.create(0, 0, myLinkOffset, g.getClipBounds().height); try { super.doPaint(textGraphics); } finally { textGraphics.dispose(); } g.translate(myLinkOffset, 0); myLink.setHeight(getHeight()); myLink.doPaint(g); g.translate(-myLinkOffset, 0); } else { super.doPaint(g); } } @NotNull @Override public Dimension getPreferredSize() { Dimension size = super.getPreferredSize(); if (myHaveLink) { size.width += myLinkWidth; } return size; } @Nullable @Override public Object getFragmentTagAt(int x) { if (myHaveLink) { return myLink.getFragmentTagAt(x - myLinkOffset); } return super.getFragmentTagAt(x); } private static class MyColoredTreeCellRenderer extends ColoredTreeCellRenderer { private int myHeight; @Override public void customizeCellRenderer(@NotNull JTree tree, Object value, boolean selected, boolean expanded, boolean leaf, int row, boolean hasFocus) {} @Override protected void doPaint(Graphics2D g) { super.doPaint(g); } public void setHeight(int height) { myHeight = height; } @Override public int getHeight() { return myHeight; } } private static class MyLongTextHyperlink extends XDebuggerTreeNodeHyperlink { private String myText; private Project myProject; public MyLongTextHyperlink() { super(XDebuggerBundle.message("node.test.show.full.value")); } public void setupComponent(String text, Project project) { myText = text; myProject = project; } @Override public boolean alwaysOnScreen() { return true; } @Override public void onClick(MouseEvent event) { DebuggerUIUtil.showValuePopup(new ImmediateFullValueEvaluator(myText), event, myProject, null); event.consume(); } } }
/* * Copyright 2017 Axway Software * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.axway.ats.uiengine.elements.html.realbrowser; import com.axway.ats.uiengine.UiDriver; import com.axway.ats.uiengine.elements.AbstractElementsFactory; import com.axway.ats.uiengine.elements.UiElementProperties; /** * A factory for HTML elements */ public class RealHtmlElementsFactory extends AbstractElementsFactory { private static RealHtmlElementsFactory instance; private RealHtmlElementsFactory() { super(); } synchronized public static RealHtmlElementsFactory getInstance() { if (instance == null) { instance = new RealHtmlElementsFactory(); } return instance; } public RealHtmlButton getHtmlButton( String mapId, UiDriver uiDriver ) { return getHtmlButton(elementsMap.getElementProperties(mapId), uiDriver); } public RealHtmlButton getHtmlButton( UiElementProperties properties, UiDriver uiDriver ) { return new RealHtmlButton(uiDriver, properties); } public RealHtmlTextBox getHtmlTextBox( String mapId, UiDriver uiDriver ) { return getHtmlTextBox(elementsMap.getElementProperties(mapId), uiDriver); } public RealHtmlTextBox getHtmlTextBox( UiElementProperties properties, UiDriver uiDriver ) { return new RealHtmlTextBox(uiDriver, properties); } public RealHtmlTextArea getHtmlTextArea( String mapId, UiDriver uiDriver ) { return getHtmlTextArea(elementsMap.getElementProperties(mapId), uiDriver); } public RealHtmlTextArea getHtmlTextArea( UiElementProperties properties, UiDriver uiDriver ) { return new RealHtmlTextArea(uiDriver, properties); } public RealHtmlCheckBox getHtmlCheckBox( String mapId, UiDriver uiDriver ) { return getHtmlCheckBox(elementsMap.getElementProperties(mapId), uiDriver); } public RealHtmlCheckBox getHtmlCheckBox( UiElementProperties properties, UiDriver uiDriver ) { return new RealHtmlCheckBox(uiDriver, properties); } public RealHtmlLink getHtmlLink( String mapId, UiDriver uiDriver ) { return getHtmlLink(elementsMap.getElementProperties(mapId), uiDriver); } public RealHtmlLink getHtmlLink( UiElementProperties properties, UiDriver uiDriver ) { return new RealHtmlLink(uiDriver, properties); } public RealHtmlSingleSelectList getHtmlSingleSelectList( String mapId, UiDriver uiDriver ) { return getHtmlSingleSelectList(elementsMap.getElementProperties(mapId), uiDriver); } public RealHtmlSingleSelectList getHtmlSingleSelectList( UiElementProperties properties, UiDriver uiDriver ) { return new RealHtmlSingleSelectList(uiDriver, properties); } public RealHtmlMultiSelectList getHtmlMultiSelectList( String mapId, UiDriver uiDriver ) { return getHtmlMultiSelectList(elementsMap.getElementProperties(mapId), uiDriver); } public RealHtmlMultiSelectList getHtmlMultiSelectList( UiElementProperties properties, UiDriver uiDriver ) { return new RealHtmlMultiSelectList(uiDriver, properties); } public RealHtmlRadioList getHtmlRadioList( String mapId, UiDriver uiDriver ) { return getHtmlRadioList(elementsMap.getElementProperties(mapId), uiDriver); } public RealHtmlRadioList getHtmlRadioList( UiElementProperties properties, UiDriver uiDriver ) { return new RealHtmlRadioList(uiDriver, properties); } public RealHtmlFileBrowse getHtmlFileBrowse( String mapId, UiDriver uiDriver ) { return getHtmlFileBrowse(elementsMap.getElementProperties(mapId), uiDriver); } public RealHtmlFileBrowse getHtmlFileBrowse( UiElementProperties properties, UiDriver uiDriver ) { return new RealHtmlFileBrowse(uiDriver, properties); } public RealHtmlTable getHtmlTable( String mapId, UiDriver uiDriver ) { return getHtmlTable(elementsMap.getElementProperties(mapId), uiDriver); } public RealHtmlTable getHtmlTable( UiElementProperties properties, UiDriver uiDriver ) { return new RealHtmlTable(uiDriver, properties); } public RealHtmlAlert getHtmlAlert( UiDriver uiDriver ) { return new RealHtmlAlert(uiDriver); } public RealHtmlPrompt getHtmlPrompt( UiDriver uiDriver ) { return new RealHtmlPrompt(uiDriver); } public RealHtmlConfirm getHtmlConfirm( UiDriver uiDriver ) { return new RealHtmlConfirm(uiDriver); } public RealHtmlElement getHtmlElement( String mapId, UiDriver uiDriver ) { return getHtmlElement(elementsMap.getElementProperties(mapId), uiDriver); } public RealHtmlElement getHtmlElement( UiElementProperties properties, UiDriver uiDriver ) { return new RealHtmlElement(uiDriver, properties); } }
/** * Copyright (c) 2012-2014 Netflix, Inc. All rights reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.netflix.msl.keyx; import static org.junit.Assert.assertArrayEquals; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertNull; import static org.junit.Assert.assertTrue; import java.math.BigInteger; import java.security.InvalidAlgorithmParameterException; import java.security.KeyPair; import java.security.KeyPairGenerator; import java.security.NoSuchAlgorithmException; import java.util.Arrays; import java.util.Random; import javax.crypto.interfaces.DHPrivateKey; import javax.crypto.interfaces.DHPublicKey; import javax.crypto.spec.DHParameterSpec; import org.json.JSONException; import org.json.JSONObject; import org.junit.AfterClass; import org.junit.Before; import org.junit.BeforeClass; import org.junit.Ignore; import org.junit.Rule; import org.junit.Test; import org.junit.runner.RunWith; import org.junit.runners.Suite; import org.junit.runners.Suite.SuiteClasses; import com.netflix.msl.MslCryptoException; import com.netflix.msl.MslEncodingException; import com.netflix.msl.MslEntityAuthException; import com.netflix.msl.MslError; import com.netflix.msl.MslException; import com.netflix.msl.MslInternalException; import com.netflix.msl.MslKeyExchangeException; import com.netflix.msl.MslMasterTokenException; import com.netflix.msl.crypto.ICryptoContext; import com.netflix.msl.entityauth.EntityAuthenticationData; import com.netflix.msl.entityauth.EntityAuthenticationScheme; import com.netflix.msl.entityauth.MockPresharedAuthenticationFactory; import com.netflix.msl.entityauth.PresharedAuthenticationData; import com.netflix.msl.keyx.DiffieHellmanExchange.RequestData; import com.netflix.msl.keyx.DiffieHellmanExchange.ResponseData; import com.netflix.msl.keyx.KeyExchangeFactory.KeyExchangeData; import com.netflix.msl.test.ExpectedMslException; import com.netflix.msl.tokens.MasterToken; import com.netflix.msl.util.Base64; import com.netflix.msl.util.JsonUtils; import com.netflix.msl.util.MockAuthenticationUtils; import com.netflix.msl.util.MockMslContext; import com.netflix.msl.util.MslContext; import com.netflix.msl.util.MslTestUtils; /** * Diffie-Hellman key exchange unit tests. * * @author Wesley Miaw <wmiaw@netflix.com> */ @RunWith(Suite.class) @SuiteClasses({DiffieHellmanExchangeSuite.KeyExchangeFactoryTest.class, DiffieHellmanExchangeSuite.RequestDataTest.class, DiffieHellmanExchangeSuite.ResponseDataTest.class}) public class DiffieHellmanExchangeSuite { /** JSON key key exchange scheme. */ private static final String KEY_SCHEME = "scheme"; /** JSON key key request data. */ private static final String KEY_KEYDATA = "keydata"; /** JSON key Diffie-Hellman parameters ID. */ private static final String KEY_PARAMETERS_ID = "parametersid"; /** JSON key Diffie-Hellman public key. */ private static final String KEY_PUBLIC_KEY = "publickey"; /** * If the provided byte array begins with a null byte this function simply * returns the original array. Otherwise a new array is created that is a * copy of the original array with a null byte prepended, and this new array * is returned. * * @param b the original array. * @return the resulting byte array. */ private static byte[] prependNullByte(final byte[] b) { if (b[0] == 0x00) return b; final byte[] result = new byte[b.length + 1]; result[0] = 0x00; System.arraycopy(b, 0, result, 1, b.length); return result; } private static MasterToken MASTER_TOKEN; private static final String PARAMETERS_ID = MockDiffieHellmanParameters.DEFAULT_ID; private static BigInteger REQUEST_PUBLIC_KEY; private static DHPrivateKey REQUEST_PRIVATE_KEY; private static BigInteger RESPONSE_PUBLIC_KEY; private static DHPrivateKey RESPONSE_PRIVATE_KEY; /** Random. */ private static Random random; /** MSL context. */ private static MslContext ctx; @BeforeClass public static synchronized void setup() throws NoSuchAlgorithmException, InvalidAlgorithmParameterException, MslEncodingException, MslCryptoException, MslKeyExchangeException { if (ctx == null) { random = new Random(); ctx = new MockMslContext(EntityAuthenticationScheme.PSK, false); MASTER_TOKEN = MslTestUtils.getMasterToken(ctx, 1, 1); final DiffieHellmanParameters params = MockDiffieHellmanParameters.getDefaultParameters(); final DHParameterSpec paramSpec = params.getParameterSpec(PARAMETERS_ID); final KeyPairGenerator generator = KeyPairGenerator.getInstance("DH"); generator.initialize(paramSpec); final KeyPair requestKeyPair = generator.generateKeyPair(); REQUEST_PUBLIC_KEY = ((DHPublicKey)requestKeyPair.getPublic()).getY(); REQUEST_PRIVATE_KEY = (DHPrivateKey)requestKeyPair.getPrivate(); generator.initialize(paramSpec); final KeyPair responseKeyPair = generator.generateKeyPair(); RESPONSE_PUBLIC_KEY = ((DHPublicKey)responseKeyPair.getPublic()).getY(); RESPONSE_PRIVATE_KEY = (DHPrivateKey)responseKeyPair.getPrivate(); } } @AfterClass public static synchronized void teardown() { // Teardown causes problems because the data is shared by the inner // classes, so don't do any cleanup. } /** Request data unit tests. */ public static class RequestDataTest { @Rule public ExpectedMslException thrown = ExpectedMslException.none(); @Test public void ctors() throws JSONException, MslEncodingException, MslKeyExchangeException { final RequestData req = new RequestData(PARAMETERS_ID, REQUEST_PUBLIC_KEY, REQUEST_PRIVATE_KEY); assertEquals(KeyExchangeScheme.DIFFIE_HELLMAN, req.getKeyExchangeScheme()); assertEquals(PARAMETERS_ID, req.getParametersId()); assertArrayEquals(REQUEST_PRIVATE_KEY.getEncoded(), req.getPrivateKey().getEncoded()); assertEquals(REQUEST_PUBLIC_KEY, req.getPublicKey()); final JSONObject keydata = req.getKeydata(); assertNotNull(keydata); final RequestData joReq = new RequestData(keydata); assertEquals(req.getKeyExchangeScheme(), joReq.getKeyExchangeScheme()); assertEquals(req.getParametersId(), joReq.getParametersId()); assertNull(joReq.getPrivateKey()); assertEquals(req.getPublicKey(), joReq.getPublicKey()); final JSONObject joKeydata = joReq.getKeydata(); assertNotNull(joKeydata); assertTrue(JsonUtils.equals(keydata, joKeydata)); } @Test public void jsonString() throws JSONException { final RequestData req = new RequestData(PARAMETERS_ID, REQUEST_PUBLIC_KEY, REQUEST_PRIVATE_KEY); final JSONObject jo = new JSONObject(req.toJSONString()); assertEquals(KeyExchangeScheme.DIFFIE_HELLMAN.toString(), jo.getString(KEY_SCHEME)); final JSONObject keydata = jo.getJSONObject(KEY_KEYDATA); assertEquals(PARAMETERS_ID, keydata.getString(KEY_PARAMETERS_ID)); assertArrayEquals(prependNullByte(REQUEST_PUBLIC_KEY.toByteArray()), Base64.decode(keydata.getString(KEY_PUBLIC_KEY))); } @Test public void create() throws JSONException, MslEncodingException, MslEntityAuthException, MslCryptoException, MslKeyExchangeException { final RequestData data = new RequestData(PARAMETERS_ID, REQUEST_PUBLIC_KEY, REQUEST_PRIVATE_KEY); final String jsonString = data.toJSONString(); final JSONObject jo = new JSONObject(jsonString); final KeyRequestData keyRequestData = KeyRequestData.create(ctx, jo); assertNotNull(keyRequestData); assertTrue(keyRequestData instanceof RequestData); final RequestData joData = (RequestData)keyRequestData; assertEquals(data.getKeyExchangeScheme(), joData.getKeyExchangeScheme()); assertEquals(data.getParametersId(), joData.getParametersId()); assertNull(joData.getPrivateKey()); assertEquals(data.getPublicKey(), joData.getPublicKey()); } @Test public void missingParametersId() throws JSONException, MslEncodingException, MslKeyExchangeException { thrown.expect(MslEncodingException.class); thrown.expectMslError(MslError.JSON_PARSE_ERROR); final RequestData req = new RequestData(PARAMETERS_ID, REQUEST_PUBLIC_KEY, REQUEST_PRIVATE_KEY); final JSONObject keydata = req.getKeydata(); assertNotNull(keydata.remove(KEY_PARAMETERS_ID)); new RequestData(keydata); } @Test public void missingPublicKey() throws JSONException, MslEncodingException, MslKeyExchangeException { thrown.expect(MslEncodingException.class); thrown.expectMslError(MslError.JSON_PARSE_ERROR); final RequestData req = new RequestData(PARAMETERS_ID, REQUEST_PUBLIC_KEY, REQUEST_PRIVATE_KEY); final JSONObject keydata = req.getKeydata(); assertNotNull(keydata.remove(KEY_PUBLIC_KEY)); new RequestData(keydata); } // This test will not fail because Base64.decode() // does not error when given invalid Base64-encoded data. @Ignore @Test public void invalidPublicKey() throws JSONException, MslEncodingException, MslKeyExchangeException { thrown.expect(MslKeyExchangeException.class); thrown.expectMslError(MslError.KEYX_INVALID_PUBLIC_KEY); final RequestData req = new RequestData(PARAMETERS_ID, REQUEST_PUBLIC_KEY, REQUEST_PRIVATE_KEY); final JSONObject keydata = req.getKeydata(); keydata.put(KEY_PUBLIC_KEY, "x"); new RequestData(keydata); } @Test public void equalsParametersId() throws MslEncodingException, MslKeyExchangeException, JSONException { final RequestData dataA = new RequestData(PARAMETERS_ID + "A", REQUEST_PUBLIC_KEY, REQUEST_PRIVATE_KEY); final RequestData dataB = new RequestData(PARAMETERS_ID + "B", REQUEST_PUBLIC_KEY, REQUEST_PRIVATE_KEY); final RequestData dataA2 = new RequestData(dataA.getKeydata()); assertTrue(dataA.equals(dataA)); assertEquals(dataA.hashCode(), dataA.hashCode()); assertFalse(dataA.equals(dataB)); assertFalse(dataB.equals(dataA)); assertTrue(dataA.hashCode() != dataB.hashCode()); // The private keys don't transfer via the JSON constructor. assertFalse(dataA.equals(dataA2)); assertFalse(dataA2.equals(dataA)); assertTrue(dataA.hashCode() != dataA2.hashCode()); } @Test public void equalsPublicKey() throws MslEncodingException, MslKeyExchangeException, JSONException { final RequestData dataA = new RequestData(PARAMETERS_ID, REQUEST_PUBLIC_KEY, REQUEST_PRIVATE_KEY); final RequestData dataB = new RequestData(PARAMETERS_ID, RESPONSE_PUBLIC_KEY, REQUEST_PRIVATE_KEY); final RequestData dataA2 = new RequestData(dataA.getKeydata()); assertTrue(dataA.equals(dataA)); assertEquals(dataA.hashCode(), dataA.hashCode()); assertFalse(dataA.equals(dataB)); assertFalse(dataB.equals(dataA)); assertTrue(dataA.hashCode() != dataB.hashCode()); // The private keys don't transfer via the JSON constructor. assertFalse(dataA.equals(dataA2)); assertFalse(dataA2.equals(dataA)); assertTrue(dataA.hashCode() != dataA2.hashCode()); } @Test public void equalsPrivateKey() throws MslEncodingException, MslKeyExchangeException, JSONException { final RequestData dataA = new RequestData(PARAMETERS_ID, REQUEST_PUBLIC_KEY, REQUEST_PRIVATE_KEY); final RequestData dataB = new RequestData(PARAMETERS_ID, REQUEST_PUBLIC_KEY, RESPONSE_PRIVATE_KEY); final RequestData dataA2 = new RequestData(dataA.getKeydata()); assertTrue(dataA.equals(dataA)); assertEquals(dataA.hashCode(), dataA.hashCode()); assertFalse(dataA.equals(dataB)); assertFalse(dataB.equals(dataA)); assertTrue(dataA.hashCode() != dataB.hashCode()); // The private keys don't transfer via the JSON constructor. assertFalse(dataA.equals(dataA2)); assertFalse(dataA2.equals(dataA)); assertTrue(dataA.hashCode() != dataA2.hashCode()); } @Test public void equalsObject() { final RequestData data = new RequestData(PARAMETERS_ID, REQUEST_PUBLIC_KEY, REQUEST_PRIVATE_KEY); assertFalse(data.equals(null)); assertFalse(data.equals(PARAMETERS_ID)); assertTrue(data.hashCode() != PARAMETERS_ID.hashCode()); } } /** Response data unit tests. */ public static class ResponseDataTest { /** JSON key master token. */ private static final String KEY_MASTER_TOKEN = "mastertoken"; @Rule public ExpectedMslException thrown = ExpectedMslException.none(); @Test public void ctors() throws JSONException, MslEncodingException, MslKeyExchangeException { final ResponseData resp = new ResponseData(MASTER_TOKEN, PARAMETERS_ID, RESPONSE_PUBLIC_KEY); assertEquals(KeyExchangeScheme.DIFFIE_HELLMAN, resp.getKeyExchangeScheme()); assertEquals(PARAMETERS_ID, resp.getParametersId()); assertEquals(RESPONSE_PUBLIC_KEY, resp.getPublicKey()); final JSONObject keydata = resp.getKeydata(); assertNotNull(keydata); final ResponseData joResp = new ResponseData(MASTER_TOKEN, keydata); assertEquals(resp.getKeyExchangeScheme(), joResp.getKeyExchangeScheme()); assertEquals(resp.getMasterToken(), joResp.getMasterToken()); assertEquals(resp.getParametersId(), joResp.getParametersId()); assertEquals(resp.getPublicKey(), joResp.getPublicKey()); final JSONObject joKeydata = joResp.getKeydata(); assertNotNull(joKeydata); assertTrue(JsonUtils.equals(keydata, joKeydata)); } @Test public void jsonString() throws JSONException, MslEncodingException, MslCryptoException, MslException { final ResponseData resp = new ResponseData(MASTER_TOKEN, PARAMETERS_ID, RESPONSE_PUBLIC_KEY); final JSONObject jo = new JSONObject(resp.toJSONString()); assertEquals(KeyExchangeScheme.DIFFIE_HELLMAN.toString(), jo.getString(KEY_SCHEME)); final MasterToken masterToken = new MasterToken(ctx, jo.getJSONObject(KEY_MASTER_TOKEN)); assertEquals(MASTER_TOKEN, masterToken); final JSONObject keydata = jo.getJSONObject(KEY_KEYDATA); assertEquals(PARAMETERS_ID, keydata.getString(KEY_PARAMETERS_ID)); assertArrayEquals(prependNullByte(RESPONSE_PUBLIC_KEY.toByteArray()), Base64.decode(keydata.getString(KEY_PUBLIC_KEY))); } @Test public void create() throws JSONException, MslException { final ResponseData data = new ResponseData(MASTER_TOKEN, PARAMETERS_ID, RESPONSE_PUBLIC_KEY); final String jsonString = data.toJSONString(); final JSONObject jo = new JSONObject(jsonString); final KeyResponseData keyResponseData = KeyResponseData.create(ctx, jo); assertNotNull(keyResponseData); assertTrue(keyResponseData instanceof ResponseData); final ResponseData joData = (ResponseData)keyResponseData; assertEquals(data.getKeyExchangeScheme(), joData.getKeyExchangeScheme()); assertEquals(data.getMasterToken(), joData.getMasterToken()); assertEquals(data.getParametersId(), joData.getParametersId()); assertEquals(data.getPublicKey(), joData.getPublicKey()); } @Test public void missingParametersId() throws JSONException, MslEncodingException, MslKeyExchangeException { thrown.expect(MslEncodingException.class); thrown.expectMslError(MslError.JSON_PARSE_ERROR); final ResponseData resp = new ResponseData(MASTER_TOKEN, PARAMETERS_ID, RESPONSE_PUBLIC_KEY); final JSONObject keydata = resp.getKeydata(); assertNotNull(keydata.remove(KEY_PARAMETERS_ID)); new ResponseData(MASTER_TOKEN, keydata); } @Test public void missingPublicKey() throws JSONException, MslEncodingException, MslKeyExchangeException { thrown.expect(MslEncodingException.class); thrown.expectMslError(MslError.JSON_PARSE_ERROR); final ResponseData resp = new ResponseData(MASTER_TOKEN, PARAMETERS_ID, RESPONSE_PUBLIC_KEY); final JSONObject keydata = resp.getKeydata(); assertNotNull(keydata.remove(KEY_PUBLIC_KEY)); new ResponseData(MASTER_TOKEN, keydata); } // This test will not fail because Base64.decode() // does not error when given invalid Base64-encoded data. @Ignore @Test public void invalidPublicKey() throws JSONException, MslEncodingException, MslKeyExchangeException { thrown.expect(MslKeyExchangeException.class); thrown.expectMslError(MslError.KEYX_INVALID_PUBLIC_KEY); final ResponseData resp = new ResponseData(MASTER_TOKEN, PARAMETERS_ID, RESPONSE_PUBLIC_KEY); final JSONObject keydata = resp.getKeydata(); keydata.put(KEY_PUBLIC_KEY, "x"); new ResponseData(MASTER_TOKEN, keydata); } @Test public void equalsMasterToken() throws MslEncodingException, MslKeyExchangeException, JSONException, MslCryptoException { final MasterToken masterTokenA = MslTestUtils.getMasterToken(ctx, 1, 1); final MasterToken masterTokenB = MslTestUtils.getMasterToken(ctx, 1, 2); final ResponseData dataA = new ResponseData(masterTokenA, PARAMETERS_ID, RESPONSE_PUBLIC_KEY); final ResponseData dataB = new ResponseData(masterTokenB, PARAMETERS_ID, RESPONSE_PUBLIC_KEY); final ResponseData dataA2 = new ResponseData(masterTokenA, dataA.getKeydata()); assertTrue(dataA.equals(dataA)); assertEquals(dataA.hashCode(), dataA.hashCode()); assertFalse(dataA.equals(dataB)); assertFalse(dataB.equals(dataA)); assertTrue(dataA.hashCode() != dataB.hashCode()); assertTrue(dataA.equals(dataA2)); assertTrue(dataA2.equals(dataA)); assertEquals(dataA.hashCode(), dataA2.hashCode()); } @Test public void equalsParametersId() throws MslEncodingException, MslKeyExchangeException, JSONException { final ResponseData dataA = new ResponseData(MASTER_TOKEN, PARAMETERS_ID + "A", RESPONSE_PUBLIC_KEY); final ResponseData dataB = new ResponseData(MASTER_TOKEN, PARAMETERS_ID + "B", RESPONSE_PUBLIC_KEY); final ResponseData dataA2 = new ResponseData(MASTER_TOKEN, dataA.getKeydata()); assertTrue(dataA.equals(dataA)); assertEquals(dataA.hashCode(), dataA.hashCode()); assertFalse(dataA.equals(dataB)); assertFalse(dataB.equals(dataA)); assertTrue(dataA.hashCode() != dataB.hashCode()); assertTrue(dataA.equals(dataA2)); assertTrue(dataA2.equals(dataA)); assertEquals(dataA.hashCode(), dataA2.hashCode()); } @Test public void equalsPublicKey() throws MslEncodingException, MslKeyExchangeException, JSONException { final ResponseData dataA = new ResponseData(MASTER_TOKEN, PARAMETERS_ID, RESPONSE_PUBLIC_KEY); final ResponseData dataB = new ResponseData(MASTER_TOKEN, PARAMETERS_ID, REQUEST_PUBLIC_KEY); final ResponseData dataA2 = new ResponseData(MASTER_TOKEN, dataA.getKeydata()); assertTrue(dataA.equals(dataA)); assertEquals(dataA.hashCode(), dataA.hashCode()); assertFalse(dataA.equals(dataB)); assertFalse(dataB.equals(dataA)); assertTrue(dataA.hashCode() != dataB.hashCode()); assertTrue(dataA.equals(dataA2)); assertTrue(dataA2.equals(dataA)); assertEquals(dataA.hashCode(), dataA2.hashCode()); } @Test public void equalsObject() { final ResponseData data = new ResponseData(MASTER_TOKEN, PARAMETERS_ID, RESPONSE_PUBLIC_KEY); assertFalse(data.equals(null)); assertFalse(data.equals(PARAMETERS_ID)); assertTrue(data.hashCode() != PARAMETERS_ID.hashCode()); } } /** Key exchange factory unit tests. */ public static class KeyExchangeFactoryTest { /** * Fake key request data for the Diffie-Hellman key exchange scheme. */ private static class FakeKeyRequestData extends KeyRequestData { /** Create a new fake key request data. */ protected FakeKeyRequestData() { super(KeyExchangeScheme.DIFFIE_HELLMAN); } /* (non-Javadoc) * @see com.netflix.msl.keyx.KeyRequestData#getKeydata() */ @Override protected JSONObject getKeydata() throws JSONException { return null; } } /** * Fake key response data for the Diffie-Hellman key exchange scheme. */ private static class FakeKeyResponseData extends KeyResponseData { /** Create a new fake key response data. */ protected FakeKeyResponseData() { super(MASTER_TOKEN, KeyExchangeScheme.DIFFIE_HELLMAN); } /* (non-Javadoc) * @see com.netflix.msl.keyx.KeyResponseData#getKeydata() */ @Override protected JSONObject getKeydata() { return null; } } @Rule public ExpectedMslException thrown = ExpectedMslException.none(); @BeforeClass public static void setup() { authutils = new MockAuthenticationUtils(); final DiffieHellmanParameters params = MockDiffieHellmanParameters.getDefaultParameters(); factory = new DiffieHellmanExchange(params, authutils); entityAuthData = new PresharedAuthenticationData(MockPresharedAuthenticationFactory.PSK_ESN); } @AfterClass public static void teardown() { entityAuthData = null; factory = null; authutils = null; } @Before public void reset() { authutils.reset(); ctx.getMslStore().clearCryptoContexts(); ctx.getMslStore().clearServiceTokens(); } @Test public void factory() { assertEquals(KeyExchangeScheme.DIFFIE_HELLMAN, factory.getScheme()); } @Test public void generateInitialResponse() throws MslException { final KeyRequestData keyRequestData = new RequestData(PARAMETERS_ID, REQUEST_PUBLIC_KEY, REQUEST_PRIVATE_KEY); final KeyExchangeData keyxData = factory.generateResponse(ctx, keyRequestData, entityAuthData); assertNotNull(keyxData); assertNotNull(keyxData.cryptoContext); assertNotNull(keyxData.keyResponseData); final KeyResponseData keyResponseData = keyxData.keyResponseData; assertEquals(KeyExchangeScheme.DIFFIE_HELLMAN, keyResponseData.getKeyExchangeScheme()); final MasterToken masterToken = keyResponseData.getMasterToken(); assertNotNull(masterToken); assertEquals(MockPresharedAuthenticationFactory.PSK_ESN, masterToken.getIdentity()); } @Test(expected = MslInternalException.class) public void wrongRequestInitialResponse() throws MslException { final KeyRequestData keyRequestData = new FakeKeyRequestData(); factory.generateResponse(ctx, keyRequestData, entityAuthData); } @Test public void invalidParametersIdInitialResponse() throws MslException { thrown.expect(MslKeyExchangeException.class); thrown.expectMslError(MslError.UNKNOWN_KEYX_PARAMETERS_ID); final KeyRequestData keyRequestData = new RequestData("x", REQUEST_PUBLIC_KEY, REQUEST_PRIVATE_KEY); factory.generateResponse(ctx, keyRequestData, entityAuthData); } @Test public void unknownParametersIdInitialResponse() throws MslException { thrown.expect(MslKeyExchangeException.class); thrown.expectMslError(MslError.UNKNOWN_KEYX_PARAMETERS_ID); final KeyRequestData keyRequestData = new RequestData(Integer.toString(98765), REQUEST_PUBLIC_KEY, REQUEST_PRIVATE_KEY); factory.generateResponse(ctx, keyRequestData, entityAuthData); } @Test public void generateSubsequentResponse() throws MslException { final KeyRequestData keyRequestData = new RequestData(PARAMETERS_ID, REQUEST_PUBLIC_KEY, REQUEST_PRIVATE_KEY); final KeyExchangeData keyxData = factory.generateResponse(ctx, keyRequestData, MASTER_TOKEN); assertNotNull(keyxData); assertNotNull(keyxData.cryptoContext); assertNotNull(keyxData.keyResponseData); final KeyResponseData keyResponseData = keyxData.keyResponseData; assertEquals(KeyExchangeScheme.DIFFIE_HELLMAN, keyResponseData.getKeyExchangeScheme()); final MasterToken masterToken = keyResponseData.getMasterToken(); assertNotNull(masterToken); assertEquals(MASTER_TOKEN.getIdentity(), masterToken.getIdentity()); assertEquals(MASTER_TOKEN.getSerialNumber(), masterToken.getSerialNumber()); assertEquals(MASTER_TOKEN.getSequenceNumber() + 1, masterToken.getSequenceNumber()); } @Test public void untrustedMasterTokenSubsequentResponse() throws MslEncodingException, MslCryptoException, JSONException, MslException { thrown.expect(MslMasterTokenException.class); thrown.expectMslError(MslError.MASTERTOKEN_UNTRUSTED); final KeyRequestData keyRequestData = new RequestData(PARAMETERS_ID, REQUEST_PUBLIC_KEY, REQUEST_PRIVATE_KEY); final MasterToken masterToken = MslTestUtils.getUntrustedMasterToken(ctx); factory.generateResponse(ctx, keyRequestData, masterToken); } @Test(expected = MslInternalException.class) public void wrongRequestSubsequentResponse() throws MslException { final KeyRequestData keyRequestData = new FakeKeyRequestData(); factory.generateResponse(ctx, keyRequestData, MASTER_TOKEN); } @Test public void invalidParametersIdSubsequentResponse() throws MslException { thrown.expect(MslKeyExchangeException.class); thrown.expectMslError(MslError.UNKNOWN_KEYX_PARAMETERS_ID); final KeyRequestData keyRequestData = new RequestData("x", REQUEST_PUBLIC_KEY, REQUEST_PRIVATE_KEY); factory.generateResponse(ctx, keyRequestData, MASTER_TOKEN); } @Test public void unknownParametersIdSubsequentResponse() throws MslException { thrown.expect(MslKeyExchangeException.class); thrown.expectMslError(MslError.UNKNOWN_KEYX_PARAMETERS_ID); final KeyRequestData keyRequestData = new RequestData(Integer.toString(98765), REQUEST_PUBLIC_KEY, REQUEST_PRIVATE_KEY); factory.generateResponse(ctx, keyRequestData, MASTER_TOKEN); } @Test public void getCryptoContext() throws MslException { final KeyRequestData keyRequestData = new RequestData(PARAMETERS_ID, REQUEST_PUBLIC_KEY, REQUEST_PRIVATE_KEY); final KeyExchangeData keyxData = factory.generateResponse(ctx, keyRequestData, entityAuthData); final ICryptoContext requestCryptoContext = keyxData.cryptoContext; final KeyResponseData keyResponseData = keyxData.keyResponseData; final ICryptoContext responseCryptoContext = factory.getCryptoContext(ctx, keyRequestData, keyResponseData, null); assertNotNull(responseCryptoContext); final byte[] data = new byte[32]; random.nextBytes(data); // Ciphertext won't always be equal depending on how it was // enveloped. So we cannot check for equality or inequality. final byte[] requestCiphertext = requestCryptoContext.encrypt(data); final byte[] responseCiphertext = responseCryptoContext.encrypt(data); assertFalse(Arrays.equals(data, requestCiphertext)); assertFalse(Arrays.equals(data, responseCiphertext)); // Signatures should always be equal. final byte[] requestSignature = requestCryptoContext.sign(data); final byte[] responseSignature = responseCryptoContext.sign(data); assertFalse(Arrays.equals(data, requestSignature)); assertFalse(Arrays.equals(data, responseSignature)); assertArrayEquals(requestSignature, responseSignature); // Plaintext should always be equal to the original message. final byte[] requestPlaintext = requestCryptoContext.decrypt(responseCiphertext); final byte[] responsePlaintext = responseCryptoContext.decrypt(requestCiphertext); assertNotNull(requestPlaintext); assertArrayEquals(data, requestPlaintext); assertArrayEquals(requestPlaintext, responsePlaintext); // Verification should always succeed. assertTrue(requestCryptoContext.verify(data, responseSignature)); assertTrue(responseCryptoContext.verify(data, requestSignature)); } @Test(expected = MslInternalException.class) public void wrongRequestCryptoContext() throws MslException { final KeyRequestData keyRequestData = new RequestData(PARAMETERS_ID, REQUEST_PUBLIC_KEY, REQUEST_PRIVATE_KEY); final KeyExchangeData keyxData = factory.generateResponse(ctx, keyRequestData, entityAuthData); final KeyResponseData keyResponseData = keyxData.keyResponseData; final KeyRequestData fakeKeyRequestData = new FakeKeyRequestData(); factory.getCryptoContext(ctx, fakeKeyRequestData, keyResponseData, null); } @Test(expected = MslInternalException.class) public void wrongResponseCryptoContext() throws MslKeyExchangeException, MslCryptoException, MslEncodingException, MslMasterTokenException, MslEntityAuthException { final KeyRequestData keyRequestData = new RequestData(PARAMETERS_ID, REQUEST_PUBLIC_KEY, REQUEST_PRIVATE_KEY); final KeyResponseData fakeKeyResponseData = new FakeKeyResponseData(); factory.getCryptoContext(ctx, keyRequestData, fakeKeyResponseData, null); } @Test public void parametersIdMismatchCryptoContext() throws MslException { thrown.expect(MslKeyExchangeException.class); thrown.expectMslError(MslError.KEYX_RESPONSE_REQUEST_MISMATCH); final KeyRequestData keyRequestData = new RequestData(PARAMETERS_ID, REQUEST_PUBLIC_KEY, REQUEST_PRIVATE_KEY); final KeyExchangeData keyxData = factory.generateResponse(ctx, keyRequestData, entityAuthData); final KeyResponseData keyResponseData = keyxData.keyResponseData; final MasterToken masterToken = keyResponseData.getMasterToken(); final KeyResponseData mismatchedKeyResponseData = new ResponseData(masterToken, PARAMETERS_ID + "x", RESPONSE_PUBLIC_KEY); factory.getCryptoContext(ctx, keyRequestData, mismatchedKeyResponseData, null); } @Test public void privateKeyMissingCryptoContext() throws MslException { thrown.expect(MslKeyExchangeException.class); thrown.expectMslError(MslError.KEYX_PRIVATE_KEY_MISSING); final KeyRequestData keyRequestData = new RequestData(PARAMETERS_ID, REQUEST_PUBLIC_KEY, null); final KeyExchangeData keyxData = factory.generateResponse(ctx, keyRequestData, entityAuthData); final KeyResponseData keyResponseData = keyxData.keyResponseData; factory.getCryptoContext(ctx, keyRequestData, keyResponseData, null); } /** Authentication utilities. */ private static MockAuthenticationUtils authutils; /** Key exchange factory. */ private static KeyExchangeFactory factory; /** Entity authentication data. */ private static EntityAuthenticationData entityAuthData; } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.activemq.artemis.jlibaio; import java.io.Closeable; import java.io.File; import java.io.IOException; import java.nio.ByteBuffer; import java.util.concurrent.Semaphore; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.atomic.AtomicLong; /** * This class is used as an aggregator for the {@link LibaioFile}. * <br> * It holds native data, and it will share a libaio queue that can be used by multiple files. * <br> * You need to use the poll methods to read the result of write and read submissions. * <br> * You also need to use the special buffer created by {@link LibaioFile} as you need special alignments * when dealing with O_DIRECT files. * <br> * A Single controller can server multiple files. There's no need to create one controller per file. * <br> * <a href="https://ext4.wiki.kernel.org/index.php/Clarifying_Direct_IO's_Semantics">Interesting reading for this.</a> */ public class LibaioContext<Callback extends SubmitInfo> implements Closeable { private static final AtomicLong totalMaxIO = new AtomicLong(0); /** * This definition needs to match Version.h on the native sources. * <br> * Or else the native module won't be loaded because of version mismatches */ private static final int EXPECTED_NATIVE_VERSION = 7; private static boolean loaded = false; private static final AtomicBoolean shuttingDown = new AtomicBoolean(false); private static final AtomicInteger contexts = new AtomicInteger(0); public static boolean isLoaded() { return loaded; } private static boolean loadLibrary(final String name) { try { System.loadLibrary(name); if (getNativeVersion() != EXPECTED_NATIVE_VERSION) { NativeLogger.LOGGER.incompatibleNativeLibrary(); return false; } else { return true; } } catch (Throwable e) { NativeLogger.LOGGER.debug(name + " -> error loading the native library", e); return false; } } static { String[] libraries = new String[]{"artemis-native-64", "artemis-native-32"}; for (String library : libraries) { if (loadLibrary(library)) { loaded = true; Runtime.getRuntime().addShutdownHook(new Thread() { @Override public void run() { shuttingDown.set(true); checkShutdown(); } }); break; } else { NativeLogger.LOGGER.debug("Library " + library + " not found!"); } } if (!loaded) { NativeLogger.LOGGER.debug("Couldn't locate LibAIO Wrapper"); } } private static void checkShutdown() { if (contexts.get() == 0 && shuttingDown.get()) { shutdownHook(); } } private static native void shutdownHook(); /** * This is used to validate leaks on tests. * * @return the number of allocated aio, to be used on test checks. */ public static long getTotalMaxIO() { return totalMaxIO.get(); } /** * It will reset all the positions on the buffer to 0, using memset. * * @param buffer a native buffer. * s */ public void memsetBuffer(ByteBuffer buffer) { memsetBuffer(buffer, buffer.limit()); } /** * This is used on tests validating for leaks. */ public static void resetMaxAIO() { totalMaxIO.set(0); } /** * the native ioContext including the structure created. */ private final ByteBuffer ioContext; private final AtomicBoolean closed = new AtomicBoolean(false); final Semaphore ioSpace; final int queueSize; final boolean useFdatasync; /** * The queue size here will use resources defined on the kernel parameter * <a href="https://www.kernel.org/doc/Documentation/sysctl/fs.txt">fs.aio-max-nr</a> . * * @param queueSize the size to be initialize on libaio * io_queue_init which can't be higher than /proc/sys/fs/aio-max-nr. * @param useSemaphore should block on a semaphore avoiding using more submits than what's available. * @param useFdatasync should use fdatasync before calling callbacks. */ public LibaioContext(int queueSize, boolean useSemaphore, boolean useFdatasync) { try { contexts.incrementAndGet(); this.ioContext = newContext(queueSize); this.useFdatasync = useFdatasync; } catch (Exception e) { throw e; } this.queueSize = queueSize; totalMaxIO.addAndGet(queueSize); if (useSemaphore) { this.ioSpace = new Semaphore(queueSize); } else { this.ioSpace = null; } } /** * Documented at {@link LibaioFile#write(long, int, java.nio.ByteBuffer, SubmitInfo)} * * @param fd the file descriptor * @param position the write position * @param size number of bytes to use * @param bufferWrite the native buffer * @param callback a callback * @throws IOException in case of error */ public void submitWrite(int fd, long position, int size, ByteBuffer bufferWrite, Callback callback) throws IOException { if (closed.get()) { throw new IOException("Libaio Context is closed!"); } try { if (ioSpace != null) { ioSpace.acquire(); } } catch (InterruptedException e) { Thread.currentThread().interrupt(); throw new IOException(e.getMessage(), e); } submitWrite(fd, this.ioContext, position, size, bufferWrite, callback); } public void submitRead(int fd, long position, int size, ByteBuffer bufferWrite, Callback callback) throws IOException { if (closed.get()) { throw new IOException("Libaio Context is closed!"); } try { if (ioSpace != null) { ioSpace.acquire(); } } catch (InterruptedException e) { Thread.currentThread().interrupt(); throw new IOException(e.getMessage(), e); } submitRead(fd, this.ioContext, position, size, bufferWrite, callback); } /** * This is used to close the libaio queues and cleanup the native data used. * <br> * It is unsafe to close the controller while you have pending writes or files open as * this could cause core dumps or VM crashes. */ @Override public void close() { if (!closed.getAndSet(true)) { if (ioSpace != null) { try { ioSpace.tryAcquire(queueSize, 10, TimeUnit.SECONDS); } catch (Exception e) { NativeLogger.LOGGER.error(e); } } totalMaxIO.addAndGet(-queueSize); if (ioContext != null) { deleteContext(ioContext); } contexts.decrementAndGet(); checkShutdown(); } } @Override protected void finalize() throws Throwable { super.finalize(); close(); } /** * It will open a file. If you set the direct flag = false then you won't need to use the special buffer. * Notice: This will create an empty file if the file doesn't already exist. * * @param file the file to be open. * @param direct will set ODIRECT. * @return It will return a LibaioFile instance. * @throws IOException in case of error. */ public LibaioFile<Callback> openFile(File file, boolean direct) throws IOException { return openFile(file.getPath(), direct); } /** * It will open a file. If you set the direct flag = false then you won't need to use the special buffer. * Notice: This will create an empty file if the file doesn't already exist. * * @param file the file to be open. * @param direct should use O_DIRECT when opening the file. * @return a new open file. * @throws IOException in case of error. */ public LibaioFile<Callback> openFile(String file, boolean direct) throws IOException { checkNotNull(file, "path"); checkNotNull(ioContext, "IOContext"); // note: the native layer will throw an IOException in case of errors int res = LibaioContext.open(file, direct); return new LibaioFile<>(res, this); } /** * It will open a file disassociated with any sort of factory. * This is useful when you won't use reading / writing through libaio like locking files. * * @param file a file name * @param direct will use O_DIRECT * @return a new file * @throws IOException in case of error. */ public static LibaioFile openControlFile(String file, boolean direct) throws IOException { checkNotNull(file, "path"); // note: the native layer will throw an IOException in case of errors int res = LibaioContext.open(file, direct); return new LibaioFile<>(res, null); } /** * Checks that the given argument is not null. If it is, throws {@link NullPointerException}. * Otherwise, returns the argument. */ private static <T> T checkNotNull(T arg, String text) { if (arg == null) { throw new NullPointerException(text); } return arg; } /** * It will poll the libaio queue for results. It should block until min is reached * Results are placed on the callback. * <br> * This shouldn't be called concurrently. You should provide your own synchronization if you need more than one * Thread polling for any reason. * <br> * Notice that the native layer will invoke {@link SubmitInfo#onError(int, String)} in case of failures, * but it won't call done method for you. * * @param callbacks area to receive the callbacks passed on submission.The size of this callback has to * be greater than the parameter max. * @param min the minimum number of elements to receive. It will block until this is achieved. * @param max The maximum number of elements to receive. * @return Number of callbacks returned. * @see LibaioFile#write(long, int, java.nio.ByteBuffer, SubmitInfo) * @see LibaioFile#read(long, int, java.nio.ByteBuffer, SubmitInfo) */ public int poll(Callback[] callbacks, int min, int max) { int released = poll(ioContext, callbacks, min, max); if (ioSpace != null) { if (released > 0) { ioSpace.release(released); } } return released; } /** * It will start polling and will keep doing until the context is closed. * This will call callbacks on {@link SubmitInfo#onError(int, String)} and * {@link SubmitInfo#done()}. * In case of error, both {@link SubmitInfo#onError(int, String)} and * {@link SubmitInfo#done()} are called. */ public void poll() { if (!closed.get()) { blockedPoll(ioContext, useFdatasync); } } /** * Called from the native layer */ private void done(SubmitInfo info) { info.done(); if (ioSpace != null) { ioSpace.release(); } } /** * This is the queue for libaio, initialized with queueSize. */ private native ByteBuffer newContext(int queueSize); /** * Internal method to be used when closing the controller. */ private native void deleteContext(ByteBuffer buffer); /** * it will return a file descriptor. * * @param path the file name. * @param direct translates as O_DIRECT On open * @return a fd from open C call. */ public static native int open(String path, boolean direct); public static native void close(int fd); /** */ /** * Buffers for O_DIRECT need to use posix_memalign. * <br> * Documented at {@link LibaioFile#newBuffer(int)}. * * @param size needs to be % alignment * @param alignment the alignment used at the dispositive * @return a new native buffer used with posix_memalign */ public static native ByteBuffer newAlignedBuffer(int size, int alignment); /** * This will call posix free to release the inner buffer allocated at {@link #newAlignedBuffer(int, int)}. * * @param buffer a native buffer allocated with {@link #newAlignedBuffer(int, int)}. */ public static native void freeBuffer(ByteBuffer buffer); /** * Documented at {@link LibaioFile#write(long, int, java.nio.ByteBuffer, SubmitInfo)}. */ native void submitWrite(int fd, ByteBuffer libaioContext, long position, int size, ByteBuffer bufferWrite, Callback callback) throws IOException; /** * Documented at {@link LibaioFile#read(long, int, java.nio.ByteBuffer, SubmitInfo)}. */ native void submitRead(int fd, ByteBuffer libaioContext, long position, int size, ByteBuffer bufferWrite, Callback callback) throws IOException; /** * Note: this shouldn't be done concurrently. * This method will block until the min condition is satisfied on the poll. * <p/> * The callbacks will include the original callback sent at submit (read or write). */ native int poll(ByteBuffer libaioContext, Callback[] callbacks, int min, int max); /** * This method will block as long as the context is open. */ native void blockedPoll(ByteBuffer libaioContext, boolean useFdatasync); static native int getNativeVersion(); public static native boolean lock(int fd); public static native void memsetBuffer(ByteBuffer buffer, int size); static native long getSize(int fd); static native int getBlockSizeFD(int fd); public static int getBlockSize(File path) { return getBlockSize(path.getAbsolutePath()); } public static native int getBlockSize(String path); static native void fallocate(int fd, long size); static native void fill(int fd, long size); static native void writeInternal(int fd, long position, long size, ByteBuffer bufferWrite) throws IOException; }
/* * Copyright 2016 NAVER Corp. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.navercorp.pinpoint.profiler.instrument; import com.navercorp.pinpoint.bootstrap.context.MethodDescriptor; import com.navercorp.pinpoint.bootstrap.instrument.InstrumentContext; import com.navercorp.pinpoint.bootstrap.instrument.InstrumentException; import com.navercorp.pinpoint.bootstrap.instrument.InstrumentMethod; import com.navercorp.pinpoint.bootstrap.interceptor.Interceptor; import com.navercorp.pinpoint.bootstrap.interceptor.registry.InterceptorRegistry; import com.navercorp.pinpoint.bootstrap.interceptor.scope.ExecutionPolicy; import com.navercorp.pinpoint.bootstrap.interceptor.scope.InterceptorScope; import com.navercorp.pinpoint.common.util.Assert; import com.navercorp.pinpoint.profiler.context.DefaultMethodDescriptor; import com.navercorp.pinpoint.profiler.instrument.interceptor.CaptureType; import com.navercorp.pinpoint.profiler.instrument.interceptor.InterceptorDefinition; import com.navercorp.pinpoint.profiler.instrument.interceptor.InterceptorType; import com.navercorp.pinpoint.profiler.interceptor.factory.AnnotatedInterceptorFactory; import com.navercorp.pinpoint.profiler.objectfactory.ObjectBinderFactory; import com.navercorp.pinpoint.profiler.util.JavaAssistUtils; import org.objectweb.asm.tree.MethodNode; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * @author jaehong.kim */ public class ASMMethod implements InstrumentMethod { private final Logger logger = LoggerFactory.getLogger(this.getClass()); private final boolean isDebug = logger.isDebugEnabled(); private final EngineComponent engineComponent; private final InstrumentContext pluginContext; private final ASMClass declaringClass; private final ASMMethodNodeAdapter methodNode; private final MethodDescriptor descriptor; public ASMMethod(EngineComponent engineComponent, InstrumentContext pluginContext, ASMClass declaringClass, MethodNode methodNode) { this(engineComponent, pluginContext, declaringClass, new ASMMethodNodeAdapter(JavaAssistUtils.javaNameToJvmName(declaringClass.getName()), methodNode)); } public ASMMethod(EngineComponent engineComponent, InstrumentContext pluginContext, ASMClass declaringClass, ASMMethodNodeAdapter methodNode) { this.engineComponent = Assert.requireNonNull(engineComponent, "engineComponent"); this.pluginContext = Assert.requireNonNull(pluginContext, "pluginContext"); this.declaringClass = declaringClass; this.methodNode = methodNode; final String[] parameterVariableNames = this.methodNode.getParameterNames(); final int lineNumber = this.methodNode.getLineNumber(); final DefaultMethodDescriptor descriptor = new DefaultMethodDescriptor(declaringClass.getName(), methodNode.getName(), getParameterTypes(), parameterVariableNames); descriptor.setLineNumber(lineNumber); this.descriptor = descriptor; } @Override public String getName() { return this.methodNode.getName(); } @Override public String[] getParameterTypes() { return this.methodNode.getParameterTypes(); } @Override public String getReturnType() { return this.methodNode.getReturnType(); } @Override public int getModifiers() { return this.methodNode.getAccess(); } @Override public boolean isConstructor() { return this.methodNode.isConstructor(); } @Override public MethodDescriptor getDescriptor() { return this.descriptor; } public Class<? extends Interceptor> loadInterceptorClass(String interceptorClassName) throws InstrumentException { try { ClassLoader classLoader = this.declaringClass.getClassLoader(); return pluginContext.injectClass(classLoader, interceptorClassName); } catch (Exception ex) { throw new InstrumentException(interceptorClassName + " not found Caused by:" + ex.getMessage(), ex); } } @Override public void addInterceptor(int interceptorId) throws InstrumentException { final Interceptor interceptor = InterceptorRegistry.getInterceptor(interceptorId); try { addInterceptor0(interceptor, interceptorId); } catch (Exception e) { throw new InstrumentException("Failed to add interceptor " + interceptor.getClass().getName() + " to " + this.methodNode.getLongName(), e); } } // for internal api int addInterceptorInternal(Class<? extends Interceptor> interceptorClass, Object[] constructorArgs, InterceptorScope interceptorScope, ExecutionPolicy executionPolicy) throws InstrumentException { if (interceptorClass == null) { throw new NullPointerException("interceptorClass"); } final Interceptor interceptor = newInterceptor(interceptorClass, constructorArgs, interceptorScope, executionPolicy); return addInterceptor0(interceptor); } private int addInterceptor0(Interceptor interceptor) { final int interceptorId = this.engineComponent.addInterceptor(interceptor); addInterceptor0(interceptor, interceptorId); return interceptorId; } private Interceptor newInterceptor(Class<? extends Interceptor> interceptorClass, Object[] constructorArgs, InterceptorScope interceptorScope, ExecutionPolicy executionPolicy) { final ScopeFactory scopeFactory = this.engineComponent.getScopeFactory(); final ScopeInfo scopeInfo = scopeFactory.newScopeInfo(pluginContext, interceptorClass, interceptorScope, executionPolicy); return createInterceptor(interceptorClass, constructorArgs, scopeInfo); } private Interceptor createInterceptor(Class<? extends Interceptor> interceptorClass, Object[] constructorArgs, ScopeInfo scopeInfo) { // exception handling. ObjectBinderFactory objectBinderFactory = this.engineComponent.getObjectBinderFactory(); final AnnotatedInterceptorFactory factory = objectBinderFactory.newAnnotatedInterceptorFactory(this.pluginContext); final Interceptor interceptor = factory.newInterceptor(interceptorClass, constructorArgs, scopeInfo, this.declaringClass, this); return interceptor; } private void addInterceptor0(Interceptor interceptor, int interceptorId) { if (interceptor == null) { throw new NullPointerException("interceptor"); } final InterceptorDefinition interceptorDefinition = this.engineComponent.createInterceptorDefinition(interceptor.getClass()); final Class<?> interceptorClass = interceptorDefinition.getInterceptorClass(); final CaptureType captureType = interceptorDefinition.getCaptureType(); if (this.methodNode.hasInterceptor()) { logger.warn("Skip adding interceptor. 'already intercepted method' class={}, interceptor={}", this.declaringClass.getName(), interceptorClass.getName()); return; } if (this.methodNode.isAbstract() || this.methodNode.isNative()) { logger.warn("Skip adding interceptor. 'abstract or native method' class={}, interceptor={}", this.declaringClass.getName(), interceptorClass.getName()); return; } int apiId = -1; if (interceptorDefinition.getInterceptorType() == InterceptorType.API_ID_AWARE) { apiId = this.engineComponent.cacheApi(this.descriptor); } // add before interceptor. if (isBeforeInterceptor(captureType) && interceptorDefinition.getBeforeMethod() != null) { this.methodNode.addBeforeInterceptor(interceptorId, interceptorDefinition, apiId); this.declaringClass.setModified(true); } else { if (isDebug) { logger.debug("Skip adding before interceptorDefinition because the interceptorDefinition doesn't have before method: {}", interceptorClass.getName()); } } // add after interface. if (isAfterInterceptor(captureType) && interceptorDefinition.getAfterMethod() != null) { this.methodNode.addAfterInterceptor(interceptorId, interceptorDefinition, apiId); this.declaringClass.setModified(true); } else { if (isDebug) { logger.debug("Skip adding after interceptor because the interceptor doesn't have after method: {}", interceptorClass.getName()); } } } private boolean isBeforeInterceptor(CaptureType captureType) { return CaptureType.BEFORE == captureType || CaptureType.AROUND == captureType; } private boolean isAfterInterceptor(CaptureType captureType) { return CaptureType.AFTER == captureType || CaptureType.AROUND == captureType; } @Override public int addInterceptor(Class<? extends Interceptor> interceptorClass) throws InstrumentException { Assert.requireNonNull(interceptorClass, "interceptorClass"); final Interceptor interceptor = newInterceptor(interceptorClass, null, null, null); return addInterceptor0(interceptor); } @Override public int addInterceptor(Class<? extends Interceptor> interceptorClass, Object[] constructorArgs) throws InstrumentException { Assert.requireNonNull(interceptorClass, "interceptorClass"); Assert.requireNonNull(constructorArgs, "constructorArgs"); final Interceptor interceptor = newInterceptor(interceptorClass, constructorArgs, null, null); return addInterceptor0(interceptor); } @Override public int addScopedInterceptor(Class<? extends Interceptor> interceptorClass, String scopeName) throws InstrumentException { Assert.requireNonNull(interceptorClass, "interceptorClass"); Assert.requireNonNull(scopeName, "scopeName"); final InterceptorScope interceptorScope = this.pluginContext.getInterceptorScope(scopeName); final Interceptor interceptor = newInterceptor(interceptorClass, null, interceptorScope, null); return addInterceptor0(interceptor); } @Override public int addScopedInterceptor(Class<? extends Interceptor> interceptorClass, InterceptorScope interceptorScope) throws InstrumentException { Assert.requireNonNull(interceptorClass, "interceptorClass"); Assert.requireNonNull(interceptorScope, "interceptorScope"); final Interceptor interceptor = newInterceptor(interceptorClass, null, interceptorScope, null); return addInterceptor0(interceptor); } @Override public int addScopedInterceptor(Class<? extends Interceptor> interceptorClass, String scopeName, ExecutionPolicy executionPolicy) throws InstrumentException { Assert.requireNonNull(interceptorClass, "interceptorClass"); Assert.requireNonNull(scopeName, "scopeName"); Assert.requireNonNull(executionPolicy, "executionPolicy"); final InterceptorScope interceptorScope = this.pluginContext.getInterceptorScope(scopeName); final Interceptor interceptor = newInterceptor(interceptorClass, null, interceptorScope, executionPolicy); return addInterceptor0(interceptor); } @Override public int addScopedInterceptor(Class<? extends Interceptor> interceptorClass, InterceptorScope interceptorScope, ExecutionPolicy executionPolicy) throws InstrumentException { Assert.requireNonNull(interceptorClass, "interceptorClass"); Assert.requireNonNull(interceptorScope, "interceptorScope"); Assert.requireNonNull(executionPolicy, "executionPolicy"); final Interceptor interceptor = newInterceptor(interceptorClass, null, interceptorScope, executionPolicy); return addInterceptor0(interceptor); } @Override public int addScopedInterceptor(Class<? extends Interceptor> interceptorClass, Object[] constructorArgs, String scopeName) throws InstrumentException { Assert.requireNonNull(interceptorClass, "interceptorClass"); Assert.requireNonNull(constructorArgs, "constructorArgs"); Assert.requireNonNull(scopeName, "scopeName"); final InterceptorScope interceptorScope = this.pluginContext.getInterceptorScope(scopeName); final Interceptor interceptor = newInterceptor(interceptorClass, constructorArgs, interceptorScope, null); return addInterceptor0(interceptor); } @Override public int addScopedInterceptor(Class<? extends Interceptor> interceptorClass, Object[] constructorArgs, InterceptorScope interceptorScope) throws InstrumentException { Assert.requireNonNull(interceptorClass, "interceptorClass"); Assert.requireNonNull(constructorArgs, "constructorArgs"); Assert.requireNonNull(interceptorScope, "interceptorScope"); final Interceptor interceptor = newInterceptor(interceptorClass, constructorArgs, interceptorScope, null); return addInterceptor0(interceptor); } @Override public int addScopedInterceptor(Class<? extends Interceptor> interceptorClass, Object[] constructorArgs, String scopeName, ExecutionPolicy executionPolicy) throws InstrumentException { Assert.requireNonNull(interceptorClass, "interceptorClass"); Assert.requireNonNull(constructorArgs, "constructorArgs"); Assert.requireNonNull(scopeName, "scopeName"); Assert.requireNonNull(executionPolicy, "executionPolicy"); final InterceptorScope interceptorScope = this.pluginContext.getInterceptorScope(scopeName); final Interceptor interceptor = newInterceptor(interceptorClass, constructorArgs, interceptorScope, executionPolicy); return addInterceptor0(interceptor); } @Override public int addScopedInterceptor(Class<? extends Interceptor> interceptorClass, Object[] constructorArgs, InterceptorScope interceptorScope, ExecutionPolicy executionPolicy) throws InstrumentException { Assert.requireNonNull(interceptorClass, "interceptorClass"); Assert.requireNonNull(constructorArgs, "constructorArgs"); Assert.requireNonNull(interceptorScope, "interceptorScope"); Assert.requireNonNull(executionPolicy, "executionPolicy"); final Interceptor interceptor = newInterceptor(interceptorClass, constructorArgs, interceptorScope, executionPolicy); return addInterceptor0(interceptor); } }
/*************************************************************************** * Copyright 2014 greenbird Integration Technology, http://www.greenbird.com/ * * This file is part of the 'xml-formatter' project available at * http://greenbird.github.com * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package com.greenbird.xml.prettyprinter.plain; /** * * XML Pretty Printer - produces reformatted XML. * <br> * Reformat of XML within comments and CDATA is supported. * <br> * XML within text nodes is also reformatted - if the text starts with '&lt;' and ends with '&gt;' and size is &gt;= 4. * <br> * Max text and CDATA node size is supported. * * @author Thomas Rorvik Skjolberg * */ public class PlainPrettyPrinterForTextNodesWithXMLAndMaxNodeLength extends AbstractPrettyPrinter { /** pretty-print cdata */ private final boolean cdata; /** pretty-print comments */ private final boolean comment; private final boolean ignoreInvalidTextNodeXML; public PlainPrettyPrinterForTextNodesWithXMLAndMaxNodeLength(boolean cdata, boolean comment, boolean ignoreInvalidTextNodeXML, boolean declaration, int maxTextNodeLength, int maxCDATANodeLength, char indentationCharacter, int indentationMultiplier) { super(declaration, maxTextNodeLength, maxCDATANodeLength, indentationCharacter, indentationMultiplier); this.cdata = cdata; this.comment = comment; this.ignoreInvalidTextNodeXML = ignoreInvalidTextNodeXML; } public boolean process(final char[] chars, int offset, int length, final StringBuilder buffer) { return process(chars, offset, offset + length, buffer, 0); } public boolean process(final char[] chars, int offset, final int length, final StringBuilder buffer, final int levelOffset) { /** * * Implementation note: cdata + comments characters handled on end element, not in uniform way, * so keep track of character type. * Implementation note: Use stricter bounds checking * */ int bufferLength = buffer.length(); final int maxTextNodeLength = this.maxTextNodeLength; // watch overflow when using arithmetic final int maxCDATANodeLength = this.maxCDATANodeLength; // watch overflow when using arithmetic CharactersType characterType = CharactersType.NONE; char[][] indentations = this.indentations; int sourceStart = offset; int level = levelOffset; Type type = Type.NEITHER; try { while(offset < length - 3) { if(chars[offset] == '<') { switch(chars[offset + 1]) { case '/' : { level--; if(type != Type.INCREMENT) { // 2 or more endish elements // flush bytes if(sourceStart < offset) { buffer.append(chars, sourceStart, offset - sourceStart); sourceStart = offset; } buffer.append(indentations[level]); } else { // characters: text, or cdata, or comment node if(characterType == CharactersType.NONE) { xml: if(isEscapedXML(chars, offset, sourceStart)) { int markLength = buffer.length(); int markSourceStart = sourceStart; // initially detected entity buffer.append('<'); sourceStart += 4; for(int k = sourceStart; k < offset; k++) { if(chars[k] == '&') { k++; int l = k; while(l < offset) { if(chars[l] == ';') { if(chars[k] == '#') { // numeric entity buffer.append(chars, sourceStart, l - sourceStart + 1); sourceStart = l + 1; } else { buffer.append(chars, sourceStart, k - sourceStart - 1); if(appendEntity(l - k, chars, k, buffer)) { sourceStart = l + 1; } else { // unknown entity? if(ignoreInvalidTextNodeXML) { // reset buffer.setLength(markLength); sourceStart = markSourceStart; break xml; } else { // report back negative result buffer.setLength(bufferLength); return false; } } } k = l; // increments +1 in for loop break; } l++; } } } if(sourceStart < offset) { buffer.append(chars, sourceStart, offset - sourceStart); sourceStart = offset; } // recursive call - expensive // rip chars from buffer char[] innerChars = new char[buffer.length() - markLength]; buffer.getChars(markLength, buffer.length(), innerChars, 0); // set buffer back to the length we had before unescaping buffer.setLength(markLength); if(!process(innerChars, 0, innerChars.length, buffer, level + 1)) { if(ignoreInvalidTextNodeXML) { // reset buffer.setLength(markLength); sourceStart = markSourceStart; } else { // report back negative result buffer.setLength(bufferLength); return false; } } else { // so we have now written the payload as at least one element buffer.append(indentations[level]); } } else { // count as plain text, not XML } // count as textual data, not XML if(offset - sourceStart > maxTextNodeLength) { buffer.append(chars, sourceStart, maxTextNodeLength); buffer.append("...[TRUNCATED BY "); buffer.append(offset - sourceStart - maxTextNodeLength); buffer.append("]"); sourceStart = offset; // skip to < } else { // flush below } } else if(characterType == CharactersType.CDATA) { if(offset - sourceStart - 3 > maxCDATANodeLength) { // 3 - watch overflow // already have flushed <![CDATA[ buffer.append(chars, sourceStart, maxCDATANodeLength); buffer.append("...[TRUNCATED BY "); buffer.append(offset - sourceStart - 3 - maxCDATANodeLength); buffer.append("]"); sourceStart = offset - 3; // i.e. first ] in ]]> } else { // not long enough to truncate } } else { // do not truncate } type = Type.DECREMENT; } offset = scanBeyondEndElement(chars, offset, length); // complete tag buffer.append(chars, sourceStart, offset - sourceStart); sourceStart = offset; characterType = CharactersType.NONE; continue; } case '!' : { // skip cdata and comments so that we maintain correct level count if(chars[offset + 2] == '-') { // look for --> if(sourceStart < offset) { buffer.append(chars, sourceStart, offset - sourceStart); sourceStart = offset; } buffer.append(indentations[level]); if(!comment) { offset = scanBeyondComment(chars, offset, length); // complete comment buffer.append(chars, sourceStart, offset - sourceStart); sourceStart = offset; type = Type.DECREMENT; characterType = CharactersType.NONE; continue; } else { offset += 4; // skip <!-- } // complete comment buffer.append(chars, sourceStart, offset - sourceStart); sourceStart = offset; type = Type.DECREMENT; characterType = CharactersType.COMMENT; continue; } else if(chars[offset + 2] == '[') { if(offset + 12 >= length) { return false; } offset += 9; if(!cdata) { // flush previous data + <![CDATA[ buffer.append(chars, sourceStart, offset - sourceStart); sourceStart = offset; offset = scanBeyondCDataEnd(chars, offset + 3, length); // skip ]]> if(offset - 3 - sourceStart > maxCDATANodeLength) { buffer.append(chars, sourceStart, maxCDATANodeLength); buffer.append("...[TRUNCATED BY "); buffer.append(offset - 3 - sourceStart - maxCDATANodeLength); buffer.append("]"); sourceStart = offset - 3; // keep ]]> } characterType = CharactersType.NONE; } else { // counts as characters characterType = CharactersType.CDATA; } // complete cdata buffer.append(chars, sourceStart, offset - sourceStart); sourceStart = offset; continue; } else { // assume entity declaration // look for > offset = scanBeyondDTDEnd(chars, offset, length); type = Type.DECREMENT; // complete entity declaration buffer.append(chars, sourceStart, offset - sourceStart); sourceStart = offset; continue; } } case '?' : { // processing instruction // indentate as start elements if(sourceStart < offset) { buffer.append(chars, sourceStart, offset - sourceStart); sourceStart = offset; } offset = scanBeyondProcessingInstruction(chars, offset, length); // <?xml version="1.0"?> if(level == 0 && !declaration && isXMLDeclaration(chars, sourceStart, length)) { // skip the whole XML declaration sourceStart = offset; } else { buffer.append(indentations[level]); // complete processing instruction buffer.append(chars, sourceStart, offset - sourceStart); sourceStart = offset; type = Type.DECREMENT; } characterType = CharactersType.NONE; continue; } default : { // start element // flush bytes if(sourceStart < offset) { buffer.append(chars, sourceStart, offset - sourceStart); sourceStart = offset; } if(level >= indentations.length) { indentations = ensureCharCapacity(level + 8); } buffer.append(indentations[level]); // scan to end of start element offset = scanBeyondStartElement(chars, offset, length); // see if empty start element if(chars[offset - 2] == '/') { // empty element type = Type.DECREMENT; // do not increment level } else { type = Type.INCREMENT; level++; } // complete start tag buffer.append(chars, sourceStart, offset - sourceStart); sourceStart = offset; characterType = CharactersType.NONE; continue; } } } offset++; } if(level != levelOffset) { buffer.setLength(bufferLength); return false; } if(sourceStart < length) { buffer.append(chars, sourceStart, length - sourceStart); } } catch(Exception e) { buffer.setLength(bufferLength); return false; } return true; } public boolean isPrettyPrintCData() { return cdata; } public boolean isPrettyPrintComments() { return comment; } public boolean isPrettyPrintTextNodes() { return true; } @Override public String toString() { return "PlainPrettyPrinterForTextNodesWithXMLAndMaxNodeLength [cdata=" + cdata + ", comment=" + comment + ", ignoreInvalidTextNodeXML=" + ignoreInvalidTextNodeXML + ", declaration=" + declaration + ", maxTextNodeLength=" + getMaxTextNodeLength() + ", maxCDATANodeLength=" + getMaxCDATANodeLength() + "]"; } }
package org.rril.bungeelogin.listeners; import java.sql.ResultSet; import java.sql.SQLException; import java.util.logging.Level; import org.bukkit.Bukkit; import org.bukkit.ChatColor; import org.bukkit.command.Command; import org.bukkit.command.CommandExecutor; import org.bukkit.command.CommandSender; import org.bukkit.entity.Player; import org.rril.bungeelogin.bungeelogin; import org.rril.bungeelogin.MD5; /** * Command manager for bungeelogin plugin * * @author Stakzz * @version 0.9.0 */ public class CommandManager implements CommandExecutor { @Override public boolean onCommand(CommandSender sender, Command cmd, String arg, String[] args) { // ADMIN COMMANDS if (cmd.getName().equalsIgnoreCase("bungeelogin")) { if (sender.hasPermission("bungeelogin.admin")) { if (args.length == 0) { sender.sendMessage(bungeelogin.PROMPT + ChatColor.RED + "This command require more arguments"); return false; } // ADMIN REGISTER COMMAND if (args[0].equalsIgnoreCase("register")) { if (args.length == 3) { if (bungeelogin.isRegistered(Bukkit.getPlayer(args[1]).getUniqueId().toString())) { sender.sendMessage(bungeelogin.PROMPT + ChatColor.RED + args[1] + " is already registered"); return true; } if (bungeelogin.vAuth) { try { bungeelogin.vAuthDatabaseConnection.register(Bukkit.getPlayer(args[1].toString()), args[2].toString(), args[2].toString()); } catch (Exception e) { bungeelogin.logger.log(Level.SEVERE, "vAuth Exception - " + e.toString()); sender.sendMessage(bungeelogin.PROMPT + ChatColor.RED + "An error occured when you try to register."); sender.sendMessage(bungeelogin.PROMPT + ChatColor.RED + "Try again or contact an admin"); return true; } } else { String query = "INSERT INTO users(username, password) VALUES('" + Bukkit.getPlayer(args[1]).getUniqueId().toString() + "','" + MD5.crypt(args[2]) + "');"; try { bungeelogin.databaseConnection.executeUpdate(query); } catch (SQLException e) { bungeelogin.logger.log(Level.SEVERE, "SQL Exception - " + e.toString()); sender.sendMessage(bungeelogin.PROMPT + ChatColor.RED + "An error occured when you try to register " + args[1]); return true; } } sender.sendMessage(bungeelogin.PROMPT + ChatColor.GREEN + args[1] + " is successfully registered"); return true; } sender.sendMessage(bungeelogin.PROMPT + ChatColor.RED + "Syntax error."); sender.sendMessage(bungeelogin.PROMPT + ChatColor.GOLD + "/bungeelogin register <player> <password>"); return true; } // ADMIN UNREGISTER COMMAND if (args[0].equalsIgnoreCase("unregister")) { if (args.length == 2) { if (!bungeelogin.isRegistered(args[1])) { sender.sendMessage(bungeelogin.PROMPT + ChatColor.RED + args[1] + " is not registered"); return true; } if (bungeelogin.vAuth) { try { bungeelogin.vAuthDatabaseConnection.remove(Bukkit.getPlayer(args[1].toString())); } catch (Exception e) { bungeelogin.logger.log(Level.SEVERE, "vAuth Exception - " + e.toString()); sender.sendMessage(bungeelogin.PROMPT + ChatColor.RED + "An error occured when you try to register."); sender.sendMessage(bungeelogin.PROMPT + ChatColor.RED + "Try again or contact an admin"); return true; } } else { String query = "DELETE FROM users WHERE username = '" + Bukkit.getPlayer(args[1]).getUniqueId().toString() + "';"; try { bungeelogin.databaseConnection.executeUpdate(query); } catch (SQLException e) { bungeelogin.logger.log(Level.SEVERE, "SQL Exception - " + e.toString()); sender.sendMessage(bungeelogin.PROMPT + ChatColor.RED + "An error occured when you try to unregister " + args[1]); return true; } } sender.sendMessage(bungeelogin.PROMPT + ChatColor.GREEN + args[1] + " is successfully unregistered"); return true; } sender.sendMessage(bungeelogin.PROMPT + ChatColor.RED + "Syntax error."); sender.sendMessage(bungeelogin.PROMPT + ChatColor.GOLD + "/bungeelogin unregister <player>"); return true; } // ADMIN CHANGEPW COMMAND if (args[0].equalsIgnoreCase("changepw")) { if (args.length == 3) { if (!bungeelogin.isRegistered(args[1])) { sender.sendMessage(bungeelogin.PROMPT + ChatColor.RED + args[1] + " is not registered"); return true; } if (bungeelogin.vAuth) { try { bungeelogin.vAuthDatabaseConnection.register(Bukkit.getPlayer(args[2].toString()), args[3].toString(), args[3].toString()); } catch (Exception e) { bungeelogin.logger.log(Level.SEVERE, "vAuth Exception - " + e.toString()); sender.sendMessage(bungeelogin.PROMPT + ChatColor.RED + "An error occured when you try to register."); sender.sendMessage(bungeelogin.PROMPT + ChatColor.RED + "Try again or contact an admin"); return true; } } else { String query = "UPDATE users SET password = '" + MD5.crypt(args[2]) + "' WHERE username = '" + Bukkit.getPlayer(args[1]).getUniqueId().toString() + "';"; try { bungeelogin.databaseConnection.executeUpdate(query); } catch (SQLException e) { bungeelogin.logger.log(Level.SEVERE, "SQL Exception - " + e.toString()); sender.sendMessage(bungeelogin.PROMPT + ChatColor.RED + "An error occured when you try to change " + args[1] + "'s password."); return true; } } sender.sendMessage(bungeelogin.PROMPT + ChatColor.GREEN + "You are successfully change " + args[1] + "'s password"); return true; } sender.sendMessage(bungeelogin.PROMPT + ChatColor.RED + "Syntax error."); sender.sendMessage(bungeelogin.PROMPT + ChatColor.GOLD + "/bungeelogin changepw <player> <newpassword>"); return true; } // ADMIN RELOAD COMMAND if (args[0].equalsIgnoreCase("reload")) { bungeelogin.pluginManager.disablePlugin(bungeelogin.plugin); bungeelogin.pluginManager.enablePlugin(bungeelogin.plugin); bungeelogin.plugin.getServer().broadcastMessage(bungeelogin.PROMPT + ChatColor.GOLD + "bungeelogin has been reloaded, please log back in"); bungeelogin.plugin.getServer().broadcastMessage(bungeelogin.PROMPT + ChatColor.GOLD + "/login <password>"); sender.sendMessage(bungeelogin.PROMPT + ChatColor.GREEN + "Plugin reloaded!"); return true; } return true; } return true; } // REGISTER COMMAND if ((cmd.getName().equalsIgnoreCase("register")) && (sender instanceof Player)) { if (sender.hasPermission("bungeelogin.register")) { Player player = (Player) sender; if (bungeelogin.isRegistered(player)) { player.sendMessage(bungeelogin.PROMPT + ChatColor.RED + "You are already registered"); return true; } if (args.length == 2) { if (args[0].equalsIgnoreCase(args[1])) { if (bungeelogin.vAuth) { try { bungeelogin.vAuthDatabaseConnection.register(player, args[0].toString(), args[1].toString()); } catch (Exception e) { bungeelogin.logger.log(Level.SEVERE, "vAuth Exception - " + e.toString()); player.sendMessage(bungeelogin.PROMPT + ChatColor.RED + "An error occured when you try to register."); player.sendMessage(bungeelogin.PROMPT + ChatColor.RED + "Try again or contact an admin"); return true; } } else { String query = "INSERT INTO users(username, password) VALUES('" + player.getUniqueId().toString() + "','" + MD5.crypt(args[0]) + "');"; try { bungeelogin.databaseConnection.executeUpdate(query); } catch (SQLException e) { bungeelogin.logger.log(Level.SEVERE, "SQL Exception - " + e.toString()); player.sendMessage(bungeelogin.PROMPT + ChatColor.RED + "An error occured when you try to register."); player.sendMessage(bungeelogin.PROMPT + ChatColor.RED + "Try again or contact an admin"); return true; } } bungeelogin.sessions.put(player.getUniqueId().toString(), player.getAddress().getHostString()); player.sendMessage(bungeelogin.PROMPT + ChatColor.GREEN + "You are successfully registered"); player.sendMessage(bungeelogin.PROMPT + ChatColor.GREEN + "Have fun !"); return true; } player.sendMessage(bungeelogin.PROMPT + ChatColor.RED + "Passwords are not equal"); return true; } player.sendMessage(bungeelogin.PROMPT + ChatColor.RED + "Syntax error"); return false; } return true; } // LOGIN COMMAND if ((cmd.getName().equalsIgnoreCase("login")) && (sender instanceof Player)) { if (sender.hasPermission("bungeelogin.login")) { Player player = (Player) sender; if (bungeelogin.isLogged(player)) { player.sendMessage(bungeelogin.PROMPT + ChatColor.RED + "You are already logged in"); return true; } if (args.length == 1) { boolean passwdCorrect = false; if (bungeelogin.vAuth) { try { if(bungeelogin.vAuthDatabaseConnection.loginCheck(player, args[0]) == true){ passwdCorrect = true; } } catch (Exception e) { bungeelogin.logger.log(Level.SEVERE, "vAuth Exception - " + e.toString()); player.sendMessage(bungeelogin.PROMPT + ChatColor.RED + "An error occured when you try to register."); player.sendMessage(bungeelogin.PROMPT + ChatColor.RED + "Try again or contact an admin"); return true; } } else { String query = "SELECT `block` FROM `users` WHERE `username` = '" + player.getUniqueId().toString() + "' AND `password` = '" + MD5.crypt(args[0]).toString() + "';"; try { ResultSet result = bungeelogin.databaseConnection.executeQuery(query); result.first(); if (result.getString("block").equalsIgnoreCase("0")) { passwdCorrect = true; } } catch (SQLException e) { bungeelogin.logger.log(Level.SEVERE, "SQL Exception - " + e.toString()); player.sendMessage(bungeelogin.PROMPT + ChatColor.RED + "An error occured when you try to login."); player.sendMessage(bungeelogin.PROMPT + ChatColor.RED + "Try again or contact an admin"); return true; } } if (passwdCorrect) { bungeelogin.sessions.put(player.getUniqueId().toString(), player.getAddress().getHostString()); player.sendMessage(bungeelogin.PROMPT + ChatColor.GREEN + "You are successfully logged in."); player.sendMessage(bungeelogin.PROMPT + ChatColor.GREEN + "Have fun !"); } else { player.sendMessage(bungeelogin.PROMPT + ChatColor.RED + "Incorrect password. Try again."); } return true; } player.sendMessage(bungeelogin.PROMPT + ChatColor.RED + "Syntax error"); return false; } return true; } // LOGOUT COMMAND if ((cmd.getName().equalsIgnoreCase("logout")) && (sender instanceof Player)) { if (sender.hasPermission("bungeelogin.logout")) { Player player = (Player) sender; if (!bungeelogin.isLogged(player)) { player.sendMessage(bungeelogin.PROMPT + ChatColor.RED + "You are not logged in"); return true; } bungeelogin.sessions.remove(player.getUniqueId().toString()); player.sendMessage(bungeelogin.PROMPT + ChatColor.GREEN + "You are successfully logged out."); player.sendMessage(bungeelogin.PROMPT + ChatColor.GREEN + "See you."); return true; } return true; } // CHANGEPW COMMAND if ((cmd.getName().equalsIgnoreCase("changepw")) && (sender instanceof Player)) { if (sender.hasPermission("bungeelogin.changepw")) { Player player = (Player) sender; if (!bungeelogin.isLogged(player)) { player.sendMessage(bungeelogin.PROMPT + ChatColor.RED + "You aren't logged in"); return true; } if (args.length == 3) { boolean passwdCorrect = false; if (bungeelogin.vAuth) { try { passwdCorrect = bungeelogin.vAuthDatabaseConnection.loginCheck(player, args[0]); } catch (Exception e) { bungeelogin.logger.log(Level.SEVERE, "vAuth Exception - " + e.toString()); player.sendMessage(bungeelogin.PROMPT + ChatColor.RED + "An error occured when you try to register."); player.sendMessage(bungeelogin.PROMPT + ChatColor.RED + "Try again or contact an admin"); return true; } } else { String query = "SELECT block FROM users WHERE username = '" + player.getUniqueId().toString() + "' AND `password` = " + MD5.crypt(args[0]).toString() + ";"; try { ResultSet result = bungeelogin.databaseConnection.executeQuery(query); result.first(); if (result.getString("block").equalsIgnoreCase("0")) { passwdCorrect = true; } } catch (SQLException e) { bungeelogin.logger.log(Level.SEVERE, "SQL Exception - " + e.toString()); player.sendMessage(bungeelogin.PROMPT + ChatColor.RED + "An error occured when you try to change your password."); player.sendMessage(bungeelogin.PROMPT + ChatColor.RED + "Try again or contact an admin"); return true; } } if (passwdCorrect) { if (args[1].equalsIgnoreCase(args[2])) { if (bungeelogin.vAuth) { try { bungeelogin.vAuthDatabaseConnection.register(player, args[1], args[2]); } catch (Exception e) { bungeelogin.logger.log(Level.SEVERE, "vAuth Exception - " + e.toString()); player.sendMessage(bungeelogin.PROMPT + ChatColor.RED + "An error occured when you try to register."); player.sendMessage(bungeelogin.PROMPT + ChatColor.RED + "Try again or contact an admin"); return true; } } else { String query1 = "UPDATE users SET password = '" + MD5.crypt(args[1]) + "' WHERE username = '" + player.getUniqueId().toString() + "';"; try { bungeelogin.databaseConnection.executeUpdate(query1); } catch (SQLException e) { bungeelogin.logger.log(Level.SEVERE, "SQL Exception - " + e.toString()); player.sendMessage(bungeelogin.PROMPT + ChatColor.RED + "An error occured when you try to change your password."); player.sendMessage(bungeelogin.PROMPT + ChatColor.RED + "Try again or contact an admin"); return true; } } player.sendMessage(bungeelogin.PROMPT + ChatColor.GREEN + "You are successfully change your password"); player.sendMessage(bungeelogin.PROMPT + ChatColor.GREEN + "Have fun !"); return true; } player.sendMessage(bungeelogin.PROMPT + ChatColor.RED + "New passwords are not equal"); return true; } else { player.sendMessage(bungeelogin.PROMPT + ChatColor.RED + "Incorrect password. Try again."); } return true; } player.sendMessage(bungeelogin.PROMPT + ChatColor.RED + "Syntax error"); return false; } return true; } return false; } }
package net.happybrackets.core.scheduling; import de.sciss.net.OSCMessage; import net.happybrackets.core.Device; import net.happybrackets.core.OSCVocabulary; import net.happybrackets.device.sensors.DataSmoother; import java.util.*; /** * Singleton class for containing inter-device scheduling */ public class DeviceSchedules { static final boolean SHOW_DEBUG = false; static final int SCHEDULE_ADJUST_TIME = 500; static final int LEAD_STRATUM = 10; static final int STARTUP_STRATUM = LEAD_STRATUM + 1; static final double MAX_JIT_THRESHOLD_MS = 20; // static final float SUCCESS_CUDOS = 0.1f; // We will Decrement our faulre by this amount on each successful JVM match private static final Object creationLock = new Object(); private static final Object listLock = new Object(); // create a group of listeners for global controls over network private List<OSCVocabulary.OSCAdvertiseListener> globalScheduleListenerList = new ArrayList<>(); SortedSet<DeviceSchedulerValue> deviceSchedulerValues = new TreeSet<>(); HashMap<String, DeviceSchedulerValue> deviceSchedulerValueHashMap = new HashMap<>(); private int stratum = STARTUP_STRATUM; public void addGlobalScheduleAdvertiseListener(OSCVocabulary.OSCAdvertiseListener listener) { globalScheduleListenerList.add(listener); } /** * Send OSC Message across to global senders - this message is for Global Scope controls * @param msg the Control Message to send * @param targetAddresses collection of address to send message to. Can be null, in which case will be broadcast */ synchronized void sendGlobalScheduleAdvertiseMessage(OSCMessage msg, Collection<String> targetAddresses) { for (OSCVocabulary.OSCAdvertiseListener listener : globalScheduleListenerList) { listener.OSCAdvertiseEvent(msg, targetAddresses); } } /** * See if we are the current leading device on synchroniser network * @return true if this is the device that others are synchronising to */ public boolean isLeadDevice(){ boolean ret = false; if (numberDevices() > 0){ synchronized (listLock){ DeviceSchedulerValue first = deviceSchedulerValues.first(); ret = first.getDeviceName().equalsIgnoreCase(Device.getDeviceName()); } } return ret; } /** * Return the number of devices synchonising on network * @return number devices */ public int numberDevices(){ return deviceSchedulerValues.size(); } /** * Get the JVM Uptime * @return the JVM uptime in milliseconds */ private double getUptime(){ return HBScheduler.getUptime(); } private static void showDebug(String text){ if (SHOW_DEBUG) { System.out.println(text); } } /** * Get the Scheduled time of global {@link HBScheduler} * @return the scheduler time of this device */ double getSchedulerTime(){ return HBScheduler.getGlobalScheduler().getSchedulerTime(); } /** * Build our Local Time as we see it * @return the Local Device Time we are at */ private DeviceSchedulerValue buildLocalDeviceSchedule() { localDeviceSchedule.schedulerTime = getSchedulerTime(); localDeviceSchedule.upTime = getUptime(); localDeviceSchedule.stratum = stratum; return localDeviceSchedule; } volatile DeviceSchedulerValue localDeviceSchedule = new DeviceSchedulerValue(Device.getDeviceName(), getUptime(), getSchedulerTime(), stratum); private static DeviceSchedules ourInstance = null; public static DeviceSchedules getInstance() { synchronized (creationLock){ if (ourInstance == null){ ourInstance = new DeviceSchedules(); } } return ourInstance; } /** * Private constructor to ensure singleton */ private DeviceSchedules() { } /** * Send the current Scheduler information about this device * @return true if able to send */ public boolean sendCurrentTime(){ boolean ret = false; // encode our message localDeviceSchedule = buildLocalDeviceSchedule(); OSCMessage message = HBScheduler.buildNetworkSendMessage(OSCVocabulary.SchedulerMessage.CURRENT, localDeviceSchedule); try { sendGlobalScheduleAdvertiseMessage(message, null); ret = true; } catch (Exception ex){ ret = false; } return ret; //return NetworkCommunication.sendNetworkOSCMessages(message, null, false); } /** * Check that two values are within our Maximum Jit Threshold * @param val_1 value 1 * @param val_2 value 2 * @return true if less than defined limit */ private boolean valuesInThreshold (double val_1, double val_2){ return Math.abs(val_1 - val_2) < MAX_JIT_THRESHOLD_MS; } /** * Get the total discards for all devices * @return to total of all discards */ public float totalDiscardCount(){ float ret = 0; List<DeviceSchedulerValue> devices = getDeviceSchedulerValues(); for (DeviceSchedulerValue device: devices) { ret += device.discardCount; ret += device.resetCount * device.jvmDifferences.getBuffSize(); } return ret; } /** * Process a stratum message to advise us we should change our stratum * @param message the message containing stratum * @return true if we changed our stratum */ public boolean processStratumMessage(DeviceStratumMessage message){ boolean ret = false; if (message.deviceName.equalsIgnoreCase(localDeviceSchedule.deviceName) && message.stratum != stratum){ stratum = message.stratum; sendCurrentTime(); showDebug("Stratum Changed"); ret = true; } return ret; } /** * Process reception of a DeviceSchedule message * @param source the source device that has sent this message * @param value the DeviceSchedulerValue message received */ public void processCurrentMessage(String source, DeviceSchedulerValue value) { String device_name = value.deviceName; synchronized (listLock) { // we will store our absolute time value.lastUpdateJVMTime = getUptime(); double jvm_diff = value.getUpTime() - value.lastUpdateJVMTime; if (deviceSchedulerValueHashMap.containsKey(device_name)) { // by just copying the values, we will be updating both the sorted set and the hashtable DeviceSchedulerValue stored = deviceSchedulerValueHashMap.get(device_name); DataSmoother stored_jvm_diffs = stored.jvmDifferences; boolean store_value = true; if (!stored_jvm_diffs.dataPrimed()) { if (stored_jvm_diffs.isEmpty()) { stored.jvmDifferences.addValue(jvm_diff); store_value = true; } else { if (valuesInThreshold(stored_jvm_diffs.getAverage(), jvm_diff)){ stored.jvmDifferences.addValue(jvm_diff); store_value = true; } else { store_value = false; // discard as it is outside jitter stored.jvmDifferences.reset(); showDebug("Discard Not Primed " + stored.getDeviceName()); } } } // !primed else // we are fully primed need to see if this has been affected by JIT { if (valuesInThreshold(stored_jvm_diffs.getAverage(),jvm_diff)){ stored.jvmDifferences.addValue(jvm_diff); store_value = true; } else { showDebug("Discard Primed " + stored.getDeviceName()); stored.discardCount++; store_value = false; } } if (store_value){ if (stored.copyValues(value)) { // The sort value may have changed // we need to remove and then re-add otherwise it will not get sorted deviceSchedulerValues.remove(stored); deviceSchedulerValues.add(stored); } // we will allow our discard count to gradually reduce on each success if (stored.discardCount > 0){ stored.discardCount -= SUCCESS_CUDOS; } // check that this message has come from the lead device that is not us DeviceSchedulerValue first = deviceSchedulerValues.first(); if (first.equals(stored) & !first.getDeviceName().equalsIgnoreCase(localDeviceSchedule.deviceName)) { // we have a fully primed and stored other value // if it is not us, it has a startup stratum and it has earlier JVM time, // we will want to inform it to better its stratum if (stored.stratum == STARTUP_STRATUM && stored_jvm_diffs.dataPrimed() && deviceSchedulerValues.size() > 1) { // if the best JVM time is another device, inform it that we want it to have a better // stratum to prevent a new device entering and taking over sendDeviceStratum(first.getDeviceName(), LEAD_STRATUM); } // end send stratum // let us adjust our scheduler adjustScheduler(first); } // send stored == first } else { if (stored.discardCount > stored_jvm_diffs.getBuffSize()){ stored.jvmDifferences.reset(); stored.discardCount = 0; stored.resetCount++; showDebug("Reset Smoother " + stored.getDeviceName()); // if we are having trouble with JVM Jitter, move this device to a worse stratum if (stratum == STARTUP_STRATUM || stratum == LEAD_STRATUM) { if (stored.getDeviceName().equalsIgnoreCase(localDeviceSchedule.deviceName)) { stratum = STARTUP_STRATUM + 1; showDebug("Lower Stratum " + stored.getDeviceName()); } else if (totalDiscardCount() > stored.jvmDifferences.getBuffSize() * 2){ stratum = STARTUP_STRATUM + 1; showDebug("Lower Stratum Multiple JVMs"); } } } } } else { deviceSchedulerValueHashMap.put(device_name, value); deviceSchedulerValues.add(value); } } } /** * Adjust the global {@link HBScheduler} so we synchronise to the lead device * @param first the lead device on our list */ private void adjustScheduler(DeviceSchedulerValue first) { double estimated = first.estimateSchedulerTime(getUptime()); double diff = estimated - getSchedulerTime(); if (Math.abs(diff) > MAX_JIT_THRESHOLD_MS / 2){ HBScheduler.getGlobalScheduler().adjustScheduleTime(diff, SCHEDULE_ADJUST_TIME); } } /** * Send a message to Device that we want it to change it's startup stratum to this new one if * it is at the startup sratum * @param deviceName the name of the device * @param new_stratum new stratup we are setting to */ private void sendDeviceStratum(String deviceName, int new_stratum) { DeviceStratumMessage adjustment = new DeviceStratumMessage(Device.getDeviceName(), new_stratum); OSCMessage message = HBScheduler.buildNetworkSendMessage(OSCVocabulary.SchedulerMessage.STRATUM, adjustment); sendGlobalScheduleAdvertiseMessage (message, null); //NetworkCommunication.sendNetworkOSCMessages(message, null, false); } /** * Get the list of all scheduled devices we have listed * @return the list of all {@link DeviceSchedulerValue} we have stored */ public List<DeviceSchedulerValue> getDeviceSchedulerValues(){ List<DeviceSchedulerValue> ret = new ArrayList<>(); synchronized (listLock) { for (DeviceSchedulerValue value : deviceSchedulerValues) { ret.add(value); } } return ret; } /** * Return the range of time in milliseconds between the maximum and minimum scheduled times * @return the difference between the minimum estimated schedule and maximum estimated scheduled times */ public double timeRange(){ double jvmTime = getUptime(); double ret = 0; if (deviceSchedulerValues.size() > 0){ boolean started = false; double max = 0; double min = 0; synchronized (listLock) { // synchronise our list for (DeviceSchedulerValue value : deviceSchedulerValues) { double estimated = value.estimateSchedulerTime(jvmTime); if (!started) { max = estimated; min = estimated; started = true; } else { if (estimated > max) { max = estimated; } if (estimated < min) { min = estimated; } } } } ret = max - min; } return ret; } /** * If the Device marked as the lead device has not been heard for a certain time, we will remove it from our list * @param expiration_millisecond the amount of milliseconds that must expire before we decide to remove it * @return true if a device was removed */ public boolean removeExpiredLead(long expiration_millisecond){ boolean ret = false; try { DeviceSchedulerValue front = deviceSchedulerValues.first(); if (front != null) { double elapsed = getUptime() - front.lastUpdateJVMTime; ret = elapsed > expiration_millisecond; if (ret) { synchronized (listLock) { deviceSchedulerValues.remove(front); deviceSchedulerValueHashMap.remove(front.getDeviceName()); } } } } catch (Exception ex){} return ret; } /** * Get the device that is listed as the first device in our list * @return the Front device */ public DeviceSchedulerValue getLeadingDevice(){ DeviceSchedulerValue ret = localDeviceSchedule; synchronized (listLock) { if (deviceSchedulerValues.size() > 0){ ret = deviceSchedulerValues.first(); } } return ret; } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.asterix.metadata; import java.util.ArrayList; import org.apache.asterix.common.functions.FunctionSignature; import org.apache.asterix.common.transactions.TxnId; import org.apache.asterix.external.dataset.adapter.AdapterIdentifier; import org.apache.asterix.metadata.entities.CompactionPolicy; import org.apache.asterix.metadata.entities.Dataset; import org.apache.asterix.metadata.entities.DatasourceAdapter; import org.apache.asterix.metadata.entities.Datatype; import org.apache.asterix.metadata.entities.Dataverse; import org.apache.asterix.metadata.entities.Feed; import org.apache.asterix.metadata.entities.FeedConnection; import org.apache.asterix.metadata.entities.FeedPolicyEntity; import org.apache.asterix.metadata.entities.Function; import org.apache.asterix.metadata.entities.Index; import org.apache.asterix.metadata.entities.Library; import org.apache.asterix.metadata.entities.NodeGroup; import org.apache.asterix.metadata.utils.MetadataUtil; /** * Used to implement serializable transactions against the MetadataCache. * Assumes that the MetadataNode also provides serializable transactions. If the * MetadataNode provides weaker guarantees than serializable, then you can * expect update anomalies in the MetadataManager, e.g., because the * MetadataCache could get polluted by a transaction reading an uncommitted * write of another transaction, and putting the uncommitted entity in the * MetadataCache. Logs the logical operations of a single transaction against * the metadata. Once the transaction decides to commit, its log will be forward * rolled against the MetadataCache. If it decides to abort, then its logs are * simply discarded without changing the MetadataCache. Also provides a * transaction-local "view" of its own uncommitted changes to the MetadataCache, * to allow metadata transactions to read their uncommitted writes, without * changing the MetadataCache. For example, think of what would happen if a * transactions read its own uncommitted addDataset(), causing an update to the * MetadataCache (the MetadataCache is immediately updated on reads, following * the assumption that the MetadataNode won't allow reading uncommitted values). * Another transaction might see this uncommitted dataset in the MetadataCache. * This class is not thread safe. We assume that a metadata transaction consists * of only one thread. */ public class MetadataTransactionContext extends MetadataCache { // Keeps track of deleted metadata entities. // An entity can either be in the droppedCache or in the inherited members // of MetadataCache (the "added" entities). // The APIs in this class make sure that these two caches are kept in sync. protected MetadataCache droppedCache = new MetadataCache(); protected ArrayList<MetadataLogicalOperation> opLog = new ArrayList<>(); private final TxnId txnId; public MetadataTransactionContext(TxnId txnId) { this.txnId = txnId; } public TxnId getTxnId() { return txnId; } public void addDataverse(Dataverse dataverse) { droppedCache.dropDataverse(dataverse); logAndApply(new MetadataLogicalOperation(dataverse, true)); } public void addDataset(Dataset dataset) { droppedCache.dropDataset(dataset); logAndApply(new MetadataLogicalOperation(dataset, true)); } public void addIndex(Index index) { droppedCache.dropIndex(index); logAndApply(new MetadataLogicalOperation(index, true)); } public void addDatatype(Datatype datatype) { droppedCache.dropDatatype(datatype); logAndApply(new MetadataLogicalOperation(datatype, true)); } public void addNodeGroup(NodeGroup nodeGroup) { droppedCache.dropNodeGroup(nodeGroup); logAndApply(new MetadataLogicalOperation(nodeGroup, true)); } public void addFunction(Function function) { droppedCache.dropFunction(function); logAndApply(new MetadataLogicalOperation(function, true)); } public void addAdapter(DatasourceAdapter adapter) { droppedCache.dropAdapterIfExists(adapter); logAndApply(new MetadataLogicalOperation(adapter, true)); } public void addCompactionPolicy(CompactionPolicy compactionPolicy) { droppedCache.dropCompactionPolicy(compactionPolicy); logAndApply(new MetadataLogicalOperation(compactionPolicy, true)); } public void dropDataset(String dataverseName, String datasetName) { Dataset dataset = new Dataset(dataverseName, datasetName, null, null, null, null, null, null, null, null, -1, MetadataUtil.PENDING_NO_OP); droppedCache.addDatasetIfNotExists(dataset); logAndApply(new MetadataLogicalOperation(dataset, false)); } public void dropIndex(String dataverseName, String datasetName, String indexName) { Index index = new Index(dataverseName, datasetName, indexName, null, null, null, null, false, false, false, MetadataUtil.PENDING_NO_OP); droppedCache.addIndexIfNotExists(index); logAndApply(new MetadataLogicalOperation(index, false)); } public void dropDataverse(String dataverseName) { Dataverse dataverse = new Dataverse(dataverseName, null, MetadataUtil.PENDING_NO_OP); droppedCache.addDataverseIfNotExists(dataverse); logAndApply(new MetadataLogicalOperation(dataverse, false)); } public void addLibrary(Library library) { droppedCache.dropLibrary(library); logAndApply(new MetadataLogicalOperation(library, true)); } public void dropDataDatatype(String dataverseName, String datatypeName) { Datatype datatype = new Datatype(dataverseName, datatypeName, null, false); droppedCache.addDatatypeIfNotExists(datatype); logAndApply(new MetadataLogicalOperation(datatype, false)); } public void dropNodeGroup(String nodeGroupName) { NodeGroup nodeGroup = new NodeGroup(nodeGroupName, null); droppedCache.addOrUpdateNodeGroup(nodeGroup); logAndApply(new MetadataLogicalOperation(nodeGroup, false)); } public void dropFunction(FunctionSignature signature) { Function function = new Function(signature, null, null, null, null, null, null); droppedCache.addFunctionIfNotExists(function); logAndApply(new MetadataLogicalOperation(function, false)); } public void dropAdapter(String dataverseName, String adapterName) { AdapterIdentifier adapterIdentifier = new AdapterIdentifier(dataverseName, adapterName); DatasourceAdapter adapter = new DatasourceAdapter(adapterIdentifier, null, null); droppedCache.addAdapterIfNotExists(adapter); logAndApply(new MetadataLogicalOperation(adapter, false)); } public void dropLibrary(String dataverseName, String libraryName) { Library library = new Library(dataverseName, libraryName); droppedCache.addLibraryIfNotExists(library); logAndApply(new MetadataLogicalOperation(library, false)); } public void logAndApply(MetadataLogicalOperation op) { opLog.add(op); doOperation(op); } public boolean dataverseIsDropped(String dataverseName) { return droppedCache.getDataverse(dataverseName) != null; } public boolean datasetIsDropped(String dataverseName, String datasetName) { if (droppedCache.getDataverse(dataverseName) != null) { return true; } return droppedCache.getDataset(dataverseName, datasetName) != null; } public boolean indexIsDropped(String dataverseName, String datasetName, String indexName) { if (droppedCache.getDataverse(dataverseName) != null) { return true; } if (droppedCache.getDataset(dataverseName, datasetName) != null) { return true; } return droppedCache.getIndex(dataverseName, datasetName, indexName) != null; } public boolean datatypeIsDropped(String dataverseName, String datatypeName) { if (droppedCache.getDataverse(dataverseName) != null) { return true; } return droppedCache.getDatatype(dataverseName, datatypeName) != null; } public boolean nodeGroupIsDropped(String nodeGroup) { return droppedCache.getNodeGroup(nodeGroup) != null; } public boolean functionIsDropped(FunctionSignature functionSignature) { return droppedCache.getFunction(functionSignature) != null; } public ArrayList<MetadataLogicalOperation> getOpLog() { return opLog; } public void addFeedPolicy(FeedPolicyEntity feedPolicy) { droppedCache.dropFeedPolicy(feedPolicy); logAndApply(new MetadataLogicalOperation(feedPolicy, true)); } public void addFeed(Feed feed) { droppedCache.dropFeedIfExists(feed); logAndApply(new MetadataLogicalOperation(feed, true)); } public void dropFeed(Feed feed) { droppedCache.addFeedIfNotExists(feed); logAndApply(new MetadataLogicalOperation(feed, false)); } public void addFeedConnection(FeedConnection feedConnection) { droppedCache.dropFeedConnection(feedConnection); logAndApply(new MetadataLogicalOperation(feedConnection, true)); } public void dropFeedConnection(String dataverseName, String feedName, String datasetName) { FeedConnection feedConnection = new FeedConnection(dataverseName, feedName, datasetName, null, null, null, null); droppedCache.addFeedConnectionIfNotExists(feedConnection); logAndApply(new MetadataLogicalOperation(feedConnection, false)); } @Override public void clear() { super.clear(); droppedCache.clear(); opLog.clear(); } }
/* * Copyright 1997-2022 Optimatika * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in * all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE * SOFTWARE. */ package org.ojalgo.array; import java.util.Arrays; import java.util.Spliterator; import java.util.Spliterators; import org.ojalgo.array.operation.COPY; import org.ojalgo.array.operation.Exchange; import org.ojalgo.array.operation.FillAll; import org.ojalgo.array.operation.FillMatchingSingle; import org.ojalgo.array.operation.OperationBinary; import org.ojalgo.array.operation.OperationParameter; import org.ojalgo.array.operation.OperationUnary; import org.ojalgo.array.operation.OperationVoid; import org.ojalgo.function.BinaryFunction; import org.ojalgo.function.NullaryFunction; import org.ojalgo.function.ParameterFunction; import org.ojalgo.function.UnaryFunction; import org.ojalgo.function.VoidFunction; import org.ojalgo.function.constant.PrimitiveMath; import org.ojalgo.function.special.MissingMath; import org.ojalgo.structure.Access1D; import org.ojalgo.structure.Mutate1D; /** * A one- and/or arbitrary-dimensional array of {@linkplain java.lang.Comparable}. * * @author apete */ public abstract class ReferenceTypeArray<N extends Comparable<N>> extends PlainArray<N> implements Mutate1D.Sortable { public final N[] data; ReferenceTypeArray(final DenseArray.Factory<N> factory, final int length) { super(factory, length); data = factory.scalar().newArrayInstance(length); this.fill(0, length, 1, this.factory().scalar().zero().get()); } ReferenceTypeArray(final DenseArray.Factory<N> factory, final N[] data) { super(factory, data.length); this.data = data; } @Override public boolean equals(final Object obj) { if (this == obj) { return true; } if (!super.equals(obj)) { return false; } if (!(obj instanceof ReferenceTypeArray)) { return false; } ReferenceTypeArray other = (ReferenceTypeArray) obj; if (!Arrays.equals(data, other.data)) { return false; } return true; } @Override public void fillMatching(final Access1D<?> values) { FillMatchingSingle.fill(data, values, this.factory().scalar()); } @Override public void fillMatching(final Access1D<N> left, final BinaryFunction<N> function, final Access1D<N> right) { int limit = MissingMath.toMinIntExact(this.count(), left.count(), right.count()); for (int i = 0; i < limit; i++) { data[i] = function.invoke(left.get(i), right.get(i)); } } @Override public void fillMatching(final UnaryFunction<N> function, final Access1D<N> arguments) { int limit = MissingMath.toMinIntExact(this.count(), arguments.count()); for (int i = 0; i < limit; i++) { data[i] = function.invoke(arguments.get(i)); } } @Override public int hashCode() { final int prime = 31; int result = super.hashCode(); result = (prime * result) + Arrays.hashCode(data); return result; } @Override public final void reset() { Arrays.fill(data, this.valueOf(PrimitiveMath.ZERO)); } @Override public final int size() { return data.length; } public final Spliterator<N> spliterator() { return Spliterators.spliterator(data, 0, data.length, PlainArray.CHARACTERISTICS); } protected final N[] copyOfData() { return COPY.copyOf(data); } @Override protected final void exchange(final int firstA, final int firstB, final int step, final int count) { Exchange.exchange(data, firstA, firstB, step, count); } @Override protected final void fill(final int first, final int limit, final Access1D<N> left, final BinaryFunction<N> function, final Access1D<N> right) { OperationBinary.invoke(data, first, limit, 1, left, function, right); } @Override protected final void fill(final int first, final int limit, final Access1D<N> left, final BinaryFunction<N> function, final N right) { OperationBinary.invoke(data, first, limit, 1, left, function, right); } @Override protected final void fill(final int first, final int limit, final int step, final N value) { FillAll.fill(data, first, limit, step, value); } @Override protected final void fill(final int first, final int limit, final int step, final NullaryFunction<?> supplier) { FillAll.fill(data, first, limit, step, supplier, this.factory().scalar()); } @Override protected final void fill(final int first, final int limit, final N left, final BinaryFunction<N> function, final Access1D<N> right) { OperationBinary.invoke(data, first, limit, 1, left, function, right); } @Override protected final void fillOne(final int index, final N value) { data[index] = value; } @Override protected final void fillOne(final int index, final NullaryFunction<?> supplier) { data[index] = this.valueOf(supplier.get()); } @Override protected final N get(final int index) { return data[index]; } @Override protected final void modify(final int first, final int limit, final int step, final Access1D<N> left, final BinaryFunction<N> function) { OperationBinary.invoke(data, first, limit, step, left, function, this); } @Override protected final void modify(final int first, final int limit, final int step, final BinaryFunction<N> function, final Access1D<N> right) { OperationBinary.invoke(data, first, limit, step, this, function, right); } @Override protected final void modify(final int first, final int limit, final int step, final BinaryFunction<N> function, final N right) { OperationBinary.invoke(data, first, limit, step, this, function, right); } @Override protected final void modify(final int first, final int limit, final int step, final N left, final BinaryFunction<N> function) { OperationBinary.invoke(data, first, limit, step, left, function, this); } @Override protected final void modify(final int first, final int limit, final int step, final ParameterFunction<N> function, final int parameter) { OperationParameter.invoke(data, first, limit, step, data, function, parameter); } @Override protected final void modify(final int first, final int limit, final int step, final UnaryFunction<N> function) { OperationUnary.invoke(data, first, limit, step, this, function); } @Override protected final void modifyOne(final int index, final UnaryFunction<N> modifier) { data[index] = modifier.invoke(data[index]); } @Override protected final int searchAscending(final N value) { return Arrays.binarySearch(data, value); } @Override protected final void set(final int index, final Comparable<?> value) { data[index] = this.valueOf(value); } @Override protected final void set(final int index, final double value) { data[index] = this.valueOf(value); } @Override protected final void set(final int index, final float value) { data[index] = this.valueOf(value); } @Override protected final void visit(final int first, final int limit, final int step, final VoidFunction<N> visitor) { OperationVoid.invoke(data, first, limit, step, visitor); } @Override protected void visitOne(final int index, final VoidFunction<N> visitor) { visitor.invoke(data[index]); } @Override final boolean isPrimitive() { return false; } @Override final void modify(final long extIndex, final int intIndex, final Access1D<N> left, final BinaryFunction<N> function) { data[intIndex] = function.invoke(left.get(extIndex), data[intIndex]); } @Override final void modify(final long extIndex, final int intIndex, final BinaryFunction<N> function, final Access1D<N> right) { data[intIndex] = function.invoke(data[intIndex], right.get(extIndex)); } @Override final void modify(final long extIndex, final int intIndex, final UnaryFunction<N> function) { data[intIndex] = function.invoke(data[intIndex]); } final N valueOf(final Comparable<?> number) { return this.factory().scalar().cast(number); } final N valueOf(final double value) { return this.factory().scalar().cast(value); } final N valueOf(final float value) { return this.factory().scalar().cast(value); } }
/* * Licensed to Crate.io GmbH ("Crate") under one or more contributor * license agreements. See the NOTICE file distributed with this work for * additional information regarding copyright ownership. Crate licenses * this file to you under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. You may * obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations * under the License. * * However, if you have executed another commercial license agreement * with Crate these terms will supersede the license and you may use the * software solely pursuant to the terms of the relevant commercial agreement. */ package io.crate.execution.engine.collect.collectors; import io.crate.breaker.RamAccounting; import io.crate.common.annotations.VisibleForTesting; import io.crate.data.Row; import io.crate.data.RowConsumer; import io.crate.execution.dsl.phases.NodeOperation; import io.crate.execution.dsl.phases.RoutedCollectPhase; import io.crate.execution.engine.distribution.merge.PassThroughPagingIterator; import io.crate.execution.jobs.CumulativePageBucketReceiver; import io.crate.execution.jobs.DistResultRXTask; import io.crate.execution.jobs.PageBucketReceiver; import io.crate.execution.jobs.RootTask; import io.crate.execution.jobs.TasksService; import io.crate.execution.jobs.kill.KillJobsRequest; import io.crate.execution.jobs.kill.TransportKillJobsNodeAction; import io.crate.execution.jobs.transport.JobRequest; import io.crate.execution.jobs.transport.JobResponse; import io.crate.execution.jobs.transport.TransportJobAction; import io.crate.metadata.settings.SessionSettings; import io.crate.types.DataTypes; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.elasticsearch.action.ActionListener; import javax.annotation.Nullable; import java.util.Collections; import java.util.List; import java.util.UUID; import java.util.concurrent.Executor; public class RemoteCollector { private static final Logger LOGGER = LogManager.getLogger(RemoteCollector.class); private static final int RECEIVER_PHASE_ID = 1; private final UUID jobId; private final SessionSettings sessionSettings; private final String localNode; private final String remoteNode; private final Executor executor; private final TransportJobAction transportJobAction; private final TransportKillJobsNodeAction transportKillJobsNodeAction; private final TasksService tasksService; private final RamAccounting ramAccounting; private final RowConsumer consumer; private final RoutedCollectPhase collectPhase; private final Object killLock = new Object(); private final boolean scrollRequired; private final boolean enableProfiling; private RootTask context = null; private boolean collectorKilled = false; public RemoteCollector(UUID jobId, SessionSettings sessionSettings, String localNode, String remoteNode, TransportJobAction transportJobAction, TransportKillJobsNodeAction transportKillJobsNodeAction, Executor executor, TasksService tasksService, RamAccounting ramAccounting, RowConsumer consumer, RoutedCollectPhase collectPhase) { this.jobId = jobId; this.sessionSettings = sessionSettings; this.localNode = localNode; this.remoteNode = remoteNode; this.executor = executor; /* * We don't wanna profile the timings of the remote execution context, because the remoteCollect is already * part of the subcontext duration of the original Task profiling. */ this.enableProfiling = false; this.scrollRequired = consumer.requiresScroll(); this.transportJobAction = transportJobAction; this.transportKillJobsNodeAction = transportKillJobsNodeAction; this.tasksService = tasksService; this.ramAccounting = ramAccounting; this.consumer = consumer; this.collectPhase = collectPhase; } public void doCollect() { if (!createLocalContext()) return; createRemoteContext(); } @VisibleForTesting boolean createLocalContext() { RootTask.Builder builder = createPageDownstreamContext(); try { synchronized (killLock) { if (collectorKilled) { consumer.accept(null, new InterruptedException()); return false; } context = tasksService.createTask(builder); context.start(); return true; } } catch (Throwable t) { if (context == null) { consumer.accept(null, t); } else { context.kill(t.getMessage()); } return false; } } @VisibleForTesting void createRemoteContext() { NodeOperation nodeOperation = new NodeOperation( collectPhase, Collections.singletonList(localNode), RECEIVER_PHASE_ID, (byte) 0); synchronized (killLock) { if (collectorKilled) { context.kill(null); return; } transportJobAction.execute( remoteNode, new JobRequest( jobId, sessionSettings, localNode, Collections.singletonList(nodeOperation), enableProfiling ), new ActionListener<>() { @Override public void onResponse(JobResponse jobResponse) { LOGGER.trace("RemoteCollector jobAction=onResponse"); if (collectorKilled) { killRemoteContext(); } } @Override public void onFailure(Exception e) { LOGGER.error("RemoteCollector jobAction=onFailure", e); context.kill(e.getMessage()); } } ); } } private RootTask.Builder createPageDownstreamContext() { RootTask.Builder builder = tasksService.newBuilder( jobId, sessionSettings.userName(), localNode, Collections.emptySet() ); PassThroughPagingIterator<Integer, Row> pagingIterator; if (scrollRequired) { pagingIterator = PassThroughPagingIterator.repeatable(); } else { pagingIterator = PassThroughPagingIterator.oneShot(); } PageBucketReceiver pageBucketReceiver = new CumulativePageBucketReceiver( localNode, RECEIVER_PHASE_ID, executor, DataTypes.getStreamers(collectPhase.outputTypes()), consumer, pagingIterator, 1); builder.addTask(new DistResultRXTask( RECEIVER_PHASE_ID, "RemoteCollectPhase", pageBucketReceiver, ramAccounting, 1 )); return builder; } private void killRemoteContext() { KillJobsRequest killRequest = new KillJobsRequest( List.of(jobId), sessionSettings.userName(), null ); transportKillJobsNodeAction.broadcast(killRequest, new ActionListener<>() { @Override public void onResponse(Long numKilled) { context.kill(null); } @Override public void onFailure(Exception e) { context.kill(e.getMessage()); } }); } public void kill(@Nullable Throwable throwable) { synchronized (killLock) { collectorKilled = true; /** * due to the lock there are 3 kill windows: * * 1. localContext not even created - doCollect aborts * 2. localContext created, no requests sent - doCollect aborts * 3. localContext created, requests sent - clean-up happens once response from remote is received */ } } }
package database.dao; import java.io.File; import java.io.IOException; import java.sql.Connection; import java.sql.PreparedStatement; import java.sql.ResultSet; import java.sql.SQLException; import java.sql.Statement; import java.util.ArrayList; import java.util.List; import model.Edition; import model.Manga; import model.Manga.MangaBuilder; import model.Type; import org.apache.commons.io.FileUtils; import utils.DateUtils; import database.DatabaseMethods; import database.ImageDatabase; public class MangaDAO implements DatabaseMethods<Manga>, AutoCloseable { private Connection connection; private static final String SQL_INSERT = "insert into mangas values (null,?,?,?,?,?,?,?,?,?,?,?,?);"; private static final String SQL_UPDATE = "update mangas set national_name_manga=?, original_name_manga=?, type_manga=?, serialization_manga=?, start_date_manga=?, finish_date_manga=?, authors_manga=?, edition_manga=?, stamp_manga=?, genders_manga=?, rating_manga=?, observations_manga=? where id_manga=?;"; private static final String SQL_REMOVE = "delete from mangas where id_manga=?;"; private static final String SQL_SELECT_ALL = "select * from mangas order by national_name_manga asc;"; private static final String SQL_SELECT_BY_ID = "select * from mangas where id_manga=?;"; public MangaDAO(Connection connection) { super(); this.connection = connection; } @Override public boolean insert(Manga object) throws SQLException, IOException { try { PreparedStatement lPreparedStatement = connection.prepareStatement(SQL_INSERT, Statement.RETURN_GENERATED_KEYS); lPreparedStatement.setString(1, object.getNationalName()); lPreparedStatement.setString(2, object.getOriginalName()); lPreparedStatement.setInt(3, object.getType().getValue()); lPreparedStatement.setString(4, object.getSerialization()); lPreparedStatement.setString(5, DateUtils.toString(object.getStartDate())); lPreparedStatement.setString(6, DateUtils.toString(object.getFinishDate())); lPreparedStatement.setString(7, object.getAuthors()); lPreparedStatement.setInt(8, object.getEdition().getValue()); lPreparedStatement.setString(9, object.getStamp()); lPreparedStatement.setString(10, object.getGendersAsString()); lPreparedStatement.setInt(11, object.getRating()); lPreparedStatement.setString(12, object.getObservations()); lPreparedStatement.executeUpdate(); try (ResultSet generatedKeys = lPreparedStatement.getGeneratedKeys()) { if (generatedKeys.next()) { object.setId(generatedKeys.getInt(1)); insertImage(object); return true; } else throw new SQLException("Creating manga failed, no ID obtained."); } } catch (SQLException e) { throw e; } } @Override public boolean update(Manga object) throws SQLException, IOException { try { PreparedStatement lPreparedStatement = connection.prepareStatement(SQL_UPDATE); lPreparedStatement.setString(1, object.getNationalName()); lPreparedStatement.setString(2, object.getOriginalName()); lPreparedStatement.setInt(3, object.getType().getValue()); lPreparedStatement.setString(4, object.getSerialization()); lPreparedStatement.setString(5, DateUtils.toString(object.getStartDate())); lPreparedStatement.setString(6, DateUtils.toString(object.getFinishDate())); lPreparedStatement.setString(7, object.getAuthors()); lPreparedStatement.setInt(8, object.getEdition().getValue()); lPreparedStatement.setString(9, object.getStamp()); lPreparedStatement.setString(10, object.getGendersAsString()); lPreparedStatement.setInt(11, object.getRating()); lPreparedStatement.setString(12, object.getObservations()); lPreparedStatement.setInt(13, object.getId()); int i = lPreparedStatement.executeUpdate(); if (i > 0) insertImage(object); return i > 0; } catch (SQLException e) { throw e; } } @Override public boolean remove(Manga object) throws SQLException { try { PreparedStatement lPreparedStatement = connection.prepareStatement(SQL_REMOVE); lPreparedStatement.setInt(1, object.getId()); int i = lPreparedStatement.executeUpdate(); if (i > 0) ImageDatabase.removeImage(object); return i > 0; } catch (SQLException e) { throw e; } } @Override public List<Manga> select() throws SQLException { List<Manga> result = new ArrayList<>(); try { Statement lStatement = connection.createStatement(); ResultSet lResultSet = lStatement.executeQuery(SQL_SELECT_ALL); @SuppressWarnings("resource") VolumeDAO lVolumeDAO = new VolumeDAO(connection); while (lResultSet.next()) { Manga lManga = new MangaBuilder() .id(lResultSet.getInt("id_manga")) .nationalName(lResultSet.getString("national_name_manga")) .originalName(lResultSet.getString("original_name_manga")) .type(Type.fromValue(lResultSet.getInt("type_manga"))) .serialization(lResultSet.getString("serialization_manga")) .startDate(DateUtils.toDate(lResultSet.getString("start_date_manga"))) .finishDate(DateUtils.toDate(lResultSet.getString("finish_date_manga"))) .authors(lResultSet.getString("authors_manga")) .edition(Edition.fromValue(lResultSet.getInt("edition_manga"))) .stamp(lResultSet.getString("stamp_manga")) .genders(lResultSet.getString("genders_manga")) .rating(lResultSet.getInt("rating_manga")) .observations(lResultSet.getString("observations_manga")) .build(); lManga.setPoster(selectImage(lManga)); lManga.setVolumes(lVolumeDAO.select(lManga)); result.add(lManga); } lResultSet.close(); lStatement.close(); } catch (SQLException e) { throw e; } return result; } @Override public Manga select(int id) throws SQLException { try { PreparedStatement lPreparedStatement = connection.prepareStatement(SQL_SELECT_BY_ID); lPreparedStatement.setInt(1, id); ResultSet lResultSet = lPreparedStatement.executeQuery(); @SuppressWarnings("resource") VolumeDAO lVolumeDAO = new VolumeDAO(connection); Manga result = null; if (lResultSet.next()) { result = new MangaBuilder() .id(lResultSet.getInt("id_manga")) .nationalName(lResultSet.getString("national_name_manga")) .originalName(lResultSet.getString("original_name_manga")) .type(Type.fromValue(lResultSet.getInt("type_manga"))) .serialization(lResultSet.getString("serialization_manga")) .startDate(DateUtils.toDate(lResultSet.getString("start_date_manga"))) .finishDate(DateUtils.toDate(lResultSet.getString("finish_date_manga"))) .authors(lResultSet.getString("authors_manga")) .edition(Edition.fromValue(lResultSet.getInt("edition_manga"))) .stamp(lResultSet.getString("stamp_manga")) .genders(lResultSet.getString("genders_manga")) .rating(lResultSet.getInt("rating_manga")) .observations(lResultSet.getString("observations_manga")) .build(); result.setPoster(selectImage(result)); result.setVolumes(lVolumeDAO.select(result)); } lResultSet.close(); lPreparedStatement.close(); return result; } catch (SQLException e) { throw e; } } @Override public void close() throws SQLException { connection.commit(); connection.close(); } @Override public void insertImage(Manga object) throws IOException { File f = new File(String.format(getImageFileLocation(), object.getId())); if (!f.getParentFile().exists()) f.getParentFile().mkdirs(); if (!f.toString().equals(object.getPoster().toString())) FileUtils.copyFile(object.getPoster(), f); } @Override public File selectImage(Manga object) { File result = new File(String.format(getImageFileLocation(), object.getId())); return result.exists() ? result : null; } @Override public void removeImage(Manga object) { File result = new File(String.format(getImageFileLocation(), object.getId())); if (result.exists()) result.delete(); } @Override public String getImageFileLocation() { return DEFAULT_FOLDER + File.separator + "mangas" + File.separator + "%d.png"; } }
/** * Copyright 2012 multibit.org * * Licensed under the MIT license (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://opensource.org/licenses/mit-license.php * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.multibit.viewsystem.swing.view.panels; import org.bitcoinj.wallet.Protos.Wallet.EncryptionType; import org.multibit.controller.Controller; import org.multibit.controller.bitcoin.BitcoinController; import org.multibit.model.bitcoin.BitcoinModel; import org.multibit.model.bitcoin.WalletBusyListener; import org.multibit.model.core.CoreModel; import org.multibit.utils.ImageLoader; import org.multibit.viewsystem.DisplayHint; import org.multibit.viewsystem.View; import org.multibit.viewsystem.Viewable; import org.multibit.viewsystem.swing.ColorAndFontConstants; import org.multibit.viewsystem.swing.MultiBitFrame; import org.multibit.viewsystem.swing.action.ExportPrivateKeysSubmitAction; import org.multibit.viewsystem.swing.action.HelpContextAction; import org.multibit.viewsystem.swing.view.PrivateKeyFileFilter; import org.multibit.viewsystem.swing.view.components.*; import javax.swing.*; import java.awt.*; import java.awt.event.*; import java.io.File; import java.util.Arrays; import java.util.Locale; /** * The export private keys panel. */ public class ExportPrivateKeysPanel extends JPanel implements Viewable, WalletBusyListener { private static final long serialVersionUID = 444992298119957705L; private final Controller controller; private final BitcoinController bitcoinController; private MultiBitFrame mainFrame; private MultiBitLabel walletFilenameLabel; private MultiBitLabel walletDescriptionLabel; private String chooseFilenameButtonText; private MultiBitLabel outputFilenameLabel; private MultiBitLabel messageLabel1; private MultiBitLabel messageLabel2; private String outputFilename; private String walletFilenameForChosenOutputFilename; private JRadioButton passwordProtect; private JRadioButton doNotPasswordProtect; private MultiBitLabel doNotPasswordProtectWarningLabel; private JPasswordField exportFilePasswordField; private JPasswordField repeatExportFilePasswordField; private JPasswordField walletPasswordField; private MultiBitLabel walletPasswordPromptLabel; private ExportPrivateKeysSubmitAction exportPrivateKeySubmitAction; private JLabel tickLabel; public static final int STENT_HEIGHT = 12; public static final int STENT_DELTA = 20; private Font adjustedFont; /** * Creates a new {@link ExportPrivateKeysPanel}. */ public ExportPrivateKeysPanel(BitcoinController bitcoinController, MultiBitFrame mainFrame) { this.bitcoinController = bitcoinController; this.controller = this.bitcoinController; this.mainFrame = mainFrame; setBackground(ColorAndFontConstants.VERY_LIGHT_BACKGROUND_COLOR); applyComponentOrientation(ComponentOrientation.getOrientation(controller.getLocaliser().getLocale())); outputFilename = ""; walletFilenameForChosenOutputFilename = ""; initUI(); this.bitcoinController.registerWalletBusyListener(this); walletBusyChange(this.bitcoinController.getModel().getActivePerWalletModelData().isBusy()); boolean walletPasswordRequired = false; if (this.bitcoinController.getModel().getActiveWallet() != null && this.bitcoinController.getModel().getActiveWallet().getEncryptionType() == EncryptionType.ENCRYPTED_SCRYPT_AES) { walletPasswordRequired = true; } enableWalletPassword(walletPasswordRequired); } @Override public void navigateAwayFromView() { } private void initUI() { setLayout(new BorderLayout()); JPanel mainPanel = new JPanel(); mainPanel.setMinimumSize(new Dimension(550, 160)); mainPanel.setLayout(new GridBagLayout()); mainPanel.setOpaque(false); mainPanel.applyComponentOrientation(ComponentOrientation.getOrientation(controller.getLocaliser().getLocale())); String[] keys = new String[] { "resetTransactionsPanel.walletDescriptionLabel", "resetTransactionsPanel.walletFilenameLabel", "showExportPrivateKeysPanel.passwordPrompt", "showExportPrivateKeysPanel.repeatPasswordPrompt", "showImportPrivateKeysPanel.numberOfKeys.text", "showImportPrivateKeysPanel.replayDate.text" }; int stentWidth = MultiBitTitledPanel.calculateStentWidthForKeys(controller.getLocaliser(), keys, this) + STENT_DELTA; GridBagConstraints constraints = new GridBagConstraints(); constraints.fill = GridBagConstraints.HORIZONTAL; constraints.gridx = 0; constraints.gridy = 0; constraints.gridwidth = 2; constraints.weightx = 1; constraints.weighty = 1; constraints.anchor = GridBagConstraints.LINE_START; mainPanel.add(createWalletPanel(stentWidth), constraints); constraints.fill = GridBagConstraints.HORIZONTAL; constraints.gridx = 0; constraints.gridy = 1; constraints.gridwidth = 2; constraints.weightx = 1; constraints.weighty = 1; constraints.anchor = GridBagConstraints.LINE_START; mainPanel.add(createFilenamePanel(stentWidth), constraints); constraints.fill = GridBagConstraints.HORIZONTAL; constraints.gridx = 0; constraints.gridy = 2; constraints.gridwidth = 2; constraints.weightx = 1; constraints.weighty = 1; constraints.anchor = GridBagConstraints.LINE_START; mainPanel.add(createPasswordPanel(stentWidth), constraints); JLabel filler1 = new JLabel(); filler1.setOpaque(false); constraints.fill = GridBagConstraints.BOTH; constraints.gridx = 0; constraints.gridy = 3; constraints.gridwidth = 1; constraints.gridheight = 1; constraints.weightx = 1; constraints.weighty = 0.1; constraints.anchor = GridBagConstraints.CENTER; mainPanel.add(filler1, constraints); constraints.fill = GridBagConstraints.NONE; constraints.gridx = 0; constraints.gridy = 4; constraints.gridwidth = 1; constraints.weightx = 0.4; constraints.weighty = 0.06; constraints.anchor = GridBagConstraints.LINE_START; mainPanel.add(createButtonPanel(), constraints); messageLabel1 = new MultiBitLabel(""); messageLabel1.setOpaque(false); messageLabel1.setBorder(BorderFactory.createEmptyBorder(0, 30, 0, 0)); messageLabel1.setHorizontalAlignment(JLabel.LEADING); messageLabel1.applyComponentOrientation(ComponentOrientation.getOrientation(controller.getLocaliser().getLocale())); constraints.fill = GridBagConstraints.HORIZONTAL; constraints.gridx = 0; constraints.gridy = 5; constraints.gridwidth = 1; constraints.weightx = 1; constraints.weighty = 0.06; constraints.anchor = GridBagConstraints.LINE_START; mainPanel.add(messageLabel1, constraints); messageLabel2 = new MultiBitLabel(""); messageLabel2.setOpaque(false); messageLabel2.setBorder(BorderFactory.createEmptyBorder(0, 30, 0, 0)); messageLabel2.setHorizontalAlignment(JLabel.LEADING); messageLabel2.applyComponentOrientation(ComponentOrientation.getOrientation(controller.getLocaliser().getLocale())); constraints.fill = GridBagConstraints.HORIZONTAL; constraints.gridx = 0; constraints.gridy = 6; constraints.gridwidth = 1; constraints.weightx = 1; constraints.weighty = 0.06; constraints.anchor = GridBagConstraints.LINE_START; mainPanel.add(messageLabel2, constraints); Action helpAction; if (ComponentOrientation.LEFT_TO_RIGHT == ComponentOrientation.getOrientation(controller.getLocaliser().getLocale())) { helpAction = new HelpContextAction(controller, ImageLoader.HELP_CONTENTS_BIG_ICON_FILE, "multiBitFrame.helpMenuText", "multiBitFrame.helpMenuTooltip", "multiBitFrame.helpMenuText", HelpContentsPanel.HELP_EXPORTING_PRIVATE_KEYS_URL); } else { helpAction = new HelpContextAction(controller, ImageLoader.HELP_CONTENTS_BIG_RTL_ICON_FILE, "multiBitFrame.helpMenuText", "multiBitFrame.helpMenuTooltip", "multiBitFrame.helpMenuText", HelpContentsPanel.HELP_EXPORTING_PRIVATE_KEYS_URL); } HelpButton helpButton = new HelpButton(helpAction, controller); helpButton.setText(""); String tooltipText = HelpContentsPanel.createMultilineTooltipText(new String[] { controller.getLocaliser().getString("multiBitFrame.helpMenuTooltip") }); helpButton.setToolTipText(tooltipText); helpButton.setHorizontalAlignment(SwingConstants.LEADING); helpButton.applyComponentOrientation(ComponentOrientation.getOrientation(controller.getLocaliser().getLocale())); helpButton.setBorder(BorderFactory.createEmptyBorder(0, AbstractTradePanel.HELP_BUTTON_INDENT, AbstractTradePanel.HELP_BUTTON_INDENT, AbstractTradePanel.HELP_BUTTON_INDENT)); constraints.fill = GridBagConstraints.NONE; constraints.gridx = 0; constraints.gridy = 7; constraints.weightx = 1; constraints.weighty = 0.1; constraints.gridwidth = 1; constraints.gridheight = 1; constraints.anchor = GridBagConstraints.BASELINE_LEADING; mainPanel.add(helpButton, constraints); JLabel filler2 = new JLabel(); filler2.setOpaque(false); constraints.fill = GridBagConstraints.BOTH; constraints.gridx = 0; constraints.gridy = 8; constraints.gridwidth = 1; constraints.weightx = 1; constraints.weighty = 100; constraints.anchor = GridBagConstraints.CENTER; mainPanel.add(filler2, constraints); JScrollPane mainScrollPane = new JScrollPane(mainPanel, JScrollPane.VERTICAL_SCROLLBAR_AS_NEEDED, JScrollPane.HORIZONTAL_SCROLLBAR_NEVER); //mainScrollPane.setBorder(BorderFactory.createMatteBorder(1, 1, 1, 1, ColorAndFontConstants.DARK_BACKGROUND_COLOR)); mainScrollPane.setBorder(BorderFactory.createEmptyBorder()); mainScrollPane.getViewport().setBackground(ColorAndFontConstants.VERY_LIGHT_BACKGROUND_COLOR); mainScrollPane.getViewport().setOpaque(true); mainScrollPane.applyComponentOrientation(ComponentOrientation.getOrientation(controller.getLocaliser().getLocale())); mainScrollPane.getHorizontalScrollBar().setUnitIncrement(CoreModel.SCROLL_INCREMENT); mainScrollPane.getVerticalScrollBar().setUnitIncrement(CoreModel.SCROLL_INCREMENT); add(mainScrollPane, BorderLayout.CENTER); } private JPanel createWalletPanel(int stentWidth) { MultiBitTitledPanel inputWalletPanel = new MultiBitTitledPanel(controller.getLocaliser().getString( "showExportPrivateKeysPanel.wallet.title"), ComponentOrientation.getOrientation(controller.getLocaliser().getLocale())); GridBagConstraints constraints = new GridBagConstraints(); MultiBitTitledPanel.addLeftJustifiedTextAtIndent( controller.getLocaliser().getString("showExportPrivateKeysPanel.wallet.text"), 3, inputWalletPanel); constraints.fill = GridBagConstraints.BOTH; constraints.gridx = 1; constraints.gridy = 4; constraints.weightx = 0.3; constraints.weighty = 0.3; constraints.gridwidth = 1; constraints.anchor = GridBagConstraints.LINE_START; inputWalletPanel.add(MultiBitTitledPanel.createStent(stentWidth, STENT_HEIGHT), constraints); constraints.fill = GridBagConstraints.BOTH; constraints.gridx = 2; constraints.gridy = 5; constraints.weightx = 0.05; constraints.weighty = 0.3; constraints.gridwidth = 1; constraints.anchor = GridBagConstraints.CENTER; inputWalletPanel.add(MultiBitTitledPanel.createStent(MultiBitTitledPanel.SEPARATION_BETWEEN_NAME_VALUE_PAIRS), constraints); JPanel filler0 = new JPanel(); filler0.setOpaque(false); constraints.fill = GridBagConstraints.BOTH; constraints.gridx =3; constraints.gridy = 4; constraints.weightx = 100; constraints.weighty = 1; constraints.gridwidth = 1; constraints.anchor = GridBagConstraints.LINE_END; inputWalletPanel.add(filler0, constraints); MultiBitLabel walletDescriptionLabelLabel = new MultiBitLabel(controller.getLocaliser().getString( "resetTransactionsPanel.walletDescriptionLabel")); walletDescriptionLabelLabel.applyComponentOrientation(ComponentOrientation.getOrientation(controller.getLocaliser().getLocale())); constraints.fill = GridBagConstraints.NONE; constraints.gridx = 1; constraints.gridy = 5; constraints.weightx = 0.5; constraints.weighty = 0.3; constraints.gridwidth = 1; constraints.anchor = GridBagConstraints.LINE_END; inputWalletPanel.add(walletDescriptionLabelLabel, constraints); walletDescriptionLabel = new MultiBitLabel(this.bitcoinController.getModel().getActivePerWalletModelData().getWalletDescription()); walletDescriptionLabel.applyComponentOrientation(ComponentOrientation.getOrientation(controller.getLocaliser().getLocale())); constraints.fill = GridBagConstraints.NONE; constraints.gridx = 3; constraints.gridy = 5; constraints.weightx = 0.5; constraints.weighty = 0.3; constraints.gridwidth = 1; constraints.anchor = GridBagConstraints.LINE_START; inputWalletPanel.add(walletDescriptionLabel, constraints); MultiBitLabel walletFilenameLabelLabel = new MultiBitLabel(controller.getLocaliser().getString( "resetTransactionsPanel.walletFilenameLabel")); walletFilenameLabelLabel.applyComponentOrientation(ComponentOrientation.getOrientation(controller.getLocaliser().getLocale())); constraints.fill = GridBagConstraints.NONE; constraints.gridx = 1; constraints.gridy = 6; constraints.weightx = 0.5; constraints.weighty = 0.3; constraints.gridwidth = 1; constraints.anchor = GridBagConstraints.LINE_END; inputWalletPanel.add(walletFilenameLabelLabel, constraints); walletFilenameLabel = new MultiBitLabel(this.bitcoinController.getModel().getActiveWalletFilename()); walletFilenameLabel.applyComponentOrientation(ComponentOrientation.getOrientation(controller.getLocaliser().getLocale())); constraints.fill = GridBagConstraints.NONE; constraints.gridx = 3; constraints.gridy = 6; constraints.weightx = 0.5; constraints.weighty = 0.3; constraints.gridwidth = 1; constraints.anchor = GridBagConstraints.LINE_START; inputWalletPanel.add(walletFilenameLabel, constraints); JPanel fill1 = new JPanel(); fill1.setOpaque(false); constraints.fill = GridBagConstraints.BOTH; constraints.gridx = 3; constraints.gridy = 7; constraints.weightx = 20; constraints.weighty = 1; constraints.gridwidth = 1; constraints.gridheight = 1; constraints.anchor = GridBagConstraints.LINE_END; inputWalletPanel.add(fill1, constraints); JPanel filler3 = new JPanel(); filler3.setOpaque(false); constraints.fill = GridBagConstraints.BOTH; constraints.gridx = 1; constraints.gridy = 7; constraints.weightx = 0.3; constraints.weighty = 0.3; constraints.gridwidth = 1; constraints.anchor = GridBagConstraints.LINE_START; inputWalletPanel.add(filler3, constraints); walletPasswordPromptLabel = new MultiBitLabel(controller.getLocaliser().getString("showExportPrivateKeysPanel.walletPasswordPrompt")); walletPasswordPromptLabel.applyComponentOrientation(ComponentOrientation.getOrientation(controller.getLocaliser().getLocale())); constraints.fill = GridBagConstraints.NONE; constraints.gridx = 1; constraints.gridy = 8; constraints.weightx = 0.3; constraints.weighty = 0.1; constraints.gridwidth = 1; constraints.anchor = GridBagConstraints.LINE_END; inputWalletPanel.add(walletPasswordPromptLabel, constraints); constraints.fill = GridBagConstraints.BOTH; constraints.gridx = 2; constraints.gridy = 8; constraints.weightx = 0.05; constraints.weighty = 0.3; constraints.gridwidth = 1; constraints.anchor = GridBagConstraints.CENTER; inputWalletPanel.add(MultiBitTitledPanel.createStent(MultiBitTitledPanel.SEPARATION_BETWEEN_NAME_VALUE_PAIRS), constraints); walletPasswordField = new JPasswordField(24); walletPasswordField.setMinimumSize(new Dimension(200, 20)); walletPasswordField.applyComponentOrientation(ComponentOrientation.getOrientation(controller.getLocaliser().getLocale())); constraints.fill = GridBagConstraints.NONE; constraints.gridx = 3; constraints.gridy = 8; constraints.weightx = 0.3; constraints.weighty = 0.6; constraints.gridwidth = 1; constraints.anchor = GridBagConstraints.LINE_START; inputWalletPanel.add(walletPasswordField, constraints); JPanel filler4 = new JPanel(); filler4.setOpaque(false); constraints.fill = GridBagConstraints.BOTH; constraints.gridx = 1; constraints.gridy = 9; constraints.weightx = 0.3; constraints.weighty = 0.3; constraints.gridwidth = 1; constraints.anchor = GridBagConstraints.LINE_START; inputWalletPanel.add(filler4, constraints); return inputWalletPanel; } private JPanel createFilenamePanel(int stentWidth) { MultiBitTitledPanel outputFilenamePanel = new MultiBitTitledPanel(controller.getLocaliser().getString( "showExportPrivateKeysPanel.filename.title"), ComponentOrientation.getOrientation(controller.getLocaliser().getLocale())); GridBagConstraints constraints = new GridBagConstraints(); constraints.fill = GridBagConstraints.BOTH; constraints.gridx = 0; constraints.gridy = 3; constraints.weightx = 0.1; constraints.weighty = 0.05; constraints.gridwidth = 1; constraints.gridheight = 1; constraints.anchor = GridBagConstraints.LINE_START; JPanel indent = MultiBitTitledPanel.getIndentPanel(1); outputFilenamePanel.add(indent, constraints); constraints.fill = GridBagConstraints.BOTH; constraints.gridx = 1; constraints.gridy = 3; constraints.weightx = 0.3; constraints.weighty = 0.3; constraints.gridwidth = 1; constraints.anchor = GridBagConstraints.LINE_START; JPanel stent = MultiBitTitledPanel.createStent(stentWidth, STENT_HEIGHT); outputFilenamePanel.add(stent, constraints); constraints.fill = GridBagConstraints.BOTH; constraints.gridx = 2; constraints.gridy = 3; constraints.weightx = 0.05; constraints.weighty = 0.3; constraints.gridwidth = 1; constraints.anchor = GridBagConstraints.CENTER; outputFilenamePanel.add(MultiBitTitledPanel.createStent(MultiBitTitledPanel.SEPARATION_BETWEEN_NAME_VALUE_PAIRS), constraints); JPanel filler0 = new JPanel(); filler0.setOpaque(false); constraints.fill = GridBagConstraints.BOTH; constraints.gridx =3; constraints.gridy = 3; constraints.weightx = 100; constraints.weighty = 1; constraints.gridwidth = 1; constraints.anchor = GridBagConstraints.LINE_END; outputFilenamePanel.add(filler0, constraints); chooseFilenameButtonText = ""; String chooseFilenameButtonText1 = controller.getLocaliser().getString("showExportPrivateKeysPanel.filename.text"); String chooseFilenameButtonText2 = controller.getLocaliser().getString("showExportPrivateKeysPanel.filename.text.2"); // If the second term is localised, use that, otherwise the first. if (controller.getLocaliser().getLocale().equals(Locale.ENGLISH)) { chooseFilenameButtonText = chooseFilenameButtonText2; } else { if (!"Export to ...".equals(chooseFilenameButtonText2)) { chooseFilenameButtonText = chooseFilenameButtonText2; } else { chooseFilenameButtonText = chooseFilenameButtonText1; } } MultiBitButton chooseOutputFilenameButton = new MultiBitButton(chooseFilenameButtonText); chooseOutputFilenameButton.applyComponentOrientation(ComponentOrientation.getOrientation(controller.getLocaliser().getLocale())); chooseOutputFilenameButton.addActionListener(new ActionListener() { @Override public void actionPerformed(ActionEvent arg0) { chooseFile(); } }); chooseOutputFilenameButton.setToolTipText(HelpContentsPanel.createTooltipText(controller.getLocaliser() .getString("showExportPrivateKeysPanel.filename.tooltip"))); MultiBitLabel walletFilenameLabelLabel = new MultiBitLabel(controller.getLocaliser().getString( "resetTransactionsPanel.walletFilenameLabel")); walletFilenameLabelLabel.applyComponentOrientation(ComponentOrientation.getOrientation(controller.getLocaliser().getLocale())); constraints.fill = GridBagConstraints.NONE; constraints.gridx = 1; constraints.gridy = 4; constraints.weightx = 0.5; constraints.weighty = 0.3; constraints.gridwidth = 1; constraints.anchor = GridBagConstraints.LINE_END; outputFilenamePanel.add(walletFilenameLabelLabel, constraints); JPanel filler2 = new JPanel(); filler2.setOpaque(false); constraints.fill = GridBagConstraints.BOTH; constraints.gridx = 2; constraints.gridy = 4; constraints.weightx = 0.1; constraints.weighty = 0.1; constraints.gridwidth = 1; constraints.anchor = GridBagConstraints.LINE_START; outputFilenamePanel.add(filler2, constraints); outputFilenameLabel = new MultiBitLabel(outputFilename); outputFilenameLabel.applyComponentOrientation(ComponentOrientation.getOrientation(controller.getLocaliser().getLocale())); constraints.fill = GridBagConstraints.HORIZONTAL; constraints.gridx = 3; constraints.gridy = 4; constraints.weightx = 0.5; constraints.weighty = 0.3; constraints.gridwidth = 1; constraints.anchor = GridBagConstraints.LINE_START; outputFilenamePanel.add(outputFilenameLabel, constraints); JPanel fill1 = new JPanel(); fill1.setOpaque(false); constraints.fill = GridBagConstraints.BOTH; constraints.gridx = 4; constraints.gridy = 4; constraints.weightx = 20; constraints.weighty = 1; constraints.gridwidth = 1; constraints.gridheight = 1; constraints.anchor = GridBagConstraints.LINE_END; outputFilenamePanel.add(fill1, constraints); JPanel filler3 = new JPanel(); filler3.setOpaque(false); constraints.fill = GridBagConstraints.BOTH; constraints.gridx = 1; constraints.gridy = 5; constraints.weightx = 0.3; constraints.weighty = 0.3; constraints.gridwidth = 1; constraints.anchor = GridBagConstraints.LINE_START; outputFilenamePanel.add(filler3, constraints); constraints.fill = GridBagConstraints.NONE; constraints.gridx = 3; constraints.gridy = 6; constraints.weightx = 0.5; constraints.weighty = 0.3; constraints.gridwidth = 3; constraints.anchor = GridBagConstraints.LINE_START; outputFilenamePanel.add(chooseOutputFilenameButton, constraints); JPanel filler4 = new JPanel(); filler4.setOpaque(false); constraints.fill = GridBagConstraints.BOTH; constraints.gridx = 1; constraints.gridy = 7; constraints.weightx = 0.3; constraints.weighty = 0.3; constraints.gridwidth = 1; constraints.anchor = GridBagConstraints.LINE_START; outputFilenamePanel.add(filler4, constraints); return outputFilenamePanel; } private JPanel createPasswordPanel(int stentWidth) { // do/do not password protect radios MultiBitTitledPanel passwordProtectPanel = new MultiBitTitledPanel(controller.getLocaliser().getString( "showExportPrivateKeysPanel.password.title"), ComponentOrientation.getOrientation(controller.getLocaliser().getLocale())); GridBagConstraints constraints = new GridBagConstraints(); constraints.fill = GridBagConstraints.BOTH; constraints.gridx = 0; constraints.gridy = 3; constraints.weightx = 0.1; constraints.weighty = 0.05; constraints.gridwidth = 1; constraints.gridheight = 1; constraints.anchor = GridBagConstraints.LINE_START; JPanel indent = MultiBitTitledPanel.getIndentPanel(1); passwordProtectPanel.add(indent, constraints); constraints.fill = GridBagConstraints.BOTH; constraints.gridx = 1; constraints.gridy = 3; constraints.weightx = 0.3; constraints.weighty = 0.3; constraints.gridwidth = 1; constraints.anchor = GridBagConstraints.LINE_START; JPanel stent = MultiBitTitledPanel.createStent(stentWidth, STENT_HEIGHT); passwordProtectPanel.add(stent, constraints); constraints.fill = GridBagConstraints.BOTH; constraints.gridx = 2; constraints.gridy = 3; constraints.weightx = 0.05; constraints.weighty = 0.3; constraints.gridwidth = 1; constraints.anchor = GridBagConstraints.CENTER; passwordProtectPanel.add(MultiBitTitledPanel.createStent(MultiBitTitledPanel.SEPARATION_BETWEEN_NAME_VALUE_PAIRS), constraints); JPanel filler0 = new JPanel(); filler0.setOpaque(false); constraints.fill = GridBagConstraints.BOTH; constraints.gridx = 4; constraints.gridy = 3; constraints.weightx = 100; constraints.weighty = 1; constraints.gridwidth = 1; constraints.anchor = GridBagConstraints.LINE_END; passwordProtectPanel.add(filler0, constraints); ButtonGroup usePasswordGroup = new ButtonGroup(); passwordProtect = new JRadioButton(controller.getLocaliser().getString("showExportPrivateKeysPanel.passwordProtect")); passwordProtect.setOpaque(false); passwordProtect.setFont(FontSizer.INSTANCE.getAdjustedDefaultFont()); passwordProtect.applyComponentOrientation(ComponentOrientation.getOrientation(controller.getLocaliser().getLocale())); doNotPasswordProtect = new JRadioButton(controller.getLocaliser().getString( "showExportPrivateKeysPanel.doNotPasswordProtect")); doNotPasswordProtect.setOpaque(false); doNotPasswordProtect.setFont(FontSizer.INSTANCE.getAdjustedDefaultFont()); doNotPasswordProtect.applyComponentOrientation(ComponentOrientation.getOrientation(controller.getLocaliser().getLocale())); ItemListener itemListener = new ChangePasswordProtectListener(); passwordProtect.addItemListener(itemListener); doNotPasswordProtect.addItemListener(itemListener); usePasswordGroup.add(passwordProtect); usePasswordGroup.add(doNotPasswordProtect); passwordProtect.setSelected(true); constraints.fill = GridBagConstraints.NONE; constraints.gridx = 1; constraints.gridy = 4; constraints.weightx = 0.2; constraints.weighty = 0.3; constraints.gridwidth = 3; constraints.anchor = GridBagConstraints.LINE_START; passwordProtectPanel.add(passwordProtect, constraints); MultiBitLabel passwordPromptLabel = new MultiBitLabel(""); passwordPromptLabel.applyComponentOrientation(ComponentOrientation.getOrientation(controller.getLocaliser().getLocale())); passwordPromptLabel.setText(controller.getLocaliser().getString("showExportPrivateKeysPanel.passwordPrompt")); constraints.fill = GridBagConstraints.NONE; constraints.gridx = 1; constraints.gridy = 5; constraints.weightx = 0.3; constraints.weighty = 0.1; constraints.gridwidth = 1; constraints.anchor = GridBagConstraints.LINE_END; passwordProtectPanel.add(passwordPromptLabel, constraints); exportFilePasswordField = new JPasswordField(24); exportFilePasswordField.setMinimumSize(new Dimension(200, 20)); exportFilePasswordField.addKeyListener(new PasswordListener()); exportFilePasswordField.applyComponentOrientation(ComponentOrientation.getOrientation(controller.getLocaliser().getLocale())); constraints.fill = GridBagConstraints.NONE; constraints.gridx = 3; constraints.gridy = 5; constraints.weightx = 0.3; constraints.weighty = 0.25; constraints.gridwidth = 1; constraints.anchor = GridBagConstraints.LINE_START; passwordProtectPanel.add(exportFilePasswordField, constraints); JLabel filler3 = new JLabel(); filler3.setMinimumSize(new Dimension(3, 3)); filler3.setMaximumSize(new Dimension(3, 3)); filler3.setPreferredSize(new Dimension(3, 3)); filler3.setOpaque(false); constraints.fill = GridBagConstraints.BOTH; constraints.gridx = 1; constraints.gridy = 6; constraints.weightx = 0.1; constraints.weighty = 0.1; constraints.gridwidth = 1; constraints.gridheight = 1; constraints.anchor = GridBagConstraints.CENTER; passwordProtectPanel.add(filler3, constraints); MultiBitLabel repeatPasswordPromptLabel = new MultiBitLabel(""); repeatPasswordPromptLabel.setText(controller.getLocaliser().getString("showExportPrivateKeysPanel.repeatPasswordPrompt")); repeatPasswordPromptLabel.applyComponentOrientation(ComponentOrientation.getOrientation(controller.getLocaliser().getLocale())); constraints.fill = GridBagConstraints.NONE; constraints.gridx = 1; constraints.gridy = 7; constraints.weightx = 0.3; constraints.weighty = 0.1; constraints.gridwidth = 1; constraints.anchor = GridBagConstraints.LINE_END; passwordProtectPanel.add(repeatPasswordPromptLabel, constraints); repeatExportFilePasswordField = new JPasswordField(24); repeatExportFilePasswordField.setMinimumSize(new Dimension(200, 20)); repeatExportFilePasswordField.addKeyListener(new PasswordListener()); repeatExportFilePasswordField.applyComponentOrientation(ComponentOrientation.getOrientation(controller.getLocaliser().getLocale())); constraints.fill = GridBagConstraints.NONE; constraints.gridx = 3; constraints.gridy = 7; constraints.weightx = 0.3; constraints.weighty = 0.25; constraints.gridwidth = 1; constraints.anchor = GridBagConstraints.LINE_START; passwordProtectPanel.add(repeatExportFilePasswordField, constraints); ImageIcon tickIcon = ImageLoader.createImageIcon(ImageLoader.TICK_ICON_FILE); tickLabel = new JLabel(tickIcon); tickLabel.setToolTipText(HelpContentsPanel.createTooltipText(controller.getLocaliser().getString("showExportPrivateKeysPanel.theTwoPasswordsMatch"))); tickLabel.setVisible(false); constraints.fill = GridBagConstraints.NONE; constraints.gridx = 4; constraints.gridy = 5; constraints.weightx = 0.1; constraints.weighty = 0.1; constraints.gridwidth = 1; constraints.gridheight = 3; constraints.anchor = GridBagConstraints.LINE_START; passwordProtectPanel.add(tickLabel, constraints); constraints.fill = GridBagConstraints.NONE; constraints.gridx = 1; constraints.gridy = 8; constraints.weightx = 0.2; constraints.weighty = 0.3; constraints.gridwidth = 3; constraints.gridheight = 1; constraints.anchor = GridBagConstraints.LINE_START; passwordProtectPanel.add(doNotPasswordProtect, constraints); doNotPasswordProtectWarningLabel = new MultiBitLabel(" "); doNotPasswordProtectWarningLabel.setForeground(Color.RED); doNotPasswordProtectWarningLabel.applyComponentOrientation(ComponentOrientation.getOrientation(controller.getLocaliser().getLocale())); constraints.fill = GridBagConstraints.NONE; constraints.gridx = 3; constraints.gridy = 9; constraints.weightx = 0.2; constraints.weighty = 0.3; constraints.gridwidth = 3; constraints.anchor = GridBagConstraints.LINE_START; passwordProtectPanel.add(doNotPasswordProtectWarningLabel, constraints); return passwordProtectPanel; } private JPanel createButtonPanel() { JPanel buttonPanel = new JPanel(); buttonPanel.setOpaque(false); FlowLayout flowLayout = new FlowLayout(); flowLayout.setAlignment(FlowLayout.TRAILING); buttonPanel.setLayout(flowLayout); buttonPanel.applyComponentOrientation(ComponentOrientation.getOrientation(controller.getLocaliser().getLocale())); /** * Create submit action with references to the password fields - this * avoids having any public accessors on the panel */ exportPrivateKeySubmitAction = new ExportPrivateKeysSubmitAction(this.bitcoinController, this, ImageLoader.createImageIcon(ImageLoader.EXPORT_PRIVATE_KEYS_ICON_FILE), walletPasswordField, exportFilePasswordField, repeatExportFilePasswordField, mainFrame); MultiBitButton submitButton = new MultiBitButton(exportPrivateKeySubmitAction, controller); submitButton.applyComponentOrientation(ComponentOrientation.getOrientation(controller.getLocaliser().getLocale())); buttonPanel.add(submitButton); return buttonPanel; } @Override public void displayView(DisplayHint displayHint) { // If it is a wallet transaction change no need to update. if (DisplayHint.WALLET_TRANSACTIONS_HAVE_CHANGED == displayHint) { return; } walletFilenameLabel.setText(this.bitcoinController.getModel().getActiveWalletFilename()); walletDescriptionLabel.setText(this.bitcoinController.getModel().getActivePerWalletModelData().getWalletDescription()); boolean walletPasswordRequired = false; if (this.bitcoinController.getModel().getActiveWallet() != null && this.bitcoinController.getModel().getActiveWallet().getEncryptionType() == EncryptionType.ENCRYPTED_SCRYPT_AES) { walletPasswordRequired = true; } enableWalletPassword(walletPasswordRequired); walletBusyChange(this.bitcoinController.getModel().getActivePerWalletModelData().isBusy()); if (outputFilename == null || "".equals(outputFilename) || (walletFilenameForChosenOutputFilename != null && !walletFilenameForChosenOutputFilename.equals(this.bitcoinController.getModel().getActiveWalletFilename()))) { outputFilename = createDefaultKeyFilename(this.bitcoinController.getModel().getActiveWalletFilename()); walletFilenameForChosenOutputFilename = this.bitcoinController.getModel().getActiveWalletFilename(); outputFilenameLabel.setText(outputFilename); } clearMessages(); } private void enableWalletPassword(boolean enableWalletPassword) { if (enableWalletPassword) { // Enable the wallet password. walletPasswordField.setEnabled(true); walletPasswordPromptLabel.setEnabled(true); } else { // Disable the wallet password. walletPasswordField.setEnabled(false); walletPasswordPromptLabel.setEnabled(false); } } public boolean requiresEncryption() { boolean requiresEncryption = false; if (passwordProtect != null && passwordProtect.isSelected()) { requiresEncryption = true; } return requiresEncryption; } private void chooseFile() { JFileChooser.setDefaultLocale(controller.getLocaliser().getLocale()); JFileChooser fileChooser = new JFileChooser(); fileChooser.setLocale(controller.getLocaliser().getLocale()); fileChooser.setDialogTitle(chooseFilenameButtonText); adjustedFont = FontSizer.INSTANCE.getAdjustedDefaultFont(); if (adjustedFont != null) { setFileChooserFont(new Container[] {fileChooser}); } fileChooser.setFileSelectionMode(JFileChooser.FILES_ONLY); fileChooser.setFileFilter(new PrivateKeyFileFilter(controller)); fileChooser.applyComponentOrientation(ComponentOrientation.getOrientation(controller.getLocaliser().getLocale())); if (outputFilename != null && !"".equals(outputFilename)) { fileChooser.setCurrentDirectory(new File(outputFilename)); fileChooser.setSelectedFile(new File(outputFilename)); } else { if (this.bitcoinController.getModel().getActiveWalletFilename() != null) { fileChooser.setCurrentDirectory(new File(this.bitcoinController.getModel().getActiveWalletFilename())); } String defaultFileName = fileChooser.getCurrentDirectory().getAbsoluteFile() + File.separator + controller.getLocaliser().getString("saveWalletAsView.untitled") + "." + BitcoinModel.PRIVATE_KEY_FILE_EXTENSION; fileChooser.setSelectedFile(new File(defaultFileName)); } int returnVal = fileChooser.showSaveDialog(mainFrame); if (returnVal == JFileChooser.APPROVE_OPTION) { File file = fileChooser.getSelectedFile(); if (file != null) { outputFilename = file.getAbsolutePath(); // add a key suffix if not present if (!outputFilename.endsWith("." + BitcoinModel.PRIVATE_KEY_FILE_EXTENSION)) { outputFilename = outputFilename + "." + BitcoinModel.PRIVATE_KEY_FILE_EXTENSION; } walletFilenameForChosenOutputFilename = this.bitcoinController.getModel().getActiveWalletFilename(); outputFilenameLabel.setText(outputFilename); clearMessages(); } } } // Used in testing. public void setOutputFilename(String outputFilename) { this.outputFilename = outputFilename; } public String getOutputFilename() { return outputFilename; } public void clearMessages() { setMessage1(" "); setMessage2(" "); } public void clearPasswords() { walletPasswordField.setText(""); exportFilePasswordField.setText(""); repeatExportFilePasswordField.setText(""); } public void setMessage1(String message1) { if (messageLabel1 != null) { messageLabel1.setText(message1); } } public void setMessage2(String message2) { if (messageLabel2 != null) { messageLabel2.setText(message2); } } private String createDefaultKeyFilename(String walletFilename) { if (walletFilename == null) { return null; } int suffixSeparator = walletFilename.lastIndexOf('.'); String stem = walletFilename.substring(0, suffixSeparator + 1); return stem + BitcoinModel.PRIVATE_KEY_FILE_EXTENSION; } class ChangePasswordProtectListener implements ItemListener { public ChangePasswordProtectListener() { } @Override public void itemStateChanged(ItemEvent e) { if (doNotPasswordProtectWarningLabel != null) { if (e.getSource().equals(passwordProtect)) { doNotPasswordProtectWarningLabel.setText(" "); exportFilePasswordField.setEnabled(true); repeatExportFilePasswordField.setEnabled(true); tickLabel.setEnabled(true); exportFilePasswordField.requestFocusInWindow(); clearMessages(); } else { doNotPasswordProtectWarningLabel.setText(controller.getLocaliser().getString( "showExportPrivateKeysPanel.doNotPasswordProtectWarningLabel")); exportFilePasswordField.setEnabled(false); repeatExportFilePasswordField.setEnabled(false); tickLabel.setEnabled(false); clearMessages(); } } } } class PasswordListener implements KeyListener { /** Handle the key typed event from the text field. */ @Override public void keyTyped(KeyEvent e) { } /** Handle the key-pressed event from the text field. */ @Override public void keyPressed(KeyEvent e) { // do nothing } /** Handle the key-released event from the text field. */ @Override public void keyReleased(KeyEvent e) { char[] password1 = null; char[] password2 = null; if (exportFilePasswordField != null) { password1 = exportFilePasswordField.getPassword(); } if (repeatExportFilePasswordField != null) { password2 = repeatExportFilePasswordField.getPassword(); } boolean tickLabelVisible = false; if (password1 != null && password2 != null) { if (Arrays.equals(password1, password2)) { tickLabelVisible = true; } } tickLabel.setVisible(tickLabelVisible); clearMessages(); // clear the password arrays (if necessary) if (password1 != null) { for (int i = 0; i < password1.length; i++) { password1[i] = 0; } } if (password2 != null) { for (int i = 0; i < password2.length; i++) { password2[i] = 0; } } } } @Override public Icon getViewIcon() { return ImageLoader.createImageIcon(ImageLoader.EXPORT_PRIVATE_KEYS_ICON_FILE); } @Override public String getViewTitle() { return controller.getLocaliser().getString("showExportPrivateKeysAction.text"); } @Override public String getViewTooltip() { return controller.getLocaliser().getString("showExportPrivateKeysAction.tooltip"); } @Override public View getViewId() { return View.SHOW_EXPORT_PRIVATE_KEYS_VIEW; } // Used in testing. public ExportPrivateKeysSubmitAction getExportPrivateKeySubmitAction() { return exportPrivateKeySubmitAction; } public String getMessageText1() { return messageLabel1.getText(); } public String getMessageText2() { return messageLabel2.getText(); } public void setWalletPassword(CharSequence walletPassword) { walletPasswordField.setText(walletPassword.toString()); } public boolean isWalletPasswordFieldEnabled() { return walletPasswordField.isEnabled(); } public void setExportPassword(CharSequence exportPassword) { exportFilePasswordField.setText(exportPassword.toString()); } public void setRepeatExportPassword(CharSequence exportPassword) { repeatExportFilePasswordField.setText(exportPassword.toString()); } public JRadioButton getDoNotPasswordProtect() { return doNotPasswordProtect; } @Override public void walletBusyChange(boolean newWalletIsBusy) { // Update the enable status of the action to match the wallet busy status. if (this.bitcoinController.getModel().getActivePerWalletModelData().isBusy()) { // Wallet is busy with another operation that may change the private keys - Action is disabled. exportPrivateKeySubmitAction.putValue(Action.SHORT_DESCRIPTION, HelpContentsPanel.createTooltipText(controller.getLocaliser().getString("multiBitSubmitAction.walletIsBusy", new Object[]{controller.getLocaliser().getString(this.bitcoinController.getModel().getActivePerWalletModelData().getBusyTaskKey())}))); exportPrivateKeySubmitAction.setEnabled(false); } else { // Enable unless wallet has been modified by another process. if (!this.bitcoinController.getModel().getActivePerWalletModelData().isFilesHaveBeenChangedByAnotherProcess()) { exportPrivateKeySubmitAction.putValue(Action.SHORT_DESCRIPTION, HelpContentsPanel.createTooltipText(controller.getLocaliser().getString("showExportPrivateKeysAction.tooltip"))); exportPrivateKeySubmitAction.setEnabled(true); } } } private void setFileChooserFont(Component[] components) { for (Component component : components) { if (component instanceof Container) setFileChooserFont(((Container) component).getComponents()); try { component.setFont(adjustedFont); } catch (Exception e) { } // TODO Why there is an empty catch block here? } } }
/******************************************************************************* * Copyright 2013 CRS4 * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. ******************************************************************************/ package com.crs4.roodin.bayesian; /** * @author ICT/LBS Team - CRS4 Sardinia, Italy * * Evaluate probabilities of being in a certain cell * */ public class Prob { static int side = 4; // square side for the bayesian algorithm /** * Evaluate probabilities * * @param probs the old cell probabilities * @param barred the barred grid of the sistem * @param heading the compass heading * @return new probabilities */ public static double[][] evalProbs(double[][] probs, int[][] barred, double heading) { double[][] newprobs = new double[probs.length][probs[0].length]; // newprobs = numpy.zeros(probs.shape) int rows = probs.length; int cols = probs[0].length; //rows,cols = probs.shape int[] max_index = {0, 0}; if (heading < 0) heading = 2 + heading; //Cell previous_est = estimatePos(probs); // it is not essential for (int r=0; r<rows; r++){ for (int c=0; c<cols; c++){ newprobs[r][c] = evalCell(r, c, probs, barred, heading); if (newprobs[r][c] >= newprobs[max_index[0]][max_index[1]]){ max_index[0] = r; max_index[1] = c; //max_index=(r,c) } } } double norm_factor = 0.; // norm_factor=sum(sum(newprobs)) for (int i = 0; i < rows; i++) { for (int j = 0; j < cols; j++) { norm_factor = norm_factor + newprobs[i][j]; } } for (int r=0; r< rows; r++){ for (int c=0; c<cols; c++){ newprobs[r][c] = (newprobs[r][c] / norm_factor) ; } } return newprobs; } /** * Evaluate single cell * * @param row the row index * @param col the col index * @param probs the old cell probabilities * @param barred the barred grid of the sistem * @param heading the compass heading * @return the cell evaluation */ private static double evalCell(int row, int col, double[][] probs, int[][] barred, double heading) { double val = 0; for (int i=0; i < barred.length; i++){ if ((barred[i][0] == row) && (barred[i][1] == col)){ //if (row,col) in barred: return 0 return 0.; } } int rows = probs.length; int cols = probs[0].length; //rows,cols = probs.shape int[] iRange = range(max(0, row-(side-1)), min(rows, row+side)); int[] jRange = range(max(0, col-(side-1)), min(cols, col+side)); //System.out.println("iRange: "+iRange[0]+" "+iRange[1]); //System.out.println("jRange: "+jRange[0]+" "+jRange[1]); for (int i : iRange){ for(int j : jRange){ if (probs[i][j]==0) //if (probs[i][j] < 1./(rows*cols)) continue; double distance = Math.sqrt(Math.pow((row-i),2)+Math.pow((col-j),2)); int dy=(i-row); int dx=(col-j); double angle = Math.atan2(dy, dx)/Math.PI; if (angle < 0) angle = 2+angle; //negative angles are expressed as positive angle = angle % 2; //angles are always in the [0-2*pi] interval double err = Math.abs(heading-angle); val = val + f_conj(distance, err) *probs[i][j]; } } return val; } /** * f_conj is the p distribution of sigma and err * * @param sigma * @param err * @return new sigma */ private static double f_conj(double sigma, double err){ if (sigma < 0.1){ return f_sigma(sigma); }else{ return f_sigma(sigma)*f_err(err); } } /** * err is angle fraction of pi * * @param err * @return new err */ private static double f_err(double err) { err = err % 2; err = Math.abs(err); if (err < .1) return .85; if (err < .4) return .14; if (err < .8) return .01; else return 0; } /** * sigma is a fraction of Average Step Lenght * * @param sigma * @return sigma value */ private static double f_sigma(double sigma) { if (Math.abs(sigma) < 0.1) return .05; if (0.80 < Math.abs(sigma) && Math.abs(sigma) < 1.2) return .95; return 0; } /** * range * * @param start * @param stop * @return */ private static int[] range(int start, int stop){ int[] result = new int[stop-start]; for(int i=0;i<stop-start;i++) result[i] = start+i; return result; } /** * max * * @param a * @param b * @return */ private static int max(int a, int b){ if (a >= b) return a; else return b; } /** * min * * @param a * @param b * @return */ private static int min(int a, int b){ if (a <= b) return a; else return b; } /** * estimate probs * * @param probs * @return */ public static Cell estimatePos(double[][] probs) { return max_cell(probs); } /** * max_cell * * @param probs * @return */ private static Cell max_cell(double[][] probs) { double v = max(probs); int rows = probs.length; int cols = probs[0].length; //rows,cols = probs.shape for (int i=0; i<rows; i++){ for (int j=0; j<cols; j++){ if (probs[i][j] == v){ double mean_probs = mean(probs); double stdev = Math.sqrt( Math.pow((v-mean_probs),2)); //math.sqrt(((v-mean_probs)**2)/1) //System.out.println("mean: "+mean_probs); //System.out.println("stdev: "+stdev); return new Cell(i, j, stdev); } } } return new Cell(0,0,0); } /** * Get the mean of the entire matrix * * @param matrix * @return the mean */ private static double mean(double[][] matrix) { int rows = matrix.length; int cols = matrix[0].length; //rows,cols = probs.shape return sum(matrix) / (rows*cols); } /** * Get the sum of the entire matrix * * @param matrix * @return the sum */ private static double sum(double[][] matrix){ double sum = 0.; for (int i = 0; i < matrix.length; i++) { for (int j = 0; j < matrix[0].length; j++) { sum = sum + matrix[i][j]; } } return sum; } /** * The max value of the entire matrix * * @param matrix * @return max value */ private static double max(double[][] matrix) { double maxValue = matrix[0][0]; for (int i = 0; i < matrix.length; i++) { for (int j = 0; j < matrix[0].length; j++) { if (matrix[i][j] > maxValue) { maxValue = matrix[i][j]; } } } return maxValue; } }
/* * Copyright 2005 Paul Hinds * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.tp23.antinstaller.input; import org.tp23.antinstaller.InstallerContext; import org.tp23.antinstaller.ValidationException; /** * * <p>Input type to select targets to install </p> * If the osSpecific flag is set the OS of the current system will * be appended to the name of the target actually by ant run so that different * Targets can be run according to the target platform. * This feature goes against the principles of * building cross platform installers, but is provided so that common installer * tasks such as creating icons and shortcuts can be run on Windows for * all those useless users who can't run a command script ;) * <br> * Currently there are two modes strict and not strict (lax).</p> * <p>Strict target will return the target name plus the exact String in the * System Property "os.name" this means you will have to provide targets for * every possible OS version. See * <a href="http://lopica.sourceforge.net/os.html">this page</a> for a list of possible values * There are a great many but you may not want to consider some of the options.</p> * <p>Lax target will return one of the following strings only * <ul> * <li>"[target-name]-linux" - Linux </li> * <li>"[target-name]-mac" - Mac OS and Mac OS X</li> * <li>"[target-name]-sun" - SunOS and Solaris</li> * <li>"[target-name]-win" - Windows *</li> * <li>"[target-name]-other" - any thing else</li> * </ul></p> so you only have to create 5 ant targets to support all the cases. * <p>Copyright: Copyright (c) 2004</p> * <p>Company: tp23</p> * @author Paul Hinds * @version $Id: TargetInput.java,v 1.3 2006/12/07 02:42:22 teknopaul Exp $ */ public class TargetInput extends InputField implements Target{ private String target; private String force; private String osSpecific; private String strict; //targets are ordered private int idx; private static int globalIdx = 1; public TargetInput() { idx = getGlobalIdx(); } public String getTarget() { if(isTrue(osSpecific)){ return getOSSpecificTarget(); } else { return target; } } /** * Used to fetch the target value that was set in the config file * @return */ public String getTargetName() { return target; } public void setTarget(String target) { this.target = target; setProperty(target); } public String getForce() { return force; } public void setForce(String force) { this.force = force; } public String getStrict() { return strict; } public void setStrict(String strict) { this.strict = strict; } public String getOsSpecific() { return osSpecific; } public void setOsSpecific(String osSpecific) { this.osSpecific = osSpecific; } /** * Called to validate the user input */ public boolean validate(InstallerContext cxt) throws ValidationException { //setInputResult(target); return true; } /** * Used by checkConfig to validate the configuration file. * Not used at runtime. * @return boolean */ public boolean validateObject() { if(getDisplayText()==null){ System.out.println("Target:displayText must be set"); return false; } if(getTarget()==null){ System.out.println("Target:target must be set"); return false; } // if(getTarget().equals("default")){ // System.out.println("Target:target can not be \"default\""); // return false; // } if(!InputField.optionalBoolean(getForce())){ System.out.println("Target:force must be true or false or null"); return false; } if(!InputField.optionalBoolean(getStrict())){ System.out.println("Target:strict must be true or false or null"); return false; } if(!InputField.optionalBoolean(getOsSpecific())){ System.out.println("Target:osSpecific must be true or false or null"); return false; } if(!InputField.requiredBoolean(getDefaultValue())){ System.out.println("Target:defaultValue must be true or false"); return false; } return true; } public int getIdx() { return idx; } public static int getGlobalIdx() { return globalIdx++; } public String getOSSpecificTarget() { if(isTrue(strict)){ return getStrictTarget(); } else return getLaxTarget(); } private String getStrictTarget(){ return target + getOsSpecificSuffix(); } private String getLaxTarget(){ return target + getLaxOsSpecificSuffix(); } /** * N.B. should have added a "-" but to late to change now and not important * @return */ public static String getOsSpecificSuffix(){ return System.getProperty("os.name"); } public static String getLaxOsSpecificSuffix(){ String osName = System.getProperty("os.name").toLowerCase(); if(osName.indexOf("linux") != -1){ return "-linux"; } if(osName.indexOf("mac") != -1){ return "-mac"; } if(osName.indexOf("windows") != -1){ return "-win"; } if(osName.indexOf("solaris") != -1 || osName.indexOf("sunos") != -1){ return "-sun"; } return "-other"; } }
package qub; public interface QubFolderTests { static void test(TestRunner runner) { PreCondition.assertNotNull(runner, "runner"); runner.testGroup(QubFolder.class, () -> { runner.testGroup("get(Folder)", () -> { runner.test("with null", (Test test) -> { test.assertThrows(() -> QubFolder.get(null), new PreConditionFailure("qubFolder cannot be null.")); }); runner.test("with folder that doesn't exist", (Test test) -> { final Folder folder = QubFolderTests.getFolder(test, "/qub/"); final QubFolder qubFolder = QubFolder.get(folder); test.assertEqual(folder.getPath(), qubFolder.getPath()); test.assertFalse(qubFolder.exists().await()); }); runner.test("with folder that exists", (Test test) -> { final Folder folder = QubFolderTests.createFolder(test, "/qub/"); final QubFolder qubFolder = QubFolder.get(folder); test.assertEqual(folder.getPath(), qubFolder.getPath()); test.assertTrue(qubFolder.exists().await()); }); }); runner.testGroup("getShortcutFile(String)", () -> { final Action2<String,Throwable> getShortcutFileErrorTest = (String shortcutName, Throwable expected) -> { runner.test("with " + Strings.escapeAndQuote(shortcutName), (Test test) -> { final QubFolder qubFolder = QubFolderTests.getQubFolder(test, "/qub/"); test.assertThrows(() -> qubFolder.getShortcutFile(shortcutName), expected); }); }; getShortcutFileErrorTest.run(null, new PreConditionFailure("shortcutName cannot be null.")); getShortcutFileErrorTest.run("", new PreConditionFailure("shortcutName cannot be empty.")); final Action1<String> getShortcutFileTest = (String shortcutName) -> { runner.test("with " + Strings.escapeAndQuote(shortcutName), (Test test) -> { final QubFolder qubFolder = QubFolderTests.getQubFolder(test, "/qub/"); final File shortcutFile = qubFolder.getShortcutFile(shortcutName).await(); test.assertNotNull(shortcutFile); test.assertEqual(shortcutName, shortcutFile.getName()); test.assertEqual(qubFolder.getPath(), shortcutFile.getParentFolder().await().getPath()); }); }; getShortcutFileTest.run("apple"); getShortcutFileTest.run("apple.bat"); getShortcutFileTest.run("qub-pack"); getShortcutFileTest.run("qub-pack.cmd"); }); runner.testGroup("getPublisherFolders()", () -> { runner.test("with non-existing QubFolder", (Test test) -> { final QubFolder qubFolder = QubFolderTests.getQubFolder(test, "/qub/"); test.assertEqual(Iterable.create(), qubFolder.getPublisherFolders().await()); }); runner.test("with existing empty QubFolder", (Test test) -> { final QubFolder qubFolder = QubFolderTests.createQubFolder(test, "/qub/"); test.assertEqual(Iterable.create(), qubFolder.getPublisherFolders().await()); }); runner.test("with existing QubFolder with files", (Test test) -> { final Folder folder = QubFolderTests.createFolder(test, "/qub/"); folder.createFile("shortcut.cmd").await(); final QubFolder qubFolder = QubFolder.get(folder); test.assertEqual(Iterable.create(), qubFolder.getPublisherFolders().await()); }); runner.test("with existing QubFolder with folders", (Test test) -> { final Folder folder = QubFolderTests.createFolder(test, "/qub/"); final Folder subFolder = folder.createFolder("publisher").await(); final QubFolder qubFolder = QubFolder.get(folder); test.assertEqual( Iterable.create( QubPublisherFolder.get(subFolder)), qubFolder.getPublisherFolders().await()); }); }); runner.testGroup("getPublisherFolder(String)", () -> { runner.test("with null", (Test test) -> { final QubFolder qubFolder = QubFolderTests.getQubFolder(test, "/qub/"); test.assertThrows(() -> qubFolder.getPublisherFolder(null), new PreConditionFailure("publisherName cannot be null.")); }); runner.test("with empty", (Test test) -> { final QubFolder qubFolder = QubFolderTests.getQubFolder(test, "/qub/"); test.assertThrows(() -> qubFolder.getPublisherFolder(""), new PreConditionFailure("publisherName cannot be empty.")); }); runner.test("with non-existing publisher", (Test test) -> { final QubFolder qubFolder = QubFolderTests.getQubFolder(test, "/qub/"); final QubPublisherFolder qubPublisherFolder = qubFolder.getPublisherFolder("spam").await(); test.assertNotNull(qubPublisherFolder); test.assertEqual(Path.parse("/qub/spam/"), qubPublisherFolder.getPath()); }); runner.test("with existing publisher", (Test test) -> { final Folder folder = QubFolderTests.createFolder(test, "/qub/"); final QubFolder qubFolder = QubFolder.get(folder); folder.createFolder("spam").await(); final QubPublisherFolder qubPublisherFolder = qubFolder.getPublisherFolder("spam").await(); test.assertNotNull(qubPublisherFolder); test.assertEqual(Path.parse("/qub/spam/"), qubPublisherFolder.getPath()); }); }); runner.testGroup("getProjectFolders(String)", () -> { runner.test("with null publisherName", (Test test) -> { final QubFolder qubFolder = QubFolderTests.getQubFolder(test, "/qub/"); test.assertThrows(() -> qubFolder.getProjectFolders(null), new PreConditionFailure("publisherName cannot be null.")); }); runner.test("with empty publisherName", (Test test) -> { final QubFolder qubFolder = QubFolderTests.getQubFolder(test, "/qub/"); test.assertThrows(() -> qubFolder.getProjectFolders(""), new PreConditionFailure("publisherName cannot be empty.")); }); runner.test("with non-existing QubFolder", (Test test) -> { final QubFolder qubFolder = QubFolderTests.getQubFolder(test, "/qub/"); test.assertEqual(Iterable.create(), qubFolder.getProjectFolders("me").await()); }); runner.test("with existing empty QubFolder", (Test test) -> { final QubFolder qubFolder = QubFolderTests.createQubFolder(test, "/qub/"); test.assertEqual(Iterable.create(), qubFolder.getProjectFolders("me").await()); }); runner.test("with existing QubFolder with files", (Test test) -> { final Folder folder = QubFolderTests.createFolder(test, "/qub/"); folder.createFile("shortcut.cmd").await(); final QubFolder qubFolder = QubFolder.get(folder); test.assertEqual(Iterable.create(), qubFolder.getProjectFolders("me").await()); }); runner.test("with existing empty QubPublisherFolder", (Test test) -> { final Folder folder = QubFolderTests.createFolder(test, "/qub/"); folder.createFolder("me").await(); final QubFolder qubFolder = QubFolder.get(folder); test.assertEqual(Iterable.create(), qubFolder.getProjectFolders("me").await()); }); runner.test("with existing QubProjectFolder", (Test test) -> { final Folder folder = QubFolderTests.createFolder(test, "/qub/"); final Folder subFolder = folder.createFolder("me/my-project/").await(); final QubFolder qubFolder = QubFolder.get(folder); test.assertEqual( Iterable.create( QubProjectFolder.get(subFolder)), qubFolder.getProjectFolders("me").await()); }); }); runner.testGroup("getProjectFolder(String)", () -> { runner.test("with null publisherName", (Test test) -> { final QubFolder qubFolder = QubFolderTests.getQubFolder(test, "/qub/"); test.assertThrows(() -> qubFolder.getProjectFolder(null, "my-project"), new PreConditionFailure("publisherName cannot be null.")); }); runner.test("with empty publisherName", (Test test) -> { final QubFolder qubFolder = QubFolderTests.getQubFolder(test, "/qub/"); test.assertThrows(() -> qubFolder.getProjectFolder("", "my-project"), new PreConditionFailure("publisherName cannot be empty.")); }); runner.test("with null projectName", (Test test) -> { final QubFolder qubFolder = QubFolderTests.getQubFolder(test, "/qub/"); test.assertThrows(() -> qubFolder.getProjectFolder("me", null), new PreConditionFailure("projectName cannot be null.")); }); runner.test("with empty projectName", (Test test) -> { final QubFolder qubFolder = QubFolderTests.getQubFolder(test, "/qub/"); test.assertThrows(() -> qubFolder.getProjectFolder("me", ""), new PreConditionFailure("projectName cannot be empty.")); }); runner.test("with non-existing publisher", (Test test) -> { final QubFolder qubFolder = QubFolderTests.getQubFolder(test, "/qub/"); final QubProjectFolder projectFolder = qubFolder.getProjectFolder("spam", "my-project").await(); test.assertNotNull(projectFolder); test.assertEqual(Path.parse("/qub/spam/my-project/"), projectFolder.getPath()); }); runner.test("with non-existing project", (Test test) -> { final Folder folder = QubFolderTests.createFolder(test, "/qub/"); final QubFolder qubFolder = QubFolder.get(folder); folder.createFolder("spam").await(); final QubProjectFolder projectFolder = qubFolder.getProjectFolder("spam", "my-project").await(); test.assertNotNull(projectFolder); test.assertEqual(Path.parse("/qub/spam/my-project/"), projectFolder.getPath()); }); runner.test("with existing project", (Test test) -> { final Folder folder = QubFolderTests.createFolder(test, "/qub/"); final QubFolder qubFolder = QubFolder.get(folder); folder.createFolder("spam/my-project/").await(); final QubProjectFolder projectFolder = qubFolder.getProjectFolder("spam", "my-project").await(); test.assertNotNull(projectFolder); test.assertEqual(Path.parse("/qub/spam/my-project/"), projectFolder.getPath()); }); }); runner.testGroup("getProjectVersionFolders(String,String)", () -> { final Action3<String,String,Throwable> getProjectVersionFoldersErrorTest = (String publisherName, String projectName, Throwable expected) -> { runner.test("with " + English.andList(Iterable.create(publisherName, projectName).map(Strings::escapeAndQuote)), (Test test) -> { final QubFolder qubFolder = QubFolderTests.getQubFolder(test, "/qub/"); test.assertThrows(() -> qubFolder.getProjectVersionFolders(publisherName, projectName), expected); }); }; getProjectVersionFoldersErrorTest.run(null, null, new PreConditionFailure("publisherName cannot be null.")); getProjectVersionFoldersErrorTest.run("", null, new PreConditionFailure("publisherName cannot be empty.")); getProjectVersionFoldersErrorTest.run("a", null, new PreConditionFailure("projectName cannot be null.")); getProjectVersionFoldersErrorTest.run("a", "", new PreConditionFailure("projectName cannot be empty.")); runner.test("with non-existing Qub folder", (Test test) -> { final QubFolder qubFolder = QubFolderTests.getQubFolder(test, "/qub/"); final String publisherName = "a"; final String projectName = "b"; test.assertEqual(Iterable.create(), qubFolder.getProjectVersionFolders(publisherName, projectName).await()); }); runner.test("with non-existing publisher folder", (Test test) -> { final QubFolder qubFolder = QubFolderTests.getQubFolder(test, "/qub/"); qubFolder.create().await(); final String publisherName = "a"; final String projectName = "b"; test.assertEqual(Iterable.create(), qubFolder.getProjectVersionFolders(publisherName, projectName).await()); }); runner.test("with non-existing project folder", (Test test) -> { final QubFolder qubFolder = QubFolderTests.getQubFolder(test, "/qub/"); final String publisherName = "a"; qubFolder.getPublisherFolder(publisherName).await().create().await(); final String projectName = "b"; test.assertEqual(Iterable.create(), qubFolder.getProjectVersionFolders(publisherName, projectName).await()); }); runner.test("with empty project folder", (Test test) -> { final QubFolder qubFolder = QubFolderTests.getQubFolder(test, "/qub/"); final String publisherName = "a"; final String projectName = "b"; final QubProjectFolder projectFolder = qubFolder.getProjectFolder(publisherName, projectName).await(); projectFolder.create().await(); test.assertEqual(Iterable.create(), qubFolder.getProjectVersionFolders(publisherName, projectName).await()); }); runner.test("with one version folder", (Test test) -> { final QubFolder qubFolder = QubFolderTests.getQubFolder(test, "/qub/"); final String publisherName = "a"; final String projectName = "b"; final QubProjectFolder projectFolder = qubFolder.getProjectFolder(publisherName, projectName).await(); projectFolder.createFolder("versions/1").await(); test.assertEqual( Iterable.create( qubFolder.getProjectVersionFolder(publisherName, projectName, "1").await()), qubFolder.getProjectVersionFolders(publisherName, projectName).await()); }); }); runner.testGroup("getProjectJSONFile(String,String,String)", () -> { final Action4<String,String,String,Throwable> getProjectJSONFileErrorTest = (String publisherName, String projectName, String version, Throwable expected) -> { runner.test("with " + English.andList(Iterable.create(publisherName, projectName, version).map(Strings::escapeAndQuote)), (Test test) -> { final QubFolder qubFolder = QubFolderTests.getQubFolder(test, "/qub/"); test.assertThrows(() -> qubFolder.getProjectJSONFile(publisherName, projectName, version), expected); }); }; getProjectJSONFileErrorTest.run(null, null, null, new PreConditionFailure("publisherName cannot be null.")); getProjectJSONFileErrorTest.run("", null, null, new PreConditionFailure("publisherName cannot be empty.")); getProjectJSONFileErrorTest.run("a", null, null, new PreConditionFailure("projectName cannot be null.")); getProjectJSONFileErrorTest.run("a", "", null, new PreConditionFailure("projectName cannot be empty.")); getProjectJSONFileErrorTest.run("a", "b", null, new PreConditionFailure("version cannot be null.")); getProjectJSONFileErrorTest.run("a", "b", "", new PreConditionFailure("version cannot be empty.")); runner.test("with non-existing Qub folder", (Test test) -> { final QubFolder qubFolder = QubFolderTests.getQubFolder(test, "/qub/"); final String publisherName = "a"; final String projectName = "b"; final String version = "1"; test.assertEqual( qubFolder.getFile(publisherName + "/" + projectName + "/versions/" + version + "/project.json").await(), qubFolder.getProjectJSONFile(publisherName, projectName, version).await()); }); runner.test("with non-existing publisher folder", (Test test) -> { final QubFolder qubFolder = QubFolderTests.getQubFolder(test, "/qub/"); qubFolder.create().await(); final String publisherName = "a"; final String projectName = "b"; final String version = "1"; test.assertEqual( qubFolder.getFile(publisherName + "/" + projectName + "/versions/" + version + "/project.json").await(), qubFolder.getProjectJSONFile(publisherName, projectName, version).await()); }); runner.test("with non-existing project folder", (Test test) -> { final QubFolder qubFolder = QubFolderTests.getQubFolder(test, "/qub/"); final String publisherName = "a"; qubFolder.getPublisherFolder(publisherName).await().create().await(); final String projectName = "b"; final String version = "1"; test.assertEqual( qubFolder.getFile(publisherName + "/" + projectName + "/versions/" + version + "/project.json").await(), qubFolder.getProjectJSONFile(publisherName, projectName, version).await()); }); runner.test("with non-existing versions folder", (Test test) -> { final QubFolder qubFolder = QubFolderTests.getQubFolder(test, "/qub/"); final String publisherName = "a"; final String projectName = "b"; qubFolder.getProjectFolder(publisherName, projectName).await().create().await(); final String version = "1"; test.assertEqual( qubFolder.getFile(publisherName + "/" + projectName + "/versions/" + version + "/project.json").await(), qubFolder.getProjectJSONFile(publisherName, projectName, version).await()); }); runner.test("with empty versions folder", (Test test) -> { final QubFolder qubFolder = QubFolderTests.getQubFolder(test, "/qub/"); final String publisherName = "a"; final String projectName = "b"; final QubProjectFolder projectFolder = qubFolder.getProjectFolder(publisherName, projectName).await(); projectFolder.createFolder("versions").await(); final String version = "1"; test.assertEqual( qubFolder.getFile(publisherName + "/" + projectName + "/versions/" + version + "/project.json").await(), qubFolder.getProjectJSONFile(publisherName, projectName, version).await()); }); runner.test("with non-existing project.json file", (Test test) -> { final QubFolder qubFolder = QubFolderTests.getQubFolder(test, "/qub/"); final String publisherName = "a"; final String projectName = "b"; final String version = "1"; final QubProjectVersionFolder projectVersionFolder = qubFolder.getProjectVersionFolder(publisherName, projectName, version).await(); projectVersionFolder.create().await(); test.assertEqual( qubFolder.getFile(publisherName + "/" + projectName + "/versions/" + version + "/project.json").await(), qubFolder.getProjectJSONFile(publisherName, projectName, version).await()); }); runner.test("with existing project.json file", (Test test) -> { final QubFolder qubFolder = QubFolderTests.getQubFolder(test, "/qub/"); final String publisherName = "a"; final String projectName = "b"; final String version = "1"; final QubProjectVersionFolder projectVersionFolder = qubFolder.getProjectVersionFolder(publisherName, projectName, version).await(); projectVersionFolder.createFile("project.json").await(); test.assertEqual( qubFolder.getFile(publisherName + "/" + projectName + "/versions/" + version + "/project.json").await(), qubFolder.getProjectJSONFile(publisherName, projectName, version).await()); }); }); runner.testGroup("getCompiledSourcesFile(String,String,String)", () -> { final Action4<String,String,String,Throwable> getCompiledSourcesFileErrorTest = (String publisherName, String projectName, String version, Throwable expected) -> { runner.test("with " + English.andList(Iterable.create(publisherName, projectName, version).map(Strings::escapeAndQuote)), (Test test) -> { final QubFolder qubFolder = QubFolderTests.getQubFolder(test, "/qub/"); test.assertThrows(() -> qubFolder.getCompiledSourcesFile(publisherName, projectName, version), expected); }); }; getCompiledSourcesFileErrorTest.run(null, null, null, new PreConditionFailure("publisherName cannot be null.")); getCompiledSourcesFileErrorTest.run("", null, null, new PreConditionFailure("publisherName cannot be empty.")); getCompiledSourcesFileErrorTest.run("a", null, null, new PreConditionFailure("projectName cannot be null.")); getCompiledSourcesFileErrorTest.run("a", "", null, new PreConditionFailure("projectName cannot be empty.")); getCompiledSourcesFileErrorTest.run("a", "b", null, new PreConditionFailure("version cannot be null.")); getCompiledSourcesFileErrorTest.run("a", "b", "", new PreConditionFailure("version cannot be empty.")); runner.test("with non-existing Qub folder", (Test test) -> { final QubFolder qubFolder = QubFolderTests.getQubFolder(test, "/qub/"); final String publisherName = "a"; final String projectName = "b"; final String version = "1"; test.assertEqual( qubFolder.getFile(publisherName + "/" + projectName + "/versions/" + version + "/" + projectName + ".jar").await(), qubFolder.getCompiledSourcesFile(publisherName, projectName, version).await()); }); runner.test("with non-existing publisher folder", (Test test) -> { final QubFolder qubFolder = QubFolderTests.getQubFolder(test, "/qub/"); qubFolder.create().await(); final String publisherName = "a"; final String projectName = "b"; final String version = "1"; test.assertEqual( qubFolder.getFile(publisherName + "/" + projectName + "/versions/" + version + "/" + projectName + ".jar").await(), qubFolder.getCompiledSourcesFile(publisherName, projectName, version).await()); }); runner.test("with non-existing project folder", (Test test) -> { final QubFolder qubFolder = QubFolderTests.getQubFolder(test, "/qub/"); final String publisherName = "a"; qubFolder.getPublisherFolder(publisherName).await().create().await(); final String projectName = "b"; final String version = "1"; test.assertEqual( qubFolder.getFile(publisherName + "/" + projectName + "/versions/" + version + "/" + projectName + ".jar").await(), qubFolder.getCompiledSourcesFile(publisherName, projectName, version).await()); }); runner.test("with non-existing versions folder", (Test test) -> { final QubFolder qubFolder = QubFolderTests.getQubFolder(test, "/qub/"); final String publisherName = "a"; final String projectName = "b"; qubFolder.getProjectFolder(publisherName, projectName).await().create().await(); final String version = "1"; test.assertEqual( qubFolder.getFile(publisherName + "/" + projectName + "/versions/" + version + "/" + projectName + ".jar").await(), qubFolder.getCompiledSourcesFile(publisherName, projectName, version).await()); }); runner.test("with empty versions folder", (Test test) -> { final QubFolder qubFolder = QubFolderTests.getQubFolder(test, "/qub/"); final String publisherName = "a"; final String projectName = "b"; final QubProjectFolder projectFolder = qubFolder.getProjectFolder(publisherName, projectName).await(); projectFolder.createFolder("versions").await(); final String version = "1"; test.assertEqual( qubFolder.getFile(publisherName + "/" + projectName + "/versions/" + version + "/" + projectName + ".jar").await(), qubFolder.getCompiledSourcesFile(publisherName, projectName, version).await()); }); runner.test("with non-existing project.json file", (Test test) -> { final QubFolder qubFolder = QubFolderTests.getQubFolder(test, "/qub/"); final String publisherName = "a"; final String projectName = "b"; final String version = "1"; final QubProjectVersionFolder projectVersionFolder = qubFolder.getProjectVersionFolder(publisherName, projectName, version).await(); projectVersionFolder.create().await(); test.assertEqual( qubFolder.getFile(publisherName + "/" + projectName + "/versions/" + version + "/" + projectName + ".jar").await(), qubFolder.getCompiledSourcesFile(publisherName, projectName, version).await()); }); runner.test("with existing project.json file", (Test test) -> { final QubFolder qubFolder = QubFolderTests.getQubFolder(test, "/qub/"); final String publisherName = "a"; final String projectName = "b"; final String version = "1"; final QubProjectVersionFolder projectVersionFolder = qubFolder.getProjectVersionFolder(publisherName, projectName, version).await(); projectVersionFolder.createFile("project.json").await(); test.assertEqual( qubFolder.getFile(publisherName + "/" + projectName + "/versions/" + version + "/" + projectName + ".jar").await(), qubFolder.getCompiledSourcesFile(publisherName, projectName, version).await()); }); }); runner.testGroup("getSourcesFile(String,String,String)", () -> { final Action4<String,String,String,Throwable> getSourcesFileErrorTest = (String publisherName, String projectName, String version, Throwable expected) -> { runner.test("with " + English.andList(Iterable.create(publisherName, projectName, version).map(Strings::escapeAndQuote)), (Test test) -> { final QubFolder qubFolder = QubFolderTests.getQubFolder(test, "/qub/"); test.assertThrows(() -> qubFolder.getSourcesFile(publisherName, projectName, version), expected); }); }; getSourcesFileErrorTest.run(null, null, null, new PreConditionFailure("publisherName cannot be null.")); getSourcesFileErrorTest.run("", null, null, new PreConditionFailure("publisherName cannot be empty.")); getSourcesFileErrorTest.run("a", null, null, new PreConditionFailure("projectName cannot be null.")); getSourcesFileErrorTest.run("a", "", null, new PreConditionFailure("projectName cannot be empty.")); getSourcesFileErrorTest.run("a", "b", null, new PreConditionFailure("version cannot be null.")); getSourcesFileErrorTest.run("a", "b", "", new PreConditionFailure("version cannot be empty.")); runner.test("with non-existing Qub folder", (Test test) -> { final QubFolder qubFolder = QubFolderTests.getQubFolder(test, "/qub/"); final String publisherName = "a"; final String projectName = "b"; final String version = "1"; test.assertEqual( qubFolder.getFile(publisherName + "/" + projectName + "/versions/" + version + "/" + projectName + ".sources.jar").await(), qubFolder.getSourcesFile(publisherName, projectName, version).await()); }); runner.test("with non-existing publisher folder", (Test test) -> { final QubFolder qubFolder = QubFolderTests.getQubFolder(test, "/qub/"); qubFolder.create().await(); final String publisherName = "a"; final String projectName = "b"; final String version = "1"; test.assertEqual( qubFolder.getFile(publisherName + "/" + projectName + "/versions/" + version + "/" + projectName + ".sources.jar").await(), qubFolder.getSourcesFile(publisherName, projectName, version).await()); }); runner.test("with non-existing project folder", (Test test) -> { final QubFolder qubFolder = QubFolderTests.getQubFolder(test, "/qub/"); final String publisherName = "a"; qubFolder.getPublisherFolder(publisherName).await().create().await(); final String projectName = "b"; final String version = "1"; test.assertEqual( qubFolder.getFile(publisherName + "/" + projectName + "/versions/" + version + "/" + projectName + ".sources.jar").await(), qubFolder.getSourcesFile(publisherName, projectName, version).await()); }); runner.test("with non-existing versions folder", (Test test) -> { final QubFolder qubFolder = QubFolderTests.getQubFolder(test, "/qub/"); final String publisherName = "a"; final String projectName = "b"; qubFolder.getProjectFolder(publisherName, projectName).await().create().await(); final String version = "1"; test.assertEqual( qubFolder.getFile(publisherName + "/" + projectName + "/versions/" + version + "/" + projectName + ".sources.jar").await(), qubFolder.getSourcesFile(publisherName, projectName, version).await()); }); runner.test("with empty versions folder", (Test test) -> { final QubFolder qubFolder = QubFolderTests.getQubFolder(test, "/qub/"); final String publisherName = "a"; final String projectName = "b"; final QubProjectFolder projectFolder = qubFolder.getProjectFolder(publisherName, projectName).await(); projectFolder.createFolder("versions").await(); final String version = "1"; test.assertEqual( qubFolder.getFile(publisherName + "/" + projectName + "/versions/" + version + "/" + projectName + ".sources.jar").await(), qubFolder.getSourcesFile(publisherName, projectName, version).await()); }); runner.test("with non-existing project.json file", (Test test) -> { final QubFolder qubFolder = QubFolderTests.getQubFolder(test, "/qub/"); final String publisherName = "a"; final String projectName = "b"; final String version = "1"; final QubProjectVersionFolder projectVersionFolder = qubFolder.getProjectVersionFolder(publisherName, projectName, version).await(); projectVersionFolder.create().await(); test.assertEqual( qubFolder.getFile(publisherName + "/" + projectName + "/versions/" + version + "/" + projectName + ".sources.jar").await(), qubFolder.getSourcesFile(publisherName, projectName, version).await()); }); runner.test("with existing project.json file", (Test test) -> { final QubFolder qubFolder = QubFolderTests.getQubFolder(test, "/qub/"); final String publisherName = "a"; final String projectName = "b"; final String version = "1"; final QubProjectVersionFolder projectVersionFolder = qubFolder.getProjectVersionFolder(publisherName, projectName, version).await(); projectVersionFolder.createFile("project.json").await(); test.assertEqual( qubFolder.getFile(publisherName + "/" + projectName + "/versions/" + version + "/" + projectName + ".sources.jar").await(), qubFolder.getSourcesFile(publisherName, projectName, version).await()); }); }); runner.testGroup("getCompiledTestsFile(String,String,String)", () -> { final Action4<String,String,String,Throwable> getCompiledTestsFileErrorTest = (String publisherName, String projectName, String version, Throwable expected) -> { runner.test("with " + English.andList(Iterable.create(publisherName, projectName, version).map(Strings::escapeAndQuote)), (Test test) -> { final QubFolder qubFolder = QubFolderTests.getQubFolder(test, "/qub/"); test.assertThrows(() -> qubFolder.getCompiledTestsFile(publisherName, projectName, version), expected); }); }; getCompiledTestsFileErrorTest.run(null, null, null, new PreConditionFailure("publisherName cannot be null.")); getCompiledTestsFileErrorTest.run("", null, null, new PreConditionFailure("publisherName cannot be empty.")); getCompiledTestsFileErrorTest.run("a", null, null, new PreConditionFailure("projectName cannot be null.")); getCompiledTestsFileErrorTest.run("a", "", null, new PreConditionFailure("projectName cannot be empty.")); getCompiledTestsFileErrorTest.run("a", "b", null, new PreConditionFailure("version cannot be null.")); getCompiledTestsFileErrorTest.run("a", "b", "", new PreConditionFailure("version cannot be empty.")); runner.test("with non-existing Qub folder", (Test test) -> { final QubFolder qubFolder = QubFolderTests.getQubFolder(test, "/qub/"); final String publisherName = "a"; final String projectName = "b"; final String version = "1"; test.assertEqual( qubFolder.getFile(publisherName + "/" + projectName + "/versions/" + version + "/" + projectName + ".tests.jar").await(), qubFolder.getCompiledTestsFile(publisherName, projectName, version).await()); }); runner.test("with non-existing publisher folder", (Test test) -> { final QubFolder qubFolder = QubFolderTests.getQubFolder(test, "/qub/"); qubFolder.create().await(); final String publisherName = "a"; final String projectName = "b"; final String version = "1"; test.assertEqual( qubFolder.getFile(publisherName + "/" + projectName + "/versions/" + version + "/" + projectName + ".tests.jar").await(), qubFolder.getCompiledTestsFile(publisherName, projectName, version).await()); }); runner.test("with non-existing project folder", (Test test) -> { final QubFolder qubFolder = QubFolderTests.getQubFolder(test, "/qub/"); final String publisherName = "a"; qubFolder.getPublisherFolder(publisherName).await().create().await(); final String projectName = "b"; final String version = "1"; test.assertEqual( qubFolder.getFile(publisherName + "/" + projectName + "/versions/" + version + "/" + projectName + ".tests.jar").await(), qubFolder.getCompiledTestsFile(publisherName, projectName, version).await()); }); runner.test("with non-existing versions folder", (Test test) -> { final QubFolder qubFolder = QubFolderTests.getQubFolder(test, "/qub/"); final String publisherName = "a"; final String projectName = "b"; qubFolder.getProjectFolder(publisherName, projectName).await().create().await(); final String version = "1"; test.assertEqual( qubFolder.getFile(publisherName + "/" + projectName + "/versions/" + version + "/" + projectName + ".tests.jar").await(), qubFolder.getCompiledTestsFile(publisherName, projectName, version).await()); }); runner.test("with empty versions folder", (Test test) -> { final QubFolder qubFolder = QubFolderTests.getQubFolder(test, "/qub/"); final String publisherName = "a"; final String projectName = "b"; final QubProjectFolder projectFolder = qubFolder.getProjectFolder(publisherName, projectName).await(); projectFolder.createFolder("versions").await(); final String version = "1"; test.assertEqual( qubFolder.getFile(publisherName + "/" + projectName + "/versions/" + version + "/" + projectName + ".tests.jar").await(), qubFolder.getCompiledTestsFile(publisherName, projectName, version).await()); }); runner.test("with non-existing project.json file", (Test test) -> { final QubFolder qubFolder = QubFolderTests.getQubFolder(test, "/qub/"); final String publisherName = "a"; final String projectName = "b"; final String version = "1"; final QubProjectVersionFolder projectVersionFolder = qubFolder.getProjectVersionFolder(publisherName, projectName, version).await(); projectVersionFolder.create().await(); test.assertEqual( qubFolder.getFile(publisherName + "/" + projectName + "/versions/" + version + "/" + projectName + ".tests.jar").await(), qubFolder.getCompiledTestsFile(publisherName, projectName, version).await()); }); runner.test("with existing project.json file", (Test test) -> { final QubFolder qubFolder = QubFolderTests.getQubFolder(test, "/qub/"); final String publisherName = "a"; final String projectName = "b"; final String version = "1"; final QubProjectVersionFolder projectVersionFolder = qubFolder.getProjectVersionFolder(publisherName, projectName, version).await(); projectVersionFolder.createFile("project.json").await(); test.assertEqual( qubFolder.getFile(publisherName + "/" + projectName + "/versions/" + version + "/" + projectName + ".tests.jar").await(), qubFolder.getCompiledTestsFile(publisherName, projectName, version).await()); }); }); runner.testGroup("equals(Object)", () -> { runner.test("with /qub/ and null", (Test test) -> { final InMemoryFileSystem fileSystem = QubFolderTests.createFileSystem(test); final Folder folder = fileSystem.getFolder("/qub/").await(); final QubFolder qubFolder = QubFolder.get(folder); test.assertEqual(false, qubFolder.equals((Object)null)); }); runner.test("with /qub/ and \"hello world\"", (Test test) -> { final InMemoryFileSystem fileSystem = QubFolderTests.createFileSystem(test); final Folder folder = fileSystem.getFolder("/qub/").await(); final QubFolder qubFolder = QubFolder.get(folder); test.assertEqual(false, qubFolder.equals((Object)"hello world")); }); runner.test("with /qub/ and /other/", (Test test) -> { final InMemoryFileSystem fileSystem = QubFolderTests.createFileSystem(test); final Folder folder = fileSystem.getFolder("/qub/").await(); final QubFolder qubFolder = QubFolder.get(folder); final Folder folder2 = fileSystem.getFolder("/other/").await(); final QubFolder qubFolder2 = QubFolder.get(folder2); test.assertEqual(false, qubFolder.equals((Object)qubFolder2)); }); runner.test("with /qub/ and /qub/", (Test test) -> { final InMemoryFileSystem fileSystem = QubFolderTests.createFileSystem(test); final Folder folder = fileSystem.getFolder("/qub/").await(); final QubFolder qubFolder = QubFolder.get(folder); final Folder folder2 = fileSystem.getFolder("/qub/").await(); final QubFolder qubFolder2 = QubFolder.get(folder2); test.assertEqual(true, qubFolder.equals((Object)qubFolder2)); }); }); runner.testGroup("equals(QubFolder)", () -> { runner.test("with /qub/ and null", (Test test) -> { final InMemoryFileSystem fileSystem = QubFolderTests.createFileSystem(test); final Folder folder = fileSystem.getFolder("/qub/").await(); final QubFolder qubFolder = QubFolder.get(folder); test.assertEqual(false, qubFolder.equals((QubFolder)null)); }); runner.test("with /qub/ and /other/", (Test test) -> { final InMemoryFileSystem fileSystem = QubFolderTests.createFileSystem(test); final Folder folder = fileSystem.getFolder("/qub/").await(); final QubFolder qubFolder = QubFolder.get(folder); final Folder folder2 = fileSystem.getFolder("/other/").await(); final QubFolder qubFolder2 = QubFolder.get(folder2); test.assertEqual(false, qubFolder.equals((QubFolder)qubFolder2)); }); runner.test("with /qub/ and /qub/", (Test test) -> { final InMemoryFileSystem fileSystem = QubFolderTests.createFileSystem(test); final Folder folder = fileSystem.getFolder("/qub/").await(); final QubFolder qubFolder = QubFolder.get(folder); final Folder folder2 = fileSystem.getFolder("/qub/").await(); final QubFolder qubFolder2 = QubFolder.get(folder2); test.assertEqual(true, qubFolder.equals((QubFolder)qubFolder2)); }); }); runner.test("toString()", (Test test) -> { final QubFolder qubFolder = QubFolderTests.getQubFolder(test, "/qub/"); test.assertEqual("/qub/", qubFolder.toString()); }); }); } static InMemoryFileSystem createFileSystem(Test test) { final InMemoryFileSystem fileSystem = new InMemoryFileSystem(test.getClock()); fileSystem.createRoot("/").await(); return fileSystem; } static Folder getFolder(Test test, String folderPath) { PreCondition.assertNotNull(test, "test"); PreCondition.assertNotNullAndNotEmpty(folderPath, "folderPath"); final InMemoryFileSystem fileSystem = QubFolderTests.createFileSystem(test); return fileSystem.getFolder(folderPath).await(); } static Folder createFolder(Test test, String folderPath) { PreCondition.assertNotNull(test, "test"); PreCondition.assertNotNullAndNotEmpty(folderPath, "folderPath"); final InMemoryFileSystem fileSystem = QubFolderTests.createFileSystem(test); return fileSystem.createFolder(folderPath).await(); } static QubFolder getQubFolder(Test test, String folderPath) { return QubFolder.get(QubFolderTests.getFolder(test, folderPath)); } static QubFolder createQubFolder(Test test, String folderPath) { return QubFolder.get(QubFolderTests.createFolder(test, folderPath)); } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.mapreduce.v2.app; import java.io.IOException; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.HashMap; import java.util.Iterator; import java.util.LinkedList; import java.util.List; import java.util.Map; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileContext; import org.apache.hadoop.fs.Path; import org.apache.hadoop.mapred.JobACLsManager; import org.apache.hadoop.mapred.ShuffleHandler; import org.apache.hadoop.mapred.TaskCompletionEvent; import org.apache.hadoop.mapreduce.Counters; import org.apache.hadoop.mapreduce.FileSystemCounter; import org.apache.hadoop.mapreduce.JobACL; import org.apache.hadoop.mapreduce.JobCounter; import org.apache.hadoop.mapreduce.MRConfig; import org.apache.hadoop.mapreduce.TaskCounter; import org.apache.hadoop.mapreduce.TypeConverter; import org.apache.hadoop.mapreduce.v2.api.records.AMInfo; import org.apache.hadoop.mapreduce.v2.api.records.JobId; import org.apache.hadoop.mapreduce.v2.api.records.JobReport; import org.apache.hadoop.mapreduce.v2.api.records.JobState; import org.apache.hadoop.mapreduce.v2.api.records.Phase; import org.apache.hadoop.mapreduce.v2.api.records.TaskAttemptCompletionEvent; import org.apache.hadoop.mapreduce.v2.api.records.TaskAttemptId; import org.apache.hadoop.mapreduce.v2.api.records.TaskAttemptReport; import org.apache.hadoop.mapreduce.v2.api.records.TaskAttemptState; import org.apache.hadoop.mapreduce.v2.api.records.TaskId; import org.apache.hadoop.mapreduce.v2.api.records.TaskReport; import org.apache.hadoop.mapreduce.v2.api.records.TaskState; import org.apache.hadoop.mapreduce.v2.api.records.TaskType; import org.apache.hadoop.mapreduce.v2.app.job.Job; import org.apache.hadoop.mapreduce.v2.app.job.Task; import org.apache.hadoop.mapreduce.v2.app.job.TaskAttempt; import org.apache.hadoop.mapreduce.v2.app.job.impl.JobImpl; import org.apache.hadoop.mapreduce.v2.util.MRBuilderUtils; import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.security.authorize.AccessControlList; import org.apache.hadoop.yarn.MockApps; import org.apache.hadoop.yarn.api.records.ApplicationAttemptId; import org.apache.hadoop.yarn.api.records.ApplicationId; import org.apache.hadoop.yarn.api.records.ContainerId; import org.apache.hadoop.yarn.api.records.NodeId; import org.apache.hadoop.yarn.api.records.Priority; import org.apache.hadoop.yarn.util.Records; import com.google.common.collect.Iterators; import com.google.common.collect.Lists; import com.google.common.collect.Maps; public class MockJobs extends MockApps { static final Iterator<JobState> JOB_STATES = Iterators.cycle(JobState .values()); static final Iterator<TaskState> TASK_STATES = Iterators.cycle(TaskState .values()); static final Iterator<TaskAttemptState> TASK_ATTEMPT_STATES = Iterators .cycle(TaskAttemptState.values()); static final Iterator<TaskType> TASK_TYPES = Iterators.cycle(TaskType .values()); static final Iterator<JobCounter> JOB_COUNTERS = Iterators.cycle(JobCounter .values()); static final Iterator<FileSystemCounter> FS_COUNTERS = Iterators .cycle(FileSystemCounter.values()); static final Iterator<TaskCounter> TASK_COUNTERS = Iterators .cycle(TaskCounter.values()); static final Iterator<String> FS_SCHEMES = Iterators.cycle("FILE", "HDFS", "LAFS", "CEPH"); static final Iterator<String> USER_COUNTER_GROUPS = Iterators .cycle( "com.company.project.subproject.component.subcomponent.UserDefinedSpecificSpecialTask$Counters", "PigCounters"); static final Iterator<String> USER_COUNTERS = Iterators.cycle("counter1", "counter2", "counter3"); static final Iterator<Phase> PHASES = Iterators.cycle(Phase.values()); static final Iterator<String> DIAGS = Iterators.cycle( "Error: java.lang.OutOfMemoryError: Java heap space", "Lost task tracker: tasktracker.domain/127.0.0.1:40879"); public static final String NM_HOST = "localhost"; public static final int NM_PORT = 1234; public static final int NM_HTTP_PORT = 8042; static final int DT = 1000000; // ms public static String newJobName() { return newAppName(); } /** * Create numJobs in a map with jobs having appId==jobId */ public static Map<JobId, Job> newJobs(int numJobs, int numTasksPerJob, int numAttemptsPerTask) { Map<JobId, Job> map = Maps.newHashMap(); for (int j = 0; j < numJobs; ++j) { ApplicationId appID = MockJobs.newAppID(j); Job job = newJob(appID, j, numTasksPerJob, numAttemptsPerTask); map.put(job.getID(), job); } return map; } public static Map<JobId, Job> newJobs(ApplicationId appID, int numJobsPerApp, int numTasksPerJob, int numAttemptsPerTask) { Map<JobId, Job> map = Maps.newHashMap(); for (int j = 0; j < numJobsPerApp; ++j) { Job job = newJob(appID, j, numTasksPerJob, numAttemptsPerTask); map.put(job.getID(), job); } return map; } public static Map<JobId, Job> newJobs(ApplicationId appID, int numJobsPerApp, int numTasksPerJob, int numAttemptsPerTask, boolean hasFailedTasks) { Map<JobId, Job> map = Maps.newHashMap(); for (int j = 0; j < numJobsPerApp; ++j) { Job job = newJob(appID, j, numTasksPerJob, numAttemptsPerTask, null, hasFailedTasks); map.put(job.getID(), job); } return map; } public static JobId newJobID(ApplicationId appID, int i) { JobId id = Records.newRecord(JobId.class); id.setAppId(appID); id.setId(i); return id; } public static JobReport newJobReport(JobId id) { JobReport report = Records.newRecord(JobReport.class); report.setJobId(id); report.setSubmitTime(System.currentTimeMillis()-DT); report .setStartTime(System.currentTimeMillis() - (int) (Math.random() * DT)); report.setFinishTime(System.currentTimeMillis() + (int) (Math.random() * DT) + 1); report.setMapProgress((float) Math.random()); report.setReduceProgress((float) Math.random()); report.setJobState(JOB_STATES.next()); return report; } public static TaskReport newTaskReport(TaskId id) { TaskReport report = Records.newRecord(TaskReport.class); report.setTaskId(id); report .setStartTime(System.currentTimeMillis() - (int) (Math.random() * DT)); report.setFinishTime(System.currentTimeMillis() + (int) (Math.random() * DT) + 1); report.setProgress((float) Math.random()); report.setStatus("Moving average: " + Math.random()); report.setCounters(TypeConverter.toYarn(newCounters())); report.setTaskState(TASK_STATES.next()); return report; } public static TaskAttemptReport newTaskAttemptReport(TaskAttemptId id) { ApplicationAttemptId appAttemptId = ApplicationAttemptId.newInstance( id.getTaskId().getJobId().getAppId(), 0); ContainerId containerId = ContainerId.newContainerId(appAttemptId, 0); TaskAttemptReport report = Records.newRecord(TaskAttemptReport.class); report.setTaskAttemptId(id); report .setStartTime(System.currentTimeMillis() - (int) (Math.random() * DT)); report.setFinishTime(System.currentTimeMillis() + (int) (Math.random() * DT) + 1); if (id.getTaskId().getTaskType() == TaskType.REDUCE) { report.setShuffleFinishTime( (report.getFinishTime() + report.getStartTime()) / 2); report.setSortFinishTime( (report.getFinishTime() + report.getShuffleFinishTime()) / 2); } report.setPhase(PHASES.next()); report.setTaskAttemptState(TASK_ATTEMPT_STATES.next()); report.setProgress((float) Math.random()); report.setCounters(TypeConverter.toYarn(newCounters())); report.setContainerId(containerId); report.setDiagnosticInfo(DIAGS.next()); report.setStateString("Moving average " + Math.random()); return report; } public static Counters newCounters() { Counters hc = new Counters(); for (JobCounter c : JobCounter.values()) { hc.findCounter(c).setValue((long) (Math.random() * 1000)); } for (TaskCounter c : TaskCounter.values()) { hc.findCounter(c).setValue((long) (Math.random() * 1000)); } int nc = FileSystemCounter.values().length * 4; for (int i = 0; i < nc; ++i) { for (FileSystemCounter c : FileSystemCounter.values()) { hc.findCounter(FS_SCHEMES.next(), c).setValue( (long) (Math.random() * DT)); } } for (int i = 0; i < 2 * 3; ++i) { hc.findCounter(USER_COUNTER_GROUPS.next(), USER_COUNTERS.next()) .setValue((long) (Math.random() * 100000)); } return hc; } public static Map<TaskAttemptId, TaskAttempt> newTaskAttempts(TaskId tid, int m) { Map<TaskAttemptId, TaskAttempt> map = Maps.newHashMap(); for (int i = 0; i < m; ++i) { TaskAttempt ta = newTaskAttempt(tid, i); map.put(ta.getID(), ta); } return map; } public static TaskAttempt newTaskAttempt(TaskId tid, int i) { final TaskAttemptId taid = Records.newRecord(TaskAttemptId.class); taid.setTaskId(tid); taid.setId(i); final TaskAttemptReport report = newTaskAttemptReport(taid); return new TaskAttempt() { @Override public NodeId getNodeId() throws UnsupportedOperationException{ throw new UnsupportedOperationException(); } @Override public TaskAttemptId getID() { return taid; } @Override public TaskAttemptReport getReport() { return report; } @Override public long getLaunchTime() { return report.getStartTime(); } @Override public long getFinishTime() { return report.getFinishTime(); } @Override public int getShufflePort() { return ShuffleHandler.DEFAULT_SHUFFLE_PORT; } @Override public Counters getCounters() { if (report != null && report.getCounters() != null) { return new Counters(TypeConverter.fromYarn(report.getCounters())); } return null; } @Override public float getProgress() { return report.getProgress(); } @Override public Phase getPhase() { return report.getPhase(); } @Override public TaskAttemptState getState() { return report.getTaskAttemptState(); } @Override public boolean isFinished() { switch (report.getTaskAttemptState()) { case SUCCEEDED: case FAILED: case KILLED: return true; } return false; } @Override public ContainerId getAssignedContainerID() { ApplicationAttemptId appAttemptId = ApplicationAttemptId.newInstance(taid.getTaskId().getJobId() .getAppId(), 0); ContainerId id = ContainerId.newContainerId(appAttemptId, 0); return id; } @Override public String getNodeHttpAddress() { return "localhost:8042"; } @Override public List<String> getDiagnostics() { return Lists.newArrayList(report.getDiagnosticInfo()); } @Override public String getAssignedContainerMgrAddress() { return "localhost:9998"; } @Override public long getShuffleFinishTime() { return report.getShuffleFinishTime(); } @Override public long getSortFinishTime() { return report.getSortFinishTime(); } @Override public String getNodeRackName() { return "/default-rack"; } }; } public static Map<TaskId, Task> newTasks(JobId jid, int n, int m, boolean hasFailedTasks) { Map<TaskId, Task> map = Maps.newHashMap(); for (int i = 0; i < n; ++i) { Task task = newTask(jid, i, m, hasFailedTasks); map.put(task.getID(), task); } return map; } public static Task newTask(JobId jid, int i, int m, final boolean hasFailedTasks) { final TaskId tid = Records.newRecord(TaskId.class); tid.setJobId(jid); tid.setId(i); tid.setTaskType(TASK_TYPES.next()); final TaskReport report = newTaskReport(tid); final Map<TaskAttemptId, TaskAttempt> attempts = newTaskAttempts(tid, m); return new Task() { @Override public TaskId getID() { return tid; } @Override public TaskReport getReport() { return report; } @Override public Counters getCounters() { if (hasFailedTasks) { return null; } return new Counters( TypeConverter.fromYarn(report.getCounters())); } @Override public float getProgress() { return report.getProgress(); } @Override public TaskType getType() { return tid.getTaskType(); } @Override public Map<TaskAttemptId, TaskAttempt> getAttempts() { return attempts; } @Override public TaskAttempt getAttempt(TaskAttemptId attemptID) { return attempts.get(attemptID); } @Override public boolean isFinished() { switch (report.getTaskState()) { case SUCCEEDED: case KILLED: case FAILED: return true; } return false; } @Override public boolean canCommit(TaskAttemptId taskAttemptID) { return false; } @Override public TaskState getState() { return report.getTaskState(); } }; } public static Counters getCounters( Collection<Task> tasks) { List<Task> completedTasks = new ArrayList<Task>(); for (Task task : tasks) { if (task.getCounters() != null) { completedTasks.add(task); } } Counters counters = new Counters(); return JobImpl.incrTaskCounters(counters, completedTasks); } static class TaskCount { int maps; int reduces; int completedMaps; int completedReduces; void incr(Task task) { TaskType type = task.getType(); boolean finished = task.isFinished(); if (type == TaskType.MAP) { if (finished) { ++completedMaps; } ++maps; } else if (type == TaskType.REDUCE) { if (finished) { ++completedReduces; } ++reduces; } } } static TaskCount getTaskCount(Collection<Task> tasks) { TaskCount tc = new TaskCount(); for (Task task : tasks) { tc.incr(task); } return tc; } public static Job newJob(ApplicationId appID, int i, int n, int m) { return newJob(appID, i, n, m, null); } public static Job newJob(ApplicationId appID, int i, int n, int m, Path confFile) { return newJob(appID, i, n, m, confFile, false); } public static Job newJob(ApplicationId appID, int i, int n, int m, Path confFile, boolean hasFailedTasks) { final JobId id = newJobID(appID, i); final String name = newJobName(); final JobReport report = newJobReport(id); final Map<TaskId, Task> tasks = newTasks(id, n, m, hasFailedTasks); final TaskCount taskCount = getTaskCount(tasks.values()); final Counters counters = getCounters(tasks .values()); final Path configFile = confFile; Map<JobACL, AccessControlList> tmpJobACLs = new HashMap<JobACL, AccessControlList>(); final Configuration conf = new Configuration(); conf.set(JobACL.VIEW_JOB.getAclName(), "testuser"); conf.setBoolean(MRConfig.MR_ACLS_ENABLED, true); JobACLsManager aclsManager = new JobACLsManager(conf); tmpJobACLs = aclsManager.constructJobACLs(conf); final Map<JobACL, AccessControlList> jobACLs = tmpJobACLs; return new Job() { @Override public JobId getID() { return id; } @Override public String getName() { return name; } @Override public JobState getState() { return report.getJobState(); } @Override public JobReport getReport() { return report; } @Override public float getProgress() { return 0; } @Override public Counters getAllCounters() { return counters; } @Override public Map<TaskId, Task> getTasks() { return tasks; } @Override public Task getTask(TaskId taskID) { return tasks.get(taskID); } @Override public int getTotalMaps() { return taskCount.maps; } @Override public int getTotalReduces() { return taskCount.reduces; } @Override public int getCompletedMaps() { return taskCount.completedMaps; } @Override public int getCompletedReduces() { return taskCount.completedReduces; } @Override public boolean isUber() { return false; } @Override public TaskAttemptCompletionEvent[] getTaskAttemptCompletionEvents( int fromEventId, int maxEvents) { return null; } @Override public TaskCompletionEvent[] getMapAttemptCompletionEvents( int startIndex, int maxEvents) { return null; } @Override public Map<TaskId, Task> getTasks(TaskType taskType) { throw new UnsupportedOperationException("Not supported yet."); } @Override public List<String> getDiagnostics() { return Collections.<String> emptyList(); } @Override public boolean checkAccess(UserGroupInformation callerUGI, JobACL jobOperation) { return true; } @Override public String getUserName() { return "mock"; } @Override public String getQueueName() { return "mockqueue"; } @Override public Path getConfFile() { return configFile; } @Override public Map<JobACL, AccessControlList> getJobACLs() { return jobACLs; } @Override public List<AMInfo> getAMInfos() { List<AMInfo> amInfoList = new LinkedList<AMInfo>(); amInfoList.add(createAMInfo(1)); amInfoList.add(createAMInfo(2)); return amInfoList; } @Override public Configuration loadConfFile() throws IOException { FileContext fc = FileContext.getFileContext(configFile.toUri(), conf); Configuration jobConf = new Configuration(false); jobConf.addResource(fc.open(configFile), configFile.toString()); return jobConf; } @Override public void setQueueName(String queueName) { // do nothing } @Override public void setJobPriority(Priority priority) { // do nothing } }; } private static AMInfo createAMInfo(int attempt) { ApplicationAttemptId appAttemptId = ApplicationAttemptId.newInstance( ApplicationId.newInstance(100, 1), attempt); ContainerId containerId = ContainerId.newContainerId(appAttemptId, 1); return MRBuilderUtils.newAMInfo(appAttemptId, System.currentTimeMillis(), containerId, NM_HOST, NM_PORT, NM_HTTP_PORT); } }
/* * Copyright 2018 StreamSets Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.streamsets.datacollector.usagestats; import com.fasterxml.jackson.core.type.TypeReference; import com.fasterxml.jackson.databind.ObjectMapper; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.io.Files; import com.streamsets.datacollector.activation.Activation; import com.streamsets.datacollector.bundles.SupportBundleManager; import com.streamsets.datacollector.json.ObjectMapperFactory; import com.streamsets.datacollector.main.BuildInfo; import com.streamsets.datacollector.main.RuntimeInfo; import com.streamsets.datacollector.usagestats.TestStatsInfo.TestModelStatsExtension; import com.streamsets.datacollector.util.Configuration; import com.streamsets.datacollector.util.SysInfo; import com.streamsets.lib.security.http.RestClient; import com.streamsets.pipeline.lib.executor.SafeScheduledExecutorService; import org.apache.commons.io.FileUtils; import org.apache.commons.io.IOUtils; import org.eclipse.jetty.server.Server; import org.eclipse.jetty.servlet.ServletContextHandler; import org.eclipse.jetty.servlet.ServletHolder; import org.junit.Assert; import org.junit.Test; import org.mockito.ArgumentCaptor; import org.mockito.Matchers; import org.mockito.Mockito; import org.mockito.invocation.InvocationOnMock; import org.mockito.stubbing.Answer; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import javax.servlet.Servlet; import javax.servlet.ServletException; import javax.servlet.http.HttpServlet; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import java.io.ByteArrayOutputStream; import java.io.File; import java.io.FileInputStream; import java.io.FileOutputStream; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import java.net.HttpURLConnection; import java.net.URL; import java.nio.charset.StandardCharsets; import java.util.Arrays; import java.util.Date; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.UUID; import java.util.concurrent.Future; import java.util.concurrent.ScheduledFuture; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicLong; public class TestStatsCollectorTask { private static final ObjectMapper OBJECT_MAPPER = new ObjectMapper(); private static final Map<String, Object> DEFAULT_SYS_INFO_MAP = ImmutableMap.of("cloudProvider", SysInfo.UNKNOWN); private static final String POST_TELEMETRY_URL = "https://fake-url.com/post/telemetry/here"; private static final String SDC_313_STATS_JSON_FIXTURE = "/com/streamsets/datacollector/usagestats/sdc3.13.stats.json"; private static final String TRANSFORMER_315_STATS_JSON_FIXTURE = "/com/streamsets/datacollector/usagestats/transformer3.15.stats.json"; private static final String TRANSFORMER_315_STATS_JSON_SDC_ID = "7cac8108-d67c-11ea-80e3-414ee1f55860"; private Runnable runnable; private HttpURLConnection[] uploadConnectionHolder = new HttpURLConnection[1]; private File createTestDir() { File dir = new File("target", UUID.randomUUID().toString()); Assert.assertTrue(dir.mkdir()); return dir.getAbsoluteFile(); } @Test public void testGetters() { File testDir = createTestDir(); BuildInfo buildInfo = Mockito.mock(BuildInfo.class); Mockito.when(buildInfo.getVersion()).thenReturn("v1"); RuntimeInfo runtimeInfo = mockRuntimeInfo("id", testDir); Configuration config = new Configuration(); config.set(AbstractStatsCollectorTask.ROLL_PERIOD_CONFIG, 1); SafeScheduledExecutorService scheduler = Mockito.mock(SafeScheduledExecutorService.class); AbstractStatsCollectorTask task = mockStatsCollectorTask(buildInfo, runtimeInfo, null, config, scheduler, true); Assert.assertEquals(buildInfo, task.getBuildInfo()); Assert.assertEquals(runtimeInfo, task.getRuntimeInfo()); Assert.assertEquals(TimeUnit.HOURS.toMillis(1), task.getRollFrequencyMillis()); Assert.assertNull(task.getStatsInfo()); } @Test public void testClusterSlave() { File testDir = createTestDir(); BuildInfo buildInfo = Mockito.mock(BuildInfo.class); Mockito.when(buildInfo.getVersion()).thenReturn("v1"); Mockito.when(buildInfo.getBuiltRepoSha()).thenReturn("sha1"); RuntimeInfo runtimeInfo = mockRuntimeInfo("id", testDir); Configuration config = new Configuration(); SafeScheduledExecutorService scheduler = Mockito.mock(SafeScheduledExecutorService.class); AbstractStatsCollectorTask task = mockStatsCollectorTask(buildInfo, runtimeInfo, null, config, scheduler, true); Mockito.when(runtimeInfo.isClusterSlave()).thenReturn(true); task.init(); Assert.assertTrue(task.isOpted()); Assert.assertFalse(task.isActive()); Assert.assertNotNull(task.getStatsInfo()); task.stop(); } @Test public void testFirstRunAndCommonInitializationAndStopLogic() { File testDir = createTestDir(); BuildInfo buildInfo = Mockito.mock(BuildInfo.class); Mockito.when(buildInfo.getVersion()).thenReturn("v1"); Mockito.when(buildInfo.getBuiltRepoSha()).thenReturn("sha1"); RuntimeInfo runtimeInfo = mockRuntimeInfo("id", testDir); Configuration config = new Configuration(); SafeScheduledExecutorService scheduler = Mockito.mock(SafeScheduledExecutorService.class); AbstractStatsCollectorTask task = mockStatsCollectorTaskAndRunnable(buildInfo, runtimeInfo, config, scheduler); task.init(); Assert.assertFalse(task.isOpted()); Assert.assertFalse(task.isActive()); Assert.assertNotNull(task.getStatsInfo()); Mockito.verify(runnable, Mockito.times(1)).run(); Mockito.verify(scheduler, Mockito.times(1)).scheduleAtFixedRate( Mockito.eq(runnable), Mockito.eq(60L), Mockito.eq(60L), Mockito.eq(TimeUnit.SECONDS) ); Future future = Mockito.mock(ScheduledFuture.class); Mockito.doReturn(future).when(task).getFuture(); Assert.assertEquals(1, task.getStatsInfo().getActiveStats().getUpTime().getMultiplier()); task.stop(); Mockito.verify(future, Mockito.times(1)).cancel(Mockito.eq(false)); Mockito.verify(runnable, Mockito.times(2)).run(); Assert.assertEquals(0, task.getStatsInfo().getActiveStats().getUpTime().getMultiplier()); } @Test public void testInitialOptingOut() throws Exception { File testDir = createTestDir(); BuildInfo buildInfo = Mockito.mock(BuildInfo.class); Mockito.when(buildInfo.getVersion()).thenReturn("v1"); Mockito.when(buildInfo.getBuiltRepoSha()).thenReturn("sha1"); RuntimeInfo runtimeInfo = mockRuntimeInfo("id", testDir); Configuration config = new Configuration(); SafeScheduledExecutorService scheduler = Mockito.mock(SafeScheduledExecutorService.class); AbstractStatsCollectorTask task = mockStatsCollectorTaskAndRunnable(buildInfo, runtimeInfo, config, scheduler); task.init(); Assert.assertFalse(task.isOpted()); Assert.assertFalse(task.isActive()); task.setActive(false); Assert.assertTrue(task.isOpted()); Assert.assertFalse(task.isActive()); task.stop(); } @Test public void testInitialOptingIn() throws Exception { File testDir = createTestDir(); BuildInfo buildInfo = Mockito.mock(BuildInfo.class); Mockito.when(buildInfo.getVersion()).thenReturn("v1"); Mockito.when(buildInfo.getBuiltRepoSha()).thenReturn("sha1"); RuntimeInfo runtimeInfo = mockRuntimeInfo("id", testDir); Configuration config = new Configuration(); SafeScheduledExecutorService scheduler = Mockito.mock(SafeScheduledExecutorService.class); AbstractStatsCollectorTask task = mockStatsCollectorTaskAndRunnable(buildInfo, runtimeInfo, config, scheduler); task.init(); Assert.assertFalse(task.isOpted()); Assert.assertFalse(task.isActive()); task.setActive(false); Assert.assertTrue(task.isOpted()); Assert.assertFalse(task.isActive()); task.stop(); } @Test public void testOptedNo() throws Exception { File testDir = createTestDir(); BuildInfo buildInfo = Mockito.mock(BuildInfo.class); Mockito.when(buildInfo.getVersion()).thenReturn("v1"); Mockito.when(buildInfo.getBuiltRepoSha()).thenReturn("sha1"); RuntimeInfo runtimeInfo = mockRuntimeInfo("id", testDir); Configuration config = new Configuration(); SafeScheduledExecutorService scheduler = Mockito.mock(SafeScheduledExecutorService.class); AbstractStatsCollectorTask task = mockStatsCollectorTaskAndRunnable(buildInfo, runtimeInfo, config, scheduler); try (OutputStream os = new FileOutputStream(task.getOptFile())) { ObjectMapperFactory.get().writeValue(os, ImmutableMap.of(task.STATS_ACTIVE_KEY, false)); } task.init(); Assert.assertTrue(task.isOpted()); Assert.assertFalse(task.isActive()); Assert.assertNotNull(task.getStatsInfo()); task.stop(); } @Test public void testOptedYesNoPriorStats() throws Exception { File testDir = createTestDir(); BuildInfo buildInfo = Mockito.mock(BuildInfo.class); Mockito.when(buildInfo.getVersion()).thenReturn("v1"); Mockito.when(buildInfo.getBuiltRepoSha()).thenReturn("sha1"); RuntimeInfo runtimeInfo = mockRuntimeInfo("id", testDir); Configuration config = new Configuration(); SafeScheduledExecutorService scheduler = Mockito.mock(SafeScheduledExecutorService.class); AbstractStatsCollectorTask task = mockStatsCollectorTaskAndRunnable(buildInfo, runtimeInfo, config, scheduler); try (OutputStream os = new FileOutputStream(task.getOptFile())) { ObjectMapperFactory.get().writeValue(os, ImmutableMap.of(task.STATS_ACTIVE_KEY, true)); } task.init(); Assert.assertTrue(task.isOpted()); Assert.assertTrue(task.isActive()); task.stop(); } @Test public void testOptedInvalid1() throws Exception { File testDir = createTestDir(); BuildInfo buildInfo = Mockito.mock(BuildInfo.class); Mockito.when(buildInfo.getVersion()).thenReturn("v1"); Mockito.when(buildInfo.getBuiltRepoSha()).thenReturn("sha1"); RuntimeInfo runtimeInfo = mockRuntimeInfo("id", testDir); Configuration config = new Configuration(); SafeScheduledExecutorService scheduler = Mockito.mock(SafeScheduledExecutorService.class); AbstractStatsCollectorTask task = mockStatsCollectorTaskAndRunnable(buildInfo, runtimeInfo, config, scheduler); try (OutputStream os = new FileOutputStream(task.getOptFile())) { } task.init(); Assert.assertFalse(task.isOpted()); Assert.assertFalse(task.isActive()); task.stop(); } @Test public void testOptedInvalid2() throws Exception { File testDir = createTestDir(); BuildInfo buildInfo = Mockito.mock(BuildInfo.class); Mockito.when(buildInfo.getVersion()).thenReturn("v1"); Mockito.when(buildInfo.getBuiltRepoSha()).thenReturn("sha1"); RuntimeInfo runtimeInfo = mockRuntimeInfo("id", testDir); Configuration config = new Configuration(); SafeScheduledExecutorService scheduler = Mockito.mock(SafeScheduledExecutorService.class); AbstractStatsCollectorTask task = mockStatsCollectorTaskAndRunnable(buildInfo, runtimeInfo, config, scheduler); try (OutputStream os = new FileOutputStream(task.getOptFile())) { ObjectMapperFactory.get().writeValue(os, null); } task.init(); Assert.assertFalse(task.isOpted()); Assert.assertFalse(task.isActive()); task.stop(); } @Test public void testOptedInvalid3() throws Exception { File testDir = createTestDir(); BuildInfo buildInfo = Mockito.mock(BuildInfo.class); Mockito.when(buildInfo.getVersion()).thenReturn("v1"); Mockito.when(buildInfo.getBuiltRepoSha()).thenReturn("sha1"); RuntimeInfo runtimeInfo = mockRuntimeInfo("id", testDir); Configuration config = new Configuration(); SafeScheduledExecutorService scheduler = Mockito.mock(SafeScheduledExecutorService.class); AbstractStatsCollectorTask task = mockStatsCollectorTaskAndRunnable(buildInfo, runtimeInfo, config, scheduler); try (OutputStream os = new FileOutputStream(task.getOptFile())) { ObjectMapperFactory.get().writeValue(os, ImmutableList.of()); } task.init(); Assert.assertFalse(task.isOpted()); Assert.assertFalse(task.isActive()); task.stop(); } @Test public void testOptedInvalid4() throws Exception { File testDir = createTestDir(); BuildInfo buildInfo = Mockito.mock(BuildInfo.class); Mockito.when(buildInfo.getVersion()).thenReturn("v1"); Mockito.when(buildInfo.getBuiltRepoSha()).thenReturn("sha1"); RuntimeInfo runtimeInfo = mockRuntimeInfo("id", testDir); Configuration config = new Configuration(); SafeScheduledExecutorService scheduler = Mockito.mock(SafeScheduledExecutorService.class); AbstractStatsCollectorTask task = mockStatsCollectorTaskAndRunnable(buildInfo, runtimeInfo, config, scheduler); try (OutputStream os = new FileOutputStream(task.getOptFile())) { os.write("foo".getBytes()); } task.init(); Assert.assertFalse(task.isOpted()); Assert.assertFalse(task.isActive()); task.stop(); } @Test public void testOptedYesPriorStats() throws Exception { File testDir = createTestDir(); BuildInfo buildInfo = Mockito.mock(BuildInfo.class); Mockito.when(buildInfo.getVersion()).thenReturn("v1"); Mockito.when(buildInfo.getBuiltRepoSha()).thenReturn("sha1"); RuntimeInfo runtimeInfo = mockRuntimeInfo("id", testDir); Configuration config = new Configuration(); SafeScheduledExecutorService scheduler = Mockito.mock(SafeScheduledExecutorService.class); // report fails so we can easily check collected stats AbstractStatsCollectorTask task = mockStatsCollectorTask(buildInfo, runtimeInfo, null, config, scheduler, false); try (OutputStream os = new FileOutputStream(task.getOptFile())) { ObjectMapperFactory.get().writeValue(os, ImmutableMap.of(task.STATS_ACTIVE_KEY, true)); } try (OutputStream os = new FileOutputStream(task.getStatsFile())) { StatsInfo statsInfo = new StatsInfo(task.provideStatsExtensions()); statsInfo.getActiveStats().setDataCollectorVersion("v2"); ObjectMapperFactory.get().writeValue(os, statsInfo); } task.init(); Assert.assertTrue(task.isOpted()); Assert.assertTrue(task.isActive()); // On load, we corrected version info to be the current "v1" when we rolled Assert.assertEquals("v1", task.getStatsInfo().getActiveStats().getDataCollectorVersion()); // collected stats should be left alone though Assert.assertEquals(1, task.getStatsInfo().getCollectedStats().size()); Assert.assertEquals("v2", task.getStatsInfo().getCollectedStats().get(0).getDataCollectorVersion()); Assert.assertEquals(1, task.getStatsInfo().getActiveStats().getExtensions().size()); // this effectively checks if the extension's statsInfo reference was populated when loaded from disk. Assert.assertNotEquals("somePid", task.getStatsInfo().getActiveStats().getExtensions().get(0).hashPipelineId("somePid")); task.stop(); } @Test public void testRunnable() throws Exception { File testDir = createTestDir(); BuildInfo buildInfo = Mockito.mock(BuildInfo.class); Mockito.when(buildInfo.getVersion()).thenReturn("v1"); Mockito.when(buildInfo.getBuiltRepoSha()).thenReturn("sha1"); RuntimeInfo runtimeInfo = mockRuntimeInfo("id", testDir); Configuration config = new Configuration(); SafeScheduledExecutorService scheduler = Mockito.mock(SafeScheduledExecutorService.class); SupportBundleManager supportBundleManager = Mockito.mock(SupportBundleManager.class); AbstractStatsCollectorTask task = mockStatsCollectorTask(buildInfo, runtimeInfo, null, config, scheduler, true); try (OutputStream os = new FileOutputStream(task.getOptFile())) { ObjectMapperFactory.get().writeValue(os, ImmutableMap.of(task.STATS_ACTIVE_KEY, true)); } try (OutputStream os = new FileOutputStream(task.getStatsFile())) { StatsInfo statsInfo = new StatsInfo(task.provideStatsExtensions()); statsInfo.getActiveStats().setSdcId("id"); statsInfo.getActiveStats().setDataCollectorVersion("v0"); statsInfo.getActiveStats().setBuildRepoSha("sha1"); statsInfo.getActiveStats().setExtraInfo(ImmutableMap.of("a", "A")); statsInfo.getCollectedStats().add(new StatsBean()); ObjectMapperFactory.get().writeValue(os, statsInfo); } task.init(); Assert.assertTrue(task.isOpted()); Assert.assertTrue(task.isActive()); //verifying we rolled the read stats Assert.assertEquals("v1", task.getStatsInfo().getActiveStats().getDataCollectorVersion()); // verifying StatsInfo is populated correctly on activeStats Assert.assertNotEquals("pid", task.getStatsInfo().getActiveStats().hashPipelineId("pid")); task.stop(); } @Test public void testRunnableReportStatsException() throws Exception { File testDir = createTestDir(); BuildInfo buildInfo = Mockito.mock(BuildInfo.class); Mockito.when(buildInfo.getVersion()).thenReturn("v1"); Mockito.when(buildInfo.getBuiltRepoSha()).thenReturn("sha1"); RuntimeInfo runtimeInfo = mockRuntimeInfo("id", testDir); Configuration config = new Configuration(); SafeScheduledExecutorService scheduler = Mockito.mock(SafeScheduledExecutorService.class); SupportBundleManager supportBundleManager = Mockito.mock(SupportBundleManager.class); // though we pass true here, we will temporarily cause failures in other layers first, before we let it get here AbstractStatsCollectorTask task = mockStatsCollectorTask(buildInfo, runtimeInfo, null, config, scheduler, true); try (OutputStream os = new FileOutputStream(task.getOptFile())) { ObjectMapperFactory.get().writeValue(os, ImmutableMap.of(task.STATS_ACTIVE_KEY, true)); } // make it fail on first call Mockito.doReturn(false).when(task).reportStats(Mockito.anyList()); task.init(); Assert.assertTrue(task.isOpted()); Assert.assertTrue(task.isActive()); int expectedRolls = 1; int expectedReports = 1; int expectedSaves = 1; verifyRollsReportsSaves(task, expectedRolls, expectedReports, expectedSaves); long initCompleteTime = System.currentTimeMillis(); task.getRunnable(false).run(); // we just rolled, should not roll again // we are in back off, should not report // always save expectedSaves++; verifyRollsReportsSaves(task, expectedRolls, expectedReports, expectedSaves); // simulate 1 minute passing Mockito.when(task.getCurrentTimeMillis()).thenReturn(initCompleteTime + TimeUnit.MINUTES.toMillis(1)); task.getRunnable(false).run(); // only roll once per hour // first back off period is 2 minutes, so we still skip it // always save expectedSaves++; verifyRollsReportsSaves(task, expectedRolls, expectedReports, expectedSaves); // simulate 2 minutes passing since init Mockito.when(task.getCurrentTimeMillis()).thenReturn(initCompleteTime + TimeUnit.MINUTES.toMillis(2)); task.getRunnable(false).run(); // only roll once per hour // completed back off, should report expectedReports++; expectedSaves++; verifyRollsReportsSaves(task, expectedRolls, expectedReports, expectedSaves); // simulate minutes 3 - 5 for (int mins = 3; mins <= 5; mins++) { Mockito.when(task.getCurrentTimeMillis()).thenReturn(initCompleteTime + TimeUnit.MINUTES.toMillis(mins)); task.getRunnable(false).run(); // only roll once per hour // still in back off, skip report expectedSaves++; verifyRollsReportsSaves(task, expectedRolls, expectedReports, expectedSaves); } // minute 6, second back off completed Mockito.when(task.getCurrentTimeMillis()).thenReturn(initCompleteTime + TimeUnit.MINUTES.toMillis(6)); task.getRunnable(false).run(); // only roll once per hour // completed back off, should report expectedReports++; expectedSaves++; verifyRollsReportsSaves(task, expectedRolls, expectedReports, expectedSaves); // rather than simulating all minutes, skip to hour mark, which also exceeds back off Mockito.when(task.getCurrentTimeMillis()).thenReturn(initCompleteTime + TimeUnit.MINUTES.toMillis(60)); task.getRunnable(false).run(); // hour's up, roll! expectedRolls++; // completed back off, should report expectedReports++; expectedSaves++; verifyRollsReportsSaves(task, expectedRolls, expectedReports, expectedSaves); // we aren't consistently using the mock time at all layers, so set this field otherwise we will roll again task.getStatsInfo().getActiveStats().setStartTime(task.getCurrentTimeMillis()); // minute 61: still failing, still in back off Mockito.when(task.getCurrentTimeMillis()).thenReturn(initCompleteTime + TimeUnit.MINUTES.toMillis(61)); task.getRunnable(false).run(); // only roll once per hour // back off period is 16 minutes, so we still skip it // always save expectedSaves++; verifyRollsReportsSaves(task, expectedRolls, expectedReports, expectedSaves); // force a roll and report, which triggers back off again (for 32 mins) task.getRunnable(true).run(); expectedRolls++; expectedReports++; expectedSaves++; verifyRollsReportsSaves(task, expectedRolls, expectedReports, expectedSaves); // we aren't consistently using the mock time at all layers, so set this field otherwise we will roll again task.getStatsInfo().getActiveStats().setStartTime(task.getCurrentTimeMillis()); // track when this period started for later long interestingPeriodStart = task.getCurrentTimeMillis(); // back off is now 61 + 32 = 93 minutes, try 92 to make sure it is still backing off after a forced roll Mockito.when(task.getCurrentTimeMillis()).thenReturn(initCompleteTime + TimeUnit.MINUTES.toMillis(92)); task.getRunnable(false).run(); // only roll once per hour // in back off expectedSaves++; verifyRollsReportsSaves(task, expectedRolls, expectedReports, expectedSaves); // let the next report go through Mockito.when(task.reportStats(Mockito.anyList())).thenCallRealMethod(); Mockito.when(task.getCurrentTimeMillis()).thenReturn(initCompleteTime + TimeUnit.MINUTES.toMillis(93)); task.getRunnable(false).run(); // only roll once per hour // just left back off expectedReports++; expectedSaves++; verifyRollsReportsSaves(task, expectedRolls, expectedReports, expectedSaves); // next report in 24 hours from oldest period start. Try 30 mins short of that. Mockito.when(task.getCurrentTimeMillis()).thenReturn( interestingPeriodStart + TimeUnit.HOURS.toMillis(24) - TimeUnit.MINUTES.toMillis(30)); task.getRunnable(false).run(); // way overdue for a roll expectedRolls++; // not quite 24 hours since oldest period start expectedSaves++; verifyRollsReportsSaves(task, expectedRolls, expectedReports, expectedSaves); // we aren't consistently using the mock time at all layers, so set this field otherwise we will roll again task.getStatsInfo().getActiveStats().setStartTime(task.getCurrentTimeMillis()); // report after the full 24 hrs Mockito.when(task.getCurrentTimeMillis()).thenReturn(interestingPeriodStart + TimeUnit.HOURS.toMillis(24)); task.getRunnable(false).run(); // just rolled 30m ago expectedReports++; expectedSaves++; verifyRollsReportsSaves(task, expectedRolls, expectedReports, expectedSaves); interestingPeriodStart = task.getCurrentTimeMillis(); // make it fail again Mockito.doReturn(false).when(task).reportStats(Mockito.anyList()); // pump the failure count up super high to trigger maximum back off duration of 1 day for (long backOff = 1; backOff < TimeUnit.DAYS.toMinutes(1) ; backOff = backOff << 1) { task.getRunnable(true).run(); expectedRolls++; expectedReports++; expectedSaves++; verifyRollsReportsSaves(task, expectedRolls, expectedReports, expectedSaves); } // we aren't consistently using the mock time at all layers, so set this field manually task.getStatsInfo().getActiveStats().setStartTime(interestingPeriodStart); // 23 hours + 59 minutes later, should not report due to backoff (normally at 99% of a day if no back off) Mockito.when(task.getCurrentTimeMillis()).thenReturn( interestingPeriodStart + TimeUnit.HOURS.toMillis(23) + TimeUnit.MINUTES.toMillis(59)); task.getRunnable(false).run(); expectedRolls++; // no report, barely too early expectedSaves++; verifyRollsReportsSaves(task, expectedRolls, expectedReports, expectedSaves); // we aren't consistently using the mock time at all layers, so set this field otherwise we will roll again task.getStatsInfo().getActiveStats().setStartTime(task.getCurrentTimeMillis()); // 24 hours later, should report Mockito.when(task.getCurrentTimeMillis()).thenReturn(interestingPeriodStart + TimeUnit.HOURS.toMillis(24)); task.getRunnable(false).run(); // just rolled expectedReports++; expectedSaves++; verifyRollsReportsSaves(task, expectedRolls, expectedReports, expectedSaves); task.stop(); } private void verifyRollsReportsSaves(AbstractStatsCollectorTask task, int expectedRolls, int expectedReports, int expectedSaves) { Mockito.verify(task, Mockito.times(expectedRolls)).updateAfterRoll(Mockito.any()); Mockito.verify(task, Mockito.times(expectedReports)).reportStats(Mockito.anyList()); Mockito.verify(task, Mockito.times(expectedSaves)).saveStatsInternal(); } @Test public void testSetActiveNoChange() throws Exception { File testDir = createTestDir(); BuildInfo buildInfo = Mockito.mock(BuildInfo.class); Mockito.when(buildInfo.getVersion()).thenReturn("v1"); Mockito.when(buildInfo.getBuiltRepoSha()).thenReturn("sha1"); RuntimeInfo runtimeInfo = mockRuntimeInfo("id", testDir); Configuration config = new Configuration(); SafeScheduledExecutorService scheduler = Mockito.mock(SafeScheduledExecutorService.class); SupportBundleManager supportBundleManager = Mockito.mock(SupportBundleManager.class); AbstractStatsCollectorTask task = mockStatsCollectorTask(buildInfo, runtimeInfo, null, config, scheduler, true); try (OutputStream os = new FileOutputStream(task.getOptFile())) { ObjectMapperFactory.get().writeValue(os, ImmutableMap.of(task.STATS_ACTIVE_KEY, true)); } try (OutputStream os = new FileOutputStream(task.getStatsFile())) { StatsInfo statsInfo = new StatsInfo(task.provideStatsExtensions()); statsInfo.getActiveStats().setSdcId("id"); statsInfo.getActiveStats().setDataCollectorVersion("v1"); statsInfo.getActiveStats().setBuildRepoSha("sha1"); statsInfo.getActiveStats().setExtraInfo(ImmutableMap.of("a", "A")); ObjectMapperFactory.get().writeValue(os, statsInfo); } task.init(); Mockito.reset(task); task.setActive(task.isActive()); Mockito.verify(task, Mockito.never()).saveStatsInternal(); task.stop(); } @Test public void testSetActiveFromTrueToFalse() throws Exception { File testDir = createTestDir(); BuildInfo buildInfo = Mockito.mock(BuildInfo.class); Mockito.when(buildInfo.getVersion()).thenReturn("v1"); Mockito.when(buildInfo.getBuiltRepoSha()).thenReturn("sha1"); RuntimeInfo runtimeInfo = mockRuntimeInfo("id", testDir); Configuration config = new Configuration(); SafeScheduledExecutorService scheduler = Mockito.mock(SafeScheduledExecutorService.class); AbstractStatsCollectorTask task = mockStatsCollectorTask(buildInfo, runtimeInfo, null, config, scheduler, true); try (OutputStream os = new FileOutputStream(task.getOptFile())) { ObjectMapperFactory.get().writeValue(os, ImmutableMap.of(task.STATS_ACTIVE_KEY, true)); } try (OutputStream os = new FileOutputStream(task.getStatsFile())) { StatsInfo statsInfo = new StatsInfo(task.provideStatsExtensions()); statsInfo.getActiveStats().setSdcId("id"); statsInfo.getActiveStats().setDataCollectorVersion("v1"); statsInfo.getActiveStats().setBuildRepoSha("sha1"); statsInfo.getActiveStats().setExtraInfo(ImmutableMap.of("a", "A")); ObjectMapperFactory.get().writeValue(os, statsInfo); statsInfo.getCollectedStats().add(new StatsBean()); } task.init(); Mockito.reset(task); long start = task.getStatsInfo().getActiveStats().getStartTime(); Thread.sleep(1); task.setActive(false); Assert.assertTrue(task.getStatsInfo().getActiveStats().getStartTime() > start); Assert.assertFalse(task.isActive()); try (InputStream is = new FileInputStream(task.getOptFile())) { Map map = ObjectMapperFactory.get().readValue(is, Map.class); Assert.assertNotNull(map.get(AbstractStatsCollectorTask.STATS_ACTIVE_KEY)); Assert.assertFalse((Boolean) map.get(AbstractStatsCollectorTask.STATS_ACTIVE_KEY)); } Mockito.verify(task, Mockito.times(1)).saveStatsInternal(); Assert.assertTrue(task.getStatsInfo().getCollectedStats().isEmpty()); task.stop(); } @Test public void testUpgradeFrom313WithOptedTrue() throws Exception { File testDir = createTestDir(); BuildInfo buildInfo = Mockito.mock(BuildInfo.class); Mockito.when(buildInfo.getVersion()).thenReturn("v1"); Mockito.when(buildInfo.getBuiltRepoSha()).thenReturn("sha1"); RuntimeInfo runtimeInfo = mockRuntimeInfo("upgradeSdcId", testDir); Configuration config = new Configuration(); SafeScheduledExecutorService scheduler = Mockito.mock(SafeScheduledExecutorService.class); // report should fail so we can easily inspect collected stats AbstractStatsCollectorTask task = mockStatsCollectorTask(buildInfo, runtimeInfo, null, config, scheduler, false); try (OutputStream os = new FileOutputStream(task.getOptFile())) { // This is copied from a real 3.13.0 install IOUtils.write( "{\n" + " \"stats.active\" : true,\n" + " \"stats.lastReport\" : 1585760851613\n" + "}", os); } FileUtils.copyFile( new File(this.getClass().getResource(SDC_313_STATS_JSON_FIXTURE).getPath()), task.getStatsFile()); task.init(); Assert.assertTrue(task.isOpted()); Assert.assertTrue(task.isActive()); ActiveStats activeStats = task.getStatsInfo().getActiveStats(); long startTimeFromFixture = 1588195985352L; // we immediately roll, so start time should be more recent Assert.assertTrue(activeStats.getStartTime() > startTimeFromFixture); // collected time should match fixture's start time Assert.assertEquals(1, task.getStatsInfo().getCollectedStats().size()); StatsBean collected = task.getStatsInfo().getCollectedStats().get(0); Assert.assertEquals(startTimeFromFixture, collected.getStartTime()); // test newer fields in current stats Assert.assertEquals("sha1", activeStats.getBuildRepoSha()); Assert.assertEquals("upgradeSdcId", activeStats.getSdcId()); // collected stats should have null for new fields ... Assert.assertEquals(null, collected.getBuildRepoSha()); // ... except sdcId. That is the one field we retroactively populate Assert.assertEquals("upgradeSdcId", collected.getSdcId()); // test upgraded fields Assert.assertEquals(ActiveStats.VERSION, activeStats.getVersion()); Assert.assertEquals("v1", activeStats.getDataCollectorVersion()); // test new extension Assert.assertEquals(1, activeStats.getExtensions().size()); TestModelStatsExtension ext = (TestModelStatsExtension) activeStats.getExtensions().get(0); Assert.assertEquals(0, ext.getRolls()); // didn't exist at time of roll Assert.assertEquals(1, ext.getStartSystems()); Assert.assertEquals(0, ext.getStopSystems()); // test collected fields were not upgraded and have no extensions Assert.assertEquals("1.0", collected.getVersion()); Assert.assertEquals("3.13.0", collected.getDataCollectorVersion()); Assert.assertEquals(12193 + 47797, collected.getPipelineMilliseconds()); Assert.assertEquals(Long.valueOf(59989), collected.getStageMilliseconds().get( "streamsets-datacollector-dev-lib::com_streamsets_pipeline_stage_devtest_RandomDataGeneratorSource")); Assert.assertEquals(Long.valueOf(59989), collected.getStageMilliseconds().get( "streamsets-datacollector-basic-lib::com_streamsets_pipeline_stage_destination_devnull_NullDTarget")); Assert.assertEquals(1, collected.getRecordsOM()); Assert.assertEquals(ImmutableList.of(), collected.getExtensions()); task.stop(); } @Test public void testUpdateAfterRollRemovesInvalidExtensions() { BuildInfo buildInfo = Mockito.mock(BuildInfo.class); Mockito.when(buildInfo.getVersion()).thenReturn("v1"); Mockito.when(buildInfo.getBuiltRepoSha()).thenReturn("sha1"); RuntimeInfo runtimeInfo = mockRuntimeInfo(UUID.randomUUID().toString(), null); Configuration config = new Configuration(); AbstractStatsCollectorTask task = mockStatsCollectorTask(buildInfo, runtimeInfo, null, config, null, true); Mockito.when(task.provideStatsExtensions()).thenReturn(ImmutableList.of()); // no extensions StatsInfo stats = new StatsInfo(ImmutableList.of(new TestModelStatsExtension())); stats.setCurrentSystemInfo(buildInfo, runtimeInfo, task.getSysInfo(), task.getActivation()); stats.rollIfNeeded(buildInfo, runtimeInfo, task.getSysInfo(), task.getActivation(),1, true, System.currentTimeMillis()); // collected stats have extensions Assert.assertEquals(1, stats.getCollectedStats().size()); Assert.assertEquals(1, stats.getCollectedStats().get(0).getExtensions().size()); StatsInfo upgraded = task.updateAfterRoll(stats); // Active Stats extensions purged Assert.assertEquals(ImmutableList.of(), upgraded.getActiveStats().getExtensions()); // collected stats should not get purged Assert.assertEquals(1, stats.getCollectedStats().size()); Assert.assertEquals(1, stats.getCollectedStats().get(0).getExtensions().size()); } @Test public void testSetActiveFromFalseToTrue() throws Exception { File testDir = createTestDir(); BuildInfo buildInfo = Mockito.mock(BuildInfo.class); Mockito.when(buildInfo.getVersion()).thenReturn("v1"); Mockito.when(buildInfo.getBuiltRepoSha()).thenReturn("sha1"); RuntimeInfo runtimeInfo = mockRuntimeInfo("id", testDir); Configuration config = new Configuration(); SafeScheduledExecutorService scheduler = Mockito.mock(SafeScheduledExecutorService.class); AbstractStatsCollectorTask task = mockStatsCollectorTask(buildInfo, runtimeInfo, null, config, scheduler, true); try (OutputStream os = new FileOutputStream(task.getOptFile())) { ObjectMapperFactory.get().writeValue(os, ImmutableMap.of(AbstractStatsCollectorTask.STATS_ACTIVE_KEY, false)); } task.init(); long start = task.getStatsInfo().getActiveStats().getStartTime(); Assert.assertTrue(task.getStatsInfo().getCollectedStats().isEmpty()); Thread.sleep(1); task.setActive(true); Assert.assertTrue(task.getStatsInfo().getActiveStats().getStartTime() > start); // we just reported the activation interval, but we keep it as reported (SDC-14937) Assert.assertEquals(1, task.getStatsInfo().getCollectedStats().size()); Assert.assertTrue(task.isActive()); try (InputStream is = new FileInputStream(task.getOptFile())) { Map map = ObjectMapperFactory.get().readValue(is, Map.class); Assert.assertNotNull(map.get(AbstractStatsCollectorTask.STATS_ACTIVE_KEY)); Assert.assertTrue((Boolean) map.get(AbstractStatsCollectorTask.STATS_ACTIVE_KEY)); } Mockito.verify(task, Mockito.times(1)).saveStatsInternal(); task.stop(); } @Test public void testSaveStats() throws Exception { File testDir = createTestDir(); BuildInfo buildInfo = Mockito.mock(BuildInfo.class); Mockito.when(buildInfo.getVersion()).thenReturn("v1"); Mockito.when(buildInfo.getBuiltRepoSha()).thenReturn("sha1"); RuntimeInfo runtimeInfo = mockRuntimeInfo("id", testDir); Configuration config = new Configuration(); SafeScheduledExecutorService scheduler = Mockito.mock(SafeScheduledExecutorService.class); AbstractStatsCollectorTask task = mockStatsCollectorTaskAndRunnable(buildInfo, runtimeInfo, config, scheduler); final AtomicLong nonForceRuns = new AtomicLong(0); final AtomicLong forceRuns = new AtomicLong(0); Mockito.when(task.isActive()).thenReturn(true); Mockito.when(task.isOpted()).thenReturn(true); Mockito.when(task.getRunnable(Mockito.anyBoolean())).then((Answer<Runnable>) invocation -> { final Runnable runnable = (Runnable) invocation.callRealMethod(); boolean isForce = (boolean)invocation.getArguments()[0]; return () -> { long runs = (isForce)? forceRuns.incrementAndGet() : nonForceRuns.incrementAndGet(); runnable.run(); }; }); task.initTask(); Assert.assertEquals(0, nonForceRuns.get()); Assert.assertEquals(1, forceRuns.get()); Mockito.verify(task).saveStatsInternal(); task.saveStats(); Assert.assertEquals(1, nonForceRuns.get()); Assert.assertEquals(1, forceRuns.get()); Mockito.verify(task, Mockito.times(2)).saveStatsInternal(); } @Test public void testReportStats() throws Exception { File testDir = createTestDir(); BuildInfo buildInfo = Mockito.mock(BuildInfo.class); Mockito.when(buildInfo.getVersion()).thenReturn("v1"); String sdcId = "0123456789-0123456789-0123456789"; RuntimeInfo runtimeInfo = mockRuntimeInfo(sdcId, testDir); Configuration config = new Configuration(); SafeScheduledExecutorService scheduler = Mockito.mock(SafeScheduledExecutorService.class); AbstractStatsCollectorTask task = mockStatsCollectorTask(buildInfo, runtimeInfo, null, config, scheduler, true); List<StatsBean> stats = ImmutableList.of(new StatsBean()); stats.get(0).setActivePipelines(5); // not reported yet Assert.assertFalse(stats.get(0).isReported()); Assert.assertTrue(task.reportStats(stats)); // reported Assert.assertTrue(stats.get(0).isReported()); Mockito.verify(task).postToGetTelemetryUrl( Mockito.any(), Mockito.eq(ImmutableMap.of( AbstractStatsCollectorTask.GET_TELEMETRY_URL_ARG_CLIENT_ID, sdcId, AbstractStatsCollectorTask.GET_TELEMETRY_URL_ARG_EXTENSION, AbstractStatsCollectorTask.GET_TELEMETRY_URL_ARG_EXTENSION_JSON))); Mockito.verify(task).getHttpURLConnection(new URL(POST_TELEMETRY_URL)); Mockito.verify(uploadConnectionHolder[0]).getResponseCode(); List<StatsBean> uploadedStats = OBJECT_MAPPER.readValue( ((ByteArrayOutputStream) uploadConnectionHolder[0].getOutputStream()).toByteArray(), new TypeReference<List<StatsBean>>(){}); Assert.assertEquals(1, uploadedStats.size()); Assert.assertEquals(5, uploadedStats.get(0).getActivePipelines()); } @Test public void testReportRawStatsArray() throws Exception { // simulates uploading just the array of collected stats String expectedSdcId = "expected-sdc-id"; long testTime = System.currentTimeMillis(); long periodStartTime = testTime - TimeUnit.HOURS.toMillis(1); int netPipelineStarts = 5; StatsBean statsBean = createStatsBeanForReportRawStatsTests( expectedSdcId, periodStartTime, testTime, netPipelineStarts); reportAndVerifyRawStats(OBJECT_MAPPER.writeValueAsString(ImmutableList.of(statsBean)), expectedSdcId, periodStartTime, testTime, netPipelineStarts); } @Test public void testReportRawStatsJson() throws Exception { // simulates uploading a full stats.json (serialized StatsInfo) String expectedSdcId = "expected-sdc-id"; long testTime = System.currentTimeMillis(); long periodStartTime = testTime - TimeUnit.HOURS.toMillis(1); int netPipelineStarts = 7; StatsBean statsBean = createStatsBeanForReportRawStatsTests( expectedSdcId, periodStartTime, testTime, netPipelineStarts); StatsInfo statsInfo = new StatsInfo(ImmutableList.of(new TestModelStatsExtension())); statsInfo.getCollectedStats().add(statsBean); reportAndVerifyRawStats(OBJECT_MAPPER.writeValueAsString(statsInfo), expectedSdcId, periodStartTime, testTime, netPipelineStarts); } @Test public void testReportRawStatsFromApiResponse() throws Exception { // simulates uploading a the response to the system/stats API endpoint String expectedSdcId = "expected-sdc-id"; long testTime = System.currentTimeMillis(); long periodStartTime = testTime - TimeUnit.HOURS.toMillis(1); int netPipelineStarts = 11; StatsBean statsBean = createStatsBeanForReportRawStatsTests( expectedSdcId, periodStartTime, testTime, netPipelineStarts); StatsInfo statsInfo = new StatsInfo(ImmutableList.of(new TestModelStatsExtension())); statsInfo.getCollectedStats().add(statsBean); Map<String, Object> restResponseMap = ImmutableMap.of( "opted", true, "active", true, "stats", statsInfo ); reportAndVerifyRawStats(OBJECT_MAPPER.writeValueAsString(restResponseMap), expectedSdcId, periodStartTime, testTime, netPipelineStarts); } @Test public void testReportTransformerRawStats() throws Exception { // simulates uploading transformer stats, which has extension types not known to DataCollector long testTime = System.currentTimeMillis(); String rawStats = Files.asCharSource( new File(this.getClass().getResource(TRANSFORMER_315_STATS_JSON_FIXTURE).getPath()), StandardCharsets.UTF_8) .read(); String reportedRawStats = reportAndVerifyRawStatsBasicCalls(rawStats, TRANSFORMER_315_STATS_JSON_SDC_ID); // Spot check that we didn't lose any info from the unknown extension class List<Map<String, Object>> reportedRawList = OBJECT_MAPPER.readValue(reportedRawStats, new TypeReference<List<Map<String, Object>>>(){}); Assert.assertEquals(4, reportedRawList.size()); Map<String, Object> interestingEntry = reportedRawList.get(1); Assert.assertTrue(interestingEntry.containsKey("extensions")); List<Map<String, Object>> extensions = (List<Map<String, Object>>) interestingEntry.get("extensions"); Assert.assertEquals(1, extensions.size()); Map<String, Object> extension = extensions.get(0); Assert.assertEquals("com.streamsets.datatransformer.usagestats.TransformerStatsBeanExtension", extension.get("class")); List<Map<String, Object>> pipelineRunReports = (List<Map<String, Object>>) extension.get("pipelineRunReports"); Assert.assertEquals(1, pipelineRunReports.size()); Assert.assertEquals("LOCAL", pipelineRunReports.get(0).get("clusterType")); } @Test public void testReportRawStatsOptedOutError() throws Exception { String rawStats = OBJECT_MAPPER.writeValueAsString(ImmutableMap.of( "opted", false, "active", false )); try { reportAndVerifyRawStatsBasicCalls(rawStats, null); } catch (IllegalArgumentException e) { Assert.assertTrue(e.getMessage().startsWith("No stats provided")); } } private StatsBean createStatsBeanForReportRawStatsTests(String sdcId, long startTime, long endTime, int netPipelineStarts) { TestStatsBean.TestModelStatsBeanExtension beanExtension = new TestStatsBean.TestModelStatsBeanExtension(); beanExtension.setNetPipelineStarts(netPipelineStarts); StatsBean statsBean = new StatsBean(); statsBean.setSdcId(sdcId); statsBean.setStartTime(startTime); statsBean.setEndTime(endTime); statsBean.setExtensions(ImmutableList.of(beanExtension)); return statsBean; } private void reportAndVerifyRawStats(String rawStats, String expectedSdcId, long expectedStartTime, long expectedEndTime, int expectedNetPipelineStarts) throws Exception { String rawOutput = reportAndVerifyRawStatsBasicCalls(rawStats, expectedSdcId); List<StatsBean> reportedBeans = OBJECT_MAPPER.readValue(rawOutput, new TypeReference<List<StatsBean>>(){}); Assert.assertEquals(1, reportedBeans.size()); StatsBean reportedBean = reportedBeans.get(0); Assert.assertEquals(expectedSdcId, reportedBean.getSdcId()); Assert.assertEquals(expectedStartTime, reportedBean.getStartTime()); Assert.assertEquals(expectedEndTime, reportedBean.getEndTime()); Assert.assertEquals(1, reportedBean.getExtensions().size()); Assert.assertTrue(reportedBean.getExtensions().get(0) instanceof TestStatsBean.TestModelStatsBeanExtension); TestStatsBean.TestModelStatsBeanExtension reportedBeanExtension = (TestStatsBean.TestModelStatsBeanExtension) reportedBean.getExtensions().get(0); Assert.assertEquals(expectedNetPipelineStarts, reportedBeanExtension.getNetPipelineStarts()); } private String reportAndVerifyRawStatsBasicCalls(String rawStats, String expectedSdcId) throws Exception { File testDir = createTestDir(); BuildInfo buildInfo = Mockito.mock(BuildInfo.class); Mockito.when(buildInfo.getVersion()).thenReturn("v1"); String sdcId = "should-not-be-used"; RuntimeInfo runtimeInfo = mockRuntimeInfo(sdcId, testDir); Configuration config = new Configuration(); AbstractStatsCollectorTask task = mockStatsCollectorTask(buildInfo, runtimeInfo, null, config, null, true); task.reportStats(null, rawStats); ArgumentCaptor<Object> getUrlDataCaptor = ArgumentCaptor.forClass(Object.class); Mockito.verify(task).postToGetTelemetryUrl(Mockito.any(), getUrlDataCaptor.capture()); Map<String, String> getUrlData = (Map<String, String>) getUrlDataCaptor.getValue(); Assert.assertEquals( ImmutableMap.of( AbstractStatsCollectorTask.GET_TELEMETRY_URL_ARG_CLIENT_ID, expectedSdcId, AbstractStatsCollectorTask.GET_TELEMETRY_URL_ARG_EXTENSION, AbstractStatsCollectorTask.GET_TELEMETRY_URL_ARG_EXTENSION_JSON), getUrlData); ByteArrayOutputStream outputStream = (ByteArrayOutputStream) uploadConnectionHolder[0].getOutputStream(); return outputStream.toString(StandardCharsets.UTF_8.name()); } @Test public void testRunnableMultipleRollsAndReport() { File testDir = createTestDir(); BuildInfo buildInfo = Mockito.mock(BuildInfo.class); Mockito.when(buildInfo.getVersion()).thenReturn("v1"); Mockito.when(buildInfo.getBuiltRepoSha()).thenReturn("abc"); Mockito.when(buildInfo.getBuiltDate()).thenReturn(new Date().toString()); Mockito.when(buildInfo.getBuiltBy()).thenReturn("System"); String sdcId = "0123456789-0123456789-0123456789"; RuntimeInfo runtimeInfo = mockRuntimeInfo(sdcId, testDir); Configuration config = new Configuration(); SafeScheduledExecutorService scheduler = Mockito.mock(SafeScheduledExecutorService.class); AbstractStatsCollectorTask task = mockStatsCollectorTask(buildInfo, runtimeInfo, null, config, scheduler, true); Mockito.when(task.isActive()).thenReturn(true); SysInfo sysInfo = task.getSysInfo(); StatsInfo statsInfo = Mockito.spy(new StatsInfo(task.provideStatsExtensions())); Mockito.when(task.getStatsInfo()).thenReturn(statsInfo); statsInfo.setCurrentSystemInfo(buildInfo, runtimeInfo, sysInfo, task.getActivation()); long rollFrequencyMillis = task.getRollFrequencyMillis(); Assert.assertEquals(TimeUnit.HOURS.toMillis(AbstractStatsCollectorTask.ROLL_PERIOD_CONFIG_MAX), rollFrequencyMillis); int expectedRolls = 0; int expectedReports = 0; int expectedSaves = 0; // first run, do initial roll and report with force=true task.initTask(); expectedRolls++; expectedReports++; expectedSaves++; Mockito.verify(statsInfo, Mockito.times(expectedRolls)).setActiveStats(Mockito.any()); Mockito.verify(task, Mockito.times(expectedReports)).reportStats(Mockito.anyListOf(StatsBean.class)); Mockito.verify(task, Mockito.times(expectedSaves)).saveStatsInternal(); // run again, should just save now that we don't force Runnable runnable = task.getRunnable(false); runnable.run(); expectedSaves++; Mockito.verify(statsInfo, Mockito.times(expectedRolls)).setActiveStats(Mockito.any()); Mockito.verify(task, Mockito.times(expectedReports)).reportStats(Mockito.anyListOf(StatsBean.class)); Mockito.verify(task, Mockito.times(expectedSaves)).saveStatsInternal(); // run after roll period but before report period Mockito.when(task.getRollFrequencyMillis()).thenReturn(0L); runnable.run(); expectedRolls++; expectedSaves++; Mockito.verify(statsInfo, Mockito.times(expectedRolls)).setActiveStats(Mockito.any()); Mockito.verify(task, Mockito.times(expectedReports)).reportStats(Mockito.anyListOf(StatsBean.class)); Mockito.verify(task, Mockito.times(expectedSaves)).saveStatsInternal(); // run after report period Mockito.when(task.getReportPeriodSeconds()).thenReturn(0L); runnable.run(); expectedRolls++; expectedReports++; expectedSaves++; Mockito.verify(statsInfo, Mockito.times(expectedRolls)).setActiveStats(Mockito.any()); Mockito.verify(task, Mockito.times(expectedReports)).reportStats(Mockito.anyListOf(StatsBean.class)); Mockito.verify(task, Mockito.times(expectedSaves)).saveStatsInternal(); // reset periods and make sure we only save Mockito.when(task.getRollFrequencyMillis()).thenReturn( TimeUnit.HOURS.toMillis(AbstractStatsCollectorTask.ROLL_PERIOD_CONFIG_MAX)); Mockito.when(task.getReportPeriodSeconds()).thenReturn( (long) AbstractStatsCollectorTask.TELEMETRY_REPORT_PERIOD_SECONDS_DEFAULT); runnable.run(); expectedSaves++; Mockito.verify(statsInfo, Mockito.times(expectedRolls)).setActiveStats(Mockito.any()); Mockito.verify(task, Mockito.times(expectedReports)).reportStats(Mockito.anyListOf(StatsBean.class)); Mockito.verify(task, Mockito.times(expectedSaves)).saveStatsInternal(); // stop task and make sure we do one full set of activity // we didn't properly start statsInfo, so fake the stop call else it will throw Mockito.doNothing().when(statsInfo).stopSystem(); task.stopTask(); expectedRolls++; expectedReports++; expectedSaves++; Mockito.verify(statsInfo, Mockito.times(expectedRolls)).setActiveStats(Mockito.any()); Mockito.verify(task, Mockito.times(expectedReports)).reportStats(Mockito.anyListOf(StatsBean.class)); Mockito.verify(task, Mockito.times(expectedSaves)).saveStatsInternal(); Mockito.verify(statsInfo).stopSystem(); } @Test public void testSkipSnapshotTelemetry() throws Exception { File testDir = createTestDir(); BuildInfo buildInfo = Mockito.mock(BuildInfo.class); Mockito.when(buildInfo.getVersion()).thenReturn("v1-SNAPSHOT"); String sdcId = "0123456789-0123456789-0123456789"; RuntimeInfo runtimeInfo = mockRuntimeInfo(sdcId, testDir); Configuration config = new Configuration(); SafeScheduledExecutorService scheduler = Mockito.mock(SafeScheduledExecutorService.class); AbstractStatsCollectorTask task = mockStatsCollectorTask(buildInfo, runtimeInfo, null, config, scheduler, true); List<StatsBean> stats = ImmutableList.of(new StatsBean()); stats.get(0).setActivePipelines(5); Assert.assertTrue(task.reportStats(stats)); Mockito.verify(task, Mockito.never()).postToGetTelemetryUrl( Mockito.any(), Mockito.any()); Mockito.verify(task, Mockito.never()).getHttpURLConnection(Mockito.any()); } @Test public void testEnableSnapshotTelemetry() throws Exception { File testDir = createTestDir(); BuildInfo buildInfo = Mockito.mock(BuildInfo.class); Mockito.when(buildInfo.getVersion()).thenReturn("v1-SNAPSHOT"); String sdcId = "0123456789-0123456789-0123456789"; RuntimeInfo runtimeInfo = mockRuntimeInfo(sdcId, testDir); Configuration config = new Configuration(); config.set(AbstractStatsCollectorTask.TELEMETRY_FOR_SNAPSHOT_BUILDS, true); SafeScheduledExecutorService scheduler = Mockito.mock(SafeScheduledExecutorService.class); AbstractStatsCollectorTask task = mockStatsCollectorTask(buildInfo, runtimeInfo, null, config, scheduler, true); List<StatsBean> stats = ImmutableList.of(new StatsBean()); Assert.assertTrue(task.reportStats(stats)); Mockito.verify(task).postToGetTelemetryUrl( Mockito.any(), Mockito.any()); Mockito.verify(task).getHttpURLConnection(Mockito.any()); } @Test public void testReportFrequencySecondConfiguration() throws Exception { File testDir = createTestDir(); BuildInfo buildInfo = Mockito.mock(BuildInfo.class); Mockito.when(buildInfo.getVersion()).thenReturn("v1"); Mockito.when(buildInfo.getBuiltRepoSha()).thenReturn("abc"); Mockito.when(buildInfo.getBuiltDate()).thenReturn(new Date().toString()); Mockito.when(buildInfo.getBuiltBy()).thenReturn("System"); String sdcId = "0123456789-0123456789-0123456789"; RuntimeInfo runtimeInfo = mockRuntimeInfo(sdcId, testDir); Configuration config = new Configuration(); config.set(AbstractStatsCollectorTask.TELEMETRY_REPORT_PERIOD_SECONDS, 120); SafeScheduledExecutorService scheduler = Mockito.mock(SafeScheduledExecutorService.class); AbstractStatsCollectorTask task = mockStatsCollectorTask(buildInfo, runtimeInfo, null, config, scheduler, true); task.initTask(); Mockito.verify(scheduler).scheduleAtFixedRate( Matchers.any(Runnable.class), Matchers.eq(60L), Matchers.eq(60L), Mockito.eq(TimeUnit.SECONDS) ); Assert.assertEquals(120, task.getReportPeriodSeconds()); scheduler = Mockito.mock(SafeScheduledExecutorService.class); //Set it to 48 hours - max at 24 hours config.set(AbstractStatsCollectorTask.TELEMETRY_REPORT_PERIOD_SECONDS, TimeUnit.DAYS.toSeconds(2)); task = mockStatsCollectorTask(buildInfo, runtimeInfo, null, config, scheduler, true); task.initTask(); Mockito.verify(scheduler).scheduleAtFixedRate( Matchers.any(Runnable.class), Matchers.eq(60L), Matchers.eq(60L), Mockito.eq(TimeUnit.SECONDS) ); Assert.assertEquals( Long.valueOf(AbstractStatsCollectorTask.TELEMETRY_REPORT_PERIOD_SECONDS_DEFAULT).longValue(), task.getReportPeriodSeconds()); } @Test public void testRollFrequencyTestMinutesConfiguration() throws Exception { File testDir = createTestDir(); BuildInfo buildInfo = Mockito.mock(BuildInfo.class); Mockito.when(buildInfo.getVersion()).thenReturn("v1"); Mockito.when(buildInfo.getBuiltRepoSha()).thenReturn("abc"); Mockito.when(buildInfo.getBuiltDate()).thenReturn(new Date().toString()); Mockito.when(buildInfo.getBuiltBy()).thenReturn("System"); String sdcId = "0123456789-0123456789-0123456789"; RuntimeInfo runtimeInfo = mockRuntimeInfo(sdcId, testDir); Configuration config = new Configuration(); config.set(AbstractStatsCollectorTask.TEST_ROLL_PERIOD_CONFIG, 20); SafeScheduledExecutorService scheduler = Mockito.mock(SafeScheduledExecutorService.class); AbstractStatsCollectorTask task = mockStatsCollectorTask(buildInfo, runtimeInfo, null, config, scheduler, true); task.initTask(); Assert.assertEquals(TimeUnit.MINUTES.toMillis(20), task.getRollFrequencyMillis()); //120 mins - will max at 60 mins config.set(AbstractStatsCollectorTask.TEST_ROLL_PERIOD_CONFIG, 120); scheduler = Mockito.mock(SafeScheduledExecutorService.class); task = mockStatsCollectorTask(buildInfo, runtimeInfo, null, config, scheduler, true); task.initTask(); Assert.assertEquals(TimeUnit.HOURS.toMillis(AbstractStatsCollectorTask.ROLL_PERIOD_CONFIG_MAX), task.getRollFrequencyMillis()); } private static final Logger LOG = LoggerFactory.getLogger(TestStatsCollectorTask.class); public static final class UsageServlet extends HttpServlet { @Override protected void doPost(HttpServletRequest req, HttpServletResponse resp) throws ServletException, IOException { boolean ok; String str = req.getContentType(); if (str == null) { LOG.error("Missing content-type header"); ok = false; } else { if (str.toLowerCase().startsWith("application/json")) { str = req.getHeader("x-requested-by"); if (str == null) { LOG.error("Missing x-requested-by header"); ok = false; } else { try { UUID.fromString(str); try { List<StatsBean> list = ObjectMapperFactory.get().readValue( req.getReader(), new TypeReference<List<StatsBean>>() { } ); if (list == null) { LOG.error("Missing payload"); ok = false; } else { if (list.isEmpty()) { LOG.error("No stats in list"); ok = false; } else { ok = true; } } } catch (IOException ex) { LOG.error("Invalid payload: " + ex); ok = false; } } catch (Exception ex) { LOG.error("Invalid x-requested-by header, should be SDC ID (a UUID): {}", ex, ex); ok = false; } } } else { LOG.error("Invalid content-type: {}", str); ok = false; } } resp.setStatus((ok) ? HttpServletResponse.SC_OK : HttpServletResponse.SC_BAD_REQUEST); } } @Test public void testHttp() throws Exception { Server server = new Server(0); ServletContextHandler context = new ServletContextHandler(); Servlet servlet = new UsageServlet(); context.addServlet(new ServletHolder(servlet), AbstractStatsCollectorTask.USAGE_PATH_DEFAULT); context.setContextPath("/"); server.setHandler(context); try { server.start(); BuildInfo buildInfo = Mockito.mock(BuildInfo.class); Mockito.when(buildInfo.getVersion()).thenReturn("v1"); RuntimeInfo runtimeInfo = mockRuntimeInfo(UUID.randomUUID().toString(), null); Configuration config = new Configuration(); AbstractStatsCollectorTask collector = mockStatsCollectorTask(buildInfo, runtimeInfo, null, config, null, true); List<StatsBean> list = Arrays.asList(new StatsBean()); Assert.assertTrue(collector.reportStats(list)); } finally { server.stop(); } } private AbstractStatsCollectorTask mockStatsCollectorTask( BuildInfo buildInfo, RuntimeInfo runtimeInfo, Activation activation, Configuration config, SafeScheduledExecutorService executorService, boolean postTelemetrySuccess ) { SysInfo sysInfo = Mockito.mock(SysInfo.class); Mockito.when(sysInfo.toMap()).thenReturn(DEFAULT_SYS_INFO_MAP); AbstractStatsCollectorTask spy = Mockito.spy(new TestModelStatsCollectorTask( buildInfo, runtimeInfo, config, executorService, sysInfo, activation)); // to test real interactions, comment out starting from here, change StatsCollectorTask.TELEMETRY_USE_TEST_BUCKET_DEFAULT to true, and run testReportStats // This will put a real file into the S3 bucket customer-support-bundles-test that you can verify. try { RestClient.Response getUrlResponse = Mockito.mock(RestClient.Response.class); Mockito.doReturn(getUrlResponse).when(spy).postToGetTelemetryUrl(Mockito.any(), Mockito.any()); Mockito.when(getUrlResponse.successful()).thenReturn(postTelemetrySuccess); Mockito.when(getUrlResponse.getData(Mockito.any(TypeReference.class))).thenReturn(ImmutableMap.of( AbstractStatsCollectorTask.TELEMETRY_URL_KEY, POST_TELEMETRY_URL )); Mockito.doAnswer(new Answer() { @Override public HttpURLConnection answer(InvocationOnMock invocation) throws Throwable { uploadConnectionHolder[0] = Mockito.spy((HttpURLConnection) invocation.callRealMethod()); // avoid real external calls ByteArrayOutputStream output = new ByteArrayOutputStream(); Mockito.doReturn(output).when(uploadConnectionHolder[0]).getOutputStream(); Mockito.doReturn(200).when(uploadConnectionHolder[0]).getResponseCode(); return uploadConnectionHolder[0]; } }).when(spy).getHttpURLConnection(Mockito.any()); } catch (IOException e) { throw new RuntimeException(e); } // end section to comment out to perform real interactions return spy; } @Test public void testRunnableRollingNotPublishing() throws Exception { File testDir = createTestDir(); BuildInfo buildInfo = Mockito.mock(BuildInfo.class); Mockito.when(buildInfo.getVersion()).thenReturn("v1"); Mockito.when(buildInfo.getBuiltRepoSha()).thenReturn("sha1"); RuntimeInfo runtimeInfo = mockRuntimeInfo("id", testDir); Configuration config = new Configuration(); SafeScheduledExecutorService scheduler = Mockito.mock(SafeScheduledExecutorService.class); AbstractStatsCollectorTask task = mockStatsCollectorTask(buildInfo, runtimeInfo, null, config, scheduler, false); try (OutputStream os = new FileOutputStream(task.getOptFile())) { ObjectMapperFactory.get().writeValue(os, ImmutableMap.of(task.STATS_ACTIVE_KEY, true)); } try (OutputStream os = new FileOutputStream(task.getStatsFile())) { StatsInfo statsInfo = new StatsInfo(task.provideStatsExtensions()); statsInfo.getActiveStats().setSdcId("id"); statsInfo.getActiveStats().setDataCollectorVersion("v1"); statsInfo.getActiveStats().setBuildRepoSha("sha1"); statsInfo.getActiveStats().setExtraInfo(ImmutableMap.of("a", "A")); ObjectMapperFactory.get().writeValue(os, statsInfo); } task.init(); Assert.assertTrue(task.isOpted()); Assert.assertTrue(task.isActive()); //verifying we rolled the read stats Assert.assertEquals("v1", task.getStatsInfo().getActiveStats().getDataCollectorVersion()); try (InputStream is = new FileInputStream(task.getStatsFile())) { StatsInfo statsInfo = ObjectMapperFactory.get().readValue(is, StatsInfo.class); Assert.assertEquals(DEFAULT_SYS_INFO_MAP, statsInfo.getExtraInfo(task.getSysInfo())); Assert.assertEquals(1, statsInfo.getCollectedStats().size()); Assert.assertEquals("id", statsInfo.getCollectedStats().get(0).getSdcId()); Assert.assertEquals("v1", statsInfo.getCollectedStats().get(0).getDataCollectorVersion()); Assert.assertEquals("sha1", statsInfo.getCollectedStats().get(0).getBuildRepoSha()); Assert.assertEquals(ImmutableMap.of("a", "A"), statsInfo.getCollectedStats().get(0).getExtraInfo()); } task.stop(); } @Test public void testSetActiveRunnableCalled() throws Exception { File testDir = createTestDir(); BuildInfo buildInfo = Mockito.mock(BuildInfo.class); Mockito.when(buildInfo.getVersion()).thenReturn("v1"); Mockito.when(buildInfo.getBuiltRepoSha()).thenReturn("sha1"); RuntimeInfo runtimeInfo = mockRuntimeInfo("id", testDir); Configuration config = new Configuration(); SafeScheduledExecutorService scheduler = Mockito.mock(SafeScheduledExecutorService.class); AbstractStatsCollectorTask task = mockStatsCollectorTask(buildInfo, runtimeInfo, null, config, scheduler, false); Map<Boolean, AtomicLong> runnableForceParamToTimes = new HashMap<>(ImmutableMap.of(Boolean.TRUE, new AtomicLong(0), Boolean.FALSE, new AtomicLong(0))); Mockito.when(task.getRunnable(Matchers.anyBoolean())).thenAnswer((Answer<Runnable>) invocation -> { final Runnable r = (Runnable) invocation.callRealMethod(); return (Runnable) () -> { runnableForceParamToTimes.get(invocation.getArgumentAt(0, Boolean.class)).incrementAndGet(); r.run(); }; }); task.initTask(); Assert.assertEquals(1, runnableForceParamToTimes.get(Boolean.TRUE).get()); //Set InActive should not trigger roll and report task.setActive(false); // 1 from start Assert.assertEquals(1, runnableForceParamToTimes.get(Boolean.TRUE).get()); Assert.assertEquals(0, runnableForceParamToTimes.get(Boolean.FALSE).get()); //Set Active should trigger roll and report task.setActive(true); Assert.assertEquals(2, runnableForceParamToTimes.get(Boolean.TRUE).get()); Assert.assertEquals(0, runnableForceParamToTimes.get(Boolean.FALSE).get()); // try it again, but should not roll and report again task.setActive(true); Assert.assertEquals(2, runnableForceParamToTimes.get(Boolean.TRUE).get()); Assert.assertEquals(0, runnableForceParamToTimes.get(Boolean.FALSE).get()); //Stop should trigger roll and report task.stopTask(); Assert.assertEquals(3, runnableForceParamToTimes.get(Boolean.TRUE).get()); Assert.assertEquals(0, runnableForceParamToTimes.get(Boolean.FALSE).get()); } private RuntimeInfo mockRuntimeInfo(String sdcId, File dataDir) { RuntimeInfo ret = Mockito.mock(RuntimeInfo.class); Mockito.when(ret.getId()).thenReturn(sdcId); Mockito.when(ret.getProductName()).thenReturn(RuntimeInfo.SDC_PRODUCT); if (dataDir != null) { Mockito.when(ret.getDataDir()).thenReturn(dataDir.getAbsolutePath()); } Mockito.when(ret.getLibexecDir()).thenReturn( System.getenv("PWD").replace("container/src/main/.*","") + "/dist/src/main/libexec"); return ret; } private AbstractStatsCollectorTask mockStatsCollectorTaskAndRunnable( BuildInfo buildInfo, RuntimeInfo runtimeInfo, Configuration config, SafeScheduledExecutorService executorService) { AbstractStatsCollectorTask spy = mockStatsCollectorTask(buildInfo, runtimeInfo, null, config, executorService, true); runnable = Mockito.mock(Runnable.class); Mockito.doReturn(runnable).when(spy).getRunnable(Mockito.anyBoolean()); return spy; } static class TestModelStatsCollectorTask extends AbstractStatsCollectorTask { public TestModelStatsCollectorTask( BuildInfo buildInfo, RuntimeInfo runtimeInfo, Configuration config, SafeScheduledExecutorService executorService, SysInfo sysInfo, Activation activation) { super(buildInfo, runtimeInfo, config, executorService, sysInfo, activation); } @Override protected List<AbstractStatsExtension> provideStatsExtensions() { return ImmutableList.of( new TestModelStatsExtension() ); } } }
/* * Copyright 2016 The gRPC Authors * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.grpc.examples.routeguide; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertTrue; import static org.mockito.ArgumentMatchers.any; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.never; import static org.mockito.Mockito.verify; import com.google.protobuf.Message; import io.grpc.Status; import io.grpc.StatusRuntimeException; import io.grpc.examples.routeguide.RouteGuideClient.TestHelper; import io.grpc.examples.routeguide.RouteGuideGrpc.RouteGuideImplBase; import io.grpc.inprocess.InProcessChannelBuilder; import io.grpc.inprocess.InProcessServerBuilder; import io.grpc.stub.StreamObserver; import io.grpc.testing.GrpcCleanupRule; import io.grpc.util.MutableHandlerRegistry; import java.util.ArrayList; import java.util.Arrays; import java.util.List; import java.util.Random; import java.util.concurrent.CountDownLatch; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicReference; import org.junit.After; import org.junit.Before; import org.junit.Rule; import org.junit.Test; import org.junit.runner.RunWith; import org.junit.runners.JUnit4; import org.mockito.ArgumentCaptor; /** * Unit tests for {@link RouteGuideClient}. * For demonstrating how to write gRPC unit test only. * Not intended to provide a high code coverage or to test every major usecase. * * directExecutor() makes it easier to have deterministic tests. * However, if your implementation uses another thread and uses streaming it is better to use * the default executor, to avoid hitting bug #3084. * * <p>For basic unit test examples see {@link io.grpc.examples.helloworld.HelloWorldClientTest} and * {@link io.grpc.examples.helloworld.HelloWorldServerTest}. */ @RunWith(JUnit4.class) public class RouteGuideClientTest { /** * This rule manages automatic graceful shutdown for the registered server at the end of test. */ @Rule public final GrpcCleanupRule grpcCleanup = new GrpcCleanupRule(); private final MutableHandlerRegistry serviceRegistry = new MutableHandlerRegistry(); private final TestHelper testHelper = mock(TestHelper.class); private final Random noRandomness = new Random() { int index; boolean isForSleep; /** * Returns a number deterministically. If the random number is for sleep time, then return * -500 so that {@code Thread.sleep(random.nextInt(1000) + 500)} sleeps 0 ms. Otherwise, it * is for list index, then return incrementally (and cyclically). */ @Override public int nextInt(int bound) { int retVal = isForSleep ? -500 : (index++ % bound); isForSleep = ! isForSleep; return retVal; } }; private RouteGuideClient client; @Before public void setUp() throws Exception { // Generate a unique in-process server name. String serverName = InProcessServerBuilder.generateName(); // Use a mutable service registry for later registering the service impl for each test case. grpcCleanup.register(InProcessServerBuilder.forName(serverName) .fallbackHandlerRegistry(serviceRegistry).directExecutor().build().start()); client = new RouteGuideClient(InProcessChannelBuilder.forName(serverName).directExecutor()); client.setTestHelper(testHelper); } @After public void tearDown() throws Exception { client.shutdown(); } /** * Example for testing blocking unary call. */ @Test public void getFeature() { Point requestPoint = Point.newBuilder().setLatitude(-1).setLongitude(-1).build(); Point responsePoint = Point.newBuilder().setLatitude(-123).setLongitude(-123).build(); final AtomicReference<Point> pointDelivered = new AtomicReference<Point>(); final Feature responseFeature = Feature.newBuilder().setName("dummyFeature").setLocation(responsePoint).build(); // implement the fake service RouteGuideImplBase getFeatureImpl = new RouteGuideImplBase() { @Override public void getFeature(Point point, StreamObserver<Feature> responseObserver) { pointDelivered.set(point); responseObserver.onNext(responseFeature); responseObserver.onCompleted(); } }; serviceRegistry.addService(getFeatureImpl); client.getFeature(-1, -1); assertEquals(requestPoint, pointDelivered.get()); verify(testHelper).onMessage(responseFeature); verify(testHelper, never()).onRpcError(any(Throwable.class)); } /** * Example for testing blocking unary call. */ @Test public void getFeature_error() { Point requestPoint = Point.newBuilder().setLatitude(-1).setLongitude(-1).build(); final AtomicReference<Point> pointDelivered = new AtomicReference<Point>(); final StatusRuntimeException fakeError = new StatusRuntimeException(Status.DATA_LOSS); // implement the fake service RouteGuideImplBase getFeatureImpl = new RouteGuideImplBase() { @Override public void getFeature(Point point, StreamObserver<Feature> responseObserver) { pointDelivered.set(point); responseObserver.onError(fakeError); } }; serviceRegistry.addService(getFeatureImpl); client.getFeature(-1, -1); assertEquals(requestPoint, pointDelivered.get()); ArgumentCaptor<Throwable> errorCaptor = ArgumentCaptor.forClass(Throwable.class); verify(testHelper).onRpcError(errorCaptor.capture()); assertEquals(fakeError.getStatus(), Status.fromThrowable(errorCaptor.getValue())); } /** * Example for testing blocking server-streaming. */ @Test public void listFeatures() { final Feature responseFeature1 = Feature.newBuilder().setName("feature 1").build(); final Feature responseFeature2 = Feature.newBuilder().setName("feature 2").build(); final AtomicReference<Rectangle> rectangleDelivered = new AtomicReference<Rectangle>(); // implement the fake service RouteGuideImplBase listFeaturesImpl = new RouteGuideImplBase() { @Override public void listFeatures(Rectangle rectangle, StreamObserver<Feature> responseObserver) { rectangleDelivered.set(rectangle); // send two response messages responseObserver.onNext(responseFeature1); responseObserver.onNext(responseFeature2); // complete the response responseObserver.onCompleted(); } }; serviceRegistry.addService(listFeaturesImpl); client.listFeatures(1, 2, 3, 4); assertEquals(Rectangle.newBuilder() .setLo(Point.newBuilder().setLatitude(1).setLongitude(2).build()) .setHi(Point.newBuilder().setLatitude(3).setLongitude(4).build()) .build(), rectangleDelivered.get()); verify(testHelper).onMessage(responseFeature1); verify(testHelper).onMessage(responseFeature2); verify(testHelper, never()).onRpcError(any(Throwable.class)); } /** * Example for testing blocking server-streaming. */ @Test public void listFeatures_error() { final Feature responseFeature1 = Feature.newBuilder().setName("feature 1").build(); final AtomicReference<Rectangle> rectangleDelivered = new AtomicReference<Rectangle>(); final StatusRuntimeException fakeError = new StatusRuntimeException(Status.INVALID_ARGUMENT); // implement the fake service RouteGuideImplBase listFeaturesImpl = new RouteGuideImplBase() { @Override public void listFeatures(Rectangle rectangle, StreamObserver<Feature> responseObserver) { rectangleDelivered.set(rectangle); // send one response message responseObserver.onNext(responseFeature1); // let the rpc fail responseObserver.onError(fakeError); } }; serviceRegistry.addService(listFeaturesImpl); client.listFeatures(1, 2, 3, 4); assertEquals(Rectangle.newBuilder() .setLo(Point.newBuilder().setLatitude(1).setLongitude(2).build()) .setHi(Point.newBuilder().setLatitude(3).setLongitude(4).build()) .build(), rectangleDelivered.get()); ArgumentCaptor<Throwable> errorCaptor = ArgumentCaptor.forClass(Throwable.class); verify(testHelper).onMessage(responseFeature1); verify(testHelper).onRpcError(errorCaptor.capture()); assertEquals(fakeError.getStatus(), Status.fromThrowable(errorCaptor.getValue())); } /** * Example for testing async client-streaming. */ @Test public void recordRoute() throws Exception { client.setRandom(noRandomness); Point point1 = Point.newBuilder().setLatitude(1).setLongitude(1).build(); Point point2 = Point.newBuilder().setLatitude(2).setLongitude(2).build(); Point point3 = Point.newBuilder().setLatitude(3).setLongitude(3).build(); Feature requestFeature1 = Feature.newBuilder().setLocation(point1).build(); Feature requestFeature2 = Feature.newBuilder().setLocation(point2).build(); Feature requestFeature3 = Feature.newBuilder().setLocation(point3).build(); final List<Feature> features = Arrays.asList( requestFeature1, requestFeature2, requestFeature3); final List<Point> pointsDelivered = new ArrayList<>(); final RouteSummary fakeResponse = RouteSummary .newBuilder() .setPointCount(7) .setFeatureCount(8) .setDistance(9) .setElapsedTime(10) .build(); // implement the fake service RouteGuideImplBase recordRouteImpl = new RouteGuideImplBase() { @Override public StreamObserver<Point> recordRoute( final StreamObserver<RouteSummary> responseObserver) { StreamObserver<Point> requestObserver = new StreamObserver<Point>() { @Override public void onNext(Point value) { pointsDelivered.add(value); } @Override public void onError(Throwable t) { } @Override public void onCompleted() { responseObserver.onNext(fakeResponse); responseObserver.onCompleted(); } }; return requestObserver; } }; serviceRegistry.addService(recordRouteImpl); // send requestFeature1, requestFeature2, requestFeature3, and then requestFeature1 again client.recordRoute(features, 4); assertEquals( Arrays.asList( requestFeature1.getLocation(), requestFeature2.getLocation(), requestFeature3.getLocation(), requestFeature1.getLocation()), pointsDelivered); verify(testHelper).onMessage(fakeResponse); verify(testHelper, never()).onRpcError(any(Throwable.class)); } /** * Example for testing async client-streaming. */ @Test public void recordRoute_serverError() throws Exception { client.setRandom(noRandomness); Point point1 = Point.newBuilder().setLatitude(1).setLongitude(1).build(); final Feature requestFeature1 = Feature.newBuilder().setLocation(point1).build(); final List<Feature> features = Arrays.asList(requestFeature1); final StatusRuntimeException fakeError = new StatusRuntimeException(Status.INVALID_ARGUMENT); // implement the fake service RouteGuideImplBase recordRouteImpl = new RouteGuideImplBase() { @Override public StreamObserver<Point> recordRoute(StreamObserver<RouteSummary> responseObserver) { // send an error immediately responseObserver.onError(fakeError); StreamObserver<Point> requestObserver = new StreamObserver<Point>() { @Override public void onNext(Point value) { } @Override public void onError(Throwable t) { } @Override public void onCompleted() { } }; return requestObserver; } }; serviceRegistry.addService(recordRouteImpl); client.recordRoute(features, 4); ArgumentCaptor<Throwable> errorCaptor = ArgumentCaptor.forClass(Throwable.class); verify(testHelper).onRpcError(errorCaptor.capture()); assertEquals(fakeError.getStatus(), Status.fromThrowable(errorCaptor.getValue())); } /** * Example for testing bi-directional call. */ @Test public void routeChat_simpleResponse() throws Exception { RouteNote fakeResponse1 = RouteNote.newBuilder().setMessage("dummy msg1").build(); RouteNote fakeResponse2 = RouteNote.newBuilder().setMessage("dummy msg2").build(); final List<String> messagesDelivered = new ArrayList<>(); final List<Point> locationsDelivered = new ArrayList<>(); final AtomicReference<StreamObserver<RouteNote>> responseObserverRef = new AtomicReference<StreamObserver<RouteNote>>(); final CountDownLatch allRequestsDelivered = new CountDownLatch(1); // implement the fake service RouteGuideImplBase routeChatImpl = new RouteGuideImplBase() { @Override public StreamObserver<RouteNote> routeChat(StreamObserver<RouteNote> responseObserver) { responseObserverRef.set(responseObserver); StreamObserver<RouteNote> requestObserver = new StreamObserver<RouteNote>() { @Override public void onNext(RouteNote value) { messagesDelivered.add(value.getMessage()); locationsDelivered.add(value.getLocation()); } @Override public void onError(Throwable t) { } @Override public void onCompleted() { allRequestsDelivered.countDown(); } }; return requestObserver; } }; serviceRegistry.addService(routeChatImpl); // start routeChat CountDownLatch latch = client.routeChat(); // request message sent and delivered for four times assertTrue(allRequestsDelivered.await(1, TimeUnit.SECONDS)); assertEquals( Arrays.asList("First message", "Second message", "Third message", "Fourth message"), messagesDelivered); assertEquals( Arrays.asList( Point.newBuilder().setLatitude(0).setLongitude(0).build(), Point.newBuilder().setLatitude(0).setLongitude(1).build(), Point.newBuilder().setLatitude(1).setLongitude(0).build(), Point.newBuilder().setLatitude(1).setLongitude(1).build() ), locationsDelivered); // Let the server send out two simple response messages // and verify that the client receives them. // Allow some timeout for verify() if not using directExecutor responseObserverRef.get().onNext(fakeResponse1); verify(testHelper).onMessage(fakeResponse1); responseObserverRef.get().onNext(fakeResponse2); verify(testHelper).onMessage(fakeResponse2); // let server complete. responseObserverRef.get().onCompleted(); assertTrue(latch.await(1, TimeUnit.SECONDS)); verify(testHelper, never()).onRpcError(any(Throwable.class)); } /** * Example for testing bi-directional call. */ @Test public void routeChat_echoResponse() throws Exception { final List<RouteNote> notesDelivered = new ArrayList<>(); // implement the fake service RouteGuideImplBase routeChatImpl = new RouteGuideImplBase() { @Override public StreamObserver<RouteNote> routeChat( final StreamObserver<RouteNote> responseObserver) { StreamObserver<RouteNote> requestObserver = new StreamObserver<RouteNote>() { @Override public void onNext(RouteNote value) { notesDelivered.add(value); responseObserver.onNext(value); } @Override public void onError(Throwable t) { responseObserver.onError(t); } @Override public void onCompleted() { responseObserver.onCompleted(); } }; return requestObserver; } }; serviceRegistry.addService(routeChatImpl); client.routeChat().await(1, TimeUnit.SECONDS); String[] messages = {"First message", "Second message", "Third message", "Fourth message"}; for (int i = 0; i < 4; i++) { verify(testHelper).onMessage(notesDelivered.get(i)); assertEquals(messages[i], notesDelivered.get(i).getMessage()); } verify(testHelper, never()).onRpcError(any(Throwable.class)); } /** * Example for testing bi-directional call. */ @Test public void routeChat_errorResponse() throws Exception { final List<RouteNote> notesDelivered = new ArrayList<>(); final StatusRuntimeException fakeError = new StatusRuntimeException(Status.PERMISSION_DENIED); // implement the fake service RouteGuideImplBase routeChatImpl = new RouteGuideImplBase() { @Override public StreamObserver<RouteNote> routeChat( final StreamObserver<RouteNote> responseObserver) { StreamObserver<RouteNote> requestObserver = new StreamObserver<RouteNote>() { @Override public void onNext(RouteNote value) { notesDelivered.add(value); responseObserver.onError(fakeError); } @Override public void onError(Throwable t) { } @Override public void onCompleted() { responseObserver.onCompleted(); } }; return requestObserver; } }; serviceRegistry.addService(routeChatImpl); client.routeChat().await(1, TimeUnit.SECONDS); assertEquals("First message", notesDelivered.get(0).getMessage()); verify(testHelper, never()).onMessage(any(Message.class)); ArgumentCaptor<Throwable> errorCaptor = ArgumentCaptor.forClass(Throwable.class); verify(testHelper).onRpcError(errorCaptor.capture()); assertEquals(fakeError.getStatus(), Status.fromThrowable(errorCaptor.getValue())); } }
/* * Copyright 2010 The Miyamoto Team * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.nnsoft.commons.miyamoto; import java.lang.annotation.Annotation; import java.lang.reflect.AccessibleObject; import java.lang.reflect.InvocationHandler; import java.lang.reflect.InvocationTargetException; import java.lang.reflect.Method; import java.lang.reflect.Proxy; import java.security.AccessController; import java.security.PrivilegedAction; import java.util.LinkedHashMap; import java.util.Map; import java.util.Map.Entry; /** * * * @param <A> The annotation type has to be proxed. * @version $Id$ */ public final class AnnotationProxyBuilder<A extends Annotation> implements Annotation, InvocationHandler { /** * The multiplicator required in the hash code calculation. */ private static final int MEMBER_NAME_MULTIPLICATOR = 127; /** * Creates a new annotation proxy. * * @param <A> the annotation type has to be proxed. * @param annotationType the annotation type class has to be proxed. * @return a new annotation proxy. */ public static <A extends Annotation> AnnotationProxyBuilder<A> newBuilder(Class<A> annotationType) { if (annotationType == null) { throw new IllegalArgumentException("Parameter 'annotationType' must be not null"); } return new AnnotationProxyBuilder<A>(annotationType); } /** * Retrieves the annotation proxy, if any, given the annotation. * * @param obj the annotation. * @return the annotation proxy, if any, given the annotation. */ private static AnnotationProxyBuilder<?> getAnnotationProxy(Object obj) { if (Proxy.isProxyClass(obj.getClass())) { InvocationHandler handler = Proxy.getInvocationHandler(obj); if (handler instanceof AnnotationProxyBuilder) { return (AnnotationProxyBuilder<?>) handler; } } return null; } /** * Access to the declared methods of an annotation, given the type. * * @param <A> the annotation type. * @param annotationType the annotation type class. * @return the declared methods of an annotation, given the type. */ private static <A extends Annotation> Method[] getDeclaredMethods(final Class<A> annotationType) { return AccessController.doPrivileged( new PrivilegedAction<Method[]>() { public Method[] run() { final Method[] declaredMethods = annotationType.getDeclaredMethods(); AccessibleObject.setAccessible(declaredMethods, true); return declaredMethods; } }); } /** * The annotation type class has to be proxed. */ private final Class<A> annotationType; /** * The annotation properties registry. */ private final Map<String, AnnotationProperty> properties = new LinkedHashMap<String, AnnotationProperty>(); /** * The proxed annotation. */ private final A proxedAnnotation; /** * Build a new proxy annotation given the annotation type. * * @param annotationType the annotation type class has to be proxed. */ private AnnotationProxyBuilder(Class<A> annotationType) { this.annotationType = annotationType; String propertyName; Class<?> returnType; Object defaultValue; for (Method method : getDeclaredMethods(annotationType)) { propertyName = method.getName(); returnType = method.getReturnType(); defaultValue = method.getDefaultValue(); AnnotationProperty property = new AnnotationProperty(propertyName, returnType); property.setValue(defaultValue); this.properties.put(propertyName, property); } this.proxedAnnotation = annotationType.cast(Proxy.newProxyInstance(annotationType.getClassLoader(), new Class<?>[]{ annotationType }, this)); } /** * Set a property value. * * @param name the property name. * @param value the property value. */ public void setProperty(String name, Object value) { if (name == null) { throw new IllegalArgumentException("Parameter 'name' must be not null"); } if (value == null) { throw new IllegalArgumentException("Parameter 'value' must be not null"); } if (!this.properties.containsKey(name)) { throw new IllegalArgumentException("Annotation '" + this.annotationType.getName() + "' does not contain a property named '" + name + "'"); } this.properties.get(name).setValue(value); } /** * Returns the property value, given the name, if present. * * @param name the property name. * @return the property value, given the name, if present. */ public Object getProperty(String name) { if (name == null) { throw new IllegalArgumentException("Parameter 'name' must be not null"); } return this.properties.get(name).getValue(); } /** * {@inheritDoc} */ public Object invoke(Object proxy, Method method, Object[] args) throws Throwable { String name = method.getName(); if (this.properties.containsKey(name)) { return this.getProperty(name); } return method.invoke(this, args); } /** * {@inheritDoc} */ public Class<? extends Annotation> annotationType() { return this.annotationType; } /** * Returns the proxed annotation. * * @return the proxed annotation. */ public A getProxedAnnotation() { return this.proxedAnnotation; } /** * {@inheritDoc} */ @Override public boolean equals(Object obj) { if (this == obj) { return true; } if (obj == null) { return false; } if (!this.annotationType.isInstance(obj)) { return false; } String propertyName; AnnotationProperty expected; for (Method method : getDeclaredMethods(this.annotationType())) { propertyName = method.getName(); if (!this.properties.containsKey(propertyName)) { return false; } expected = this.properties.get(propertyName); AnnotationProperty actual = new AnnotationProperty(propertyName, method.getReturnType()); AnnotationProxyBuilder<?> proxy = getAnnotationProxy(obj); if (proxy != null) { actual.setValue(proxy.getProperty(propertyName)); } else { try { actual.setValue(method.invoke(obj)); } catch (IllegalArgumentException e) { return false; } catch (IllegalAccessException e) { throw new AssertionError(e); } catch (InvocationTargetException e) { return false; } } if (!expected.equals(actual)) { return false; } } return true; } /** * {@inheritDoc} */ @Override public int hashCode() { int hashCode = 0; for (Entry<String, AnnotationProperty> property : this.properties.entrySet()) { hashCode += (MEMBER_NAME_MULTIPLICATOR * property.getKey().hashCode() ^ property.getValue().getValueHashCode()); } return hashCode; } /** * {@inheritDoc} */ @Override public String toString() { StringBuilder stringBuilder = new StringBuilder("@") .append(this.annotationType.getName()) .append('('); int counter = 0; for (Entry<String, AnnotationProperty> property : this.properties.entrySet()) { if (counter > 0) { stringBuilder.append(", "); } stringBuilder.append(property.getKey()) .append('=') .append(property.getValue().valueToString()); counter++; } return stringBuilder.append(')').toString(); } }
package prg2.connectfour.ui; import java.awt.BorderLayout; import java.awt.Dimension; import java.awt.Font; import java.awt.GridLayout; import java.awt.event.ActionEvent; import java.awt.event.ActionListener; import java.util.ArrayList; import javax.swing.JButton; import javax.swing.JLabel; import javax.swing.JOptionPane; import javax.swing.JPanel; import javax.swing.SwingConstants; import javax.swing.border.LineBorder; import prg2.connectfour.comlayer.NetworkEnv; import prg2.connectfour.comlayer.NetworkEnv.MoveHandler; import prg2.connectfour.comlayer.NetworkPlayer; import prg2.connectfour.logic.Cell; import prg2.connectfour.logic.Color; import prg2.connectfour.logic.Game; import prg2.connectfour.logic.GameFactory; import prg2.connectfour.logic.Grid; import prg2.connectfour.logic.Player; import prg2.connectfour.logic.bot.GameTheory; import prg2.connectfour.ui.HomeScreen.GameMode; public class PlayGround extends JPanel implements MoveHandler { private ArrayList<EndGameHandler> endGameListeners = new ArrayList<>(); private GameMode gameMode; private Grid grid; private Game game; private Player[] players = new Player[2]; private JLabel activePlayerLabel; private JPanel buttonPanel; private JPanel gridPanel; private JLabel slots[][]; private JButton buttons[]; private NetworkEnv networkEnv; private NetworkPlayer networkPlayer; private String gameToken; private Font font; public PlayGround(int x, int y, String myName) { this.font = new Font("Arial", Font.PLAIN, 36); this.players[0] = new Player(myName, Color.Red); this.grid = new Grid(x, y); } private void initGame(Game game) { if (game != null) { this.game = game; this.grid = game.grid; this.game.setPlayers(this.players); } else { this.game = GameFactory.create().withGrid(this.grid).withPlayers(this.players).finish(); } initComponents(); revalidate(); } public void networkInit(NetworkEnv env, String gameToken, NetworkPlayer player, boolean canIStart) { this.gameMode = GameMode.NETWORK; this.networkEnv = env; this.gameToken = gameToken; this.networkPlayer = player; this.players[1] = player; if (!canIStart) { this.players[1] = this.players[0]; this.players[0] = player; } initGame(null); processNext(); } public void singleInit(Game game, GameTheory bot) { this.gameMode = GameMode.SINGLE; this.players[1] = bot; initGame(game); processNext(); } private void processNext() { drawGrid(); if (!this.game.isFinished()) { Player activePlayer = this.game.getActivePlayer(); if (activePlayer instanceof NetworkPlayer) { disableButtons(); this.activePlayerLabel.setText("Opponent's turn"); this.networkEnv.addMoveListener(this); } else if (activePlayer instanceof GameTheory) { disableButtons(); int nextMove = ((GameTheory) activePlayer).getNextMove(this.grid); this.game.dropOnColumn(nextMove); processNext(); } else if (activePlayer instanceof Player) { this.activePlayerLabel.setText("Your turn"); enableButtons(); } } else { revalidate(); showFinish(); onEndGame(); } revalidate(); } private void enableButtons() { for (int x = 0; x < this.grid.width; x++) { Cell cell = this.grid.getCellAt(x, this.grid.height - 1); if (cell.getOwner() == null) { this.buttons[x].setEnabled(true); } } } private void disableButtons() { for (JButton button : this.buttons) { button.setEnabled(false); } } private void initComponents() { this.setLayout(new BorderLayout(10, 10)); this.activePlayerLabel = new JLabel(); //this.add(this.activePlayerLabel); drawButtons(); this.add(this.buttonPanel, BorderLayout.NORTH); drawGrid(); this.add(this.gridPanel, BorderLayout.CENTER); } private void drawButtons() { this.buttonPanel = new JPanel(); this.buttonPanel.setLayout(new GridLayout(1, this.grid.width)); this.buttons = new JButton[this.grid.width]; for (int i = 0; i < this.grid.width; i++) { buttons[i] = new JButton("" + (i + 1)); buttons[i].setFont(this.font); Dimension d = buttons[i].getPreferredSize(); d.height = 100; buttons[i].setPreferredSize(d); buttons[i].setActionCommand("" + i); buttons[i].addActionListener(new ActionListener() { public void actionPerformed(ActionEvent e) { int a = Integer.parseInt(e.getActionCommand()); System.out.println("Droped to column: " + a); game.dropOnColumn(a); if (networkEnv != null) networkEnv.sendMove(networkPlayer, a); processNext(); } }); this.buttonPanel.add(buttons[i]); } } private void drawGrid() { if (this.gridPanel != null) this.gridPanel.removeAll(); else this.gridPanel = new JPanel(); this.gridPanel.setLayout(new GridLayout(this.grid.height, this.grid.width)); this.slots = new JLabel[this.grid.height][this.grid.width]; for (int row = this.grid.height - 1; row >= 0; row--) { for (int column = 0; column < this.grid.width; column++) { slots[row][column] = new JLabel(); slots[row][column].setHorizontalAlignment(SwingConstants.CENTER); slots[row][column].setBorder(new LineBorder(java.awt.Color.black)); slots[row][column].setOpaque(true); Cell cell = this.grid.getCellAt(column, row); if (cell.getOwner() != null) { if (cell.getOwner().color == Color.Red) { slots[row][column].setBackground(java.awt.Color.red); } else if (cell.getOwner().color == Color.Yellow) { slots[row][column].setBackground(java.awt.Color.yellow); } else throw new IllegalArgumentException("Player color unknown"); } this.gridPanel.add(slots[row][column]); } } } @Override public void movePerformed(int x) { this.networkEnv.removeAllMoveListeners(); this.game.dropOnColumn(x); this.processNext(); } public void showFinish() { String msg, title; if (this.game.getWinner() == null) { msg = "No more fields."; title = "Draw"; } else if (this.game.getWinner() instanceof NetworkPlayer) { msg = this.game.getWinner().name + " is much better than you. Go home and cry."; title = "Loser"; } else if (this.game.getWinner() instanceof GameTheory) { msg = "Really, you're worse than a computer."; title = "Loser"; } else { msg = "Well done!"; title = "Winner"; } JOptionPane.showMessageDialog(this, msg, title, JOptionPane.INFORMATION_MESSAGE); } public Game getGame() { return this.game; } public GameMode getGameMode() { return this.gameMode; } public void addEndGameListener(EndGameHandler handler) { this.endGameListeners.add(handler); } private void onEndGame() { for (EndGameHandler listener : this.endGameListeners) { listener.endGame(); } } public interface EndGameHandler { void endGame(); } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.parquet.io; import java.util.ArrayList; import java.util.List; import org.apache.parquet.io.api.RecordConsumer; import org.junit.Test; import org.apache.parquet.column.ParquetProperties.WriterVersion; import org.apache.parquet.column.impl.ColumnWriteStoreV1; import org.apache.parquet.column.page.mem.MemPageStore; import org.apache.parquet.example.data.Group; import org.apache.parquet.example.data.GroupWriter; import org.apache.parquet.example.data.simple.convert.GroupRecordConverter; import org.apache.parquet.filter.ColumnPredicates.LongPredicateFunction; import org.apache.parquet.filter.ColumnPredicates.PredicateFunction; import org.apache.parquet.filter2.compat.FilterCompat; import org.apache.parquet.io.api.RecordMaterializer; import static org.junit.Assert.assertEquals; import static org.apache.parquet.example.Paper.r1; import static org.apache.parquet.example.Paper.r2; import static org.apache.parquet.example.Paper.schema; import static org.apache.parquet.filter.AndRecordFilter.and; import static org.apache.parquet.filter.ColumnPredicates.applyFunctionToLong; import static org.apache.parquet.filter.ColumnPredicates.applyFunctionToString; import static org.apache.parquet.filter.ColumnPredicates.equalTo; import static org.apache.parquet.filter.ColumnRecordFilter.column; import static org.apache.parquet.filter.NotRecordFilter.not; import static org.apache.parquet.filter.OrRecordFilter.or; import static org.apache.parquet.filter.PagedRecordFilter.page; public class TestFiltered { /* Class that implements applyFunction filter for long. Checks for long greater than 15. */ public class LongGreaterThan15Predicate implements LongPredicateFunction { @Override public boolean functionToApply(long input) { return input > 15; } }; /* Class that implements applyFunction filter for string. Checks for string ending in 'A'. */ public class StringEndsWithAPredicate implements PredicateFunction<String> { @Override public boolean functionToApply(String input) { return input.endsWith("A"); } }; private List<Group> readAll(RecordReader<Group> reader) { List<Group> result = new ArrayList<Group>(); Group g; while ((g = reader.read()) != null) { result.add(g); } return result; } private void readOne(RecordReader<Group> reader, String message, Group expected) { List<Group> result = readAll(reader); assertEquals(message + ": " + result, 1, result.size()); assertEquals("filtering did not return the correct record", expected.toString(), result.get(0).toString()); } @Test public void testFilterOnInteger() { MessageColumnIO columnIO = new ColumnIOFactory(true).getColumnIO(schema); MemPageStore memPageStore = writeTestRecords(columnIO, 1); // Get first record RecordMaterializer<Group> recordConverter = new GroupRecordConverter(schema); RecordReaderImplementation<Group> recordReader = (RecordReaderImplementation<Group>) columnIO.getRecordReader(memPageStore, recordConverter, FilterCompat.get(column("DocId", equalTo(10l)))); readOne(recordReader, "r2 filtered out", r1); // Get second record recordReader = (RecordReaderImplementation<Group>) columnIO.getRecordReader(memPageStore, recordConverter, FilterCompat.get(column("DocId", equalTo(20l)))); readOne(recordReader, "r1 filtered out", r2); } @Test public void testApplyFunctionFilterOnLong() { MessageColumnIO columnIO = new ColumnIOFactory(true).getColumnIO(schema); MemPageStore memPageStore = writeTestRecords(columnIO, 1); // Get first record RecordMaterializer<Group> recordConverter = new GroupRecordConverter(schema); RecordReaderImplementation<Group> recordReader = (RecordReaderImplementation<Group>) columnIO.getRecordReader(memPageStore, recordConverter, FilterCompat.get(column("DocId", equalTo(10l)))); readOne(recordReader, "r2 filtered out", r1); // Get second record recordReader = (RecordReaderImplementation<Group>) columnIO.getRecordReader(memPageStore, recordConverter, FilterCompat.get(column("DocId", applyFunctionToLong(new LongGreaterThan15Predicate())))); readOne(recordReader, "r1 filtered out", r2); } @Test public void testFilterOnString() { MessageColumnIO columnIO = new ColumnIOFactory(true).getColumnIO(schema); MemPageStore memPageStore = writeTestRecords(columnIO, 1); // First try matching against the A url in record 1 RecordMaterializer<Group> recordConverter = new GroupRecordConverter(schema); RecordReaderImplementation<Group> recordReader = (RecordReaderImplementation<Group>) columnIO.getRecordReader(memPageStore, recordConverter, FilterCompat.get(column("Name.Url", equalTo("http://A")))); readOne(recordReader, "r2 filtered out", r1); // Second try matching against the B url in record 1 - it should fail as we only match // against the first instance of a recordReader = (RecordReaderImplementation<Group>) columnIO.getRecordReader(memPageStore, recordConverter, FilterCompat.get(column("Name.Url", equalTo("http://B")))); List<Group> all = readAll(recordReader); assertEquals("There should be no matching records: " + all , 0, all.size()); // Finally try matching against the C url in record 2 recordReader = (RecordReaderImplementation<Group>) columnIO.getRecordReader(memPageStore, recordConverter, FilterCompat.get(column("Name.Url", equalTo("http://C")))); readOne(recordReader, "r1 filtered out", r2); } @Test public void testApplyFunctionFilterOnString() { MessageColumnIO columnIO = new ColumnIOFactory(true).getColumnIO(schema); MemPageStore memPageStore = writeTestRecords(columnIO, 1); // First try matching against the A url in record 1 RecordMaterializer<Group> recordConverter = new GroupRecordConverter(schema); RecordReaderImplementation<Group> recordReader = (RecordReaderImplementation<Group>) columnIO.getRecordReader(memPageStore, recordConverter, FilterCompat.get(column("Name.Url", applyFunctionToString(new StringEndsWithAPredicate())))); readOne(recordReader, "r2 filtered out", r1); // Second try matching against the B url in record 1 - it should fail as we only match // against the first instance of a recordReader = (RecordReaderImplementation<Group>) columnIO.getRecordReader(memPageStore, recordConverter, FilterCompat.get(column("Name.Url", equalTo("http://B")))); List<Group> all = readAll(recordReader); assertEquals("There should be no matching records: " + all , 0, all.size()); // Finally try matching against the C url in record 2 recordReader = (RecordReaderImplementation<Group>) columnIO.getRecordReader(memPageStore, recordConverter, FilterCompat.get(column("Name.Url", equalTo("http://C")))); readOne(recordReader, "r1 filtered out", r2); } @Test public void testPaged() { MessageColumnIO columnIO = new ColumnIOFactory(true).getColumnIO(schema); MemPageStore memPageStore = writeTestRecords(columnIO, 6); RecordMaterializer<Group> recordConverter = new GroupRecordConverter(schema); RecordReaderImplementation<Group> recordReader = (RecordReaderImplementation<Group>) columnIO.getRecordReader(memPageStore, recordConverter, FilterCompat.get(page(4, 4))); List<Group> all = readAll(recordReader); assertEquals("expecting records " + all, 4, all.size()); for (int i = 0; i < all.size(); i++) { assertEquals("expecting record", (i%2 == 0 ? r2 : r1).toString(), all.get(i).toString()); } } @Test public void testFilteredAndPaged() { MessageColumnIO columnIO = new ColumnIOFactory(true).getColumnIO(schema); MemPageStore memPageStore = writeTestRecords(columnIO, 8); RecordMaterializer<Group> recordConverter = new GroupRecordConverter(schema); RecordReaderImplementation<Group> recordReader = (RecordReaderImplementation<Group>) columnIO.getRecordReader(memPageStore, recordConverter, FilterCompat.get(and(column("DocId", equalTo(10l)), page(2, 4)))); List<Group> all = readAll(recordReader); assertEquals("expecting 4 records " + all, 4, all.size()); for (int i = 0; i < all.size(); i++) { assertEquals("expecting record1", r1.toString(), all.get(i).toString()); } } @Test public void testFilteredOrPaged() { MessageColumnIO columnIO = new ColumnIOFactory(true).getColumnIO(schema); MemPageStore memPageStore = writeTestRecords(columnIO, 8); RecordMaterializer<Group> recordConverter = new GroupRecordConverter(schema); RecordReaderImplementation<Group> recordReader = (RecordReaderImplementation<Group>) columnIO.getRecordReader(memPageStore, recordConverter, FilterCompat.get(or(column("DocId", equalTo(10l)), column("DocId", equalTo(20l))))); List<Group> all = readAll(recordReader); assertEquals("expecting 8 records " + all, 16, all.size()); for (int i = 0; i < all.size () / 2; i++) { assertEquals("expecting record1", r1.toString(), all.get(2 * i).toString()); assertEquals("expecting record2", r2.toString(), all.get(2 * i + 1).toString()); } } @Test public void testFilteredNotPaged() { MessageColumnIO columnIO = new ColumnIOFactory(true).getColumnIO(schema); MemPageStore memPageStore = writeTestRecords(columnIO, 8); RecordMaterializer<Group> recordConverter = new GroupRecordConverter(schema); RecordReaderImplementation<Group> recordReader = (RecordReaderImplementation<Group>) columnIO.getRecordReader(memPageStore, recordConverter, FilterCompat.get(not(column("DocId", equalTo(10l))))); List<Group> all = readAll(recordReader); assertEquals("expecting 8 records " + all, 8, all.size()); for (int i = 0; i < all.size(); i++) { assertEquals("expecting record2", r2.toString(), all.get(i).toString()); } } private MemPageStore writeTestRecords(MessageColumnIO columnIO, int number) { MemPageStore memPageStore = new MemPageStore(number * 2); ColumnWriteStoreV1 columns = new ColumnWriteStoreV1(memPageStore, 800, 800, false, WriterVersion.PARQUET_1_0); RecordConsumer recordWriter = columnIO.getRecordWriter(columns); GroupWriter groupWriter = new GroupWriter(recordWriter, schema); for ( int i = 0; i < number; i++ ) { groupWriter.write(r1); groupWriter.write(r2); } recordWriter.flush(); columns.flush(); return memPageStore; } }
package utilities; import java.io.BufferedReader; import java.io.BufferedWriter; import java.io.File; import java.io.FileInputStream; import java.io.FileNotFoundException; import java.io.FileOutputStream; import java.io.FileReader; import java.io.FileWriter; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import java.net.Socket; import java.sql.Timestamp; import java.util.ArrayList; import java.util.LinkedList; import messages.MessageType; import server.MusicTrack; import server.MusicTrack.TrackType; import server.YTJBServer; /**static class that provides functions for uniform input and output * * @author Mellich * */ public class IO { public static boolean debugMode = true; private static YTJBServer server = null; public static void setServer(YTJBServer s){ server = s; } static public void printlnDebug(Object speaker, String input){ if (debugMode){ String name ; if (speaker != null) name = speaker.getClass().getName(); else name = "STATIC"; long n = Thread.currentThread().getId(); Timestamp t = new Timestamp(System.currentTimeMillis()); System.out.println(t.toString()+" Thread-"+n+"="+name+": "+input); if (server != null){ ArrayList<String> s = new ArrayList<String>(); s.add(t.toString()+" Thread-"+n+"="+name+": "+input); server.notifyClients(MessageType.DEBUGOUTPUTNOTIFY, s); } } } public static BufferedReader getFileOutput(String filename){ try { return new BufferedReader(new FileReader(filename)); } catch (FileNotFoundException e) { IO.printlnDebug(null, "File could not be loaded: "+filename); } return null; } public static boolean deleteGapList(String filename){ return new File(filename).delete(); } public static BufferedWriter getFileInput(String filename){ try { return new BufferedWriter(new FileWriter(filename)); } catch (IOException e) { // TODO Auto-generated catch block e.printStackTrace(); } return null; } public static ArrayList<String> getGapLists(String directory){ IO.printlnDebug(null, "getting gap lists..."); ArrayList<String> result = new ArrayList<String>(); File dir = new File(directory); File[] gaplists = dir.listFiles((File f,String s) -> {if (s.substring(s.length()-3).equals(".jb")) return true; else return false;}); if (gaplists != null){ for (int i = 0; i < gaplists.length; i++){ result.add((gaplists[i].getName().substring(0,gaplists[i].getName().length() - 3))); } } return result; } public static String[] readOutGapList(String filename){ BufferedReader reader = getFileOutput(filename); try { long max = reader.lines().count(); reader.close(); String[] title = new String[(int)max*2]; IO.printlnDebug(null, "Titles to output: "+max); reader = getFileOutput(filename); int current = 0; String url = reader.readLine(); while (url != null || url == ""){ String[] splitted = url.split(";"); title[current] = splitted[1]; current++; title[current] = splitted[2]; current++; url = reader.readLine(); } reader.close(); IO.printlnDebug(null, "Finished sucessfully"); return title; } catch (IOException e) { IO.printlnDebug(null, "ERROR: Could not read out title of the gaplist "+filename); } return new String[0]; } public static boolean loadGapListFromFile(String filename, YTJBServer server){ try { BufferedReader reader = getFileOutput(filename); server.setMaxGapListTrackCount((int) reader.lines().count()); reader.close(); IO.printlnDebug(null, "Start to load gap list "+filename); reader = getFileOutput(filename); String url = reader.readLine(); while (url != null || url == ""){ String[] splitted = url.split(";"); MusicTrack yURL = new MusicTrack(TrackType.valueOf(splitted[0]),splitted[1],splitted[2],true); if (Thread.interrupted()) break; server.addToList(yURL, false, false); url = reader.readLine(); } reader.close(); //TODO: irgendwo muss noch geparst werden... eigener Thread? } catch (IOException e) { IO.printlnDebug(null, "ERROR while opening file: "+filename); return false; } catch (NullPointerException e){ IO.printlnDebug(null, "Creating new Gap list: "+filename); IO.saveGapListToFile(new LinkedList<MusicTrack>(), filename,server); server.setMaxGapListTrackCount(0); return false; } IO.printlnDebug(null, "finished loading gap list!"); return true; } public static boolean saveGapListToFile(LinkedList<MusicTrack> urls, String filename,YTJBServer server){ try { BufferedWriter writer = new BufferedWriter( new FileWriter(filename)); writer.write(""); server.setMaxGapListTrackCount(urls.size()); for (MusicTrack url : urls){ writer.write(url.getMusicType()+";"+url.getTitle()+";"+url.getShortURL()); writer.newLine(); } writer.close(); return true; } catch (IOException e) { IO.printlnDebug(null, "Error while saving the gaplist: "+e.getMessage()); } return false; } public static boolean receiveAndSaveFile(Socket socket,String filename){ InputStream in; IO.printlnDebug(null, "Receiving file..."); try { in = socket.getInputStream(); FileOutputStream fileOut = new FileOutputStream(filename); byte[] buffer = new byte[1024]; int bytesRead = 0; while ((bytesRead = in.read(buffer)) > 0) { fileOut.write(buffer, 0, bytesRead); System.out.println(bytesRead); } fileOut.close(); IO.printlnDebug(null, "File received!"); return true; } catch (IOException e) { IO.printlnDebug(null, "ERROR: could not receive or save the file"); } return false; } public static boolean sendFile(Socket socket,String filename){ OutputStream out; IO.printlnDebug(null, "Sending file..."); try { out = socket.getOutputStream(); InputStream fileIn = new FileInputStream(filename); byte[] buffer = new byte[1024]; while (fileIn.available() > 0) { out.write(buffer, 0, fileIn.read(buffer)); } out.close(); fileIn.close(); IO.printlnDebug(null, "File sent!"); return true; } catch (IOException e) { IO.printlnDebug(null, "ERROR: could not send the file "+filename); } return false; } }
/* * Copyright 2000-2009 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intellij.util; import com.intellij.openapi.util.Comparing; import com.intellij.util.containers.ContainerUtil; import com.intellij.util.text.CharArrayCharSequence; import gnu.trove.Equality; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import java.io.File; import java.lang.reflect.Array; import java.util.Collection; import java.util.Comparator; import java.util.List; /** * Author: msk */ public class ArrayUtil { public static final short[] EMPTY_SHORT_ARRAY = new short[0]; public static final char[] EMPTY_CHAR_ARRAY = new char[0]; public static final byte[] EMPTY_BYTE_ARRAY = new byte[0]; public static final int[] EMPTY_INT_ARRAY = new int[0]; public static final boolean[] EMPTY_BOOLEAN_ARRAY = new boolean[0]; @SuppressWarnings({"SSBasedInspection"}) public static final Object[] EMPTY_OBJECT_ARRAY = new Object[0]; @SuppressWarnings({"SSBasedInspection"}) public static final String[] EMPTY_STRING_ARRAY = new String[0]; @SuppressWarnings({"SSBasedInspection"}) public static final Class[] EMPTY_CLASS_ARRAY = new Class[0]; public static final long[] EMPTY_LONG_ARRAY = new long[0]; public static final Collection[] EMPTY_COLLECTION_ARRAY = new Collection[0]; public static final CharSequence EMPTY_CHAR_SEQUENCE = new CharArrayCharSequence(EMPTY_CHAR_ARRAY); public static final File[] EMPTY_FILE_ARRAY = new File[0]; public static final ArrayFactory<String> STRING_ARRAY_FACTORY = new ArrayFactory<String>() { @Override public String[] create(int count) { return newStringArray(count); } }; public static final Runnable[] EMPTY_RUNNABLE_ARRAY = new Runnable[0]; @NotNull public static byte[] realloc(@NotNull byte[] array, final int newSize) { if (newSize == 0) { return EMPTY_BYTE_ARRAY; } final int oldSize = array.length; if (oldSize == newSize) { return array; } final byte[] result = new byte[newSize]; System.arraycopy(array, 0, result, 0, Math.min(oldSize, newSize)); return result; } @NotNull public static int[] realloc(@NotNull int[] array, final int newSize) { if (newSize == 0) { return EMPTY_INT_ARRAY; } final int oldSize = array.length; if (oldSize == newSize) { return array; } final int[] result = new int[newSize]; System.arraycopy(array, 0, result, 0, Math.min(oldSize, newSize)); return result; } @NotNull public static int[] append(@NotNull int[] array, int value) { array = realloc(array, array.length + 1); array[array.length - 1] = value; return array; } @NotNull public static char[] realloc(@NotNull char[] array, final int newSize) { if (newSize == 0) { return EMPTY_CHAR_ARRAY; } final int oldSize = array.length; if (oldSize == newSize) { return array; } final char[] result = new char[newSize]; System.arraycopy(array, 0, result, 0, Math.min(oldSize, newSize)); return result; } @NotNull public static <T> T[] toObjectArray(@NotNull Collection<T> collection, @NotNull Class<T> aClass) { T[] array = (T[])Array.newInstance(aClass, collection.size()); return collection.toArray(array); } @NotNull public static <T> T[] toObjectArray(@NotNull Class<T> aClass, Object... source) { T[] array = (T[])Array.newInstance(aClass, source.length); System.arraycopy(source, 0, array, 0, array.length); return array; } @NotNull public static Object[] toObjectArray(@NotNull Collection<?> collection) { if (collection.isEmpty()) return EMPTY_OBJECT_ARRAY; //noinspection SSBasedInspection return collection.toArray(new Object[collection.size()]); } @NotNull public static String[] toStringArray(@NotNull Collection<String> collection) { if (collection.isEmpty()) return EMPTY_STRING_ARRAY; return ContainerUtil.toArray(collection, new String[collection.size()]); } @NotNull public static int[] toIntArray(@NotNull List<Integer> list) { int[] ret = new int[list.size()]; int i = 0; for (Integer e : list) { ret[i++] = e.intValue(); } return ret; } @NotNull public static <T> T[] mergeArrays(@NotNull T[] a1, @NotNull T[] a2, @NotNull Class<T> aClass) { if (a1.length == 0) { return a2; } if (a2.length == 0) { return a1; } T[] highlights = (T[])Array.newInstance(aClass, a1.length + a2.length); System.arraycopy(a1, 0, highlights, 0, a1.length); System.arraycopy(a2, 0, highlights, a1.length, a2.length); return highlights; } @NotNull public static <T> T[] mergeArrays(@NotNull T[] a1, @NotNull T[] a2, @NotNull ArrayFactory<T> factory) { if (a1.length == 0) { return a2; } if (a2.length == 0) { return a1; } T[] highlights = factory.create(a1.length + a2.length); System.arraycopy(a1, 0, highlights, 0, a1.length); System.arraycopy(a2, 0, highlights, a1.length, a2.length); return highlights; } @NotNull public static int[] mergeArrays(@NotNull int[] a1, @NotNull int[] a2) { if (a1.length == 0) { return a2; } if (a2.length == 0) { return a1; } int[] a = new int[a1.length + a2.length]; int idx = 0; for (int i : a1) { a[idx++] = i; } for (int i : a2) { a[idx++] = i; } return a; } /** * Allocates new array of size <code>array.length + collection.size()</code> and copies elements of <code>array</code> and * <code>collection</code> to it. * * @param array source array * @param collection source collection * @param factory array factory used to create destination array of type <code>T</code> * @return destination array */ @NotNull public static <T> T[] mergeArrayAndCollection(@NotNull T[] array, @NotNull Collection<T> collection, @NotNull final ArrayFactory<T> factory) { if (collection.isEmpty()) { return array; } final T[] array2 = collection.toArray(factory.create(collection.size())); if (array.length == 0) { return array2; } final T[] result = factory.create(array.length + collection.size()); System.arraycopy(array, 0, result, 0, array.length); System.arraycopy(array2, 0, result, array.length, array2.length); return result; } /** * Appends <code>element</code> to the <code>src</code> array. As you can * imagine the appended element will be the last one in the returned result. * * @param src array to which the <code>element</code> should be appended. * @param element object to be appended to the end of <code>src</code> array. * @return new array */ @NotNull public static <T> T[] append(@NotNull final T[] src, final T element) { return append(src, element, (Class<T>)src.getClass().getComponentType()); } public static <T> T[] append(@NotNull final T[] src, final T element, ArrayFactory<T> factory) { int length = src.length; T[] result = factory.create(length + 1); System.arraycopy(src, 0, result, 0, length); result[length] = element; return result; } @NotNull public static <T> T[] append(@NotNull T[] src, final T element, @NotNull Class<T> componentType) { int length = src.length; T[] result = (T[])Array.newInstance(componentType, length + 1); System.arraycopy(src, 0, result, 0, length); result[length] = element; return result; } /** * Removes element with index <code>idx</code> from array <code>src</code>. * * @param src array. * @param idx index of element to be removed. * @return modified array. */ @NotNull public static <T> T[] remove(@NotNull final T[] src, int idx) { int length = src.length; if (idx < 0 || idx >= length) { throw new IllegalArgumentException("invalid index: " + idx); } T[] result = (T[])Array.newInstance(src.getClass().getComponentType(), length - 1); System.arraycopy(src, 0, result, 0, idx); System.arraycopy(src, idx + 1, result, idx, length - idx - 1); return result; } @NotNull public static <T> T[] remove(@NotNull final T[] src, int idx, ArrayFactory<T> factory) { int length = src.length; if (idx < 0 || idx >= length) { throw new IllegalArgumentException("invalid index: " + idx); } T[] result = factory.create(length - 1); System.arraycopy(src, 0, result, 0, idx); System.arraycopy(src, idx + 1, result, idx, length - idx - 1); return result; } @NotNull public static <T> T[] remove(@NotNull final T[] src, T element) { final int idx = find(src, element); if (idx == -1) return src; return remove(src, idx); } @NotNull public static <T> T[] remove(@NotNull final T[] src, T element, ArrayFactory<T> factory) { final int idx = find(src, element); if (idx == -1) return src; return remove(src, idx, factory); } @NotNull public static int[] remove(@NotNull final int[] src, int idx) { int length = src.length; if (idx < 0 || idx >= length) { throw new IllegalArgumentException("invalid index: " + idx); } int[] result = new int[src.length - 1]; System.arraycopy(src, 0, result, 0, idx); System.arraycopy(src, idx + 1, result, idx, length - idx - 1); return result; } /** * @param src source array. * @param obj object to be found. * @return index of <code>obj</code> in the <code>src</code> array. * Returns <code>-1</code> if passed object isn't found. This method uses * <code>equals</code> of arrays elements to compare <code>obj</code> with * these elements. */ public static <T> int find(@NotNull final T[] src, final T obj) { for (int i = 0; i < src.length; i++) { final T o = src[i]; if (o == null) { if (obj == null) { return i; } } else { if (o.equals(obj)) { return i; } } } return -1; } public static <T> int lastIndexOf(@NotNull final T[] src, final T obj) { for (int i = src.length - 1; i >= 0; i--) { final T o = src[i]; if (o == null) { if (obj == null) { return i; } } else { if (o.equals(obj)) { return i; } } } return -1; } public static int find(@NotNull int[] src, int obj) { return indexOf(src, obj); } public static boolean startsWith(byte[] array, byte[] subArray) { if (array == subArray) { return true; } if (array == null || subArray == null) { return false; } int length = subArray.length; if (array.length < length) { return false; } for (int i = 0; i < length; i++) { if (array[i] != subArray[i]) { return false; } } return true; } public static <E> boolean startsWith(E[] array, E[] subArray) { if (array == subArray) { return true; } if (array == null || subArray == null) { return false; } int length = subArray.length; if (array.length < length) { return false; } for (int i = 0; i < length; i++) { if (!Comparing.equal(array[i], subArray[i])) { return false; } } return true; } public static boolean startsWith(@NotNull byte[] array, int start, @NotNull byte[] subArray) { int length = subArray.length; if (array.length - start < length) { return false; } for (int i = 0; i < length; i++) { if (array[start + i] != subArray[i]) { return false; } } return true; } public static <T> boolean equals(T[] a1, T[] a2, Equality<? super T> comparator) { if (a1 == a2) { return true; } if (a1 == null || a2 == null) { return false; } int length = a2.length; if (a1.length != length) { return false; } for (int i = 0; i < length; i++) { if (!comparator.equals(a1[i], a2[i])) { return false; } } return true; } public static <T> boolean equals(T[] a1, T[] a2, Comparator<? super T> comparator) { if (a1 == a2) { return true; } if (a1 == null || a2 == null) { return false; } int length = a2.length; if (a1.length != length) { return false; } for (int i = 0; i < length; i++) { if (comparator.compare(a1[i], a2[i]) != 0) { return false; } } return true; } @NotNull public static <T> T[] reverseArray(@NotNull T[] array) { T[] newArray = array.clone(); for (int i = 0; i < array.length; i++) { newArray[array.length - i - 1] = array[i]; } return newArray; } @NotNull public static int[] reverseArray(@NotNull int[] array) { int[] newArray = array.clone(); for (int i = 0; i < array.length; i++) { newArray[array.length - i - 1] = array[i]; } return newArray; } public static void reverse(@NotNull char[] array) { for (int i = 0; i < array.length; i++) { swap(array, array.length - i - 1, i); } } public static int lexicographicCompare(@NotNull String[] obj1, @NotNull String[] obj2) { for (int i = 0; i < Math.max(obj1.length, obj2.length); i++) { String o1 = i < obj1.length ? obj1[i] : null; String o2 = i < obj2.length ? obj2[i] : null; if (o1 == null) return -1; if (o2 == null) return 1; int res = o1.compareToIgnoreCase(o2); if (res != 0) return res; } return 0; } //must be Comparables public static <T> int lexicographicCompare(@NotNull T[] obj1, @NotNull T[] obj2) { for (int i = 0; i < Math.max(obj1.length, obj2.length); i++) { T o1 = i < obj1.length ? obj1[i] : null; T o2 = i < obj2.length ? obj2[i] : null; if (o1 == null) return -1; if (o2 == null) return 1; int res = ((Comparable)o1).compareTo(o2); if (res != 0) return res; } return 0; } public static <T> void swap(@NotNull T[] array, int i1, int i2) { final T t = array[i1]; array[i1] = array[i2]; array[i2] = t; } public static void swap(@NotNull int[] array, int i1, int i2) { final int t = array[i1]; array[i1] = array[i2]; array[i2] = t; } public static void swap(@NotNull boolean[] array, int i1, int i2) { final boolean t = array[i1]; array[i1] = array[i2]; array[i2] = t; } public static void swap(@NotNull char[] array, int i1, int i2) { final char t = array[i1]; array[i1] = array[i2]; array[i2] = t; } public static <T> void rotateLeft(@NotNull T[] array, int i1, int i2) { final T t = array[i1]; System.arraycopy(array, i1 + 1, array, i1, i2 - i1); array[i2] = t; } public static <T> void rotateRight(@NotNull T[] array, int i1, int i2) { final T t = array[i2]; System.arraycopy(array, i1, array, i1 + 1, i2 - i1); array[i1] = t; } public static int indexOf(@NotNull Object[] objects, Object object) { for (int i = 0; i < objects.length; i++) { if (Comparing.equal(objects[i], object)) return i; } return -1; } public static <T> int indexOf(@NotNull List<T> objects, T object, @NotNull Equality<T> comparator) { for (int i = 0; i < objects.size(); i++) { if (comparator.equals(objects.get(i), object)) return i; } return -1; } public static <T> int indexOf(@NotNull List<T> objects, T object, @NotNull Comparator<T> comparator) { for (int i = 0; i < objects.size(); i++) { if (comparator.compare(objects.get(i), object) == 0) return i; } return -1; } public static <T> int indexOf(@NotNull T[] objects, T object, @NotNull Equality<T> comparator) { for (int i = 0; i < objects.length; i++) { if (comparator.equals(objects[i], object)) return i; } return -1; } public static int indexOf(@NotNull int[] ints, int value) { for (int i = 0; i < ints.length; i++) { if (ints[i] == value) return i; } return -1; } public static boolean contains(final Object o, final Object... objects) { return indexOf(objects, o) >= 0; } public static int[] newIntArray(int count) { return count == 0 ? EMPTY_INT_ARRAY : new int[count]; } public static String[] newStringArray(int count) { return count == 0 ? EMPTY_STRING_ARRAY : new String[count]; } @NotNull public static <E> E[] ensureExactSize(int count, @NotNull E[] sample) { if (count == sample.length) return sample; return (E[])Array.newInstance(sample.getClass().getComponentType(), count); } @Nullable public static <T> T getLastElement(T[] array) { return array.length > 0 ? array[array.length - 1] : null; } public static <T> T[] join(T[] array1, T[] array2) { final T[] newArray = (T[])Array.newInstance(array1.getClass().getComponentType(), array1.length + array2.length); System.arraycopy(array1, 0, newArray, 0, array1.length); System.arraycopy(array2, 0, newArray, array1.length, array2.length); return newArray; } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.flink.streaming.runtime.tasks.mailbox; import org.apache.flink.annotation.VisibleForTesting; import javax.annotation.Nonnull; import javax.annotation.Nullable; import javax.annotation.concurrent.GuardedBy; import javax.annotation.concurrent.ThreadSafe; import java.util.ArrayDeque; import java.util.ArrayList; import java.util.Collections; import java.util.Deque; import java.util.Iterator; import java.util.List; import java.util.Optional; import java.util.concurrent.locks.Condition; import java.util.concurrent.locks.ReentrantLock; import static org.apache.flink.streaming.runtime.tasks.mailbox.TaskMailbox.State.CLOSED; import static org.apache.flink.streaming.runtime.tasks.mailbox.TaskMailbox.State.OPEN; import static org.apache.flink.streaming.runtime.tasks.mailbox.TaskMailbox.State.QUIESCED; /** * Implementation of {@link TaskMailbox} in a {@link java.util.concurrent.BlockingQueue} fashion and tailored towards * our use case with multiple writers and single reader. */ @ThreadSafe public class TaskMailboxImpl implements TaskMailbox { /** * Lock for all concurrent ops. */ private final ReentrantLock lock = new ReentrantLock(); /** * Internal queue of mails. */ @GuardedBy("lock") private final Deque<Mail> queue = new ArrayDeque<>(); /** * Condition that is triggered when the mailbox is no longer empty. */ @GuardedBy("lock") private final Condition notEmpty = lock.newCondition(); /** * The state of the mailbox in the lifecycle of open, quiesced, and closed. */ @GuardedBy("lock") private State state = OPEN; /** * Reference to the thread that executes the mailbox mails. */ @Nonnull private final Thread taskMailboxThread; /** * The current batch of mails. A new batch can be created with {@link #createBatch()} and consumed with {@link * #tryTakeFromBatch()}. */ private final Deque<Mail> batch = new ArrayDeque<>(); /** * Performance optimization where hasNewMail == !queue.isEmpty(). Will not reflect the state of {@link #batch}. */ private volatile boolean hasNewMail = false; public TaskMailboxImpl(@Nonnull final Thread taskMailboxThread) { this.taskMailboxThread = taskMailboxThread; } @VisibleForTesting public TaskMailboxImpl() { this(Thread.currentThread()); } @Override public boolean isMailboxThread() { return Thread.currentThread() == taskMailboxThread; } @Override public boolean hasMail() { checkIsMailboxThread(); return !batch.isEmpty() || hasNewMail; } @Override public Optional<Mail> tryTake(int priority) { checkIsMailboxThread(); checkTakeStateConditions(); Mail head = takeOrNull(batch, priority); if (head != null) { return Optional.of(head); } if (!hasNewMail) { return Optional.empty(); } final ReentrantLock lock = this.lock; lock.lock(); try { final Mail value = takeOrNull(queue, priority); if (value == null) { return Optional.empty(); } hasNewMail = !queue.isEmpty(); return Optional.ofNullable(value); } finally { lock.unlock(); } } @Override public @Nonnull Mail take(int priority) throws InterruptedException, IllegalStateException { checkIsMailboxThread(); checkTakeStateConditions(); Mail head = takeOrNull(batch, priority); if (head != null) { return head; } final ReentrantLock lock = this.lock; lock.lockInterruptibly(); try { Mail headMail; while ((headMail = takeOrNull(queue, priority)) == null) { notEmpty.await(); } hasNewMail = !queue.isEmpty(); return headMail; } finally { lock.unlock(); } } //------------------------------------------------------------------------------------------------------------------ @Override public boolean createBatch() { checkIsMailboxThread(); if (!hasNewMail) { // batch is usually depleted by previous MailboxProcessor#runMainLoop // however, putFirst may add a message directly to the batch if called from mailbox thread return !batch.isEmpty(); } final ReentrantLock lock = this.lock; lock.lock(); try { Mail mail; while ((mail = queue.pollFirst()) != null) { batch.addLast(mail); } hasNewMail = false; return !batch.isEmpty(); } finally { lock.unlock(); } } @Override public Optional<Mail> tryTakeFromBatch() { checkIsMailboxThread(); checkTakeStateConditions(); return Optional.ofNullable(batch.pollFirst()); } //------------------------------------------------------------------------------------------------------------------ @Override public void put(@Nonnull Mail mail) { final ReentrantLock lock = this.lock; lock.lock(); try { checkPutStateConditions(); queue.addLast(mail); hasNewMail = true; notEmpty.signal(); } finally { lock.unlock(); } } @Override public void putFirst(@Nonnull Mail mail) { if (isMailboxThread()) { checkPutStateConditions(); batch.addFirst(mail); } else { final ReentrantLock lock = this.lock; lock.lock(); try { checkPutStateConditions(); queue.addFirst(mail); hasNewMail = true; notEmpty.signal(); } finally { lock.unlock(); } } } //------------------------------------------------------------------------------------------------------------------ @Nullable private Mail takeOrNull(Deque<Mail> queue, int priority) { if (queue.isEmpty()) { return null; } Iterator<Mail> iterator = queue.iterator(); while (iterator.hasNext()) { Mail mail = iterator.next(); if (mail.getPriority() >= priority) { iterator.remove(); return mail; } } return null; } @Override public List<Mail> drain() { List<Mail> drainedMails = new ArrayList<>(batch); batch.clear(); final ReentrantLock lock = this.lock; lock.lock(); try { drainedMails.addAll(queue); queue.clear(); hasNewMail = false; return drainedMails; } finally { lock.unlock(); } } private void checkIsMailboxThread() { if (!isMailboxThread()) { throw new IllegalStateException( "Illegal thread detected. This method must be called from inside the mailbox thread!"); } } private void checkPutStateConditions() { if (state != OPEN) { throw new MailboxClosedException("Mailbox is in state " + state + ", but is required to be in state " + OPEN + " for put operations."); } } private void checkTakeStateConditions() { if (state == CLOSED) { throw new MailboxClosedException("Mailbox is in state " + state + ", but is required to be in state " + OPEN + " or " + QUIESCED + " for take operations."); } } @Override public void quiesce() { checkIsMailboxThread(); final ReentrantLock lock = this.lock; lock.lock(); try { if (state == OPEN) { state = QUIESCED; } } finally { this.lock.unlock(); } } @Nonnull @Override public List<Mail> close() { checkIsMailboxThread(); final ReentrantLock lock = this.lock; lock.lock(); try { if (state == CLOSED) { return Collections.emptyList(); } List<Mail> droppedMails = drain(); state = CLOSED; // to unblock all notEmpty.signalAll(); return droppedMails; } finally { lock.unlock(); } } @Nonnull @Override public State getState() { if (isMailboxThread()) { return state; } final ReentrantLock lock = this.lock; lock.lock(); try { return state; } finally { lock.unlock(); } } @Override public void runExclusively(Runnable runnable) { lock.lock(); try { runnable.run(); } finally { lock.unlock(); } } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.camel.component.smpp; import java.util.Arrays; import java.util.Collections; import java.util.Date; import java.util.LinkedHashMap; import java.util.Map; import java.util.TimeZone; import org.apache.camel.Exchange; import org.apache.camel.ExchangePattern; import org.apache.camel.impl.DefaultCamelContext; import org.apache.camel.support.DefaultExchange; import org.jsmpp.bean.Address; import org.jsmpp.bean.Alphabet; import org.jsmpp.bean.DataCodings; import org.jsmpp.bean.ESMClass; import org.jsmpp.bean.NumberingPlanIndicator; import org.jsmpp.bean.OptionalParameter; import org.jsmpp.bean.OptionalParameter.Tag; import org.jsmpp.bean.RegisteredDelivery; import org.jsmpp.bean.ReplaceIfPresentFlag; import org.jsmpp.bean.SMSCDeliveryReceipt; import org.jsmpp.bean.SubmitMultiResult; import org.jsmpp.bean.TypeOfNumber; import org.jsmpp.bean.UnsuccessDelivery; import org.jsmpp.session.SMPPSession; import org.junit.jupiter.api.AfterAll; import org.junit.jupiter.api.BeforeAll; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertNotNull; import static org.junit.jupiter.api.Assertions.assertNull; import static org.mockito.ArgumentMatchers.*; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; public class SmppSubmitMultiCommandTest { private static TimeZone defaultTimeZone; private SMPPSession session; private SmppConfiguration config; private SmppSubmitMultiCommand command; @BeforeAll public static void setUpBeforeClass() { defaultTimeZone = TimeZone.getDefault(); TimeZone.setDefault(TimeZone.getTimeZone("GMT")); } @AfterAll public static void tearDownAfterClass() { if (defaultTimeZone != null) { TimeZone.setDefault(defaultTimeZone); } } @BeforeEach public void setUp() { session = mock(SMPPSession.class); config = new SmppConfiguration(); config.setServiceType("CMT"); command = new SmppSubmitMultiCommand(session, config); } @Test public void executeWithConfigurationData() throws Exception { Exchange exchange = new DefaultExchange(new DefaultCamelContext(), ExchangePattern.InOut); exchange.getIn().setHeader(SmppConstants.COMMAND, "SubmitMulti"); exchange.getIn().setHeader(SmppConstants.ID, "1"); exchange.getIn().setBody("short message body"); when(session.submitMultiple(eq("CMT"), eq(TypeOfNumber.UNKNOWN), eq(NumberingPlanIndicator.UNKNOWN), eq("1616"), eq(new Address[] { new Address(TypeOfNumber.UNKNOWN, NumberingPlanIndicator.UNKNOWN, "1717") }), eq(new ESMClass()), eq((byte) 0), eq((byte) 1), (String) isNull(), (String) isNull(), eq(new RegisteredDelivery(SMSCDeliveryReceipt.SUCCESS_FAILURE)), eq(ReplaceIfPresentFlag.DEFAULT), eq(DataCodings.newInstance((byte) 0)), eq((byte) 0), eq("short message body".getBytes()))) .thenReturn(new SubmitMultiResult( "1", new UnsuccessDelivery( new Address(TypeOfNumber.UNKNOWN, NumberingPlanIndicator.UNKNOWN, "1717"), 0))); command.execute(exchange); assertEquals(Collections.singletonList("1"), exchange.getMessage().getHeader(SmppConstants.ID)); assertEquals(1, exchange.getMessage().getHeader(SmppConstants.SENT_MESSAGE_COUNT)); assertNotNull(exchange.getMessage().getHeader(SmppConstants.ERROR)); } @Test public void execute() throws Exception { Exchange exchange = new DefaultExchange(new DefaultCamelContext(), ExchangePattern.InOut); exchange.getIn().setHeader(SmppConstants.COMMAND, "SubmitMulti"); exchange.getIn().setHeader(SmppConstants.ID, "1"); exchange.getIn().setHeader(SmppConstants.SOURCE_ADDR_TON, TypeOfNumber.NATIONAL.value()); exchange.getIn().setHeader(SmppConstants.SOURCE_ADDR_NPI, NumberingPlanIndicator.NATIONAL.value()); exchange.getIn().setHeader(SmppConstants.SOURCE_ADDR, "1818"); exchange.getIn().setHeader(SmppConstants.DEST_ADDR_TON, TypeOfNumber.INTERNATIONAL.value()); exchange.getIn().setHeader(SmppConstants.DEST_ADDR_NPI, NumberingPlanIndicator.INTERNET.value()); exchange.getIn().setHeader(SmppConstants.DEST_ADDR, Arrays.asList("1919")); exchange.getIn().setHeader(SmppConstants.SCHEDULE_DELIVERY_TIME, new Date(1111111)); exchange.getIn().setHeader(SmppConstants.VALIDITY_PERIOD, new Date(2222222)); exchange.getIn().setHeader(SmppConstants.PROTOCOL_ID, (byte) 1); exchange.getIn().setHeader(SmppConstants.PRIORITY_FLAG, (byte) 2); exchange.getIn().setHeader(SmppConstants.REGISTERED_DELIVERY, new RegisteredDelivery(SMSCDeliveryReceipt.FAILURE).value()); exchange.getIn().setHeader(SmppConstants.REPLACE_IF_PRESENT_FLAG, ReplaceIfPresentFlag.REPLACE.value()); exchange.getIn().setBody("short message body"); when(session.submitMultiple(eq("CMT"), eq(TypeOfNumber.NATIONAL), eq(NumberingPlanIndicator.NATIONAL), eq("1818"), eq(new Address[] { new Address(TypeOfNumber.INTERNATIONAL, NumberingPlanIndicator.INTERNET, "1919") }), eq(new ESMClass()), eq((byte) 1), eq((byte) 2), eq("-300101001831100+"), eq("-300101003702200+"), eq(new RegisteredDelivery(SMSCDeliveryReceipt.FAILURE)), eq(ReplaceIfPresentFlag.REPLACE), eq(DataCodings.newInstance((byte) 0)), eq((byte) 0), eq("short message body".getBytes()))) .thenReturn(new SubmitMultiResult("1")); command.execute(exchange); assertEquals(Collections.singletonList("1"), exchange.getMessage().getHeader(SmppConstants.ID)); assertEquals(1, exchange.getMessage().getHeader(SmppConstants.SENT_MESSAGE_COUNT)); assertNull(exchange.getMessage().getHeader(SmppConstants.ERROR)); } @Test public void executeWithValidityPeriodAsString() throws Exception { Exchange exchange = new DefaultExchange(new DefaultCamelContext(), ExchangePattern.InOut); exchange.getIn().setHeader(SmppConstants.COMMAND, "SubmitMulti"); exchange.getIn().setHeader(SmppConstants.ID, "1"); exchange.getIn().setHeader(SmppConstants.SOURCE_ADDR_TON, TypeOfNumber.NATIONAL.value()); exchange.getIn().setHeader(SmppConstants.SOURCE_ADDR_NPI, NumberingPlanIndicator.NATIONAL.value()); exchange.getIn().setHeader(SmppConstants.SOURCE_ADDR, "1818"); exchange.getIn().setHeader(SmppConstants.DEST_ADDR_TON, TypeOfNumber.INTERNATIONAL.value()); exchange.getIn().setHeader(SmppConstants.DEST_ADDR_NPI, NumberingPlanIndicator.INTERNET.value()); exchange.getIn().setHeader(SmppConstants.DEST_ADDR, Collections.singletonList("1919")); exchange.getIn().setHeader(SmppConstants.SCHEDULE_DELIVERY_TIME, new Date(1111111)); exchange.getIn().setHeader(SmppConstants.VALIDITY_PERIOD, "000003000000000R"); // three days exchange.getIn().setHeader(SmppConstants.PROTOCOL_ID, (byte) 1); exchange.getIn().setHeader(SmppConstants.PRIORITY_FLAG, (byte) 2); exchange.getIn().setHeader(SmppConstants.REGISTERED_DELIVERY, new RegisteredDelivery(SMSCDeliveryReceipt.FAILURE).value()); exchange.getIn().setHeader(SmppConstants.REPLACE_IF_PRESENT_FLAG, ReplaceIfPresentFlag.REPLACE.value()); exchange.getIn().setBody("short message body"); when(session.submitMultiple(eq("CMT"), eq(TypeOfNumber.NATIONAL), eq(NumberingPlanIndicator.NATIONAL), eq("1818"), eq(new Address[] { new Address(TypeOfNumber.INTERNATIONAL, NumberingPlanIndicator.INTERNET, "1919") }), eq(new ESMClass()), eq((byte) 1), eq((byte) 2), eq("-300101001831100+"), eq("000003000000000R"), eq(new RegisteredDelivery(SMSCDeliveryReceipt.FAILURE)), eq(ReplaceIfPresentFlag.REPLACE), eq(DataCodings.newInstance((byte) 0)), eq((byte) 0), eq("short message body".getBytes()))) .thenReturn(new SubmitMultiResult("1")); command.execute(exchange); assertEquals(Collections.singletonList("1"), exchange.getMessage().getHeader(SmppConstants.ID)); assertEquals(1, exchange.getMessage().getHeader(SmppConstants.SENT_MESSAGE_COUNT)); assertNull(exchange.getMessage().getHeader(SmppConstants.ERROR)); } @Test public void bodyWithSmscDefaultDataCodingNarrowedToCharset() throws Exception { final byte dataCoding = (byte) 0x00; /* SMSC-default */ byte[] body = { (byte) 0xFF, 'A', 'B', (byte) 0x00, (byte) 0xFF, (byte) 0x7F, 'C', (byte) 0xFF }; byte[] bodyNarrowed = { '?', 'A', 'B', '\0', '?', (byte) 0x7F, 'C', '?' }; Exchange exchange = new DefaultExchange(new DefaultCamelContext(), ExchangePattern.InOut); exchange.getIn().setHeader(SmppConstants.COMMAND, "SubmitMulti"); exchange.getIn().setHeader(SmppConstants.DATA_CODING, dataCoding); exchange.getIn().setBody(body); Address[] destAddrs = new Address[] { new Address( TypeOfNumber.UNKNOWN, NumberingPlanIndicator.UNKNOWN, "1717") }; when(session.submitMultiple(eq("CMT"), eq(TypeOfNumber.UNKNOWN), eq(NumberingPlanIndicator.UNKNOWN), eq("1616"), eq(destAddrs), eq(new ESMClass()), eq((byte) 0), eq((byte) 1), (String) isNull(), (String) isNull(), eq(new RegisteredDelivery(SMSCDeliveryReceipt.SUCCESS_FAILURE)), eq(ReplaceIfPresentFlag.DEFAULT), eq(DataCodings.newInstance(dataCoding)), eq((byte) 0), eq(bodyNarrowed))) .thenReturn(new SubmitMultiResult("1")); command.execute(exchange); assertEquals(Collections.singletonList("1"), exchange.getMessage().getHeader(SmppConstants.ID)); } @Test public void bodyWithLatin1DataCodingNarrowedToCharset() throws Exception { final byte dataCoding = (byte) 0x03; /* ISO-8859-1 (Latin1) */ byte[] body = { (byte) 0xFF, 'A', 'B', (byte) 0x00, (byte) 0xFF, (byte) 0x7F, 'C', (byte) 0xFF }; byte[] bodyNarrowed = { '?', 'A', 'B', '\0', '?', (byte) 0x7F, 'C', '?' }; Exchange exchange = new DefaultExchange(new DefaultCamelContext(), ExchangePattern.InOut); exchange.getIn().setHeader(SmppConstants.COMMAND, "SubmitMulti"); exchange.getIn().setHeader(SmppConstants.DATA_CODING, dataCoding); exchange.getIn().setBody(body); Address[] destAddrs = new Address[] { new Address( TypeOfNumber.UNKNOWN, NumberingPlanIndicator.UNKNOWN, "1717") }; when(session.submitMultiple(eq("CMT"), eq(TypeOfNumber.UNKNOWN), eq(NumberingPlanIndicator.UNKNOWN), eq("1616"), eq(destAddrs), eq(new ESMClass()), eq((byte) 0), eq((byte) 1), (String) isNull(), (String) isNull(), eq(new RegisteredDelivery(SMSCDeliveryReceipt.SUCCESS_FAILURE)), eq(ReplaceIfPresentFlag.DEFAULT), eq(DataCodings.newInstance(dataCoding)), eq((byte) 0), eq(bodyNarrowed))) .thenReturn(new SubmitMultiResult("1")); command.execute(exchange); assertEquals(Collections.singletonList("1"), exchange.getMessage().getHeader(SmppConstants.ID)); } @Test public void bodyWithSMPP8bitDataCodingNotModified() throws Exception { final byte dataCoding = (byte) 0x04; /* SMPP 8-bit */ byte[] body = { (byte) 0xFF, 'A', 'B', (byte) 0x00, (byte) 0xFF, (byte) 0x7F, 'C', (byte) 0xFF }; Exchange exchange = new DefaultExchange(new DefaultCamelContext(), ExchangePattern.InOut); exchange.getIn().setHeader(SmppConstants.COMMAND, "SubmitMulti"); exchange.getIn().setHeader(SmppConstants.DATA_CODING, dataCoding); exchange.getIn().setBody(body); Address[] destAddrs = new Address[] { new Address( TypeOfNumber.UNKNOWN, NumberingPlanIndicator.UNKNOWN, "1717") }; when(session.submitMultiple(eq("CMT"), eq(TypeOfNumber.UNKNOWN), eq(NumberingPlanIndicator.UNKNOWN), eq("1616"), eq(destAddrs), eq(new ESMClass()), eq((byte) 0), eq((byte) 1), (String) isNull(), (String) isNull(), eq(new RegisteredDelivery(SMSCDeliveryReceipt.SUCCESS_FAILURE)), eq(ReplaceIfPresentFlag.DEFAULT), eq(DataCodings.newInstance(dataCoding)), eq((byte) 0), eq(body))) .thenReturn(new SubmitMultiResult("1")); command.execute(exchange); assertEquals(Collections.singletonList("1"), exchange.getMessage().getHeader(SmppConstants.ID)); } @Test public void bodyWithGSM8bitDataCodingNotModified() throws Exception { final byte dataCoding = (byte) 0xF7; /* GSM 8-bit class 3 */ byte[] body = { (byte) 0xFF, 'A', 'B', (byte) 0x00, (byte) 0xFF, (byte) 0x7F, 'C', (byte) 0xFF }; Exchange exchange = new DefaultExchange(new DefaultCamelContext(), ExchangePattern.InOut); exchange.getIn().setHeader(SmppConstants.COMMAND, "SubmitMulti"); exchange.getIn().setHeader(SmppConstants.DATA_CODING, dataCoding); exchange.getIn().setBody(body); Address[] destAddrs = new Address[] { new Address( TypeOfNumber.UNKNOWN, NumberingPlanIndicator.UNKNOWN, "1717") }; when(session.submitMultiple(eq("CMT"), eq(TypeOfNumber.UNKNOWN), eq(NumberingPlanIndicator.UNKNOWN), eq("1616"), eq(destAddrs), eq(new ESMClass()), eq((byte) 0), eq((byte) 1), (String) isNull(), (String) isNull(), eq(new RegisteredDelivery(SMSCDeliveryReceipt.SUCCESS_FAILURE)), eq(ReplaceIfPresentFlag.DEFAULT), eq(DataCodings.newInstance(dataCoding)), eq((byte) 0), eq(body))) .thenReturn(new SubmitMultiResult("1")); command.execute(exchange); assertEquals(Collections.singletonList("1"), exchange.getMessage().getHeader(SmppConstants.ID)); } @Test public void eightBitDataCodingOverridesDefaultAlphabet() throws Exception { final byte binDataCoding = (byte) 0x04; /* SMPP 8-bit */ byte[] body = { (byte) 0xFF, 'A', 'B', (byte) 0x00, (byte) 0xFF, (byte) 0x7F, 'C', (byte) 0xFF }; Exchange exchange = new DefaultExchange(new DefaultCamelContext(), ExchangePattern.InOut); exchange.getIn().setHeader(SmppConstants.COMMAND, "SubmitMulti"); exchange.getIn().setHeader(SmppConstants.ALPHABET, Alphabet.ALPHA_DEFAULT.value()); exchange.getIn().setHeader(SmppConstants.DATA_CODING, binDataCoding); exchange.getIn().setBody(body); Address[] destAddrs = new Address[] { new Address( TypeOfNumber.UNKNOWN, NumberingPlanIndicator.UNKNOWN, "1717") }; when(session.submitMultiple(eq("CMT"), eq(TypeOfNumber.UNKNOWN), eq(NumberingPlanIndicator.UNKNOWN), eq("1616"), eq(destAddrs), eq(new ESMClass()), eq((byte) 0), eq((byte) 1), (String) isNull(), (String) isNull(), eq(new RegisteredDelivery(SMSCDeliveryReceipt.SUCCESS_FAILURE)), eq(ReplaceIfPresentFlag.DEFAULT), eq(DataCodings.newInstance(binDataCoding)), eq((byte) 0), eq(body))) .thenReturn(new SubmitMultiResult("1")); command.execute(exchange); assertEquals(Collections.singletonList("1"), exchange.getMessage().getHeader(SmppConstants.ID)); } @Test public void latin1DataCodingOverridesEightBitAlphabet() throws Exception { final byte latin1DataCoding = (byte) 0x03; /* ISO-8859-1 (Latin1) */ byte[] body = { (byte) 0xFF, 'A', 'B', (byte) 0x00, (byte) 0xFF, (byte) 0x7F, 'C', (byte) 0xFF }; byte[] bodyNarrowed = { '?', 'A', 'B', '\0', '?', (byte) 0x7F, 'C', '?' }; Exchange exchange = new DefaultExchange(new DefaultCamelContext(), ExchangePattern.InOut); exchange.getIn().setHeader(SmppConstants.COMMAND, "SubmitMulti"); exchange.getIn().setHeader(SmppConstants.ALPHABET, Alphabet.ALPHA_8_BIT.value()); exchange.getIn().setHeader(SmppConstants.DATA_CODING, latin1DataCoding); exchange.getIn().setBody(body); Address[] destAddrs = new Address[] { new Address( TypeOfNumber.UNKNOWN, NumberingPlanIndicator.UNKNOWN, "1717") }; when(session.submitMultiple(eq("CMT"), eq(TypeOfNumber.UNKNOWN), eq(NumberingPlanIndicator.UNKNOWN), eq("1616"), eq(destAddrs), eq(new ESMClass()), eq((byte) 0), eq((byte) 1), (String) isNull(), (String) isNull(), eq(new RegisteredDelivery(SMSCDeliveryReceipt.SUCCESS_FAILURE)), eq(ReplaceIfPresentFlag.DEFAULT), eq(DataCodings.newInstance(latin1DataCoding)), eq((byte) 0), eq(bodyNarrowed))) .thenReturn(new SubmitMultiResult("1")); command.execute(exchange); assertEquals(Collections.singletonList("1"), exchange.getMessage().getHeader(SmppConstants.ID)); } @Test public void executeWithOptionalParameter() throws Exception { Exchange exchange = new DefaultExchange(new DefaultCamelContext(), ExchangePattern.InOut); exchange.getIn().setHeader(SmppConstants.COMMAND, "SubmitMulti"); exchange.getIn().setHeader(SmppConstants.ID, "1"); exchange.getIn().setHeader(SmppConstants.SOURCE_ADDR_TON, TypeOfNumber.NATIONAL.value()); exchange.getIn().setHeader(SmppConstants.SOURCE_ADDR_NPI, NumberingPlanIndicator.NATIONAL.value()); exchange.getIn().setHeader(SmppConstants.SOURCE_ADDR, "1818"); exchange.getIn().setHeader(SmppConstants.DEST_ADDR_TON, TypeOfNumber.INTERNATIONAL.value()); exchange.getIn().setHeader(SmppConstants.DEST_ADDR_NPI, NumberingPlanIndicator.INTERNET.value()); exchange.getIn().setHeader(SmppConstants.DEST_ADDR, Arrays.asList("1919")); exchange.getIn().setHeader(SmppConstants.SCHEDULE_DELIVERY_TIME, new Date(1111111)); exchange.getIn().setHeader(SmppConstants.VALIDITY_PERIOD, new Date(2222222)); exchange.getIn().setHeader(SmppConstants.PROTOCOL_ID, (byte) 1); exchange.getIn().setHeader(SmppConstants.PRIORITY_FLAG, (byte) 2); exchange.getIn().setHeader(SmppConstants.REGISTERED_DELIVERY, new RegisteredDelivery(SMSCDeliveryReceipt.FAILURE).value()); exchange.getIn().setHeader(SmppConstants.REPLACE_IF_PRESENT_FLAG, ReplaceIfPresentFlag.REPLACE.value()); Map<String, String> optionalParameters = new LinkedHashMap<>(); optionalParameters.put("SOURCE_SUBADDRESS", "1292"); optionalParameters.put("ADDITIONAL_STATUS_INFO_TEXT", "urgent"); optionalParameters.put("DEST_ADDR_SUBUNIT", "4"); optionalParameters.put("DEST_TELEMATICS_ID", "2"); optionalParameters.put("QOS_TIME_TO_LIVE", "3600000"); optionalParameters.put("ALERT_ON_MESSAGE_DELIVERY", null); exchange.getIn().setHeader(SmppConstants.OPTIONAL_PARAMETERS, optionalParameters); exchange.getIn().setBody("short message body"); when(session.submitMultiple(eq("CMT"), eq(TypeOfNumber.NATIONAL), eq(NumberingPlanIndicator.NATIONAL), eq("1818"), eq(new Address[] { new Address(TypeOfNumber.INTERNATIONAL, NumberingPlanIndicator.INTERNET, "1919") }), eq(new ESMClass()), eq((byte) 1), eq((byte) 2), eq("-300101001831100+"), eq("-300101003702200+"), eq(new RegisteredDelivery(SMSCDeliveryReceipt.FAILURE)), eq(ReplaceIfPresentFlag.REPLACE), eq(DataCodings.newInstance((byte) 0)), eq((byte) 0), eq("short message body".getBytes()), eq(new OptionalParameter.Source_subaddress("1292".getBytes())), eq(new OptionalParameter.Additional_status_info_text("urgent".getBytes())), eq(new OptionalParameter.Dest_addr_subunit((byte) 4)), eq(new OptionalParameter.Dest_telematics_id((short) 2)), eq(new OptionalParameter.Qos_time_to_live(3600000)), eq(new OptionalParameter.Alert_on_message_delivery("O".getBytes())))) .thenReturn(new SubmitMultiResult("1")); command.execute(exchange); assertEquals(Collections.singletonList("1"), exchange.getMessage().getHeader(SmppConstants.ID)); assertEquals(1, exchange.getMessage().getHeader(SmppConstants.SENT_MESSAGE_COUNT)); assertNull(exchange.getMessage().getHeader(SmppConstants.ERROR)); } @Test public void executeWithOptionalParameterNewStyle() throws Exception { Exchange exchange = new DefaultExchange(new DefaultCamelContext(), ExchangePattern.InOut); exchange.getIn().setHeader(SmppConstants.COMMAND, "SubmitMulti"); exchange.getIn().setHeader(SmppConstants.ID, "1"); exchange.getIn().setHeader(SmppConstants.SOURCE_ADDR_TON, TypeOfNumber.NATIONAL.value()); exchange.getIn().setHeader(SmppConstants.SOURCE_ADDR_NPI, NumberingPlanIndicator.NATIONAL.value()); exchange.getIn().setHeader(SmppConstants.SOURCE_ADDR, "1818"); exchange.getIn().setHeader(SmppConstants.DEST_ADDR_TON, TypeOfNumber.INTERNATIONAL.value()); exchange.getIn().setHeader(SmppConstants.DEST_ADDR_NPI, NumberingPlanIndicator.INTERNET.value()); exchange.getIn().setHeader(SmppConstants.DEST_ADDR, Arrays.asList("1919")); exchange.getIn().setHeader(SmppConstants.SCHEDULE_DELIVERY_TIME, new Date(1111111)); exchange.getIn().setHeader(SmppConstants.VALIDITY_PERIOD, new Date(2222222)); exchange.getIn().setHeader(SmppConstants.PROTOCOL_ID, (byte) 1); exchange.getIn().setHeader(SmppConstants.PRIORITY_FLAG, (byte) 2); exchange.getIn().setHeader(SmppConstants.REGISTERED_DELIVERY, new RegisteredDelivery(SMSCDeliveryReceipt.FAILURE).value()); exchange.getIn().setHeader(SmppConstants.REPLACE_IF_PRESENT_FLAG, ReplaceIfPresentFlag.REPLACE.value()); Map<Short, Object> optionalParameters = new LinkedHashMap<>(); // standard optional parameter optionalParameters.put((short) 0x0202, "1292".getBytes("UTF-8")); optionalParameters.put((short) 0x001D, "urgent"); optionalParameters.put((short) 0x0005, Byte.valueOf("4")); optionalParameters.put((short) 0x0008, (short) 2); optionalParameters.put((short) 0x0017, 3600000); optionalParameters.put((short) 0x130C, null); // vendor specific optional parameter optionalParameters.put((short) 0x2150, "0815".getBytes("UTF-8")); optionalParameters.put((short) 0x2151, "0816"); optionalParameters.put((short) 0x2152, Byte.valueOf("6")); optionalParameters.put((short) 0x2153, (short) 9); optionalParameters.put((short) 0x2154, 7400000); optionalParameters.put((short) 0x2155, null); exchange.getIn().setHeader(SmppConstants.OPTIONAL_PARAMETER, optionalParameters); exchange.getIn().setBody("short message body"); when(session.submitMultiple(eq("CMT"), eq(TypeOfNumber.NATIONAL), eq(NumberingPlanIndicator.NATIONAL), eq("1818"), eq(new Address[] { new Address(TypeOfNumber.INTERNATIONAL, NumberingPlanIndicator.INTERNET, "1919") }), eq(new ESMClass()), eq((byte) 1), eq((byte) 2), eq("-300101001831100+"), eq("-300101003702200+"), eq(new RegisteredDelivery(SMSCDeliveryReceipt.FAILURE)), eq(ReplaceIfPresentFlag.REPLACE), eq(DataCodings.newInstance((byte) 0)), eq((byte) 0), eq("short message body".getBytes()), eq(new OptionalParameter.OctetString(Tag.SOURCE_SUBADDRESS, "1292")), eq(new OptionalParameter.COctetString(Tag.ADDITIONAL_STATUS_INFO_TEXT.code(), "urgent")), eq(new OptionalParameter.Byte(Tag.DEST_ADDR_SUBUNIT, (byte) 4)), eq(new OptionalParameter.Short(Tag.DEST_TELEMATICS_ID.code(), (short) 2)), eq(new OptionalParameter.Int(Tag.QOS_TIME_TO_LIVE, 3600000)), eq(new OptionalParameter.Null(Tag.ALERT_ON_MESSAGE_DELIVERY)), eq(new OptionalParameter.OctetString((short) 0x2150, "1292", "UTF-8")), eq(new OptionalParameter.COctetString((short) 0x2151, "0816")), eq(new OptionalParameter.Byte((short) 0x2152, (byte) 6)), eq(new OptionalParameter.Short((short) 0x2153, (short) 9)), eq(new OptionalParameter.Int((short) 0x2154, 7400000)), eq(new OptionalParameter.Null((short) 0x2155)))) .thenReturn(new SubmitMultiResult("1")); command.execute(exchange); assertEquals(Collections.singletonList("1"), exchange.getMessage().getHeader(SmppConstants.ID)); assertEquals(1, exchange.getMessage().getHeader(SmppConstants.SENT_MESSAGE_COUNT)); assertNull(exchange.getMessage().getHeader(SmppConstants.ERROR)); } @Test public void singleDlrRequestOverridesDeliveryReceiptFlag() throws Exception { String longSms = "123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890" + "12345678901234567890123456789012345678901234567890123456789012345678901"; Exchange exchange = new DefaultExchange(new DefaultCamelContext(), ExchangePattern.InOut); exchange.getIn().setHeader(SmppConstants.COMMAND, "SubmitMulti"); exchange.getIn().setHeader(SmppConstants.SINGLE_DLR, "true"); exchange.getIn().setBody(longSms.getBytes()); Address[] destAddrs = new Address[] { new Address( TypeOfNumber.UNKNOWN, NumberingPlanIndicator.UNKNOWN, "1717") }; when(session.submitMultiple(eq("CMT"), eq(TypeOfNumber.UNKNOWN), eq(NumberingPlanIndicator.UNKNOWN), eq("1616"), eq(destAddrs), eq(new ESMClass((byte) 64)), eq((byte) 0), eq((byte) 1), (String) isNull(), (String) isNull(), eq(new RegisteredDelivery(SMSCDeliveryReceipt.DEFAULT)), eq(ReplaceIfPresentFlag.DEFAULT), eq(DataCodings.newInstance((byte) 0)), eq((byte) 0), any(byte[].class))) .thenReturn(new SubmitMultiResult("1")); when(session.submitMultiple(eq("CMT"), eq(TypeOfNumber.UNKNOWN), eq(NumberingPlanIndicator.UNKNOWN), eq("1616"), eq(destAddrs), eq(new ESMClass((byte) 64)), eq((byte) 0), eq((byte) 1), (String) isNull(), (String) isNull(), eq(new RegisteredDelivery(SMSCDeliveryReceipt.SUCCESS_FAILURE)), eq(ReplaceIfPresentFlag.DEFAULT), eq(DataCodings.newInstance((byte) 0)), eq((byte) 0), any(byte[].class))) .thenReturn(new SubmitMultiResult("2")); command.execute(exchange); assertEquals(Arrays.asList("1", "2"), exchange.getMessage().getHeader(SmppConstants.ID)); assertEquals(2, exchange.getMessage().getHeader(SmppConstants.SENT_MESSAGE_COUNT)); } }
/* * The MIT License * * Copyright (c) 2004-2009, Sun Microsystems, Inc., Kohsuke Kawaguchi, Stephen Connolly * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in * all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN * THE SOFTWARE. */ package hudson.slaves; import hudson.AbortException; import hudson.FilePath; import hudson.Functions; import hudson.Util; import hudson.console.ConsoleLogFilter; import hudson.model.Computer; import hudson.model.Executor; import hudson.model.ExecutorListener; import hudson.model.Node; import hudson.model.Queue; import hudson.model.Slave; import hudson.model.TaskListener; import hudson.model.User; import hudson.remoting.Channel; import hudson.remoting.ChannelBuilder; import hudson.remoting.ChannelClosedException; import hudson.remoting.CommandTransport; import hudson.remoting.Launcher; import hudson.remoting.VirtualChannel; import hudson.security.ACL; import hudson.slaves.OfflineCause.ChannelTermination; import hudson.util.Futures; import hudson.util.NullStream; import hudson.util.RingBufferLogHandler; import hudson.util.StreamTaskListener; import hudson.util.VersionNumber; import hudson.util.io.RewindableFileOutputStream; import hudson.util.io.RewindableRotatingFileOutputStream; import jenkins.model.Jenkins; import jenkins.security.ChannelConfigurator; import jenkins.security.MasterToSlaveCallable; import jenkins.slaves.EncryptedSlaveAgentJnlpFile; import jenkins.slaves.JnlpSlaveAgentProtocol; import jenkins.slaves.RemotingVersionInfo; import jenkins.slaves.systemInfo.SlaveSystemInfo; import jenkins.util.SystemProperties; import org.acegisecurity.context.SecurityContext; import org.acegisecurity.context.SecurityContextHolder; import org.kohsuke.accmod.Restricted; import org.kohsuke.accmod.restrictions.Beta; import org.kohsuke.accmod.restrictions.DoNotUse; import org.kohsuke.stapler.HttpRedirect; import org.kohsuke.stapler.HttpResponse; import org.kohsuke.stapler.QueryParameter; import org.kohsuke.stapler.StaplerRequest; import org.kohsuke.stapler.StaplerResponse; import org.kohsuke.stapler.WebMethod; import org.kohsuke.stapler.export.Exported; import org.kohsuke.stapler.interceptor.RequirePOST; import javax.annotation.CheckForNull; import javax.annotation.Nonnull; import javax.annotation.OverridingMethodsMustInvokeSuper; import javax.servlet.ServletException; import java.io.File; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import java.io.PrintStream; import java.nio.charset.Charset; import java.security.Security; import java.util.ArrayList; import java.util.Collections; import java.util.List; import java.util.concurrent.Future; import java.util.logging.Handler; import java.util.logging.Level; import java.util.logging.LogRecord; import java.util.logging.Logger; import static hudson.slaves.SlaveComputer.LogHolder.SLAVE_LOG_HANDLER; import org.jenkinsci.remoting.util.LoggingChannelListener; /** * {@link Computer} for {@link Slave}s. * * @author Kohsuke Kawaguchi */ public class SlaveComputer extends Computer { private volatile Channel channel; private volatile transient boolean acceptingTasks = true; private Charset defaultCharset; private Boolean isUnix; /** * Effective {@link ComputerLauncher} that hides the details of * how we launch a agent agent on this computer. * * <p> * This is normally the same as {@link Slave#getLauncher()} but * can be different. See {@link #grabLauncher(Node)}. */ private ComputerLauncher launcher; /** * Perpetually writable log file. */ private final RewindableFileOutputStream log; /** * {@link StreamTaskListener} that wraps {@link #log}, hence perpetually writable. */ private final TaskListener taskListener; /** * Number of failed attempts to reconnect to this node * (so that if we keep failing to reconnect, we can stop * trying.) */ private transient int numRetryAttempt; /** * Tracks the status of the last launch operation, which is always asynchronous. * This can be used to wait for the completion, or cancel the launch activity. */ private volatile Future<?> lastConnectActivity = null; private Object constructed = new Object(); private transient volatile String absoluteRemoteFs; public SlaveComputer(Slave slave) { super(slave); this.log = new RewindableRotatingFileOutputStream(getLogFile(), 10); this.taskListener = new StreamTaskListener(decorate(this.log)); assert slave.getNumExecutors()!=0 : "Computer created with 0 executors"; } /** * Uses {@link ConsoleLogFilter} to decorate logger. */ private OutputStream decorate(OutputStream os) { for (ConsoleLogFilter f : ConsoleLogFilter.all()) { try { os = f.decorateLogger(this,os); } catch (IOException|InterruptedException e) { LOGGER.log(Level.WARNING, "Failed to filter log with "+f, e); } } return os; } /** * {@inheritDoc} */ @Override @OverridingMethodsMustInvokeSuper public boolean isAcceptingTasks() { // our boolean flag is an override on any additional programmatic reasons why this agent might not be // accepting tasks. return acceptingTasks && super.isAcceptingTasks(); } /** * @since 1.498 */ public String getJnlpMac() { return JnlpSlaveAgentProtocol.SLAVE_SECRET.mac(getName()); } /** * Allows suspension of tasks being accepted by the agent computer. While this could be called by a * {@linkplain hudson.slaves.ComputerLauncher} or a {@linkplain hudson.slaves.RetentionStrategy}, such usage * can result in fights between multiple actors calling setting differential values. A better approach * is to override {@link hudson.slaves.RetentionStrategy#isAcceptingTasks(hudson.model.Computer)} if the * {@link hudson.slaves.RetentionStrategy} needs to control availability. * * @param acceptingTasks {@code true} if the agent can accept tasks. */ public void setAcceptingTasks(boolean acceptingTasks) { this.acceptingTasks = acceptingTasks; } @Override public Boolean isUnix() { return isUnix; } @CheckForNull @Override public Slave getNode() { Node node = super.getNode(); if (node == null || node instanceof Slave) { return (Slave)node; } else { logger.log(Level.WARNING, "found an unexpected kind of node {0} from {1} with nodeName={2}", new Object[] {node, this, nodeName}); return null; } } /** * Return the {@code TaskListener} for this SlaveComputer. Never null * @since 2.9 */ public TaskListener getListener() { return taskListener; } @Override public String getIcon() { Future<?> l = lastConnectActivity; if(l!=null && !l.isDone()) return "computer-flash.gif"; return super.getIcon(); } /** * @deprecated since 2008-05-20. */ @Deprecated @Override public boolean isJnlpAgent() { return launcher instanceof JNLPLauncher; } @Override public boolean isLaunchSupported() { return launcher.isLaunchSupported(); } /** * Return the {@code ComputerLauncher} for this SlaveComputer. * @since 1.312 */ public ComputerLauncher getLauncher() { return launcher; } /** * Return the {@code ComputerLauncher} for this SlaveComputer, strips off * any {@code DelegatingComputerLauncher}s or {@code ComputerLauncherFilter}s. * @since 2.83 */ public ComputerLauncher getDelegatedLauncher() { ComputerLauncher l = launcher; while (true) { if (l instanceof DelegatingComputerLauncher) { l = ((DelegatingComputerLauncher) l).getLauncher(); } else if (l instanceof ComputerLauncherFilter) { l = ((ComputerLauncherFilter) l).getCore(); } else { break; } } return l; } protected Future<?> _connect(boolean forceReconnect) { if(channel!=null) return Futures.precomputed(null); if(!forceReconnect && isConnecting()) return lastConnectActivity; if(forceReconnect && isConnecting()) logger.fine("Forcing a reconnect on "+getName()); closeChannel(); return lastConnectActivity = Computer.threadPoolForRemoting.submit(new java.util.concurrent.Callable<Object>() { public Object call() throws Exception { // do this on another thread so that the lengthy launch operation // (which is typical) won't block UI thread. ACL.impersonate(ACL.SYSTEM); // background activity should run like a super user try { log.rewind(); try { for (ComputerListener cl : ComputerListener.all()) cl.preLaunch(SlaveComputer.this, taskListener); offlineCause = null; launcher.launch(SlaveComputer.this, taskListener); } catch (AbortException e) { taskListener.error(e.getMessage()); throw e; } catch (IOException e) { Util.displayIOException(e,taskListener); Functions.printStackTrace(e, taskListener.error(Messages.ComputerLauncher_unexpectedError())); throw e; } catch (InterruptedException e) { Functions.printStackTrace(e, taskListener.error(Messages.ComputerLauncher_abortedLaunch())); throw e; } catch (Exception e) { Functions.printStackTrace(e, taskListener.error(Messages.ComputerLauncher_unexpectedError())); throw e; } } finally { if (channel==null && offlineCause == null) { offlineCause = new OfflineCause.LaunchFailed(); for (ComputerListener cl : ComputerListener.all()) cl.onLaunchFailure(SlaveComputer.this, taskListener); } } if (channel==null) throw new IOException("Agent failed to connect, even though the launcher didn't report it. See the log output for details."); return null; } }); } /** * {@inheritDoc} */ @Override public void taskAccepted(Executor executor, Queue.Task task) { super.taskAccepted(executor, task); if (launcher instanceof ExecutorListener) { ((ExecutorListener)launcher).taskAccepted(executor, task); } //getNode() can return null at indeterminate times when nodes go offline Slave node = getNode(); if (node != null && node.getRetentionStrategy() instanceof ExecutorListener) { ((ExecutorListener)node.getRetentionStrategy()).taskAccepted(executor, task); } } /** * {@inheritDoc} */ @Override public void taskCompleted(Executor executor, Queue.Task task, long durationMS) { super.taskCompleted(executor, task, durationMS); if (launcher instanceof ExecutorListener) { ((ExecutorListener)launcher).taskCompleted(executor, task, durationMS); } RetentionStrategy r = getRetentionStrategy(); if (r instanceof ExecutorListener) { ((ExecutorListener) r).taskCompleted(executor, task, durationMS); } } /** * {@inheritDoc} */ @Override public void taskCompletedWithProblems(Executor executor, Queue.Task task, long durationMS, Throwable problems) { super.taskCompletedWithProblems(executor, task, durationMS, problems); if (launcher instanceof ExecutorListener) { ((ExecutorListener)launcher).taskCompletedWithProblems(executor, task, durationMS, problems); } RetentionStrategy r = getRetentionStrategy(); if (r instanceof ExecutorListener) { ((ExecutorListener) r).taskCompletedWithProblems(executor, task, durationMS, problems); } } @Override public boolean isConnecting() { Future<?> l = lastConnectActivity; return isOffline() && l!=null && !l.isDone(); } public OutputStream openLogFile() { try { log.rewind(); return log; } catch (IOException e) { logger.log(Level.SEVERE, "Failed to create log file "+getLogFile(),e); return new NullStream(); } } private final Object channelLock = new Object(); /** * Creates a {@link Channel} from the given stream and sets that to this agent. * * Same as {@link #setChannel(InputStream, OutputStream, OutputStream, Channel.Listener)}, but for * {@link TaskListener}. */ public void setChannel(@Nonnull InputStream in, @Nonnull OutputStream out, @Nonnull TaskListener taskListener, @CheckForNull Channel.Listener listener) throws IOException, InterruptedException { setChannel(in,out,taskListener.getLogger(),listener); } /** * Creates a {@link Channel} from the given stream and sets that to this agent. * * @param in * Stream connected to the remote agent. It's the caller's responsibility to do * buffering on this stream, if that's necessary. * @param out * Stream connected to the remote peer. It's the caller's responsibility to do * buffering on this stream, if that's necessary. * @param launchLog * If non-null, receive the portion of data in {@code is} before * the data goes into the "binary mode". This is useful * when the established communication channel might include some data that might * be useful for debugging/trouble-shooting. * @param listener * Gets a notification when the channel closes, to perform clean up. Can be null. * By the time this method is called, the cause of the termination is reported to the user, * so the implementation of the listener doesn't need to do that again. */ public void setChannel(@Nonnull InputStream in, @Nonnull OutputStream out, @CheckForNull OutputStream launchLog, @CheckForNull Channel.Listener listener) throws IOException, InterruptedException { ChannelBuilder cb = new ChannelBuilder(nodeName,threadPoolForRemoting) .withMode(Channel.Mode.NEGOTIATE) .withHeaderStream(launchLog); for (ChannelConfigurator cc : ChannelConfigurator.all()) { cc.onChannelBuilding(cb,this); } Channel channel = cb.build(in,out); setChannel(channel,launchLog,listener); } /** * Creates a {@link Channel} from the given Channel Builder and Command Transport. * This method can be used to allow {@link ComputerLauncher}s to create channels not based on I/O streams. * * @param cb * Channel Builder. * To print launch logs this channel builder should have a Header Stream defined * (see {@link ChannelBuilder#getHeaderStream()}) in this argument or by one of {@link ChannelConfigurator}s. * @param commandTransport * Command Transport * @param listener * Gets a notification when the channel closes, to perform clean up. Can be {@code null}. * By the time this method is called, the cause of the termination is reported to the user, * so the implementation of the listener doesn't need to do that again. * @since 2.127 */ @Restricted(Beta.class) public void setChannel(@Nonnull ChannelBuilder cb, @Nonnull CommandTransport commandTransport, @CheckForNull Channel.Listener listener) throws IOException, InterruptedException { for (ChannelConfigurator cc : ChannelConfigurator.all()) { cc.onChannelBuilding(cb,this); } OutputStream headerStream = cb.getHeaderStream(); if (headerStream == null) { LOGGER.log(Level.WARNING, "No header stream defined when setting channel for computer {0}. " + "Launch log won't be printed", this); } Channel channel = cb.build(commandTransport); setChannel(channel, headerStream, listener); } /** * Shows {@link Channel#classLoadingCount}. * @since 1.495 */ public int getClassLoadingCount() throws IOException, InterruptedException { return channel.call(new LoadingCount(false)); } /** * Shows {@link Channel#classLoadingPrefetchCacheCount}. * @return -1 in case that capability is not supported * @since 1.519 */ public int getClassLoadingPrefetchCacheCount() throws IOException, InterruptedException { if (!channel.remoteCapability.supportsPrefetch()) { return -1; } return channel.call(new LoadingPrefetchCacheCount()); } /** * Shows {@link Channel#resourceLoadingCount}. * @since 1.495 */ public int getResourceLoadingCount() throws IOException, InterruptedException { return channel.call(new LoadingCount(true)); } /** * Shows {@link Channel#classLoadingTime}. * @since 1.495 */ public long getClassLoadingTime() throws IOException, InterruptedException { return channel.call(new LoadingTime(false)); } /** * Shows {@link Channel#resourceLoadingTime}. * @since 1.495 */ public long getResourceLoadingTime() throws IOException, InterruptedException { return channel.call(new LoadingTime(true)); } /** * Returns the remote FS root absolute path or {@code null} if the agent is off-line. The absolute path may change * between connections if the connection method does not provide a consistent working directory and the node's * remote FS is specified as a relative path. * * @return the remote FS root absolute path or {@code null} if the agent is off-line. * @since 1.606 */ @CheckForNull public String getAbsoluteRemoteFs() { return channel == null ? null : absoluteRemoteFs; } /** * Just for restFul api. * Returns the remote FS root absolute path or {@code null} if the agent is off-line. The absolute path may change * between connections if the connection method does not provide a consistent working directory and the node's * remote FS is specified as a relative path. * @see #getAbsoluteRemoteFs() * @return the remote FS root absolute path or {@code null} if the agent is off-line or don't have connect permission. * @since 2.125 */ @Exported @Restricted(DoNotUse.class) @CheckForNull public String getAbsoluteRemotePath() { if(hasPermission(CONNECT)) { return getAbsoluteRemoteFs(); } else { return null; } } static class LoadingCount extends MasterToSlaveCallable<Integer,RuntimeException> { private final boolean resource; LoadingCount(boolean resource) { this.resource = resource; } @Override public Integer call() { Channel c = Channel.current(); if (c == null) { return -1; } return resource ? c.resourceLoadingCount.get() : c.classLoadingCount.get(); } } static class LoadingPrefetchCacheCount extends MasterToSlaveCallable<Integer,RuntimeException> { @Override public Integer call() { return Channel.current().classLoadingPrefetchCacheCount.get(); } } static class LoadingTime extends MasterToSlaveCallable<Long,RuntimeException> { private final boolean resource; LoadingTime(boolean resource) { this.resource = resource; } @Override public Long call() { Channel c = Channel.current(); if (c == null) { return Long.valueOf(-1); } return resource ? c.resourceLoadingTime.get() : c.classLoadingTime.get(); } } /** * Sets up the connection through an existing channel. * @param channel the channel to use; <strong>warning:</strong> callers are expected to have called {@link ChannelConfigurator} already. * @param launchLog Launch log. If not {@code null}, will receive launch log messages * @param listener Channel event listener to be attached (if not {@code null}) * @since 1.444 */ public void setChannel(@Nonnull Channel channel, @CheckForNull OutputStream launchLog, @CheckForNull Channel.Listener listener) throws IOException, InterruptedException { if(this.channel!=null) throw new IllegalStateException("Already connected"); final TaskListener taskListener = launchLog != null ? new StreamTaskListener(launchLog) : TaskListener.NULL; PrintStream log = taskListener.getLogger(); channel.setProperty(SlaveComputer.class, this); channel.addListener(new LoggingChannelListener(logger, Level.FINEST) { @Override public void onClosed(Channel c, IOException cause) { // Orderly shutdown will have null exception if (cause!=null) { offlineCause = new ChannelTermination(cause); Functions.printStackTrace(cause, taskListener.error("Connection terminated")); } else { taskListener.getLogger().println("Connection terminated"); } closeChannel(); try { launcher.afterDisconnect(SlaveComputer.this, taskListener); } catch (Throwable t) { LogRecord lr = new LogRecord(Level.SEVERE, "Launcher {0}'s afterDisconnect method propagated an exception when {1}'s connection was closed: {2}"); lr.setThrown(t); lr.setParameters(new Object[]{launcher, SlaveComputer.this.getName(), t.getMessage()}); logger.log(lr); } } }); if(listener!=null) channel.addListener(listener); String slaveVersion = channel.call(new SlaveVersion()); log.println("Remoting version: " + slaveVersion); VersionNumber agentVersion = new VersionNumber(slaveVersion); if (agentVersion.isOlderThan(RemotingVersionInfo.getMinimumSupportedVersion())) { log.println(String.format("WARNING: Remoting version is older than a minimum required one (%s). " + "Connection will not be rejected, but the compatibility is NOT guaranteed", RemotingVersionInfo.getMinimumSupportedVersion())); } boolean _isUnix = channel.call(new DetectOS()); log.println(_isUnix? hudson.model.Messages.Slave_UnixSlave():hudson.model.Messages.Slave_WindowsSlave()); String defaultCharsetName = channel.call(new DetectDefaultCharset()); Slave node = getNode(); if (node == null) { // Node has been disabled/removed during the connection throw new IOException("Node "+nodeName+" has been deleted during the channel setup"); } String remoteFS = node.getRemoteFS(); if (Util.isRelativePath(remoteFS)) { remoteFS = channel.call(new AbsolutePath(remoteFS)); log.println("NOTE: Relative remote path resolved to: "+remoteFS); } if(_isUnix && !remoteFS.contains("/") && remoteFS.contains("\\")) log.println("WARNING: "+remoteFS +" looks suspiciously like Windows path. Maybe you meant "+remoteFS.replace('\\','/')+"?"); FilePath root = new FilePath(channel,remoteFS); // reference counting problem is known to happen, such as JENKINS-9017, and so as a preventive measure // we pin the base classloader so that it'll never get GCed. When this classloader gets released, // it'll have a catastrophic impact on the communication. channel.pinClassLoader(getClass().getClassLoader()); channel.call(new SlaveInitializer(DEFAULT_RING_BUFFER_SIZE)); SecurityContext old = ACL.impersonate(ACL.SYSTEM); try { for (ComputerListener cl : ComputerListener.all()) { cl.preOnline(this,channel,root,taskListener); } } finally { SecurityContextHolder.setContext(old); } offlineCause = null; // update the data structure atomically to prevent others from seeing a channel that's not properly initialized yet synchronized(channelLock) { if(this.channel!=null) { // check again. we used to have this entire method in a big synchronization block, // but Channel constructor blocks for an external process to do the connection // if CommandLauncher is used, and that cannot be interrupted because it blocks at InputStream. // so if the process hangs, it hangs the thread in a lock, and since Hudson will try to relaunch, // we'll end up queuing the lot of threads in a pseudo deadlock. // This implementation prevents that by avoiding a lock. HUDSON-1705 is likely a manifestation of this. channel.close(); throw new IllegalStateException("Already connected"); } isUnix = _isUnix; numRetryAttempt = 0; this.channel = channel; this.absoluteRemoteFs = remoteFS; defaultCharset = Charset.forName(defaultCharsetName); synchronized (statusChangeLock) { statusChangeLock.notifyAll(); } } old = ACL.impersonate(ACL.SYSTEM); try { for (ComputerListener cl : ComputerListener.all()) { cl.onOnline(this,taskListener); } } finally { SecurityContextHolder.setContext(old); } log.println("Agent successfully connected and online"); Jenkins.getInstance().getQueue().scheduleMaintenance(); } @Override public Channel getChannel() { return channel; } public Charset getDefaultCharset() { return defaultCharset; } public List<LogRecord> getLogRecords() throws IOException, InterruptedException { if(channel==null) return Collections.emptyList(); else return channel.call(new SlaveLogFetcher()); } @RequirePOST public HttpResponse doDoDisconnect(@QueryParameter String offlineMessage) throws IOException, ServletException { if (channel!=null) { //does nothing in case computer is already disconnected checkPermission(DISCONNECT); offlineMessage = Util.fixEmptyAndTrim(offlineMessage); disconnect(new OfflineCause.UserCause(User.current(), offlineMessage)); } return new HttpRedirect("."); } @Override public Future<?> disconnect(OfflineCause cause) { super.disconnect(cause); return Computer.threadPoolForRemoting.submit(new Runnable() { public void run() { // do this on another thread so that any lengthy disconnect operation // (which could be typical) won't block UI thread. launcher.beforeDisconnect(SlaveComputer.this, taskListener); closeChannel(); launcher.afterDisconnect(SlaveComputer.this, taskListener); } }); } @RequirePOST public void doLaunchSlaveAgent(StaplerRequest req, StaplerResponse rsp) throws IOException, ServletException { checkPermission(CONNECT); if(channel!=null) { req.getView(this,"already-launched.jelly").forward(req, rsp); return; } connect(true); // TODO: would be nice to redirect the user to "launching..." wait page, // then spend a few seconds there and poll for the completion periodically. rsp.sendRedirect("log"); } public void tryReconnect() { numRetryAttempt++; if(numRetryAttempt<6 || (numRetryAttempt%12)==0) { // initially retry several times quickly, and after that, do it infrequently. logger.info("Attempting to reconnect "+nodeName); connect(true); } } /** * Serves jar files for JNLP agents. * * @deprecated since 2008-08-18. * This URL binding is no longer used and moved up directly under to {@link jenkins.model.Jenkins}, * but it's left here for now just in case some old JNLP agents request it. */ @Deprecated public Slave.JnlpJar getJnlpJars(String fileName) { return new Slave.JnlpJar(fileName); } @WebMethod(name="slave-agent.jnlp") public HttpResponse doSlaveAgentJnlp(StaplerRequest req, StaplerResponse res) throws IOException, ServletException { return new EncryptedSlaveAgentJnlpFile(this, "slave-agent.jnlp.jelly", getName(), CONNECT); } @Override protected void kill() { super.kill(); closeChannel(); try { log.close(); } catch (IOException x) { LOGGER.log(Level.WARNING, "Failed to close agent log", x); } try { Util.deleteRecursive(getLogDir()); } catch (IOException ex) { logger.log(Level.WARNING, "Unable to delete agent logs", ex); } } public RetentionStrategy getRetentionStrategy() { Slave n = getNode(); return n==null ? RetentionStrategy.INSTANCE : n.getRetentionStrategy(); } /** * If still connected, disconnect. */ private void closeChannel() { // TODO: race condition between this and the setChannel method. Channel c; synchronized (channelLock) { c = channel; channel = null; absoluteRemoteFs = null; isUnix = null; } if (c != null) { try { c.close(); } catch (IOException e) { logger.log(Level.SEVERE, "Failed to terminate channel to " + getDisplayName(), e); } for (ComputerListener cl : ComputerListener.all()) cl.onOffline(this, offlineCause); } } @Override protected void setNode(final Node node) { super.setNode(node); launcher = grabLauncher(node); // maybe the configuration was changed to relaunch the agent, so try to re-launch now. // "constructed==null" test is an ugly hack to avoid launching before the object is fully // constructed. if(constructed!=null) { if (node instanceof Slave) { Queue.withLock(new Runnable() { @Override public void run() { ((Slave)node).getRetentionStrategy().check(SlaveComputer.this); } }); } else { connect(false); } } } /** * Grabs a {@link ComputerLauncher} out of {@link Node} to keep it in this {@link Computer}. * The returned launcher will be set to {@link #launcher} and used to carry out the actual launch operation. * * <p> * Subtypes that needs to decorate {@link ComputerLauncher} can do so by overriding this method. * This is useful for {@link SlaveComputer}s for clouds for example, where one normally needs * additional pre-launch step (such as waiting for the provisioned node to become available) * before the user specified launch step (like SSH connection) kicks in. * * @see ComputerLauncherFilter */ protected ComputerLauncher grabLauncher(Node node) { return ((Slave)node).getLauncher(); } /** * Get the agent version */ public String getSlaveVersion() throws IOException, InterruptedException { return channel.call(new SlaveVersion()); } /** * Get the OS description. */ public String getOSDescription() throws IOException, InterruptedException { return channel.call(new DetectOS()) ? "Unix" : "Windows"; } private static final Logger logger = Logger.getLogger(SlaveComputer.class.getName()); private static final class SlaveVersion extends MasterToSlaveCallable<String,IOException> { public String call() throws IOException { try { return Launcher.VERSION; } catch (Throwable ex) { return "< 1.335"; } // Older slave.jar won't have VERSION } } private static final class DetectOS extends MasterToSlaveCallable<Boolean,IOException> { public Boolean call() throws IOException { return File.pathSeparatorChar==':'; } } private static final class AbsolutePath extends MasterToSlaveCallable<String,IOException> { private static final long serialVersionUID = 1L; private final String relativePath; private AbsolutePath(String relativePath) { this.relativePath = relativePath; } public String call() throws IOException { return new File(relativePath).getAbsolutePath(); } } private static final class DetectDefaultCharset extends MasterToSlaveCallable<String,IOException> { public String call() throws IOException { return Charset.defaultCharset().name(); } } /** * Puts the {@link #SLAVE_LOG_HANDLER} into a separate class so that loading this class * in JVM doesn't end up loading tons of additional classes. */ static final class LogHolder { /** * This field is used on each agent to record logs on the agent. */ static RingBufferLogHandler SLAVE_LOG_HANDLER; } private static class SlaveInitializer extends MasterToSlaveCallable<Void,RuntimeException> { final int ringBufferSize; public SlaveInitializer(int ringBufferSize) { this.ringBufferSize = ringBufferSize; } public Void call() { SLAVE_LOG_HANDLER = new RingBufferLogHandler(ringBufferSize); // avoid double installation of the handler. JNLP slaves can reconnect to the master multiple times // and each connection gets a different RemoteClassLoader, so we need to evict them by class name, // not by their identity. for (Handler h : LOGGER.getHandlers()) { if (h.getClass().getName().equals(SLAVE_LOG_HANDLER.getClass().getName())) LOGGER.removeHandler(h); } LOGGER.addHandler(SLAVE_LOG_HANDLER); // remove Sun PKCS11 provider if present. See http://wiki.jenkins-ci.org/display/JENKINS/Solaris+Issue+6276483 try { Security.removeProvider("SunPKCS11-Solaris"); } catch (SecurityException e) { // ignore this error. } try { getChannelOrFail().setProperty("slave",Boolean.TRUE); // indicate that this side of the channel is the slave side. } catch (ChannelClosedException e) { throw new IllegalStateException(e); } return null; } private static final long serialVersionUID = 1L; private static final Logger LOGGER = Logger.getLogger(""); } /** * Obtains a {@link VirtualChannel} that allows some computation to be performed on the master. * This method can be called from any thread on the master, or from agent (more precisely, * it only works from the remoting request-handling thread in agents, which means if you've started * separate thread on agents, that'll fail.) * * @return null if the calling thread doesn't have any trace of where its master is. * @since 1.362 */ public static VirtualChannel getChannelToMaster() { if (Jenkins.getInstanceOrNull()!=null) // check if calling thread is on master or on slave return FilePath.localChannel; // if this method is called from within the agent computation thread, this should work Channel c = Channel.current(); if (c!=null && Boolean.TRUE.equals(c.getProperty("slave"))) return c; return null; } /** * Helper method for Jelly. */ public static List<SlaveSystemInfo> getSystemInfoExtensions() { return SlaveSystemInfo.all(); } private static class SlaveLogFetcher extends MasterToSlaveCallable<List<LogRecord>,RuntimeException> { public List<LogRecord> call() { return new ArrayList<LogRecord>(SLAVE_LOG_HANDLER.getView()); } } // use RingBufferLogHandler class name to configure for backward compatibility private static final int DEFAULT_RING_BUFFER_SIZE = SystemProperties.getInteger(RingBufferLogHandler.class.getName() + ".defaultSize", 256); private static final Logger LOGGER = Logger.getLogger(SlaveComputer.class.getName()); }
/** * MIT License * * Copyright (c) 2017 zgqq * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in all * copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE * SOFTWARE. */ package mah.ui.input; import mah.action.AbstractAction; import mah.action.ActionEvent; import mah.command.CommandManager; import mah.mode.AbstractMode; import mah.mode.Mode; import mah.mode.ModeManager; import mah.ui.UiManager; import mah.ui.util.ClipboardUtils; import mah.ui.window.WindowMode; /** * Created by zgq on 2017-01-09 10:33 */ public class InputMode extends AbstractMode { public static final String NAME = "input_mode"; public InputMode(Mode parent) { super(NAME, parent); } private static final String FORWARD_CHAR = "ForwardChar"; private static final String BACKWARD_CHAR = "BackwardChar"; private static final String DELETE_CHAR = "DeleteChar"; private static final String FORWARD_WORD = "ForwardWord"; private static final String BACKWARD_WORD = "BackwardWord"; private static final String KILL_WORD = "KillWord"; private static final String BEGINNING_OF_LINE = "BeginningOfLine"; private static final String END_OF_LINE = "EndOfLine"; private static final String KILL_LINE = "KillLine"; @Override public void init() { registerAction(new ForwardChar(FORWARD_CHAR)); registerAction(new BackwardChar(BACKWARD_CHAR)); registerAction(new DeleteChar(DELETE_CHAR)); registerAction(new ForwardWord(FORWARD_WORD)); registerAction(new BackwardWord(BACKWARD_WORD)); registerAction(new KillWord(KILL_WORD)); registerAction(new BackwardKillWord("BackwardKillWord")); registerAction(new BackwardDeleteChar("BackwardDeleteChar")); registerAction(new BeginningOfLine(BEGINNING_OF_LINE)); registerAction(new EndOfLine(END_OF_LINE)); registerAction(new KillWholeLine("KillWholeLine")); registerAction(new KillLine(KILL_LINE)); registerAction(new Undo("Undo")); registerAction(new Redo("Redo")); registerAction(new ClearQueryText("ClearQueryText")); registerAction(new CopyQueryText("CopyQueryText")); } public static InputMode triggerMode() { InputMode inputMode = getAndRegisterMode(); ModeManager.getInstance().triggerMode(inputMode); return inputMode; } public static InputMode getAndRegisterMode() { return (InputMode) ModeManager.getInstance().getOrRegisterMode(new InputMode(WindowMode.getOrRegisterMode())); } abstract static class InputAction extends AbstractAction { public InputAction(String name) { super(name, Input.class); } @Override public void actionPerformed(ActionEvent actionEvent) { Input source = (Input) actionEvent.getSource(); UiManager.getInstance().runLater(new Runnable() { @Override public void run() { actionPerformed(source); } }); } protected abstract void actionPerformed(Input source); } static class KillWholeLine extends InputAction { public KillWholeLine(String name) { super(name); } @Override protected void actionPerformed(Input source) { source.killWholeLine(); } } static class KillLine extends InputAction { public KillLine(String name) { super(name); } @Override protected void actionPerformed(Input source) { source.killLine(); } } static class BeginningOfLine extends InputAction { public BeginningOfLine(String name) { super(name); } @Override protected void actionPerformed(Input source) { source.beginningOfLine(); } } static class EndOfLine extends InputAction { public EndOfLine(String name) { super(name); } @Override protected void actionPerformed(Input source) { source.endOfLine(); } } static class BackwardKillWord extends InputAction { public BackwardKillWord(String name) { super(name); } @Override protected void actionPerformed(Input source) { source.backwardKillWord(); } } static class KillWord extends InputAction { public KillWord(String name) { super(name); } @Override protected void actionPerformed(Input source) { source.killWord(); } } static class ForwardWord extends InputAction { public ForwardWord(String name) { super(name); } @Override protected void actionPerformed(Input source) { source.forwardWord(); } } static class BackwardWord extends InputAction { public BackwardWord(String name) { super(name); } @Override protected void actionPerformed(Input source) { source.backwardWord(); } } static class DeleteChar extends InputAction { public DeleteChar(String name) { super(name); } @Override protected void actionPerformed(Input source) { source.deleteChar(); } } static class ForwardChar extends InputAction { public ForwardChar(String name) { super(name); } @Override protected void actionPerformed(Input source) { source.forwardChar(); } } static class BackwardChar extends InputAction { public BackwardChar(String name) { super(name); } @Override protected void actionPerformed(Input source) { source.backwardChar(); } } static class BackwardDeleteChar extends InputAction { public BackwardDeleteChar(String name) { super(name); } @Override protected void actionPerformed(Input source) { source.backwardDeleteChar(); } } static class Undo extends InputAction { public Undo(String name) { super(name); } @Override protected void actionPerformed(Input source) { source.undo(); } } static class Redo extends InputAction { public Redo(String name) { super(name); } @Override protected void actionPerformed(Input source) { source.redo(); } } static class ClearQueryText extends InputAction { public ClearQueryText(String name) { super(name); } @Override protected void actionPerformed(Input source) { String currentTriggerKey = CommandManager.getInstance().getCurrentTriggerKey(); if (currentTriggerKey == null) { return; } source.setText(currentTriggerKey.trim() + " "); source.setCaretPosition(source.getText().length()); } } static class CopyQueryText extends InputAction { public CopyQueryText(String name) { super(name); } @Override protected void actionPerformed(Input source) { String currentTriggerKey = CommandManager.getInstance().getCurrentTriggerKey(); if (currentTriggerKey == null || !currentTriggerKey.endsWith(" ")) { return; } String copyText = source.getText().substring(currentTriggerKey.length(), source.getText().length()); ClipboardUtils.copy(copyText); } } }
/* * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. */ package codetoanalyze.java.infer; import java.io.BufferedOutputStream; import java.io.DataOutputStream; import java.io.FileOutputStream; import java.io.FilterOutputStream; import java.io.IOException; import java.security.DigestOutputStream; import java.util.zip.CheckedOutputStream; import java.util.zip.DeflaterOutputStream; import java.util.zip.GZIPOutputStream; import java.util.zip.InflaterOutputStream; import javax.crypto.CipherOutputStream; public class FilterOutputStreamLeaks { // FilterOutputStream tests public void filterOutputStreamNotClosedAfterWriteBad() { byte[] arr = {1, 2, 3}; FileOutputStream fis; try { fis = new FileOutputStream("file.txt"); FilterOutputStream fos = new FilterOutputStream(fis); fos.write(arr); fos.close(); } catch (IOException e) { } } public void filterOutputStreamClosedAfterWriteOk() throws IOException { byte[] arr = {1, 2, 3}; FileOutputStream fis; FilterOutputStream fos = null; try { fis = new FileOutputStream("file.txt"); fos = new FilterOutputStream(fis); fos.write(arr); } catch (IOException e) { } finally { if (fos != null) fos.close(); } } // DataOutputStream tests public void dataOutputStreamNotClosedAfterWriteBad() { byte[] arr = {1, 2, 3}; FileOutputStream fis; try { fis = new FileOutputStream("file.txt"); DataOutputStream dos = new DataOutputStream(fis); dos.write(arr); dos.close(); } catch (IOException e) { } } public void dataOutputStreamClosedAfterWriteOk() throws IOException { byte[] arr = {1, 2, 3}; FileOutputStream fis; DataOutputStream dos = null; try { fis = new FileOutputStream("file.txt"); dos = new DataOutputStream(fis); dos.write(arr); } catch (IOException e) { } finally { if (dos != null) dos.close(); } } // BufferedOutputStream tests public void bufferedOutputStreamNotClosedAfterWriteBad() { byte[] arr = {1, 2, 3}; FileOutputStream fis = null; try { fis = new FileOutputStream("file.txt"); BufferedOutputStream bos = new BufferedOutputStream(fis); bos.write(arr); bos.close(); } catch (IOException e) { } } public void bufferedOutputStreamClosedAfterWriteOk() throws IOException { byte[] arr = {1, 2, 3}; FileOutputStream fis; BufferedOutputStream bos = null; try { fis = new FileOutputStream("file.txt"); bos = new BufferedOutputStream(fis); bos.write(arr); } catch (IOException e) { } finally { if (bos != null) bos.close(); } } // CheckedOutputStream tests public void checkedOutputStreamNotClosedAfterWriteBad() { byte[] arr = {1, 2, 3}; FileOutputStream fis; try { fis = new FileOutputStream("file.txt"); CheckedOutputStream chos = new CheckedOutputStream(fis, null); chos.write(arr); chos.close(); } catch (IOException e) { } } public void checkedOutputStreamClosedAfterWriteOk() throws IOException { byte[] arr = {1, 2, 3}; FileOutputStream fis; CheckedOutputStream chos = null; try { fis = new FileOutputStream("file.txt"); chos = new CheckedOutputStream(fis, null); chos.write(arr); } catch (IOException e) { } finally { if (chos != null) chos.close(); } } // CipherOutputStream tests public void cipherOutputStreamNotClosedAfterWriteBad() { byte[] arr = {1, 2, 3}; FileOutputStream fis; try { fis = new FileOutputStream("file.txt"); CipherOutputStream cos = new CipherOutputStream(fis, null); cos.write(arr); cos.close(); } catch (IOException e) { } } public void cipherOutputStreamClosedAfterWriteOk() throws IOException { byte[] arr = {1, 2, 3}; FileOutputStream fis; CipherOutputStream cos = null; try { fis = new FileOutputStream("file.txt"); cos = new CipherOutputStream(fis, null); cos.write(arr); } catch (IOException e) { } finally { if (cos != null) cos.close(); } } // DeflaterOutputStream tests public void deflaterOutputStreamNotClosedAfterWriteBad() { byte[] arr = {1, 2, 3}; FileOutputStream fis; try { fis = new FileOutputStream("file.txt"); DeflaterOutputStream dos = new DeflaterOutputStream(fis, null); dos.write(arr); dos.close(); } catch (IOException e) { } } public void deflaterOutputStreamClosedAfterWriteOk() throws IOException { byte[] arr = {1, 2, 3}; FileOutputStream fis; DeflaterOutputStream dos = null; try { fis = new FileOutputStream("file.txt"); dos = new DeflaterOutputStream(fis, null); dos.write(arr); } catch (IOException e) { } finally { if (dos != null) dos.close(); } } // DigestOutputStream tests public void digestOutputStreamNotClosedAfterWriteBad() { byte[] arr = {1, 2, 3}; FileOutputStream fis; try { fis = new FileOutputStream("file.txt"); DigestOutputStream dos = new DigestOutputStream(fis, null); dos.write(arr); dos.close(); } catch (IOException e) { } } public void digestOutputStreamClosedAfterWriteOk() throws IOException { byte[] arr = {1, 2, 3}; FileOutputStream fis; DigestOutputStream dos = null; try { fis = new FileOutputStream("file.txt"); dos = new DigestOutputStream(fis, null); dos.write(arr); } catch (IOException e) { } finally { if (dos != null) dos.close(); } } // InflaterOutputStream tests public void inflaterOutputStreamNotClosedAfterWriteBad() { byte[] arr = {1, 2, 3}; FileOutputStream fis; try { fis = new FileOutputStream("file.txt"); InflaterOutputStream ios = new InflaterOutputStream(fis, null); ios.write(arr); ios.close(); } catch (IOException e) { } } public void inflaterOutputStreamClosedAfterWriteOk() throws IOException { byte[] arr = {1, 2, 3}; FileOutputStream fis; InflaterOutputStream ios = null; try { fis = new FileOutputStream("file.txt"); ios = new InflaterOutputStream(fis, null); ios.write(arr); } catch (IOException e) { } finally { if (ios != null) ios.close(); } } // GZipOutputStream tests public void gzipOutputStreamNotClosedAfterFlushBad() { FileOutputStream fos; try { fos = new FileOutputStream("file.txt"); GZIPOutputStream gzipOutputStream = new GZIPOutputStream(fos); gzipOutputStream.flush(); gzipOutputStream.close(); } catch (IOException e) { } } public void gzipOutputStreamClosedAfterWriteOk() throws IOException { byte[] arr = {1, 2, 3}; FileOutputStream fos = null; GZIPOutputStream gzipOutputStream = null; try { fos = new FileOutputStream("file.txt"); gzipOutputStream = new GZIPOutputStream(fos); gzipOutputStream.write(arr); } catch (IOException e) { } finally { if (gzipOutputStream != null) gzipOutputStream.close(); else if (fos != null) fos.close(); } } // PrintStream tests public void printStreamNotClosedAfterWriteBad() { byte[] arr = {1, 2, 3}; FileOutputStream fis; try { fis = new FileOutputStream("file.txt"); InflaterOutputStream printer = new InflaterOutputStream(fis, null); printer.write(arr); } catch (IOException e) { } } public void printStreamClosedAfterWriteOk() throws IOException { byte[] arr = {1, 2, 3}; FileOutputStream fis; InflaterOutputStream printer = null; try { fis = new FileOutputStream("file.txt"); printer = new InflaterOutputStream(fis, null); printer.write(arr); } catch (IOException e) { } finally { if (printer != null) printer.close(); } } }
/* * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one * or more contributor license agreements. Licensed under the Elastic License; * you may not use this file except in compliance with the Elastic License. */ package org.elasticsearch.xpack.monitoring.exporter.http; import java.util.Collections; import java.util.Map; import org.apache.http.HttpEntity; import org.apache.http.entity.ContentType; import org.apache.http.entity.StringEntity; import org.apache.logging.log4j.Logger; import org.elasticsearch.client.Request; import org.elasticsearch.client.Response; import org.elasticsearch.client.ResponseException; import org.elasticsearch.client.ResponseListener; import org.elasticsearch.client.RestClient; import org.elasticsearch.common.CheckedFunction; import org.elasticsearch.common.SuppressLoggerChecks; import org.elasticsearch.common.xcontent.XContent; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.rest.RestStatus; import org.mockito.ArgumentCaptor; import java.io.IOException; import java.util.function.Supplier; import static org.elasticsearch.xpack.monitoring.exporter.http.AsyncHttpResourceHelper.whenPerformRequestAsyncWith; import static org.hamcrest.Matchers.is; import static org.mockito.Matchers.any; import static org.mockito.Matchers.eq; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.times; import static org.mockito.Mockito.verify; import static org.mockito.Mockito.verifyNoMoreInteractions; import static org.mockito.Mockito.verifyZeroInteractions; import static org.mockito.Mockito.when; /** * Tests {@link PublishableHttpResource}. */ public class PublishableHttpResourceTests extends AbstractPublishableHttpResourceTestCase { private final String ownerType = "ownerthing"; private final String resourceBasePath = "/_fake"; private final String resourceName = ".my_thing"; private final String resourceType = "thingamajig"; private final Logger logger = mock(Logger.class); private final HttpEntity entity = mock(HttpEntity.class); private final Supplier<HttpEntity> body = () -> entity; private final PublishableHttpResource resource = new MockHttpResource(owner, masterTimeout, PublishableHttpResource.NO_BODY_PARAMETERS); public void testCheckForResourceExists() throws IOException { assertCheckForResource(successfulCheckStatus(), true, "{} [{}] found on the [{}] {}"); } public void testCheckForResourceDoesNotExist() throws IOException { assertCheckForResource(notFoundCheckStatus(), false, "{} [{}] does not exist on the [{}] {}"); } public void testCheckForResourceUnexpectedResponse() throws IOException { final String endpoint = concatenateEndpoint(resourceBasePath, resourceName); final RestStatus failedStatus = failedCheckStatus(); final Response response = response("GET", endpoint, failedStatus); final Request request = new Request("GET", endpoint); addParameters(request, getParameters(resource.getDefaultParameters())); whenPerformRequestAsyncWith(client, request, response); assertCheckForResource(client, logger, resourceBasePath, resourceName, resourceType, null, response); verify(logger).trace("checking if {} [{}] exists on the [{}] {}", resourceType, resourceName, owner, ownerType); verify(client).performRequestAsync(eq(request), any(ResponseListener.class)); verify(logger).error(any(org.apache.logging.log4j.util.Supplier.class), any(ResponseException.class)); verifyNoMoreInteractions(client, logger); } public void testVersionCheckForResourceExists() { assertVersionCheckForResource(successfulCheckStatus(), true, randomInt(), "{} [{}] found on the [{}] {}"); } public void testVersionCheckForResourceDoesNotExist() { if (randomBoolean()) { // it literally does not exist assertVersionCheckForResource(notFoundCheckStatus(), false, randomInt(), "{} [{}] does not exist on the [{}] {}"); } else { // it DOES exist, but the version needs to be replaced assertVersionCheckForResource(successfulCheckStatus(), false, randomInt(), "{} [{}] found on the [{}] {}"); } } public void testVersionCheckForResourceUnexpectedResponse() { final String endpoint = concatenateEndpoint(resourceBasePath, resourceName); final RestStatus failedStatus = failedCheckStatus(); final Response response = response("GET", endpoint, failedStatus); final XContent xContent = mock(XContent.class); final int minimumVersion = randomInt(); final Request request = new Request("GET", endpoint); addParameters(request, getParameters(resource.getDefaultParameters())); whenPerformRequestAsyncWith(client, request, response); resource.versionCheckForResource(client, listener, logger, resourceBasePath, resourceName, resourceType, owner, ownerType, xContent, minimumVersion); verifyListener(null); verify(logger).trace("checking if {} [{}] exists on the [{}] {}", resourceType, resourceName, owner, ownerType); verify(client).performRequestAsync(eq(request), any(ResponseListener.class)); verify(logger).error(any(org.apache.logging.log4j.util.Supplier.class), any(ResponseException.class)); verifyNoMoreInteractions(client, logger); } public void testVersionCheckForResourceMalformedResponse() { final String endpoint = concatenateEndpoint(resourceBasePath, resourceName); final RestStatus okStatus = successfulCheckStatus(); final int minimumVersion = randomInt(); final HttpEntity entity = entityForResource(null, resourceName, minimumVersion); final Response response = response("GET", endpoint, okStatus, entity); final XContent xContent = mock(XContent.class); final Request request = new Request("GET", endpoint); addParameters(request, getParameters(resource.getDefaultParameters())); whenPerformRequestAsyncWith(client, request, response); resource.versionCheckForResource(client, listener, logger, resourceBasePath, resourceName, resourceType, owner, ownerType, xContent, minimumVersion); verifyListener(null); verify(logger).trace("checking if {} [{}] exists on the [{}] {}", resourceType, resourceName, owner, ownerType); verify(logger).debug("{} [{}] found on the [{}] {}", resourceType, resourceName, owner, ownerType); verify(client).performRequestAsync(eq(request), any(ResponseListener.class)); verify(logger, times(2)).error(any(org.apache.logging.log4j.util.Supplier.class), any(ResponseException.class)); verifyNoMoreInteractions(client, logger); } public void testCheckForResourceErrors() throws IOException { final String endpoint = concatenateEndpoint(resourceBasePath, resourceName); final RestStatus failedStatus = failedCheckStatus(); final ResponseException responseException = responseException("GET", endpoint, failedStatus); final Exception e = randomFrom(new IOException("expected"), new RuntimeException("expected"), responseException); final Response response = e == responseException ? responseException.getResponse() : null; final Request request = new Request("GET", endpoint); addParameters(request, getParameters(resource.getDefaultParameters())); whenPerformRequestAsyncWith(client, request, e); assertCheckForResource(client, logger, resourceBasePath, resourceName, resourceType, null, response); verify(logger).trace("checking if {} [{}] exists on the [{}] {}", resourceType, resourceName, owner, ownerType); verify(client).performRequestAsync(eq(request), any(ResponseListener.class)); verify(logger).error(any(org.apache.logging.log4j.util.Supplier.class), eq(e)); verifyNoMoreInteractions(client, logger); } public void testPutResourceTrue() { assertPutResource(successfulPublishStatus(), true); } public void testPutResourceFalse() { assertPutResource(failedPublishStatus(), false); } public void testPutResourceFalseWithException() { final String endpoint = concatenateEndpoint(resourceBasePath, resourceName); final Exception e = randomFrom(new IOException("expected"), new RuntimeException("expected")); final Request request = new Request("PUT", endpoint); addParameters(request, resource.getDefaultParameters()); request.setEntity(entity); whenPerformRequestAsyncWith(client, request, e); final Map<String, String> parameters = Collections.emptyMap(); resource.putResource(client, listener, logger, resourceBasePath, resourceName, parameters, body, resourceType, owner, ownerType); verifyListener(null); verify(logger).trace("uploading {} [{}] to the [{}] {}", resourceType, resourceName, owner, ownerType); verify(client).performRequestAsync(eq(request), any(ResponseListener.class)); verify(logger).error(any(org.apache.logging.log4j.util.Supplier.class), eq(e)); verifyNoMoreInteractions(client, logger); } public void testDeleteResourceTrue() { final RestStatus status = randomFrom(successfulCheckStatus(), notFoundCheckStatus()); assertDeleteResource(status, true); } public void testDeleteResourceFalse() { assertDeleteResource(failedCheckStatus(), false); } public void testDeleteResourceErrors() { final String endpoint = concatenateEndpoint(resourceBasePath, resourceName); final RestStatus failedStatus = failedCheckStatus(); final ResponseException responseException = responseException("DELETE", endpoint, failedStatus); final Exception e = randomFrom(new IOException("expected"), new RuntimeException("expected"), responseException); final Map<String, String> deleteParameters = deleteParameters(resource.getDefaultParameters()); final Request request = new Request("DELETE", endpoint); addParameters(request, deleteParameters); whenPerformRequestAsyncWith(client, request, e); resource.deleteResource(client, listener, logger, resourceBasePath, resourceName, resourceType, owner, ownerType); verifyListener(null); verify(logger).trace("deleting {} [{}] from the [{}] {}", resourceType, resourceName, owner, ownerType); verify(client).performRequestAsync(eq(request), any(ResponseListener.class)); verify(logger).error(any(org.apache.logging.log4j.util.Supplier.class), eq(e)); verifyNoMoreInteractions(client, logger); } public void testParameters() { assertParameters(resource); } public void testDoCheckAndPublishIgnoresPublishWhenCheckErrors() { final PublishableHttpResource resource = new MockHttpResource(owner, masterTimeout, PublishableHttpResource.NO_BODY_PARAMETERS, null, true); resource.doCheckAndPublish(client, listener); verifyListener(null); } public void testDoCheckAndPublish() { // not an error (the third state) final boolean exists = randomBoolean(); final boolean publish = randomBoolean(); final PublishableHttpResource resource = new MockHttpResource(owner, masterTimeout, PublishableHttpResource.NO_BODY_PARAMETERS, exists, publish); resource.doCheckAndPublish(client, listener); verifyListener(exists || publish); } public void testShouldReplaceResourceRethrowsIOException() throws IOException { final Response response = mock(Response.class); final HttpEntity entity = mock(HttpEntity.class); final XContent xContent = mock(XContent.class); when(response.getEntity()).thenReturn(entity); when(entity.getContent()).thenThrow(new IOException("TEST - expected")); expectThrows(IOException.class, () -> resource.shouldReplaceResource(response, xContent, resourceName, randomInt())); } public void testShouldReplaceResourceThrowsExceptionForMalformedResponse() { final Response response = mock(Response.class); final HttpEntity entity = entityForResource(null, resourceName, randomInt()); final XContent xContent = XContentType.JSON.xContent(); when(response.getEntity()).thenReturn(entity); expectThrows(RuntimeException.class, () -> resource.shouldReplaceResource(response, xContent, resourceName, randomInt())); } public void testShouldReplaceResourceReturnsTrueVersionIsNotExpected() throws IOException { final int minimumVersion = randomInt(); final Response response = mock(Response.class); final HttpEntity entity = entityForResource(false, resourceName, minimumVersion); final XContent xContent = XContentType.JSON.xContent(); when(response.getEntity()).thenReturn(entity); assertThat(resource.shouldReplaceResource(response, xContent, resourceName, minimumVersion), is(true)); } public void testShouldReplaceResourceChecksVersion() throws IOException { final int minimumVersion = randomInt(); final int version = randomInt(); final boolean shouldReplace = version < minimumVersion; final Response response = mock(Response.class); // { "resourceName": { "version": randomLong } } final HttpEntity entity = new StringEntity("{\"" + resourceName + "\":{\"version\":" + version + "}}", ContentType.APPLICATION_JSON); final XContent xContent = XContentType.JSON.xContent(); when(response.getEntity()).thenReturn(entity); assertThat(resource.shouldReplaceResource(response, xContent, resourceName, minimumVersion), is(shouldReplace)); } @SuppressLoggerChecks(reason = "mock logger used") private void assertCheckForResource(final RestStatus status, final Boolean expected, final String debugLogMessage) throws IOException { final String endpoint = concatenateEndpoint(resourceBasePath, resourceName); final Response response = response("GET", endpoint, status); final Request request = new Request("GET", endpoint); addParameters(request, getParameters(resource.getDefaultParameters())); whenPerformRequestAsyncWith(client, request, response); assertCheckForResource(client, logger, resourceBasePath, resourceName, resourceType, expected, response); verify(logger).trace("checking if {} [{}] exists on the [{}] {}", resourceType, resourceName, owner, ownerType); verify(client).performRequestAsync(eq(request), any(ResponseListener.class)); if (expected != null) { verify(response).getStatusLine(); } else { verify(response).getStatusLine(); verify(response).getRequestLine(); verify(response).getHost(); verify(response).getEntity(); } verify(logger).debug(debugLogMessage, resourceType, resourceName, owner, ownerType); verifyNoMoreInteractions(client, response, logger); } @SuppressLoggerChecks(reason = "mock logger used") private void assertVersionCheckForResource(final RestStatus status, final Boolean expected, final int minimumVersion, final String debugLogMessage) { final String endpoint = concatenateEndpoint(resourceBasePath, resourceName); final boolean shouldReplace = status == RestStatus.OK && expected == Boolean.FALSE; final HttpEntity entity = status == RestStatus.OK ? entityForResource(expected, resourceName, minimumVersion) : null; final Response response = response("GET", endpoint, status, entity); final XContent xContent = XContentType.JSON.xContent(); final Request request = new Request("GET", endpoint); addParameters(request, getParameters(resource.getDefaultParameters())); whenPerformRequestAsyncWith(client, request, response); resource.versionCheckForResource(client, listener, logger, resourceBasePath, resourceName, resourceType, owner, ownerType, xContent, minimumVersion); verify(logger).trace("checking if {} [{}] exists on the [{}] {}", resourceType, resourceName, owner, ownerType); verify(client).performRequestAsync(eq(request), any(ResponseListener.class)); if (shouldReplace || expected) { verify(response).getStatusLine(); verify(response).getEntity(); } else if (expected == false) { verify(response).getStatusLine(); } else { // expected == null verify(response).getStatusLine(); verify(response).getRequestLine(); verify(response).getHost(); verify(response).getEntity(); } verifyListener(expected); verify(logger).debug(debugLogMessage, resourceType, resourceName, owner, ownerType); verifyNoMoreInteractions(client, response, logger); } private void assertPutResource(final RestStatus status, final boolean errorFree) { final String endpoint = concatenateEndpoint(resourceBasePath, resourceName); final Response response = response("PUT", endpoint, status); final Request request = new Request("PUT", endpoint); addParameters(request, resource.getDefaultParameters()); request.setEntity(entity); whenPerformRequestAsyncWith(client, request, response); final Map<String, String> parameters = Collections.emptyMap(); resource.putResource(client, listener, logger, resourceBasePath, resourceName, parameters, body, resourceType, owner, ownerType); verifyListener(errorFree ? true : null); verify(client).performRequestAsync(eq(request), any(ResponseListener.class)); verify(response).getStatusLine(); verify(logger).trace("uploading {} [{}] to the [{}] {}", resourceType, resourceName, owner, ownerType); if (errorFree) { verify(logger).debug("{} [{}] uploaded to the [{}] {}", resourceType, resourceName, owner, ownerType); } else { ArgumentCaptor<RuntimeException> e = ArgumentCaptor.forClass(RuntimeException.class); verify(logger).error(any(org.apache.logging.log4j.util.Supplier.class), e.capture()); assertThat(e.getValue().getMessage(), is("[" + resourceBasePath + "/" + resourceName + "] responded with [" + status.getStatus() + "]")); } verifyNoMoreInteractions(client, response, logger, entity); } @SuppressWarnings("unchecked") private void assertCheckForResource(final RestClient client, final Logger logger, final String resourceBasePath, final String resourceName, final String resourceType, final Boolean expected, final Response response) throws IOException { final CheckedFunction<Response, Boolean, IOException> responseChecker = mock(CheckedFunction.class); final CheckedFunction<Response, Boolean, IOException> dneResponseChecker = mock(CheckedFunction.class); if (expected != null) { // invert expected to keep the same value when(responseChecker.apply(response)).thenReturn(false == expected); when(dneResponseChecker.apply(response)).thenReturn(false == expected); } resource.checkForResource(client, listener, logger, resourceBasePath, resourceName, resourceType, owner, ownerType, PublishableHttpResource.GET_EXISTS, PublishableHttpResource.GET_DOES_NOT_EXIST, responseChecker, dneResponseChecker); if (expected == Boolean.TRUE) { verify(responseChecker).apply(response); verifyZeroInteractions(dneResponseChecker); } else if (expected == Boolean.FALSE) { verifyZeroInteractions(responseChecker); verify(dneResponseChecker).apply(response); } else { verifyZeroInteractions(responseChecker, dneResponseChecker); } verifyListener(expected); } private void assertDeleteResource(final RestStatus status, final boolean expected) { final String endpoint = concatenateEndpoint(resourceBasePath, resourceName); final Response response = response("DELETE", endpoint, status); final Map<String, String> deleteParameters = deleteParameters(resource.getDefaultParameters()); final Request request = new Request("DELETE", endpoint); addParameters(request, deleteParameters); whenPerformRequestAsyncWith(client, request, response); resource.deleteResource(client, listener, logger, resourceBasePath, resourceName, resourceType, owner, ownerType); verify(client).performRequestAsync(eq(request), any(ResponseListener.class)); verify(response).getStatusLine(); verify(logger).trace("deleting {} [{}] from the [{}] {}", resourceType, resourceName, owner, ownerType); if (expected) { verify(logger).debug("{} [{}] deleted from the [{}] {}", resourceType, resourceName, owner, ownerType); verifyListener(true); } else { ArgumentCaptor<RuntimeException> e = ArgumentCaptor.forClass(RuntimeException.class); verify(logger).error(any(org.apache.logging.log4j.util.Supplier.class), e.capture()); assertThat(e.getValue().getMessage(), is("[" + resourceBasePath + "/" + resourceName + "] responded with [" + status.getStatus() + "]")); verifyListener(null); } verifyNoMoreInteractions(client, response, logger, entity); } }
/* * (c) Copyright 2000, 2001, 2002, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009 Hewlett-Packard Development Company, LP * All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions * are met: * 1. Redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer. * 2. Redistributions in binary form must reproduce the above copyright * notice, this list of conditions and the following disclaimer in the * documentation and/or other materials provided with the distribution. * 3. The name of the author may not be used to endorse or promote products * derived from this software without specific prior written permission. * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR * IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. * IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF * THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. * * ModelCon.java * * Created on 7 December 2001, 11:00 */ package com.hp.hpl.jena.rdf.model; import java.util.Calendar; import com.hp.hpl.jena.datatypes.RDFDatatype; import com.hp.hpl.jena.graph.Node; /** Convenience methods which extend the {@link Model} interface. * <P>The {@link Model} interface provides a set of primitive operations on * an RDF model. This interface extends those methods with a * set of convenience methods.</P> * <p>This interface provides methods supporting typed literals. This means * that methods are provided which will translate a built in type, or an * object to an RDF Literal. This translation is done by invoking the * <CODE>toString()</CODE> method of the object, or its built in equivalent. * The reverse translation is also supported. This is built in for built * in types. Factory objects, provided by the application, are used * for application objects.</p> * <p>This interface provides methods for supporting enhanced resources. An * enhanced resource is a resource to which the application has added * behaviour. RDF containers are examples of enhanced resources built in * to this package. Enhanced resources are supported by encapsulating a * resource created by an implementation in another class which adds * the extra behaviour. Factory objects are used to construct such * enhanced resources.</p> * @author bwm * @version Release='$Name: $' Revision='$Revision: 1.2 $' Date='$Date: 2009/09/28 10:45:11 $' */ public interface ModelCon { /** Return a Resource instance in this model. * * <p>Subsequent operations on the returned object may modify this model.</p> * <p>The resource is assumed to already exist in the model. If it does not, * <CODE>createResource</CODE> should be used instead.</p> * @return a resource instance created by the factory provided * @param uri the URI of the resource * @param f the factory object */ @Deprecated Resource getResource(String uri, ResourceF f) ; /** Return a Property instance in this model. * * <p>Subsequent operations on the returned property may modify this model.</p> * <p>The property is assumed to already exist in the model. If it does not, * <CODE>createProperty</CODE> should be used instead.</p> * @return a property object * @param uri the URI of the property */ Property getProperty(String uri) ; /** Return a Bag instance in this model. * * <p>Subsequent operations on the returned bag may modify this model.</p> * <p>The bag is assumed to already exist in the model. If it does not, * <CODE>createBag</CODE> should be used instead.</p> * @return a bag instance * @param uri the URI of the bag. */ Bag getBag(String uri) ; /** Return a bag instance based on a given resource. * * <p> This method enables an application to treat any resource as a bag. * It is in effect an unsafe downcast.</p> * * <p>Subsequent operations on the returned bag may modify this model.</p> * <p>The bag is assumed to already exist in the model. If it does not, * <CODE>createBag</CODE> should be used instead.</p> * @return a bag instance * @param r an untyped Resource instance */ Bag getBag(Resource r) ; /** Return an Alt instance in this model. * * <p>Subsequent operations on the returned object may modify this model.</p> * <p>The alt is assumed to already exist in the model. If it does not, * <CODE>createAlt</CODE> should be used instead.</p> * @return an alt instance * @param uri the URI of the alt */ Alt getAlt(String uri) ; /** Return an Alt instance based on a given resource. * * <p> This method enables an application to treat any resource as an Alt. * It is in effect an unsafe downcast.</p> * * <p>Subsequent operations on the returned Alt may modify this model.</p> * <p>The bag is assumed to already exist in the model. If it does not, * <CODE>createAlt</CODE> should be used instead.</p> * @return an Alt instance * @param r an untyped Resource instance */ Alt getAlt(Resource r) ; /** Return a Seq instance in this model. * * <p>Subsequent operations on the returned bag may modify this model.</p> * <p>The seq is assumed to already exist in the model. If it does not, * <CODE>createSeq</CODE> should be used instead.</p> * @return a seq instance * @param uri the URI of the seq */ Seq getSeq(String uri) ; /** Return a Seq instance based on a given resource. * * <p> This method enables an application to treat any resource as a Seq. * It is in effect an unsafe downcast.</p> * * <p>Subsequent operations on the returned Seq may modify this model.</p> * <p>The Seq is assumed to already exist in the model. If it does not, * <CODE>createAlt</CODE> should be used instead.</p> * @return an Alt instance * @param r an untyped Resource instance */ Seq getSeq(Resource r) ; /** Create a new anonymous resource with a given type. * * <p> Subsequent operations on the returned resource may modify this model. * </p> * <p> The resource is created and an rdf:type property added to the model * to specify its type. </p> * @param type the type of the resource to be created. * @return a new anonymous resource linked to this model. */ public Resource createResource(Resource type) ; /** Create or find an RDFNode (a {@link Resource} or a {@link Literal}) from a graph Node. This is provided for users and developers operating at the API/SPI interface, where Resources are constructed from Nodes. Providing this method allows each Model the opportunity to cache node-to-resource maps if it requires. @param n the graph.Node on which to base the Model.RDFNode @return a suitable RDFNode */ public RDFNode getRDFNode( Node n ); /** Create a new resource with a given type. * * <p> Subsequent operations on the returned resource may modify this model. * </p> * <p> The resource is created and an rdf:type property added to the model * to specify its type. </p> * @param type the type of the resource to be created. * @return a new resource linked to this model. * @param uri The URI of the new resource. */ public Resource createResource(String uri, Resource type); /** Create a new anonymous resource using the supplied factory. * * <p> Subsequent operations on the returned resource may modify this model. * </p> * @return a new anonymous resource linked to this model. * @param f A factory object to create the returned object. . */ @Deprecated public Resource createResource(ResourceF f) ; /** Create a new resource using the supplied factory. * * <p> Subsequent operations on the returned resource may modify this model. * </p> * @return a new resource linked to this model. * @param uri the URI of the resource * @param f A factory to create the returned object. . */ @Deprecated public Resource createResource(String uri, ResourceF f) ; /** Create a property. * * <p> Subsequent operations on the returned property may modify this model. * </p> * @param uri the URI of the property * @return a property instance */ public Property createProperty(String uri) ; /** create a literal from a String value. * * @param v the value of the literal * @return a new literal representing the value v */ public Literal createLiteral( String v ); /** create a type literal from a boolean value. * * <p> The value is converted to a string using its <CODE>toString</CODE> * method. </p> * @param v the value of the literal * @return a new literal representing the value v */ public Literal createTypedLiteral(boolean v) ; /** create a typed literal from an integer value. * * @param v the value of the literal * @return a new literal representing the value v */ public Literal createTypedLiteral(int v) ; /** create a typed literal from an integer value. * * @param v the value of the literal * @return a new literal representing the value v */ public Literal createTypedLiteral(long v) ; /** * Create a typed literal of type xsd:dateTime from a Calendar object. */ public Literal createTypedLiteral(Calendar d); /** create a typed literal from a char value. * * @param v the value of the literal * @return a new literal representing the value v */ public Literal createTypedLiteral(char v) ; /** create a typed literal from a float value. * * @param v the value of the literal * @return a new literal representing the value v */ public Literal createTypedLiteral(float v) ; /** create a typed literal from a double value. * * @param v the value of the literal * @return a new literal representing the value v */ public Literal createTypedLiteral(double v) ; /** create a typed literal from a String value. * * @param v the value of the literal * @return a new literal representing the value v */ public Literal createTypedLiteral(String v) ; /** create a literal from an Object. * * @return a new literal representing the value v * @param v the value of the literal. */ public Literal createTypedLiteral(Object v) ; /** * Build a typed literal from its lexical form. The * lexical form will be parsed now and the value stored. If * the form is not legal this will throw an exception. * <p> * Note that in preview releases of Jena2 it was also possible to specify * a language type. Changes to the RDF specification mean that this is no longer * legal except for plain literals. To create a plain literal with a language tag * use {@link Model#createLiteral(String, String) createLiteral}. * </p> * * @param lex the lexical form of the literal * @param typeURI the uri of the type of the literal, null for old style "plain" literals * @throws DatatypeFormatException if lex is not a legal form of dtype */ public Literal createTypedLiteral(String lex, String typeURI) ; /** * Build a typed literal from its value form. * <p> * Note that in preview releases of Jena2 it was also possible to specify * a language type. Changes to the RDF specification mean that this is no longer * legal except for plain literals. To create a plain literal with a language tag * use {@link Model#createLiteral(String, String) createLiteral}. * </p> * * @param value the value of the literal * @param typeURI the URI of the type of the literal, null for old style "plain" literals */ public Literal createTypedLiteral(Object value, String typeURI); /** Answer a new Statement object (s, p, o') where o' is the typed literal corresponding to o using createTypedLiteral. */ public Statement createLiteralStatement( Resource s, Property p, boolean o ); /** Answer a new Statement object (s, p, o') where o' is the typed literal corresponding to o using createTypedLiteral. */ public Statement createLiteralStatement( Resource s, Property p, float o ); /** Answer a new Statement object (s, p, o') where o' is the typed literal corresponding to o using createTypedLiteral. */ public Statement createLiteralStatement( Resource s, Property p, double o ); /** Answer a new Statement object (s, p, o') where o' is the typed literal corresponding to o using createTypedLiteral. */ public Statement createLiteralStatement( Resource s, Property p, long o ); /** Answer a new Statement object (s, p, o') where o' is the typed literal corresponding to o using createTypedLiteral. */ public Statement createLiteralStatement( Resource s, Property p, int o ); /** Answer a new Statement object (s, p, o') where o' is the typed literal corresponding to o using createTypedLiteral. */ public Statement createLiteralStatement( Resource s, Property p, char o ); /** Answer a new Statement object (s, p, o') where o' is the typed literal corresponding to o using createTypedLiteral. */ public Statement createLiteralStatement( Resource s, Property p, Object o ); /** Create a Statement instance. * * <p>Subsequent operations on the statement or any of its parts may * modify this model.</p> * <p>Creating a statement does not add it to the set of statements in the * model. </p> * <p>The Object o will be converted to a Literal.</P> * @param s the subject of the statement * @param p the predicate of the statement * @param o is the value to be the object of the statement * @return the new statement */ public Statement createStatement(Resource s, Property p, String o) ; /** Create a Statement instance. * * <p>Subsequent operations on the statement or any of its parts may * modify this model.</p> * <p>Creating a statement does not add it to the set of statements in the * model. </p> * <p>The Object o will be converted to a Literal.</P> * @param s the subject of the statement * @param p the predicate of the statement * @param o is the value to be the object of the statement * @param l the language associated with the object * @return the new statement */ public Statement createStatement(Resource s, Property p, String o, String l) ; /** Create a Statement instance. * * <p>Subsequent operations on the statement or any of its parts may * modify this model.</p> * <p>Creating a statement does not add it to the set of statements in the * model. </p> * <p>The Object o will be converted to a Literal.</P> * @param s the subject of the statement * @param p the predicate of the statement * @param o is the value to be the object of the statement * @param wellFormed true if the string is well formed XML * @return the new statement */ public Statement createStatement(Resource s, Property p, String o, boolean wellFormed) ; /** Create a Statement instance. * * <p>Subsequent operations on the statement or any of its parts may * modify this model.</p> * <p>Creating a statement does not add it to the set of statements in the * model. </p> * <p>The Object o will be converted to a Literal.</P> * @param s the subject of the statement * @param p the predicate of the statement * @param o is the value to be the object of the statement * @param l the language associated with the object * @param wellFormed true of the string is well formed XML * @return the new statement */ public Statement createStatement(Resource s, Property p, String o, String l, boolean wellFormed) ; /** Create a new anonymous bag. * * <p>Subsequent operations on the bag or any of its parts may * modify this model.</p> * <p>A statement defining the type of the new bag is added to this model. * </p> * @return a new anonymous bag. */ public Bag createBag() ; /** Create a new bag. * * <p>Subsequent operations on the bag or any of its parts may * modify this model.</p> * <p>A statement defining the type of the new bag is added to this model. * </p> * @param uri The URI of the new Bag. * @return a new bag. */ public Bag createBag(String uri) ; /** Create a new anonymous alt. * * <p>Subsequent operations on the alt or any of its parts may * modify this model.</p> * <p>A statement defining the type of the new alt is added to this model. * </p> * @return a new anonymous alt. */ public Alt createAlt() ; /** Create a new alt. * * <p>Subsequent operations on the alt or any of its parts may * modify this model.</p> * <p>A statement defining the type of the new alt is added to this model. * </p> * @param uri The URI of the new alt. * @return a new alt. */ public Alt createAlt(String uri) ; /** Create a new anonymous seq. * * <p>Subsequent operations on the seq or any of its parts may * modify this model.</p> * <p>A statement defining the type of the new seq is added to this model. * </p> * @return a new anonymous seq. */ public Seq createSeq() ; /** Create a new seq. * * <p>Subsequent operations on the seq or any of its parts may * modify this model.</p> * <p>A statement defining the type of the new seq is added to this model. * </p> * @param uri The URI of the new seq. * @return a new seq. */ public Seq createSeq(String uri) ; /** add a statement to this model. * @return this model * @param s the subject of the statement to add * @param p the predicate of the statement to add * @param o the object of the statement to add */ Model add(Resource s, Property p, RDFNode o) ; /** Add the statement (s, p, createTypedLiteral( o )) to this model and answer this model. */ Model addLiteral( Resource s, Property p, boolean o ); /** Add the statement (s, p, createTypedLiteral( o )) to this model and answer this model. */ Model addLiteral( Resource s, Property p, long o ); /** Add the statement (s, p, createTypedLiteral( o )) to this model and answer this model. */ Model addLiteral( Resource s, Property p, int o ); /** Add the statement (s, p, createTypedLiteral( o )) to this model and answer this model. */ Model addLiteral( Resource s, Property p, char o ) ; /** Add the statement (s, p, o') to the model, where o' is the typed literal corresponding to o. Answer this model. */ Model addLiteral( Resource s, Property p, float o ); /** Add the statement (s, p, o') to the model, where o' is the typed literal corresponding to o. Answer this model. */ Model addLiteral( Resource s, Property p, double o ) ; /** add a statement to this model. * Applications should use typed literals whereever possible. * * @return this model * @param s the subject of the statement to add * @param p the predicate of the statement to add * @param o the object of the statement to add * @deprecated Freshly (should have been done a while ago) */ @Deprecated Model addLiteral( Resource s, Property p, Object o ); /** add a statement to this model. * * @return this model * @param s the subject of the statement to add * @param p the predicate of the statement to add * @param o the object of the statement to add */ Model addLiteral( Resource s, Property p, Literal o ); /** add a statement to this model. * * @return this model * @param s the subject of the statement to add * @param p the predicate of the statement to add * @param o the object of the statement to add */ Model add(Resource s, Property p, String o) ; /** add a statement to this model. * * @return this model * @param s the subject of the statement to add * @param p the predicate of the statement to add * @param lex the lexcial form of the literal * @param datatype the datatype of the literal */ Model add(Resource s, Property p, String lex, RDFDatatype datatype) ; /** add a statement to this model. * * @return this model * @param s the subject of the statement to add * @param p the predicate of the statement to add * @param o the object of the statement to add * @param wellFormed true if o is well formed XML */ Model add(Resource s, Property p, String o, boolean wellFormed); /** add a statement to this model. * * @return this model * @param s the subject of the statement to add * @param p the predicate of the statement to add * @param o the object of the statement to add * @param l the language associated with the object */ Model add(Resource s, Property p, String o, String l) ; /** remove the statement <code>(s, p, o)</code> from this model and answer this model. None of <code>s, p, o</code> are permitted to be <code>null</code>: for wildcard removal, see <code>removeAll</code>. */ Model remove( Resource s, Property p, RDFNode o ); /** Remove all the Statements returned by an iterator. * @return this model * @param iter the iterator which returns the statements to be removed. */ Model remove(StmtIterator iter) ; /** Remove all the Statements in a given model, including reified statements * @return this model * @param m the model containing the statements to be removed. */ Model remove(Model m) ; /** Remove from this model all the statements found in the given model. If suppressreifications is true, remove the reified statements of m as well. @param m the model containing the statements to remove @param suppressReifications true to remove reified statements too @return this model for cascading */ Model remove( Model m, boolean suppressReifications ); /** Answer a statement iterator that will iterate over all the statements (S, P, O) in this model where S matches <code>subject</code>, P matches <code>predicate</code>, and O matches the typed literal corresponding to <code>object</code>. */ StmtIterator listLiteralStatements( Resource subject, Property predicate, boolean object ); /** Answer a statement iterator that will iterate over all the statements (S, P, O) in this model where S matches <code>subject</code>, P matches <code>predicate</code>, and O matches the typed literal corresponding to <code>object</code>. */ StmtIterator listLiteralStatements( Resource subject, Property predicate, char object ); /** Answer a statement iterator that will iterate over all the statements (S, P, O) in this model where S matches <code>subject</code>, P matches <code>predicate</code>, and O matches the typed literal corresponding to <code>object</code>. */ StmtIterator listLiteralStatements(Resource subject, Property predicate, long object ); /** Answer a statement iterator that will iterate over all the statements (S, P, O) in this model where S matches <code>subject</code>, P matches <code>predicate</code>, and O matches the typed literal corresponding to <code>object</code>. */ StmtIterator listLiteralStatements( Resource subject, Property predicate, float object ); /** Answer a statement iterator that will iterate over all the statements (S, P, O) in this model where S matches <code>subject</code>, P matches <code>predicate</code>, and O matches the typed literal corresponding to <code>object</code>. */ StmtIterator listLiteralStatements(Resource subject, Property predicate, double object ); /** Find all the statements matching a pattern. * <p>Return an iterator over all the statements in a model * that match a pattern. The statements selected are those * whose subject matches the <code>subject</code> argument, * whose predicate matches the <code>predicate</code> argument * and whose object matchesthe <code>object</code> argument.</p> * @return an iterator over the subjects * @param subject The subject sought * @param predicate The predicate sought * @param object The value sought */ StmtIterator listStatements( Resource subject, Property predicate, String object ); /** Find all the statements matching a pattern. * <p>Return an iterator over all the statements in a model * that match a pattern. The statements selected are those * whose subject matches the <code>subject</code> argument, * whose predicate matches the <code>predicate</code> argument * and whose object matchesthe <code>object</code> argument. * If an argument is <code>null</code> it matches anything.</p> * @return an iterator over the subjects * @param subject The subject sought * @param predicate The predicate sought * @param object The value sought * @param lang The lang code ofthe string. */ StmtIterator listStatements(Resource subject, Property predicate, String object, String lang) ; /** Answer an iterator [without duplicates] over all the resources in this model which have value o' for property p, where o' is the typed literal corresponding to o. */ ResIterator listResourcesWithProperty( Property p, boolean o ); /** Answer an iterator [without duplicates] over all the resources in this model which have value o' for property p, where o' is the typed literal corresponding to o. */ ResIterator listResourcesWithProperty( Property p, long o ); /** Answer an iterator [without duplicates] over all the resources in this model which have value o' for property p, where o' is the typed literal corresponding to o. */ ResIterator listResourcesWithProperty( Property p, char o ); /** Answer an iterator [without duplicates] over all the resources in this model which have value o' for property p, where o' is the typed literal corresponding to o. */ ResIterator listResourcesWithProperty( Property p, float o ); /** Answer an iterator [without duplicates] over all the resources in this model which have value o' for property p, where o' is the typed literal corresponding to o. */ ResIterator listResourcesWithProperty( Property p, double o ); /** Answer an iterator [without duplicates] over all the resources in this model which have value o' for property p, where o' is the typed literal corresponding to o. */ ResIterator listResourcesWithProperty( Property p, Object o ); /** lists all subjects with a given property and property value. * @return an iterator over the set of subjects * @param p The predicate sought. * @param o The property value sought. */ ResIterator listSubjectsWithProperty( Property p, String o ); /** lists all subjects with a given property and property value. * @return an iterator over the set of subjects * @param p The predicate sought. * @param o The property value sought. * @param l the language associated with the object */ ResIterator listSubjectsWithProperty( Property p, String o, String l ); /** Answer true iff this model contains the statement (s, p, o') where o' is the typed literal corresponding to the value o. */ boolean containsLiteral( Resource s, Property p, boolean o ); /** Answer true iff this model contains the statement (s, p, o') where o' is the typed literal corresponding to the value o. */ boolean containsLiteral( Resource s, Property p, long o ); /** Answer true iff this model contains the statement (s, p, o') where o' is the typed literal corresponding to the value o. */ boolean containsLiteral( Resource s, Property p, int o ); /** Answer true iff this model contains the statement (s, p, o') where o' is the typed literal corresponding to the value o. */ boolean containsLiteral( Resource s, Property p, char o ); /** Answer true iff this model contains (s, p, o') where o' is the typed literal corresponding to o. */ boolean containsLiteral( Resource s, Property p, float o ); /** Answer true iff this model contains the statement (s, p, o') where o' is the typed literal corresponding to the value o. */ boolean containsLiteral( Resource s, Property p, double o ); /** Answer true iff this model contains the statement (s, p, o') where o' is the typed literal corresponding to the value o. */ boolean containsLiteral( Resource s, Property p, Object o ); /** Determine if a statement is present in this model. * @return true if the statement with subject s, property p and object o * is in the model, false otherwise * @param s The subject of the statment tested. * @param p The predicate of the statement tested. * @param o The object of the statement tested. */ boolean contains( Resource s, Property p, String o ); /** Determine if a statement is present in this model. * @return true if the statement with subject s, property p and object o * is in the model, false otherwise * @param s The subject of the statment tested. * @param p The predicate of the statement tested. * @param o The object of the statement tested. * @param l the language associated with the object */ boolean contains( Resource s, Property p, String o, String l ); }
/* * Copyright 2014 The Closure Compiler Authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.javascript.jscomp.parsing; import static com.google.javascript.jscomp.base.JSCompDoubles.isExactInt32; import static java.lang.Double.isNaN; import com.google.common.base.Ascii; import com.google.common.base.Preconditions; import com.google.javascript.jscomp.parsing.ParserRunner.ParseResult; import com.google.javascript.rhino.ErrorReporter; import com.google.javascript.rhino.Msg; import com.google.javascript.rhino.Node; import com.google.javascript.rhino.StaticSourceFile; /** * A parser for the type transformation expressions (TTL-Exp) as in * {@code @template T := TTL-Exp =:} */ public final class TypeTransformationParser { private final String typeTransformationString; private Node typeTransformationAst; private final StaticSourceFile sourceFile; private final ErrorReporter errorReporter; private final int templateLineno; private final int templateCharno; private static final int VAR_ARGS = Integer.MAX_VALUE; /** The classification of the keywords */ public static enum OperationKind { TYPE_CONSTRUCTOR, OPERATION, STRING_PREDICATE, TYPE_PREDICATE, TYPEVAR_PREDICATE } /** Keywords of the type transformation language */ public static enum Keywords { ALL("all", 0, 0, OperationKind.TYPE_CONSTRUCTOR), COND("cond", 3, 3, OperationKind.OPERATION), EQ("eq", 2, 2, OperationKind.TYPE_PREDICATE), ISCTOR("isCtor", 1, 1, OperationKind.TYPE_PREDICATE), ISDEFINED("isDefined", 1, 1, OperationKind.TYPEVAR_PREDICATE), ISRECORD("isRecord", 1, 1, OperationKind.TYPE_PREDICATE), ISTEMPLATIZED("isTemplatized", 1, 1, OperationKind.TYPE_PREDICATE), ISUNKNOWN("isUnknown", 1, 1, OperationKind.TYPE_PREDICATE), INSTANCEOF("instanceOf", 1, 1, OperationKind.OPERATION), MAPUNION("mapunion", 2, 2, OperationKind.OPERATION), MAPRECORD("maprecord", 2, 2, OperationKind.OPERATION), NONE("none", 0, 0, OperationKind.TYPE_CONSTRUCTOR), PRINTTYPE("printType", 2, 2, OperationKind.OPERATION), PROPTYPE("propType", 2, 2, OperationKind.OPERATION), RAWTYPEOF("rawTypeOf", 1, 1, OperationKind.TYPE_CONSTRUCTOR), SUB("sub", 2, 2, OperationKind.TYPE_PREDICATE), STREQ("streq", 2, 2, OperationKind.STRING_PREDICATE), RECORD("record", 1, VAR_ARGS, OperationKind.TYPE_CONSTRUCTOR), TEMPLATETYPEOF("templateTypeOf", 2, 2, OperationKind.TYPE_CONSTRUCTOR), TYPE("type", 2, VAR_ARGS, OperationKind.TYPE_CONSTRUCTOR), TYPEEXPR("typeExpr", 1, 1, OperationKind.TYPE_CONSTRUCTOR), TYPEOFVAR("typeOfVar", 1, 1, OperationKind.OPERATION), UNION("union", 2, VAR_ARGS, OperationKind.TYPE_CONSTRUCTOR), UNKNOWN("unknown", 0, 0, OperationKind.TYPE_CONSTRUCTOR); public final String name; public final int minParamCount; public final int maxParamCount; public final OperationKind kind; Keywords(String name, int minParamCount, int maxParamCount, OperationKind kind) { this.name = name; this.minParamCount = minParamCount; this.maxParamCount = maxParamCount; this.kind = kind; } } public TypeTransformationParser(String typeTransformationString, StaticSourceFile sourceFile, ErrorReporter errorReporter, int templateLineno, int templateCharno) { this.typeTransformationString = typeTransformationString; this.sourceFile = sourceFile; this.errorReporter = errorReporter; this.templateLineno = templateLineno; this.templateCharno = templateCharno; } public Node getTypeTransformationAst() { return typeTransformationAst; } private void addNewWarning(Msg messageId, String messageArg) { // TODO(lpino): Use the exact lineno and charno, it is currently using // the lineno and charno of the parent @template // TODO(lpino): Use only constants as parameters of this method errorReporter.warning( "Bad type annotation. " + messageId.format(messageArg), sourceFile.getName(), templateLineno, templateCharno); } private Keywords nameToKeyword(String s) { return Keywords.valueOf(Ascii.toUpperCase(s)); } private boolean isValidKeyword(String name) { for (Keywords k : Keywords.values()) { if (k.name.equals(name)) { return true; } } return false; } private boolean isOperationKind(String name, OperationKind kind) { return isValidKeyword(name) && nameToKeyword(name).kind == kind; } private boolean isValidStringPredicate(String name) { return isOperationKind(name, OperationKind.STRING_PREDICATE); } private boolean isValidTypePredicate(String name) { return isOperationKind(name, OperationKind.TYPE_PREDICATE); } private boolean isValidTypevarPredicate(String name) { return isOperationKind(name, OperationKind.TYPEVAR_PREDICATE); } private boolean isBooleanOperation(Node n) { return n.isAnd() || n.isOr() || n.isNot(); } private boolean isValidPredicate(String name) { return isValidStringPredicate(name) || isValidTypePredicate(name) || isValidTypevarPredicate(name); } private int getFunctionParamCount(Node n) { Preconditions.checkArgument(n.isFunction(), "Expected a function node, found %s", n); return n.getSecondChild().getChildCount(); } private Node getFunctionBody(Node n) { Preconditions.checkArgument(n.isFunction(), "Expected a function node, found %s", n); return n.getChildAtIndex(2); } private String getCallName(Node n) { Preconditions.checkArgument(n.isCall(), "Expected a call node, found %s", n); return n.getFirstChild().getString(); } private Node getCallArgument(Node n, int i) { Preconditions.checkArgument(n.isCall(), "Expected a call node, found %s", n); return n.getChildAtIndex(i + 1); } private int getCallParamCount(Node n) { Preconditions.checkArgument(n.isCall(), "Expected a call node, found %s", n); return n.getChildCount() - 1; } private boolean isTypeVar(Node n) { return n.isName(); } private boolean isTypeName(Node n) { return n.isStringLit(); } private boolean isOperation(Node n) { return n.isCall(); } /** * A valid expression is either: * - NAME for a type variable * - STRING for a type name * - CALL for the other expressions */ private boolean isValidExpression(Node e) { return isTypeVar(e) || isTypeName(e) || isOperation(e); } private void warnInvalid(String msg) { addNewWarning(Msg.JSDOC_TYPETRANSFORMATION_INVALID, msg); } private void warnInvalidExpression(String msg) { addNewWarning(Msg.JSDOC_TYPETRANSFORMATION_INVALID_EXPRESSION, msg); } private void warnMissingParam(String msg) { addNewWarning(Msg.JSDOC_TYPETRANSFORMATION_MISSING_PARAM, msg); } private void warnExtraParam(String msg) { addNewWarning(Msg.JSDOC_TYPETRANSFORMATION_EXTRA_PARAM, msg); } private void warnInvalidInside(String msg) { addNewWarning(Msg.JSDOC_TYPETRANSFORMATION_INVALID_INSIDE, msg); } private boolean checkParameterCount(Node expr, Keywords keyword) { int paramCount = getCallParamCount(expr); if (paramCount < keyword.minParamCount) { warnMissingParam(keyword.name); return false; } if (paramCount > keyword.maxParamCount) { warnExtraParam(keyword.name); return false; } return true; } /** * Takes a type transformation expression, transforms it to an AST using * the ParserRunner of the JSCompiler and then verifies that it is a valid * AST. * @return true if the parsing was successful otherwise it returns false and * at least one warning is reported */ public boolean parseTypeTransformation() { Config config = Config.builder() .setLanguageMode(Config.LanguageMode.ES_NEXT) .setStrictMode(Config.StrictMode.SLOPPY) .build(); // TODO(lpino): ParserRunner reports errors if the expression is not // ES6 valid. We need to abort the validation of the type transformation // whenever an error is reported. ParseResult result = ParserRunner.parse( sourceFile, typeTransformationString, config, errorReporter); Node ast = result.ast; // Check that the expression is a script with an expression result if (ast == null || !ast.isScript() || !ast.hasChildren() || !ast.getFirstChild().isExprResult()) { warnInvalidExpression("type transformation"); return false; } Node expr = ast.getFirstFirstChild(); // The AST of the type transformation must correspond to a valid expression if (!validTypeTransformationExpression(expr)) { // No need to add a new warning because the validation does it return false; } fixLineNumbers(expr); // Store the result if the AST is valid typeTransformationAst = expr; return true; } private void fixLineNumbers(Node expr) { expr.setLinenoCharno(expr.getLineno() + templateLineno, expr.getCharno() + templateCharno); for (Node child = expr.getFirstChild(); child != null; child = child.getNext()) { fixLineNumbers(child); } } /** * A template type expression must be of the form type(typename, TTLExp,...) * or type(typevar, TTLExp...) */ private boolean validTemplateTypeExpression(Node expr) { // The expression must have at least three children the type keyword, // a type name (or type variable) and a type expression if (!checkParameterCount(expr, Keywords.TYPE)) { return false; } int paramCount = getCallParamCount(expr); // The first parameter must be a type variable or a type name Node firstParam = getCallArgument(expr, 0); if (!isTypeVar(firstParam) && !isTypeName(firstParam)) { warnInvalid("type name or type variable"); warnInvalidInside("template type operation"); return false; } // The rest of the parameters must be valid type expressions for (int i = 1; i < paramCount; i++) { if (!validTypeTransformationExpression(getCallArgument(expr, i))) { warnInvalidInside("template type operation"); return false; } } return true; } /** * A Union type expression must be a valid type variable or * a union(TTLExp, TTLExp, ...) */ private boolean validUnionTypeExpression(Node expr) { // The expression must have at least three children: The union keyword and // two type expressions if (!checkParameterCount(expr, Keywords.UNION)) { return false; } int paramCount = getCallParamCount(expr); // Check if each of the members of the union is a valid type expression for (int i = 0; i < paramCount; i++) { if (!validTypeTransformationExpression(getCallArgument(expr, i))) { warnInvalidInside("union type"); return false; } } return true; } /** * A none type expression must be of the form: none() */ private boolean validNoneTypeExpression(Node expr) { // The expression must have no children return checkParameterCount(expr, Keywords.NONE); } /** * An all type expression must be of the form: all() */ private boolean validAllTypeExpression(Node expr) { // The expression must have no children return checkParameterCount(expr, Keywords.ALL); } /** * An unknown type expression must be of the form: unknown() */ private boolean validUnknownTypeExpression(Node expr) { // The expression must have no children return checkParameterCount(expr, Keywords.UNKNOWN); } /** * A raw type expression must be of the form rawTypeOf(TTLExp) */ private boolean validRawTypeOfTypeExpression(Node expr) { // The expression must have two children. The rawTypeOf keyword and the // parameter if (!checkParameterCount(expr, Keywords.RAWTYPEOF)) { return false; } // The parameter must be a valid type expression if (!validTypeTransformationExpression(getCallArgument(expr, 0))) { warnInvalidInside(Keywords.RAWTYPEOF.name); return false; } return true; } /** * A template type of expression must be of the form * templateTypeOf(TTLExp, index) */ private boolean validTemplateTypeOfExpression(Node expr) { // The expression must have three children. The templateTypeOf keyword, a // templatized type and an index if (!checkParameterCount(expr, Keywords.TEMPLATETYPEOF)) { return false; } // The parameter must be a valid type expression if (!validTypeTransformationExpression(getCallArgument(expr, 0))) { warnInvalidInside(Keywords.TEMPLATETYPEOF.name); return false; } if (!getCallArgument(expr, 1).isNumber()) { warnInvalid("index"); warnInvalidInside(Keywords.TEMPLATETYPEOF.name); return false; } double index = getCallArgument(expr, 1).getDouble(); if (isNaN(index) || !isExactInt32(index)) { warnInvalid("index"); warnInvalidInside(Keywords.TEMPLATETYPEOF.name); return false; } return true; } /** * A record must be a valid type transformation expression or a node of the form: * {prop:TTLExp, prop:TTLExp, ...} * Notice that the values are mandatory and they must be valid type * transformation expressions */ private boolean validRecordParam(Node expr) { if (expr.isObjectLit()) { // Each value of a property must be a valid expression for (Node prop = expr.getFirstChild(); prop != null; prop = prop.getNext()) { if (prop.isShorthandProperty()) { warnInvalid("property, missing type"); return false; } else if (!validTypeTransformationExpression(prop.getFirstChild())) { return false; } } } else if (!validTypeTransformationExpression(expr)) { return false; } return true; } /** * A record type expression must be of the form: * record(RecordExp, RecordExp, ...) */ private boolean validRecordTypeExpression(Node expr) { // The expression must have at least two children. The record keyword and // a record expression if (!checkParameterCount(expr, Keywords.RECORD)) { return false; } // Each child must be a valid record for (int i = 0; i < getCallParamCount(expr); i++) { if (!validRecordParam(getCallArgument(expr, i))) { warnInvalidInside(Keywords.RECORD.name); return false; } } return true; } private boolean validNativeTypeExpr(Node expr) { // The expression must have two children: // - The typeExpr keyword // - A string if (!checkParameterCount(expr, Keywords.TYPEEXPR)) { return false; } Node typeString = getCallArgument(expr, 0); if (!typeString.isStringLit()) { warnInvalidExpression("native type"); warnInvalidInside(Keywords.TYPEEXPR.name); return false; } Node typeExpr = JsDocInfoParser.parseTypeString(typeString.getString()); typeString.detach(); expr.addChildToBack(typeExpr); return true; } /** * A TTL type expression must be a union type, a template type, a record type * or any of the type predicates (none, rawTypeOf, templateTypeOf). */ private boolean validTypeExpression(Node expr) { String name = getCallName(expr); Keywords keyword = nameToKeyword(name); switch (keyword) { case TYPE: return validTemplateTypeExpression(expr); case UNION: return validUnionTypeExpression(expr); case NONE: return validNoneTypeExpression(expr); case ALL: return validAllTypeExpression(expr); case UNKNOWN: return validUnknownTypeExpression(expr); case RAWTYPEOF: return validRawTypeOfTypeExpression(expr); case TEMPLATETYPEOF: return validTemplateTypeOfExpression(expr); case RECORD: return validRecordTypeExpression(expr); case TYPEEXPR: return validNativeTypeExpr(expr); default: throw new IllegalStateException("Invalid type expression"); } } private boolean validTypePredicate(Node expr, int paramCount) { // All the types must be valid type expressions for (int i = 0; i < paramCount; i++) { if (!validTypeTransformationExpression(getCallArgument(expr, i))) { warnInvalidInside("boolean"); return false; } } return true; } private boolean isValidStringParam(Node expr) { if (!expr.isName() && !expr.isStringLit()) { warnInvalid("string"); return false; } if (expr.getString().isEmpty()) { warnInvalid("string parameter"); return false; } return true; } private boolean validStringPredicate(Node expr, int paramCount) { // Each parameter must be valid string parameter for (int i = 0; i < paramCount; i++) { if (!isValidStringParam(getCallArgument(expr, i))) { warnInvalidInside("boolean"); return false; } } return true; } private boolean validTypevarParam(Node expr) { if (!isTypeVar(expr)) { warnInvalid("name"); return false; } return true; } private boolean validTypevarPredicate(Node expr, int paramCount) { // Each parameter must be valid string parameter for (int i = 0; i < paramCount; i++) { if (!validTypevarParam(getCallArgument(expr, i))) { warnInvalidInside("boolean"); return false; } } return true; } private boolean validBooleanOperation(Node expr) { boolean valid; if (expr.isNot()) { valid = validBooleanExpression(expr.getFirstChild()); } else { valid = validBooleanExpression(expr.getFirstChild()) && validBooleanExpression(expr.getSecondChild()); } if (!valid) { warnInvalidInside("boolean"); return false; } return true; } /** * A boolean expression must be a boolean predicate or a boolean * type predicate */ private boolean validBooleanExpression(Node expr) { if (isBooleanOperation(expr)) { return validBooleanOperation(expr); } if (!isOperation(expr)) { warnInvalidExpression("boolean"); return false; } if (!isValidPredicate(getCallName(expr))) { warnInvalid("boolean predicate"); return false; } Keywords keyword = nameToKeyword(getCallName(expr)); if (!checkParameterCount(expr, keyword)) { return false; } switch (keyword.kind) { case TYPE_PREDICATE: return validTypePredicate(expr, getCallParamCount(expr)); case STRING_PREDICATE: return validStringPredicate(expr, getCallParamCount(expr)); case TYPEVAR_PREDICATE: return validTypevarPredicate(expr, getCallParamCount(expr)); default: throw new IllegalStateException("Invalid boolean expression"); } } /** * A conditional type transformation expression must be of the * form cond(BoolExp, TTLExp, TTLExp) */ private boolean validConditionalExpression(Node expr) { // The expression must have four children: // - The cond keyword // - A boolean expression // - A type transformation expression with the 'if' branch // - A type transformation expression with the 'else' branch if (!checkParameterCount(expr, Keywords.COND)) { return false; } // Check for the validity of the boolean and the expressions if (!validBooleanExpression(getCallArgument(expr, 0))) { warnInvalidInside("conditional"); return false; } if (!validTypeTransformationExpression(getCallArgument(expr, 1))) { warnInvalidInside("conditional"); return false; } if (!validTypeTransformationExpression(getCallArgument(expr, 2))) { warnInvalidInside("conditional"); return false; } return true; } /** * A mapunion type transformation expression must be of the form * mapunion(TTLExp, (typevar) => TTLExp). */ private boolean validMapunionExpression(Node expr) { // The expression must have four children: // - The mapunion keyword // - A union type expression // - A map function if (!checkParameterCount(expr, Keywords.MAPUNION)) { return false; } // The second child must be a valid union type expression if (!validTypeTransformationExpression(getCallArgument(expr, 0))) { warnInvalidInside(Keywords.MAPUNION.name); return false; } // The third child must be a function if (!getCallArgument(expr, 1).isFunction()) { warnInvalid("map function"); warnInvalidInside(Keywords.MAPUNION.name); return false; } Node mapFn = getCallArgument(expr, 1); // The map function must have only one parameter int mapFnParamCount = getFunctionParamCount(mapFn); if (mapFnParamCount < 1) { warnMissingParam("map function"); warnInvalidInside(Keywords.MAPUNION.name); return false; } if (mapFnParamCount > 1) { warnExtraParam("map function"); warnInvalidInside(Keywords.MAPUNION.name); return false; } // The body must be a valid type transformation expression Node mapFnBody = getFunctionBody(mapFn); if (!validTypeTransformationExpression(mapFnBody)) { warnInvalidInside("map function body"); return false; } return true; } /** * A maprecord type transformation expression must be of the form * maprecord(TTLExp, (typevar, typevar) => TTLExp). */ private boolean validMaprecordExpression(Node expr) { // The expression must have four children: // - The maprecord keyword // - A type expression // - A map function if (!checkParameterCount(expr, Keywords.MAPRECORD)) { return false; } // The second child must be a valid expression if (!validTypeTransformationExpression(getCallArgument(expr, 0))) { warnInvalidInside(Keywords.MAPRECORD.name); return false; } // The third child must be a function if (!getCallArgument(expr, 1).isFunction()) { warnInvalid("map function"); warnInvalidInside(Keywords.MAPRECORD.name); return false; } Node mapFn = getCallArgument(expr, 1); // The map function must have exactly two parameters int mapFnParamCount = getFunctionParamCount(mapFn); if (mapFnParamCount < 2) { warnMissingParam("map function"); warnInvalidInside(Keywords.MAPRECORD.name); return false; } if (mapFnParamCount > 2) { warnExtraParam("map function"); warnInvalidInside(Keywords.MAPRECORD.name); return false; } // The body must be a valid type transformation expression Node mapFnBody = getFunctionBody(mapFn); if (!validTypeTransformationExpression(mapFnBody)) { warnInvalidInside("map function body"); return false; } return true; } /** * A typeOfVar expression must be of the form typeOfVar('name') */ private boolean validTypeOfVarExpression(Node expr) { // The expression must have two children: // - The typeOfVar keyword // - A string if (!checkParameterCount(expr, Keywords.TYPEOFVAR)) { return false; } if (!getCallArgument(expr, 0).isStringLit()) { warnInvalid("name"); warnInvalidInside(Keywords.TYPEOFVAR.name); return false; } return true; } /** * A typeOfVar expression must be of the form instanceOf('name') */ private boolean validInstanceOfExpression(Node expr) { // The expression must have two children: // - The instanceOf keyword // - A string if (!checkParameterCount(expr, Keywords.INSTANCEOF)) { return false; } if (!validTypeTransformationExpression(getCallArgument(expr, 0))) { warnInvalidInside(Keywords.INSTANCEOF.name); return false; } return true; } private boolean validPrintTypeExpression(Node expr) { // The expression must have three children. The printType keyword, a // message and a type transformation expression if (!checkParameterCount(expr, Keywords.PRINTTYPE)) { return false; } if (!getCallArgument(expr, 0).isStringLit()) { warnInvalid("message"); warnInvalidInside(Keywords.PRINTTYPE.name); return false; } if (!validTypeTransformationExpression(getCallArgument(expr, 1))) { warnInvalidInside(Keywords.PRINTTYPE.name); return false; } return true; } private boolean validPropTypeExpression(Node expr) { // The expression must have three children. The propType keyword, a // a string and a type transformation expression if (!checkParameterCount(expr, Keywords.PROPTYPE)) { return false; } if (!getCallArgument(expr, 0).isStringLit()) { warnInvalid("property name"); warnInvalidInside(Keywords.PROPTYPE.name); return false; } if (!validTypeTransformationExpression(getCallArgument(expr, 1))) { warnInvalidInside(Keywords.PROPTYPE.name); return false; } return true; } /** * An operation expression is a cond or a mapunion */ private boolean validOperationExpression(Node expr) { String name = getCallName(expr); Keywords keyword = nameToKeyword(name); switch (keyword) { case COND: return validConditionalExpression(expr); case MAPUNION: return validMapunionExpression(expr); case MAPRECORD: return validMaprecordExpression(expr); case TYPEOFVAR: return validTypeOfVarExpression(expr); case INSTANCEOF: return validInstanceOfExpression(expr); case PRINTTYPE: return validPrintTypeExpression(expr); case PROPTYPE: return validPropTypeExpression(expr); default: throw new IllegalStateException("Invalid type transformation operation"); } } /** * Checks the structure of the AST of a type transformation expression * in @template T := TTLExp =: */ private boolean validTypeTransformationExpression(Node expr) { if (!isValidExpression(expr)) { warnInvalidExpression("type transformation"); return false; } if (isTypeVar(expr) || isTypeName(expr)) { return true; } // Check for valid keyword String name = getCallName(expr); if (!isValidKeyword(name)) { warnInvalidExpression("type transformation"); return false; } Keywords keyword = nameToKeyword(name); // Check the rest of the expression depending on the kind switch (keyword.kind) { case TYPE_CONSTRUCTOR: return validTypeExpression(expr); case OPERATION: return validOperationExpression(expr); default: throw new IllegalStateException("Invalid type transformation expression"); } } }