gt
stringclasses
1 value
context
stringlengths
2.05k
161k
/********************************************************************************** * $URL$ * $Id$ *********************************************************************************** * * Copyright (c) 2003, 2004, 2005, 2006, 2008 The Sakai Foundation * * Licensed under the Educational Community License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.opensource.org/licenses/ECL-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * **********************************************************************************/ package org.sakaiproject.tool.syllabus; import java.io.IOException; import java.util.Enumeration; import javax.servlet.RequestDispatcher; import javax.servlet.ServletException; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import org.sakaiproject.jsf.util.JsfTool; import org.sakaiproject.tool.api.ActiveTool; import org.sakaiproject.tool.api.Tool; import org.sakaiproject.tool.api.ToolException; import org.sakaiproject.tool.api.ToolSession; import org.sakaiproject.tool.cover.ActiveToolManager; import org.sakaiproject.tool.cover.SessionManager; import org.sakaiproject.util.Web; /** * @author <a href="mailto:cwen.iupui.edu">Chen Wen</a> * @version $Id$ * */ public class SyllabusFilePickerServlet extends JsfTool { private static final String HELPER_EXT = ".helper"; private static final String HELPER_SESSION_PREFIX = "session."; protected void dispatch(HttpServletRequest req, HttpServletResponse res) throws ServletException, IOException { // NOTE: this is a simple path dispatching, taking the path as the view id = jsp file name for the view, // with default used if no path and a path prefix as configured. // TODO: need to allow other sorts of dispatching, such as pulling out drill-down ids and making them // available to the JSF // build up the target that will be dispatched to String target = req.getPathInfo(); // see if we have a helper request if (sendToHelper(req, res, target)) { return; } // see if we have a resource request - i.e. a path with an extension, and one that is not the JSF_EXT if (isResourceRequest(target)) { // get a dispatcher to the path RequestDispatcher resourceDispatcher = getServletContext().getRequestDispatcher(target); if (resourceDispatcher != null) { resourceDispatcher.forward(req, res); return; } } if ("Title".equals(req.getParameter("panel"))) { // This allows only one Title JSF for each tool target = "/title.jsf"; } else { ToolSession session = SessionManager.getCurrentToolSession(); if (target == null || "/".equals(target)) { target = computeDefaultTarget(); // make sure it's a valid path if (!target.startsWith("/")) { target = "/" + target; } // now that we've messed with the URL, send a redirect to make it official res.sendRedirect(Web.returnUrl(req, target)); return; } // see if we want to change the specifically requested view String newTarget = redirectRequestedTarget(target); // make sure it's a valid path if (!newTarget.startsWith("/")) { newTarget = "/" + newTarget; } if (!newTarget.equals(target)) { // now that we've messed with the URL, send a redirect to make it official res.sendRedirect(Web.returnUrl(req, newTarget)); return; } target = newTarget; // store this if (m_defaultToLastView) { session.setAttribute(LAST_VIEW_VISITED, target); } } // add the configured folder root and extension (if missing) target = m_path + target; // add the default JSF extension (if we have no extension) int lastSlash = target.lastIndexOf("/"); int lastDot = target.lastIndexOf("."); if (lastDot < 0 || lastDot < lastSlash) { target += JSF_EXT; } // set the information that can be removed from return URLs req.setAttribute(URL_PATH, m_path); req.setAttribute(URL_EXT, ".jsp"); // set the sakai request object wrappers to provide the native, not Sakai set up, URL information // - this assures that the FacesServlet can dispatch to the proper view based on the path info req.setAttribute(Tool.NATIVE_URL, Tool.NATIVE_URL); // TODO: Should setting the HTTP headers be moved up to the portal level as well? res.setContentType("text/html; charset=UTF-8"); res.addDateHeader("Expires", System.currentTimeMillis() - (1000L * 60L * 60L * 24L * 365L)); res.addDateHeader("Last-Modified", System.currentTimeMillis()); res.addHeader("Cache-Control", "no-store, no-cache, must-revalidate, max-age=0, post-check=0, pre-check=0"); res.addHeader("Pragma", "no-cache"); // dispatch to the target /*M_log.debug("dispatching path: " + req.getPathInfo() + " to: " + target + " context: " + getServletContext().getServletContextName());*/ RequestDispatcher dispatcher = getServletContext().getRequestDispatcher(target); dispatcher.forward(req, res); // restore the request object req.removeAttribute(Tool.NATIVE_URL); req.removeAttribute(URL_PATH); req.removeAttribute(URL_EXT); } protected boolean sendToHelper(HttpServletRequest req, HttpServletResponse res, String target) throws ToolException { String path = req.getPathInfo(); if (path == null) path = "/"; // 0 parts means the path was just "/", otherwise parts[0] = "", parts[1] = item id, parts[2] if present is "edit"... String[] parts = path.split("/"); if (parts.length < 2) { return false; } if (!parts[1].endsWith(HELPER_EXT)) { return false; } ToolSession toolSession = SessionManager.getCurrentToolSession(); Enumeration params = req.getParameterNames(); while (params.hasMoreElements()) { String paramName = (String)params.nextElement(); if (paramName.startsWith(HELPER_SESSION_PREFIX)) { String attributeName = paramName.substring(HELPER_SESSION_PREFIX.length()); toolSession.setAttribute(attributeName, req.getParameter(paramName)); } } // calc helper id int posEnd = parts[1].lastIndexOf("."); String helperId = target.substring(1, posEnd + 1); ActiveTool helperTool = ActiveToolManager.getActiveTool(helperId); if (toolSession.getAttribute(helperTool.getId() + Tool.HELPER_DONE_URL) == null && !target.equals("/sakai.filepicker.helper")) { toolSession.setAttribute(helperTool.getId() + Tool.HELPER_DONE_URL, req.getContextPath() + req.getServletPath() + computeDefaultTarget(true)); } /*comment out for using the global parameter rather than tool-by-tool setting SessionState state = UsageSessionService.getSessionState(toolSession.getPlacementId()); boolean show_other_sites = ServerConfigurationService.getBoolean("syllabus.resources.show_all_collections.helper", true); state.setAttribute("resources.allow_user_to_see_all_sites", (new Boolean(show_other_sites)).toString()); state.setAttribute("resources.user_chooses_to_see_other_sites", (new Boolean(show_other_sites)).toString()); */ String context = req.getContextPath() + req.getServletPath() + Web.makePath(parts, 1, 2); String toolPath = Web.makePath(parts, 2, parts.length); helperTool.help(req, res, context, toolPath); return true; // was handled as helper call } protected String computeDefaultTarget(boolean lastVisited) { // setup for the default view as configured String target = "/" + m_default; // if we are doing lastVisit and there's a last-visited view, for this tool placement / user, use that if (lastVisited) { ToolSession session = SessionManager.getCurrentToolSession(); String last = (String) session.getAttribute(LAST_VIEW_VISITED); if (last != null) { target = last; } } return target; } }
/** * Copyright 2007-2015, Kaazing Corporation. All rights reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.kaazing.gateway.transport.http.bridge.filter; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertNull; import static org.junit.Assert.assertTrue; import java.net.URI; import java.nio.ByteBuffer; import java.nio.charset.Charset; import java.nio.charset.StandardCharsets; import java.util.Arrays; import java.util.Set; import org.apache.mina.core.buffer.IoBuffer; import org.apache.mina.core.service.IoHandler; import org.apache.mina.filter.codec.ProtocolDecoder; import org.apache.mina.filter.codec.ProtocolDecoderException; import org.jmock.lib.concurrent.Synchroniser; import org.junit.Test; import org.kaazing.gateway.resource.address.ResourceAddress; import org.kaazing.gateway.resource.address.ResourceAddressFactory; import org.kaazing.gateway.transport.DefaultIoSessionConfigEx; import org.kaazing.gateway.transport.DefaultTransportMetadata; import org.kaazing.gateway.transport.http.DefaultHttpCookie; import org.kaazing.gateway.transport.http.DefaultHttpSession; import org.kaazing.gateway.transport.http.HttpConnector; import org.kaazing.gateway.transport.http.HttpCookie; import org.kaazing.gateway.transport.http.HttpMethod; import org.kaazing.gateway.transport.http.HttpProtocol; import org.kaazing.gateway.transport.http.HttpStatus; import org.kaazing.gateway.transport.http.HttpVersion; import org.kaazing.gateway.transport.http.bridge.HttpContentMessage; import org.kaazing.gateway.transport.http.bridge.HttpMessage; import org.kaazing.gateway.transport.http.bridge.HttpResponseMessage; import org.kaazing.gateway.transport.test.Expectations; import org.kaazing.mina.core.buffer.IoBufferAllocatorEx; import org.kaazing.mina.core.buffer.IoBufferEx; import org.kaazing.mina.core.service.IoProcessorEx; import org.kaazing.mina.core.service.IoServiceEx; import org.kaazing.mina.core.write.DefaultWriteRequestEx; import org.kaazing.mina.filter.codec.ProtocolCodecSessionEx; import org.kaazing.test.util.Mockery; public class HttpResponseDecoderTest { private static final Charset UTF_8 = Charset.forName("UTF-8"); @Test public void decodeHttpResponse() throws Exception { ProtocolCodecSessionEx session = new ProtocolCodecSessionEx(); ProtocolDecoder decoder = new HttpResponseDecoder(); IoBufferAllocatorEx<?> allocator = session.getBufferAllocator(); ByteBuffer in = ByteBuffer.wrap(("HTTP/1.1 200 OK (Test)\r\n" + "Server: Test\r\n" + "Content-Length: 0\r\n" + "\r\n").getBytes()); IoBufferEx buf = allocator.wrap(in); decoder.decode(session, (IoBuffer) buf, session.getDecoderOutput()); assertFalse(session.getDecoderOutputQueue().isEmpty()); HttpResponseMessage httpResponse = (HttpResponseMessage)session.getDecoderOutputQueue().poll(); assertEquals(HttpVersion.HTTP_1_1, httpResponse.getVersion()); assertEquals(HttpStatus.SUCCESS_OK, httpResponse.getStatus()); assertEquals("OK (Test)", httpResponse.getReason()); assertNull(httpResponse.getContent()); assertEquals(Arrays.asList("Test"), httpResponse.getHeaderValues("Server")); assertTrue(session.getDecoderOutputQueue().isEmpty()); decoder.finishDecode(session, session.getDecoderOutput()); assertTrue(session.getDecoderOutputQueue().isEmpty()); assertFalse(in.hasRemaining()); } @Test public void decodeHttpResponseComplete() throws Exception { ProtocolCodecSessionEx session = new ProtocolCodecSessionEx(); ProtocolDecoder decoder = new HttpResponseDecoder(); ByteBuffer in = ByteBuffer.wrap(("HTTP/1.1 200 OK\r\n" + "Content-Length: 55\r\n" + "\r\n" + "retry:2500\r\n" + "event:TCPSend\r\n" + "id:24\r\n" + "data:Hello, world\r\n" + "\r\n").getBytes()); IoBufferAllocatorEx<?> allocator = session.getBufferAllocator(); IoBufferEx buf = allocator.wrap(in); decoder.decode(session, (IoBuffer) buf, session.getDecoderOutput()); assertFalse(session.getDecoderOutputQueue().isEmpty()); HttpResponseMessage httpResponse = (HttpResponseMessage)session.getDecoderOutputQueue().poll(); HttpContentMessage httpContent = httpResponse.getContent(); assertTrue(httpContent.isComplete()); assertEquals("retry:2500\r\n" + "event:TCPSend\r\n" + "id:24\r\n" + "data:Hello, world\r\n" + "\r\n", httpContent.asText(UTF_8.newDecoder())); assertTrue(session.getDecoderOutputQueue().isEmpty()); decoder.finishDecode(session, session.getDecoderOutput()); assertTrue(session.getDecoderOutputQueue().isEmpty()); assertFalse(in.hasRemaining()); } @Test public void decodeLargeHttpResponseComplete() throws Exception { ProtocolCodecSessionEx session = new ProtocolCodecSessionEx(); ProtocolDecoder decoder = new HttpResponseDecoder(); int bodyLength1 = 4096; int bodyLength2 = 4096; int bodyLength3 = 1; int contentLength = bodyLength1 + bodyLength2 + bodyLength3; String headers = "HTTP/1.1 200 OK\r\n" + "Content-Length: " + contentLength + "\r\n" + "\r\n"; HttpResponseMessage httpResponse = (HttpResponseMessage) parse(session, decoder, headers); HttpContentMessage httpContent = httpResponse.getContent(); assertFalse(httpContent.isComplete()); String body1 = getBody(bodyLength1, '1'); httpContent = (HttpContentMessage) parse(session, decoder, body1); assertFalse(httpContent.isComplete()); assertEquals(body1, httpContent.asText(UTF_8.newDecoder())); String body2 = getBody(bodyLength2, '2'); httpContent = (HttpContentMessage) parse(session, decoder, body2); assertFalse(httpContent.isComplete()); assertEquals(body2, httpContent.asText(UTF_8.newDecoder())); String body3 = getBody(bodyLength3, '3'); httpContent = (HttpContentMessage) parse(session, decoder, body3); assertTrue(httpContent.isComplete()); assertEquals(body3, httpContent.asText(UTF_8.newDecoder())); decoder.finishDecode(session, session.getDecoderOutput()); assertTrue(session.getDecoderOutputQueue().isEmpty()); } private static String getBody(int size, char ch) { char[] chars = new char[size]; Arrays.fill(chars, ch); return new String(chars); } private static HttpMessage parse(ProtocolCodecSessionEx session, ProtocolDecoder decoder, String part) throws Exception { ByteBuffer in = ByteBuffer.wrap((part).getBytes(StandardCharsets.UTF_8)); IoBufferAllocatorEx<?> allocator = session.getBufferAllocator(); IoBufferEx buf = allocator.wrap(in); decoder.decode(session, (IoBuffer) buf, session.getDecoderOutput()); assertFalse(in.hasRemaining()); assertFalse(session.getDecoderOutputQueue().isEmpty()); return (HttpMessage) session.getDecoderOutputQueue().poll(); } @Test public void decodeHeadResponseWihtContentLengthButNoContent() throws Exception { Mockery context = new Mockery(); context.setThreadingPolicy(new Synchroniser()); IoServiceEx httpService = context.mock(IoServiceEx.class); IoHandler httpHandler = context.mock(IoHandler.class); IoProcessorEx<DefaultHttpSession> processor = context.mock(IoProcessorEx.class); context.checking(new Expectations() {{ allowing(httpService).getTransportMetadata(); will(returnValue(new DefaultTransportMetadata(HttpProtocol.NAME))); allowing(httpService).getHandler(); will(returnValue(httpHandler)); allowing(httpService).getSessionConfig(); will(returnValue(new DefaultIoSessionConfigEx())); allowing(httpService).getThreadLocalWriteRequest(with(any(int.class))); will(returnValue(new DefaultWriteRequestEx.ShareableWriteRequest())); }}); ResourceAddressFactory addressFactory = ResourceAddressFactory.newResourceAddressFactory(); ResourceAddress address = addressFactory.newResourceAddress(URI.create("http://localhost:4232/")); ResourceAddress remoteAddress = addressFactory.newResourceAddress(URI.create("http://localhost:8080/")); ProtocolCodecSessionEx session = new ProtocolCodecSessionEx(); DefaultHttpSession httpSession = new DefaultHttpSession(httpService, processor, address, remoteAddress, session, null); httpSession.setMethod(HttpMethod.HEAD); HttpConnector.HTTP_SESSION_KEY.set(session, httpSession); ProtocolDecoder decoder = new HttpResponseDecoder(); ByteBuffer in = ByteBuffer.wrap(( "HTTP/1.1 200 OK\r\n" + "Content-Length: 55\r\n" + "\r\n").getBytes()); IoBufferAllocatorEx<?> allocator = session.getBufferAllocator(); IoBufferEx buf = allocator.wrap(in); decoder.decode(session, (IoBuffer) buf, session.getDecoderOutput()); assertFalse(session.getDecoderOutputQueue().isEmpty()); HttpResponseMessage httpResponse = (HttpResponseMessage)session.getDecoderOutputQueue().poll(); HttpContentMessage httpContent = httpResponse.getContent(); assertTrue(httpContent.isComplete()); assertTrue(session.getDecoderOutputQueue().isEmpty()); decoder.finishDecode(session, session.getDecoderOutput()); assertTrue(session.getDecoderOutputQueue().isEmpty()); assertFalse(in.hasRemaining()); context.assertIsSatisfied(); } @Test public void decodeHttpResponseChunkedComplete() throws Exception { ProtocolCodecSessionEx session = new ProtocolCodecSessionEx(); ProtocolDecoder decoder = new HttpResponseDecoder(); IoBufferAllocatorEx<?> allocator = session.getBufferAllocator(); ByteBuffer in = ByteBuffer.wrap(("HTTP/1.1 200 OK\r\n" + "Transfer-Encoding: chunked\r\n" + "\r\n" + "37\r\n" + "retry:2500\r\n" + "event:TCPSend\r\n" + "id:24\r\n" + "data:Hello, world\r\n" + "\r\n" + "\r\n" + "0\r\n" + "\r\n").getBytes()); IoBufferEx buf = allocator.wrap(in); decoder.decode(session, (IoBuffer) buf, session.getDecoderOutput()); assertFalse(session.getDecoderOutputQueue().isEmpty()); HttpResponseMessage httpResponse = (HttpResponseMessage)session.getDecoderOutputQueue().poll(); assertEquals(allocator.wrap(allocator.allocate(0)), httpResponse.getContent().asBuffer()); assertFalse(httpResponse.getContent().isComplete()); assertTrue(httpResponse.getCookies().isEmpty()); assertFalse(session.getDecoderOutputQueue().isEmpty()); HttpContentMessage httpContent = (HttpContentMessage)session.getDecoderOutputQueue().poll(); assertFalse(httpContent.isComplete()); assertEquals("retry:2500\r\n" + "event:TCPSend\r\n" + "id:24\r\n" + "data:Hello, world\r\n" + "\r\n", httpContent.asText(UTF_8.newDecoder())); assertFalse(session.getDecoderOutputQueue().isEmpty()); assertEquals(new HttpContentMessage(allocator.wrap(allocator.allocate(0)), true), session.getDecoderOutputQueue().poll()); assertTrue(session.getDecoderOutputQueue().isEmpty()); decoder.finishDecode(session, session.getDecoderOutput()); assertTrue(session.getDecoderOutputQueue().isEmpty()); assertFalse(in.hasRemaining()); } @Test public void decodeHttpResponseIncomplete() throws Exception { ProtocolCodecSessionEx session = new ProtocolCodecSessionEx(); ProtocolDecoder decoder = new HttpResponseDecoder(); IoBufferAllocatorEx<?> allocator = session.getBufferAllocator(); ByteBuffer[] in = new ByteBuffer[] { ByteBuffer.wrap(("HTTP/1.1 200 OK\r\n" + "Transfer-Encoding: chunked\r\n" + "\r\n").getBytes()), ByteBuffer.wrap(("37\r\n" + "retry:2500\r\n" + "event:TCPSend\r\n" + "id:24\r\n" + "data:Hello, world\r\n" + "\r\n" + "\r\n").getBytes()) }; IoBufferEx[] buf = new IoBufferEx[] { allocator.wrap(in[0]), allocator.wrap(in[1]), }; assertTrue(session.getDecoderOutputQueue().isEmpty()); decoder.decode(session, (IoBuffer) buf[0], session.getDecoderOutput()); assertFalse(session.getDecoderOutputQueue().isEmpty()); assertFalse(in[0].hasRemaining()); HttpResponseMessage httpResponse = (HttpResponseMessage)session.getDecoderOutputQueue().poll(); assertEquals(allocator.wrap(allocator.allocate(0)), httpResponse.getContent().asBuffer()); assertFalse(httpResponse.getContent().isComplete()); assertTrue(httpResponse.getCookies().isEmpty()); assertTrue(session.getDecoderOutputQueue().isEmpty()); decoder.decode(session, (IoBuffer) buf[1], session.getDecoderOutput()); assertFalse(session.getDecoderOutputQueue().isEmpty()); assertFalse(in[1].hasRemaining()); HttpContentMessage httpContent = (HttpContentMessage)session.getDecoderOutputQueue().poll(); assertFalse(httpContent.isComplete()); assertEquals("retry:2500\r\n" + "event:TCPSend\r\n" + "id:24\r\n" + "data:Hello, world\r\n" + "\r\n", httpContent.asText(UTF_8.newDecoder())); assertTrue(session.getDecoderOutputQueue().isEmpty()); decoder.finishDecode(session, session.getDecoderOutput()); assertTrue(session.getDecoderOutputQueue().isEmpty()); } @Test public void decodeHttpResponseCookiesNoValue() throws Exception { ProtocolCodecSessionEx session = new ProtocolCodecSessionEx(); ProtocolDecoder decoder = new HttpResponseDecoder(); IoBufferAllocatorEx<?> allocator = session.getBufferAllocator(); ByteBuffer in = ByteBuffer.wrap(("HTTP/1.1 200 OK\r\n" + "Server: Apache-Coyote/1.1\r\n" + "Set-Cookie: name=; Path=/path/; domain=somedomain.com\r\n" + "Transfer-Encoding: chunked\r\n" + "\r\n").getBytes()); IoBufferEx buf = allocator.wrap(in); decoder.decode(session, (IoBuffer) buf, session.getDecoderOutput()); assertFalse(session.getDecoderOutputQueue().isEmpty()); HttpResponseMessage httpResponse = (HttpResponseMessage) session.getDecoderOutputQueue().poll(); Set<HttpCookie> cookies = httpResponse.getCookies(); assertFalse("Empty cookies", cookies.isEmpty()); HttpCookie receivedCookie = cookies.iterator().next(); HttpCookie expectedCookie = new DefaultHttpCookie("name", "somedomain.com", "/path/", null); assertEquals("Wrong parsing of the received cookies", expectedCookie, receivedCookie); } @Test public void decodeHttpResponseCookiesPropertiesNoValues() throws Exception { ProtocolCodecSessionEx session = new ProtocolCodecSessionEx(); ProtocolDecoder decoder = new HttpResponseDecoder(); IoBufferAllocatorEx<?> allocator = session.getBufferAllocator(); ByteBuffer in = ByteBuffer.wrap(("HTTP/1.1 200 OK\r\n" + "Server: Apache-Coyote/1.1\r\n" + "Set-Cookie: cookieName=testCookie; comment=; domain=; max-age=; path=; version=\r\n" + "Transfer-Encoding: chunked\r\n" + "\r\n").getBytes()); IoBufferEx buf = allocator.wrap(in); decoder.decode(session, (IoBuffer) buf, session.getDecoderOutput()); assertFalse(session.getDecoderOutputQueue().isEmpty()); HttpResponseMessage httpResponse = (HttpResponseMessage) session.getDecoderOutputQueue().poll(); Set<HttpCookie> cookies = httpResponse.getCookies(); assertFalse("Empty cookies", cookies.isEmpty()); HttpCookie receivedCookie = cookies.iterator().next(); HttpCookie expectedCookie = new DefaultHttpCookie("cookieName", null, null, "testCookie"); assertEquals("Wrong parsing of the received cookies", expectedCookie, receivedCookie); } @Test public void decodeHttpResponseCookiesPropertiesWithValues() throws Exception { ProtocolCodecSessionEx session = new ProtocolCodecSessionEx(); ProtocolDecoder decoder = new HttpResponseDecoder(); IoBufferAllocatorEx<?> allocator = session.getBufferAllocator(); ByteBuffer in = ByteBuffer.wrap(("HTTP/1.1 200 OK\r\n" + "Server: Apache-Coyote/1.1\r\n" + "Set-Cookie: cookieName=cookieValue; comment=c; domain=somedomain.com; max-age=2; path=/path/; version=1\r\n" + "Transfer-Encoding: chunked\r\n" + "\r\n").getBytes()); IoBufferEx buf = allocator.wrap(in); decoder.decode(session, (IoBuffer) buf, session.getDecoderOutput()); assertFalse(session.getDecoderOutputQueue().isEmpty()); HttpResponseMessage httpResponse = (HttpResponseMessage) session.getDecoderOutputQueue().poll(); Set<HttpCookie> cookies = httpResponse.getCookies(); assertFalse("Empty cookies", cookies.isEmpty()); DefaultHttpCookie receivedCookie = (DefaultHttpCookie) cookies.iterator().next(); DefaultHttpCookie expectedCookie = new DefaultHttpCookie("cookieName", "somedomain.com", "/path/", "cookieValue"); expectedCookie.setComment("c"); expectedCookie.setMaxAge(2); expectedCookie.setVersion(1); assertEquals("Wrong parsing of the received cookies", expectedCookie, receivedCookie); } @Test(expected = ProtocolDecoderException.class) public void decodeIncorrectHttpVersion() throws Exception { ProtocolCodecSessionEx session = new ProtocolCodecSessionEx(); ProtocolDecoder decoder = new HttpResponseDecoder(); IoBufferAllocatorEx<?> allocator = session.getBufferAllocator(); ByteBuffer in = ByteBuffer.wrap(new byte[]{0x03, 0x00, 0x00, 0x00, 0x00}); IoBufferEx buf = allocator.wrap(in); decoder.decode(session, (IoBuffer) buf, session.getDecoderOutput()); } }
package com.latin.continuoussr; import java.util.ArrayList; import java.util.Locale; import org.json.JSONArray; import org.apache.cordova.CordovaPlugin; import org.apache.cordova.PluginResult; import org.apache.cordova.CallbackContext; import android.app.Activity; import android.content.Context; import android.content.Intent; import android.hardware.Camera; import android.hardware.Camera.Parameters; import android.media.AudioManager; import android.os.Handler; import android.speech.RecognitionListener; import android.speech.RecognizerIntent; import android.speech.SpeechRecognizer; import android.util.Log; import android.os.Bundle; /** * Style and such borrowed from the TTS and PhoneListener plugins */ public class ContinuousSpeechRecognizer extends CordovaPlugin { private static final String LOG_TAG = ContinuousSpeechRecognizer.class.getSimpleName(); private static int REQUEST_CODE = 1001; private CallbackContext callbackContext; private LanguageDetailsChecker languageDetailsChecker; private Camera cam; private Parameters p; private boolean isFlashOn; private SpeechRecognizer sr; private Intent intent; private AudioManager mAudioManager; private int mStreamVolume = 0; public boolean execute(String action, JSONArray args, CallbackContext callbackContext) { Boolean isValidAction = true; this.callbackContext = callbackContext; if ("startRecognize".equals(action)) { startSpeechRecognitionActivity(args); } else if ("getSupportedLanguages".equals(action)) { getSupportedLanguages(); } else { this.callbackContext.error("Unknown action: " + action); isValidAction = false; } return isValidAction; } private void getSupportedLanguages() { if (languageDetailsChecker == null){ languageDetailsChecker = new LanguageDetailsChecker(callbackContext); } Intent detailsIntent = new Intent(RecognizerIntent.ACTION_GET_LANGUAGE_DETAILS); cordova.getActivity().sendOrderedBroadcast(detailsIntent, null, languageDetailsChecker, null, Activity.RESULT_OK, null, null); } /** * Fire an intent to start the speech recognition activity. * * @param args Argument array with the following string args: [req code][number of matches] */ private void startSpeechRecognitionActivity(JSONArray args) { int maxMatches = 0; String language = Locale.getDefault().toString(); try { if (args.length() > 0) { String temp = args.getString(0); maxMatches = Integer.parseInt(temp); } if (args.length() > 1) { language = args.getString(1); } } catch (Exception e) { Log.e(LOG_TAG, String.format("startSpeechRecognitionActivity exception: %s", e.toString())); } intent = new Intent(RecognizerIntent.ACTION_RECOGNIZE_SPEECH); intent.putExtra(RecognizerIntent.EXTRA_LANGUAGE_MODEL,RecognizerIntent.LANGUAGE_MODEL_FREE_FORM); intent.putExtra(RecognizerIntent.EXTRA_LANGUAGE, language); intent.putExtra(RecognizerIntent.EXTRA_CALLING_PACKAGE,"voice.recognition.test"); if (maxMatches > 0) { intent.putExtra(RecognizerIntent.EXTRA_MAX_RESULTS, maxMatches); } cordova.getActivity().runOnUiThread(new Runnable() { public void run() { sr = SpeechRecognizer.createSpeechRecognizer(cordova.getActivity().getBaseContext()); sr.setRecognitionListener(new Listener()); sr.startListening(intent); } }); mAudioManager = (AudioManager) cordova.getActivity().getSystemService(Context.AUDIO_SERVICE); mStreamVolume = mAudioManager.getStreamVolume(AudioManager.STREAM_MUSIC); muteStreamVolume(); } @Override public void onResume(boolean b) { super.onResume(b); AppStatus.activityResumed(); cordova.getActivity().runOnUiThread(new Runnable() { public void run() { sr.startListening(intent); } }); muteStreamVolume(); } @Override public void onPause(boolean b) { super.onPause(b); AppStatus.activityPaused(); cordova.getActivity().runOnUiThread(new Runnable() { public void run() { sr.stopListening(); } }); setStreamVolumeBack(); } @Override public void onDestroy() { super.onDestroy(); cordova.getActivity().runOnUiThread(new Runnable() { public void run() { if(sr != null) { sr.cancel(); sr.destroy(); sr = null; } } }); setStreamVolumeBack(); } private void muteStreamVolume() { mAudioManager.setStreamVolume(AudioManager.STREAM_MUSIC, 0, 0); } private void setStreamVolumeBack() { new Handler().postDelayed(new Runnable() { @Override public void run() { mAudioManager.setStreamVolume(AudioManager.STREAM_MUSIC, mStreamVolume, 0); } }, 300); } private void returnSpeechResults(ArrayList<String> matches) { JSONArray jsonMatches = new JSONArray(matches); this.callbackContext.success(jsonMatches); } private void returnProgressResults(ArrayList<String> matches) { JSONArray jsonMatches = new JSONArray(matches); PluginResult progressResult = new PluginResult(PluginResult.Status.OK, jsonMatches); progressResult.setKeepCallback(true); callbackContext.sendPluginResult(progressResult); } class Listener implements RecognitionListener { public void onReadyForSpeech(Bundle params) { } public void onBeginningOfSpeech() { } public void onRmsChanged(float rmsdB) { } public void onBufferReceived(byte[] buffer) { } public void onEndOfSpeech() { } public void onError(int error) { if(AppStatus.isActivityVisible()) { cordova.getActivity().runOnUiThread(new Runnable() { public void run() { sr.startListening(intent); } }); } } public void onResults(Bundle results) { ArrayList<String> matches = new ArrayList<String>(); ArrayList data = results.getStringArrayList(SpeechRecognizer.RESULTS_RECOGNITION); for (int i = 0; i < data.size(); i++) { matches.add((String) data.get(i)); } if(AppStatus.isActivityVisible()) { cordova.getActivity().runOnUiThread(new Runnable() { public void run() { sr.startListening(intent); } }); } returnProgressResults(matches); } public void onPartialResults(Bundle partialResults) { } public void onEvent(int eventType, Bundle params) { } } static class AppStatus { private static boolean activityVisible = true; public static boolean isActivityVisible() { return activityVisible; } public static void activityResumed() { activityVisible = true; } public static void activityPaused() { activityVisible = false; } } }
/* * * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.hbase.io.hfile; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertTrue; import java.io.ByteArrayOutputStream; import java.io.DataOutputStream; import java.io.IOException; import java.nio.ByteBuffer; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.HashSet; import java.util.List; import java.util.Random; import java.util.Set; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FSDataInputStream; import org.apache.hadoop.fs.FSDataOutputStream; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hbase.CellComparator; import org.apache.hadoop.hbase.CellUtil; import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.KeyValueUtil; import org.apache.hadoop.hbase.fs.HFileSystem; import org.apache.hadoop.hbase.io.compress.Compression; import org.apache.hadoop.hbase.io.encoding.DataBlockEncoding; import org.apache.hadoop.hbase.io.hfile.HFileBlockIndex.BlockIndexChunk; import org.apache.hadoop.hbase.io.hfile.HFileBlockIndex.BlockIndexReader; import org.apache.hadoop.hbase.testclassification.IOTests; import org.apache.hadoop.hbase.testclassification.MediumTests; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.ClassSize; import org.apache.hadoop.hbase.util.EnvironmentEdgeManager; import org.junit.Before; import org.junit.Test; import org.junit.experimental.categories.Category; import org.junit.runner.RunWith; import org.junit.runners.Parameterized; import org.junit.runners.Parameterized.Parameters; @RunWith(Parameterized.class) @Category({IOTests.class, MediumTests.class}) public class TestHFileBlockIndex { @Parameters public static Collection<Object[]> compressionAlgorithms() { return HBaseTestingUtility.COMPRESSION_ALGORITHMS_PARAMETERIZED; } public TestHFileBlockIndex(Compression.Algorithm compr) { this.compr = compr; } private static final Log LOG = LogFactory.getLog(TestHFileBlockIndex.class); private static final int NUM_DATA_BLOCKS = 1000; private static final HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility(); private static final int SMALL_BLOCK_SIZE = 4096; private static final int NUM_KV = 10000; private static FileSystem fs; private Path path; private Random rand; private long rootIndexOffset; private int numRootEntries; private int numLevels; private static final List<byte[]> keys = new ArrayList<byte[]>(); private final Compression.Algorithm compr; private byte[] firstKeyInFile; private Configuration conf; private static final int[] INDEX_CHUNK_SIZES = { 4096, 512, 384 }; private static final int[] EXPECTED_NUM_LEVELS = { 2, 3, 4 }; private static final int[] UNCOMPRESSED_INDEX_SIZES = { 19187, 21813, 23086 }; private static final boolean includesMemstoreTS = true; static { assert INDEX_CHUNK_SIZES.length == EXPECTED_NUM_LEVELS.length; assert INDEX_CHUNK_SIZES.length == UNCOMPRESSED_INDEX_SIZES.length; } @Before public void setUp() throws IOException { keys.clear(); rand = new Random(2389757); firstKeyInFile = null; conf = TEST_UTIL.getConfiguration(); // This test requires at least HFile format version 2. conf.setInt(HFile.FORMAT_VERSION_KEY, HFile.MAX_FORMAT_VERSION); fs = HFileSystem.get(conf); } @Test public void testBlockIndex() throws IOException { testBlockIndexInternals(false); clear(); testBlockIndexInternals(true); } private void clear() throws IOException { keys.clear(); rand = new Random(2389757); firstKeyInFile = null; conf = TEST_UTIL.getConfiguration(); // This test requires at least HFile format version 2. conf.setInt(HFile.FORMAT_VERSION_KEY, 3); fs = HFileSystem.get(conf); } private void testBlockIndexInternals(boolean useTags) throws IOException { path = new Path(TEST_UTIL.getDataTestDir(), "block_index_" + compr + useTags); writeWholeIndex(useTags); readIndex(useTags); } /** * A wrapper around a block reader which only caches the results of the last * operation. Not thread-safe. */ private static class BlockReaderWrapper implements HFile.CachingBlockReader { private HFileBlock.FSReader realReader; private long prevOffset; private long prevOnDiskSize; private boolean prevPread; private HFileBlock prevBlock; public int hitCount = 0; public int missCount = 0; public BlockReaderWrapper(HFileBlock.FSReader realReader) { this.realReader = realReader; } @Override public HFileBlock readBlock(long offset, long onDiskSize, boolean cacheBlock, boolean pread, boolean isCompaction, boolean updateCacheMetrics, BlockType expectedBlockType, DataBlockEncoding expectedDataBlockEncoding) throws IOException { if (offset == prevOffset && onDiskSize == prevOnDiskSize && pread == prevPread) { hitCount += 1; return prevBlock; } missCount += 1; prevBlock = realReader.readBlockData(offset, onDiskSize, -1, pread); prevOffset = offset; prevOnDiskSize = onDiskSize; prevPread = pread; return prevBlock; } } private void readIndex(boolean useTags) throws IOException { long fileSize = fs.getFileStatus(path).getLen(); LOG.info("Size of " + path + ": " + fileSize); FSDataInputStream istream = fs.open(path); HFileContext meta = new HFileContextBuilder() .withHBaseCheckSum(true) .withIncludesMvcc(includesMemstoreTS) .withIncludesTags(useTags) .withCompression(compr) .build(); HFileBlock.FSReader blockReader = new HFileBlock.FSReaderImpl(istream, fs.getFileStatus(path) .getLen(), meta); BlockReaderWrapper brw = new BlockReaderWrapper(blockReader); HFileBlockIndex.BlockIndexReader indexReader = new HFileBlockIndex.BlockIndexReader( CellComparator.COMPARATOR, numLevels, brw); indexReader.readRootIndex(blockReader.blockRange(rootIndexOffset, fileSize).nextBlockWithBlockType(BlockType.ROOT_INDEX), numRootEntries); long prevOffset = -1; int i = 0; int expectedHitCount = 0; int expectedMissCount = 0; LOG.info("Total number of keys: " + keys.size()); for (byte[] key : keys) { assertTrue(key != null); assertTrue(indexReader != null); KeyValue.KeyOnlyKeyValue keyOnlyKey = new KeyValue.KeyOnlyKeyValue(key, 0, key.length); HFileBlock b = indexReader.seekToDataBlock(keyOnlyKey, null, true, true, false, null); if (CellComparator.COMPARATOR.compare(keyOnlyKey, firstKeyInFile, 0, firstKeyInFile.length) < 0) { assertTrue(b == null); ++i; continue; } String keyStr = "key #" + i + ", " + Bytes.toStringBinary(key); assertTrue("seekToDataBlock failed for " + keyStr, b != null); if (prevOffset == b.getOffset()) { assertEquals(++expectedHitCount, brw.hitCount); } else { LOG.info("First key in a new block: " + keyStr + ", block offset: " + b.getOffset() + ")"); assertTrue(b.getOffset() > prevOffset); assertEquals(++expectedMissCount, brw.missCount); prevOffset = b.getOffset(); } ++i; } istream.close(); } private void writeWholeIndex(boolean useTags) throws IOException { assertEquals(0, keys.size()); HFileContext meta = new HFileContextBuilder() .withHBaseCheckSum(true) .withIncludesMvcc(includesMemstoreTS) .withIncludesTags(useTags) .withCompression(compr) .withBytesPerCheckSum(HFile.DEFAULT_BYTES_PER_CHECKSUM) .build(); HFileBlock.Writer hbw = new HFileBlock.Writer(null, meta); FSDataOutputStream outputStream = fs.create(path); HFileBlockIndex.BlockIndexWriter biw = new HFileBlockIndex.BlockIndexWriter(hbw, null, null); for (int i = 0; i < NUM_DATA_BLOCKS; ++i) { hbw.startWriting(BlockType.DATA).write(String.valueOf(rand.nextInt(1000)).getBytes()); long blockOffset = outputStream.getPos(); hbw.writeHeaderAndData(outputStream); byte[] firstKey = null; byte[] family = Bytes.toBytes("f"); byte[] qualifier = Bytes.toBytes("q"); for (int j = 0; j < 16; ++j) { byte[] k = new KeyValue(TestHFileWriterV2.randomOrderedKey(rand, i * 16 + j), family, qualifier, EnvironmentEdgeManager.currentTime(), KeyValue.Type.Put).getKey(); keys.add(k); if (j == 8) { firstKey = k; } } assertTrue(firstKey != null); if (firstKeyInFile == null) { firstKeyInFile = firstKey; } biw.addEntry(firstKey, blockOffset, hbw.getOnDiskSizeWithHeader()); writeInlineBlocks(hbw, outputStream, biw, false); } writeInlineBlocks(hbw, outputStream, biw, true); rootIndexOffset = biw.writeIndexBlocks(outputStream); outputStream.close(); numLevels = biw.getNumLevels(); numRootEntries = biw.getNumRootEntries(); LOG.info("Index written: numLevels=" + numLevels + ", numRootEntries=" + numRootEntries + ", rootIndexOffset=" + rootIndexOffset); } private void writeInlineBlocks(HFileBlock.Writer hbw, FSDataOutputStream outputStream, HFileBlockIndex.BlockIndexWriter biw, boolean isClosing) throws IOException { while (biw.shouldWriteBlock(isClosing)) { long offset = outputStream.getPos(); biw.writeInlineBlock(hbw.startWriting(biw.getInlineBlockType())); hbw.writeHeaderAndData(outputStream); biw.blockWritten(offset, hbw.getOnDiskSizeWithHeader(), hbw.getUncompressedSizeWithoutHeader()); LOG.info("Wrote an inline index block at " + offset + ", size " + hbw.getOnDiskSizeWithHeader()); } } private static final long getDummyFileOffset(int i) { return i * 185 + 379; } private static final int getDummyOnDiskSize(int i) { return i * i * 37 + i * 19 + 13; } @Test public void testSecondaryIndexBinarySearch() throws IOException { int numTotalKeys = 99; assertTrue(numTotalKeys % 2 == 1); // Ensure no one made this even. // We only add odd-index keys into the array that we will binary-search. int numSearchedKeys = (numTotalKeys - 1) / 2; ByteArrayOutputStream baos = new ByteArrayOutputStream(); DataOutputStream dos = new DataOutputStream(baos); dos.writeInt(numSearchedKeys); int curAllEntriesSize = 0; int numEntriesAdded = 0; // Only odd-index elements of this array are used to keep the secondary // index entries of the corresponding keys. int secondaryIndexEntries[] = new int[numTotalKeys]; for (int i = 0; i < numTotalKeys; ++i) { byte[] k = TestHFileWriterV2.randomOrderedKey(rand, i * 2); KeyValue cell = new KeyValue(k, Bytes.toBytes("f"), Bytes.toBytes("q"), Bytes.toBytes("val")); //KeyValue cell = new KeyValue.KeyOnlyKeyValue(k, 0, k.length); keys.add(cell.getKey()); String msgPrefix = "Key #" + i + " (" + Bytes.toStringBinary(k) + "): "; StringBuilder padding = new StringBuilder(); while (msgPrefix.length() + padding.length() < 70) padding.append(' '); msgPrefix += padding; if (i % 2 == 1) { dos.writeInt(curAllEntriesSize); secondaryIndexEntries[i] = curAllEntriesSize; LOG.info(msgPrefix + "secondary index entry #" + ((i - 1) / 2) + ", offset " + curAllEntriesSize); curAllEntriesSize += cell.getKey().length + HFileBlockIndex.SECONDARY_INDEX_ENTRY_OVERHEAD; ++numEntriesAdded; } else { secondaryIndexEntries[i] = -1; LOG.info(msgPrefix + "not in the searched array"); } } // Make sure the keys are increasing. for (int i = 0; i < keys.size() - 1; ++i) assertTrue(CellComparator.COMPARATOR.compare( new KeyValue.KeyOnlyKeyValue(keys.get(i), 0, keys.get(i).length), new KeyValue.KeyOnlyKeyValue(keys.get(i + 1), 0, keys.get(i + 1).length)) < 0); dos.writeInt(curAllEntriesSize); assertEquals(numSearchedKeys, numEntriesAdded); int secondaryIndexOffset = dos.size(); assertEquals(Bytes.SIZEOF_INT * (numSearchedKeys + 2), secondaryIndexOffset); for (int i = 1; i <= numTotalKeys - 1; i += 2) { assertEquals(dos.size(), secondaryIndexOffset + secondaryIndexEntries[i]); long dummyFileOffset = getDummyFileOffset(i); int dummyOnDiskSize = getDummyOnDiskSize(i); LOG.debug("Storing file offset=" + dummyFileOffset + " and onDiskSize=" + dummyOnDiskSize + " at offset " + dos.size()); dos.writeLong(dummyFileOffset); dos.writeInt(dummyOnDiskSize); LOG.debug("Stored key " + ((i - 1) / 2) +" at offset " + dos.size()); dos.write(keys.get(i)); } dos.writeInt(curAllEntriesSize); ByteBuffer nonRootIndex = ByteBuffer.wrap(baos.toByteArray()); for (int i = 0; i < numTotalKeys; ++i) { byte[] searchKey = keys.get(i); byte[] arrayHoldingKey = new byte[searchKey.length + searchKey.length / 2]; // To make things a bit more interesting, store the key we are looking // for at a non-zero offset in a new array. System.arraycopy(searchKey, 0, arrayHoldingKey, searchKey.length / 2, searchKey.length); KeyValue.KeyOnlyKeyValue cell = new KeyValue.KeyOnlyKeyValue( arrayHoldingKey, searchKey.length / 2, searchKey.length); int searchResult = BlockIndexReader.binarySearchNonRootIndex(cell, nonRootIndex, CellComparator.COMPARATOR); String lookupFailureMsg = "Failed to look up key #" + i + " (" + Bytes.toStringBinary(searchKey) + ")"; int expectedResult; int referenceItem; if (i % 2 == 1) { // This key is in the array we search as the element (i - 1) / 2. Make // sure we find it. expectedResult = (i - 1) / 2; referenceItem = i; } else { // This key is not in the array but between two elements on the array, // in the beginning, or in the end. The result should be the previous // key in the searched array, or -1 for i = 0. expectedResult = i / 2 - 1; referenceItem = i - 1; } assertEquals(lookupFailureMsg, expectedResult, searchResult); // Now test we can get the offset and the on-disk-size using a // higher-level API function.s boolean locateBlockResult = (BlockIndexReader.locateNonRootIndexEntry(nonRootIndex, cell, CellComparator.COMPARATOR) != -1); if (i == 0) { assertFalse(locateBlockResult); } else { assertTrue(locateBlockResult); String errorMsg = "i=" + i + ", position=" + nonRootIndex.position(); assertEquals(errorMsg, getDummyFileOffset(referenceItem), nonRootIndex.getLong()); assertEquals(errorMsg, getDummyOnDiskSize(referenceItem), nonRootIndex.getInt()); } } } @Test public void testBlockIndexChunk() throws IOException { BlockIndexChunk c = new BlockIndexChunk(); ByteArrayOutputStream baos = new ByteArrayOutputStream(); int N = 1000; int[] numSubEntriesAt = new int[N]; int numSubEntries = 0; for (int i = 0; i < N; ++i) { baos.reset(); DataOutputStream dos = new DataOutputStream(baos); c.writeNonRoot(dos); assertEquals(c.getNonRootSize(), dos.size()); baos.reset(); dos = new DataOutputStream(baos); c.writeRoot(dos); assertEquals(c.getRootSize(), dos.size()); byte[] k = TestHFileWriterV2.randomOrderedKey(rand, i); numSubEntries += rand.nextInt(5) + 1; keys.add(k); c.add(k, getDummyFileOffset(i), getDummyOnDiskSize(i), numSubEntries); } // Test the ability to look up the entry that contains a particular // deeper-level index block's entry ("sub-entry"), assuming a global // 0-based ordering of sub-entries. This is needed for mid-key calculation. for (int i = 0; i < N; ++i) { for (int j = i == 0 ? 0 : numSubEntriesAt[i - 1]; j < numSubEntriesAt[i]; ++j) { assertEquals(i, c.getEntryBySubEntry(j)); } } } /** Checks if the HeapSize calculator is within reason */ @Test public void testHeapSizeForBlockIndex() throws IOException { Class<HFileBlockIndex.BlockIndexReader> cl = HFileBlockIndex.BlockIndexReader.class; long expected = ClassSize.estimateBase(cl, false); HFileBlockIndex.BlockIndexReader bi = new HFileBlockIndex.BlockIndexReader(null, 1); long actual = bi.heapSize(); // Since the arrays in BlockIndex(byte [][] blockKeys, long [] blockOffsets, // int [] blockDataSizes) are all null they are not going to show up in the // HeapSize calculation, so need to remove those array costs from expected. expected -= ClassSize.align(3 * ClassSize.ARRAY); if (expected != actual) { ClassSize.estimateBase(cl, true); assertEquals(expected, actual); } } /** * Testing block index through the HFile writer/reader APIs. Allows to test * setting index block size through configuration, intermediate-level index * blocks, and caching index blocks on write. * * @throws IOException */ @Test public void testHFileWriterAndReader() throws IOException { Path hfilePath = new Path(TEST_UTIL.getDataTestDir(), "hfile_for_block_index"); CacheConfig cacheConf = new CacheConfig(conf); BlockCache blockCache = cacheConf.getBlockCache(); for (int testI = 0; testI < INDEX_CHUNK_SIZES.length; ++testI) { int indexBlockSize = INDEX_CHUNK_SIZES[testI]; int expectedNumLevels = EXPECTED_NUM_LEVELS[testI]; LOG.info("Index block size: " + indexBlockSize + ", compression: " + compr); // Evict all blocks that were cached-on-write by the previous invocation. blockCache.evictBlocksByHfileName(hfilePath.getName()); conf.setInt(HFileBlockIndex.MAX_CHUNK_SIZE_KEY, indexBlockSize); Set<String> keyStrSet = new HashSet<String>(); byte[][] keys = new byte[NUM_KV][]; byte[][] values = new byte[NUM_KV][]; // Write the HFile { HFileContext meta = new HFileContextBuilder() .withBlockSize(SMALL_BLOCK_SIZE) .withCompression(compr) .build(); HFile.Writer writer = HFile.getWriterFactory(conf, cacheConf) .withPath(fs, hfilePath) .withFileContext(meta) .create(); Random rand = new Random(19231737); byte[] family = Bytes.toBytes("f"); byte[] qualifier = Bytes.toBytes("q"); for (int i = 0; i < NUM_KV; ++i) { byte[] row = TestHFileWriterV2.randomOrderedKey(rand, i); // Key will be interpreted by KeyValue.KEY_COMPARATOR KeyValue kv = new KeyValue(row, family, qualifier, EnvironmentEdgeManager.currentTime(), TestHFileWriterV2.randomValue(rand)); byte[] k = kv.getKey(); writer.append(kv); keys[i] = k; values[i] = CellUtil.cloneValue(kv); keyStrSet.add(Bytes.toStringBinary(k)); if (i > 0) { assertTrue((CellComparator.COMPARATOR.compare(kv, keys[i - 1], 0, keys[i - 1].length)) > 0); } } writer.close(); } // Read the HFile HFile.Reader reader = HFile.createReader(fs, hfilePath, cacheConf, conf); assertEquals(expectedNumLevels, reader.getTrailer().getNumDataIndexLevels()); assertTrue(Bytes.equals(keys[0], reader.getFirstKey())); assertTrue(Bytes.equals(keys[NUM_KV - 1], reader.getLastKey())); LOG.info("Last key: " + Bytes.toStringBinary(keys[NUM_KV - 1])); for (boolean pread : new boolean[] { false, true }) { HFileScanner scanner = reader.getScanner(true, pread); for (int i = 0; i < NUM_KV; ++i) { checkSeekTo(keys, scanner, i); checkKeyValue("i=" + i, keys[i], values[i], scanner.getKey(), scanner.getValue()); } assertTrue(scanner.seekTo()); for (int i = NUM_KV - 1; i >= 0; --i) { checkSeekTo(keys, scanner, i); checkKeyValue("i=" + i, keys[i], values[i], scanner.getKey(), scanner.getValue()); } } // Manually compute the mid-key and validate it. HFile.Reader reader2 = reader; HFileBlock.FSReader fsReader = reader2.getUncachedBlockReader(); HFileBlock.BlockIterator iter = fsReader.blockRange(0, reader.getTrailer().getLoadOnOpenDataOffset()); HFileBlock block; List<byte[]> blockKeys = new ArrayList<byte[]>(); while ((block = iter.nextBlock()) != null) { if (block.getBlockType() != BlockType.LEAF_INDEX) return; ByteBuffer b = block.getBufferReadOnly(); int n = b.getInt(); // One int for the number of items, and n + 1 for the secondary index. int entriesOffset = Bytes.SIZEOF_INT * (n + 2); // Get all the keys from the leaf index block. S for (int i = 0; i < n; ++i) { int keyRelOffset = b.getInt(Bytes.SIZEOF_INT * (i + 1)); int nextKeyRelOffset = b.getInt(Bytes.SIZEOF_INT * (i + 2)); int keyLen = nextKeyRelOffset - keyRelOffset; int keyOffset = b.arrayOffset() + entriesOffset + keyRelOffset + HFileBlockIndex.SECONDARY_INDEX_ENTRY_OVERHEAD; byte[] blockKey = Arrays.copyOfRange(b.array(), keyOffset, keyOffset + keyLen); String blockKeyStr = Bytes.toString(blockKey); blockKeys.add(blockKey); // If the first key of the block is not among the keys written, we // are not parsing the non-root index block format correctly. assertTrue("Invalid block key from leaf-level block: " + blockKeyStr, keyStrSet.contains(blockKeyStr)); } } // Validate the mid-key. assertEquals( Bytes.toStringBinary(blockKeys.get((blockKeys.size() - 1) / 2)), Bytes.toStringBinary(reader.midkey())); assertEquals(UNCOMPRESSED_INDEX_SIZES[testI], reader.getTrailer().getUncompressedDataIndexSize()); reader.close(); reader2.close(); } } private void checkSeekTo(byte[][] keys, HFileScanner scanner, int i) throws IOException { assertEquals("Failed to seek to key #" + i + " (" + Bytes.toStringBinary(keys[i]) + ")", 0, scanner.seekTo(KeyValueUtil.createKeyValueFromKey(keys[i]))); } private void assertArrayEqualsBuffer(String msgPrefix, byte[] arr, ByteBuffer buf) { assertEquals(msgPrefix + ": expected " + Bytes.toStringBinary(arr) + ", actual " + Bytes.toStringBinary(buf), 0, Bytes.compareTo(arr, 0, arr.length, buf.array(), buf.arrayOffset(), buf.limit())); } /** Check a key/value pair after it was read by the reader */ private void checkKeyValue(String msgPrefix, byte[] expectedKey, byte[] expectedValue, ByteBuffer keyRead, ByteBuffer valueRead) { if (!msgPrefix.isEmpty()) msgPrefix += ". "; assertArrayEqualsBuffer(msgPrefix + "Invalid key", expectedKey, keyRead); assertArrayEqualsBuffer(msgPrefix + "Invalid value", expectedValue, valueRead); } }
/** * * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.hbase.thrift2.client; import java.io.IOException; import java.nio.ByteBuffer; import java.util.EnumSet; import java.util.List; import java.util.Map; import java.util.Set; import java.util.concurrent.Future; import java.util.regex.Pattern; import org.apache.commons.lang3.NotImplementedException; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.CacheEvictionStats; import org.apache.hadoop.hbase.ClusterMetrics; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.NamespaceDescriptor; import org.apache.hadoop.hbase.NamespaceNotFoundException; import org.apache.hadoop.hbase.RegionMetrics; import org.apache.hadoop.hbase.ServerName; import org.apache.hadoop.hbase.TableExistsException; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.TableNotFoundException; import org.apache.hadoop.hbase.client.Admin; import org.apache.hadoop.hbase.client.ColumnFamilyDescriptor; import org.apache.hadoop.hbase.client.CompactType; import org.apache.hadoop.hbase.client.CompactionState; import org.apache.hadoop.hbase.client.Connection; import org.apache.hadoop.hbase.client.LogQueryFilter; import org.apache.hadoop.hbase.client.OnlineLogRecord; import org.apache.hadoop.hbase.client.RegionInfo; import org.apache.hadoop.hbase.client.SnapshotDescription; import org.apache.hadoop.hbase.client.SnapshotType; import org.apache.hadoop.hbase.client.TableDescriptor; import org.apache.hadoop.hbase.client.replication.TableCFs; import org.apache.hadoop.hbase.client.security.SecurityCapability; import org.apache.hadoop.hbase.ipc.CoprocessorRpcChannel; import org.apache.hadoop.hbase.net.Address; import org.apache.hadoop.hbase.quotas.QuotaFilter; import org.apache.hadoop.hbase.quotas.QuotaSettings; import org.apache.hadoop.hbase.quotas.SpaceQuotaSnapshot; import org.apache.hadoop.hbase.replication.ReplicationPeerConfig; import org.apache.hadoop.hbase.replication.ReplicationPeerDescription; import org.apache.hadoop.hbase.replication.SyncReplicationState; import org.apache.hadoop.hbase.rsgroup.RSGroupInfo; import org.apache.hadoop.hbase.security.access.GetUserPermissionsRequest; import org.apache.hadoop.hbase.security.access.Permission; import org.apache.hadoop.hbase.security.access.UserPermission; import org.apache.hadoop.hbase.snapshot.RestoreSnapshotException; import org.apache.hadoop.hbase.thrift2.ThriftUtilities; import org.apache.hadoop.hbase.thrift2.generated.TColumnFamilyDescriptor; import org.apache.hadoop.hbase.thrift2.generated.THBaseService; import org.apache.hadoop.hbase.thrift2.generated.TLogQueryFilter; import org.apache.hadoop.hbase.thrift2.generated.TNamespaceDescriptor; import org.apache.hadoop.hbase.thrift2.generated.TOnlineLogRecord; import org.apache.hadoop.hbase.thrift2.generated.TServerName; import org.apache.hadoop.hbase.thrift2.generated.TTableDescriptor; import org.apache.hadoop.hbase.thrift2.generated.TTableName; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.Pair; import org.apache.thrift.TException; import org.apache.thrift.transport.TTransport; import org.apache.yetus.audience.InterfaceAudience; @InterfaceAudience.Private public class ThriftAdmin implements Admin { private THBaseService.Client client; private TTransport transport; private int operationTimeout; private int syncWaitTimeout; private Configuration conf; public ThriftAdmin(THBaseService.Client client, TTransport tTransport, Configuration conf) { this.client = client; this.transport = tTransport; this.operationTimeout = conf.getInt(HConstants.HBASE_CLIENT_OPERATION_TIMEOUT, HConstants.DEFAULT_HBASE_CLIENT_OPERATION_TIMEOUT); this.syncWaitTimeout = conf.getInt("hbase.client.sync.wait.timeout.msec", 10 * 60000); // 10min this.conf = conf; } @Override public int getOperationTimeout() { return operationTimeout; } @Override public int getSyncWaitTimeout() { return syncWaitTimeout; } @Override public void abort(String why, Throwable e) { } @Override public boolean isAborted() { return false; } @Override public void close() { transport.close(); } @Override public Configuration getConfiguration() { return conf; } @Override public boolean tableExists(TableName tableName) throws IOException { TTableName tTableName = ThriftUtilities.tableNameFromHBase(tableName); try { return client.tableExists(tTableName); } catch (TException e) { throw new IOException(e); } } @Override public Connection getConnection() { throw new NotImplementedException("getConnection not supported in ThriftAdmin"); } @Override public List<TableDescriptor> listTableDescriptors() throws IOException { return listTableDescriptors((Pattern) null); } @Override public List<TableDescriptor> listTableDescriptors(boolean includeSysTables) throws IOException { return listTableDescriptors(null, includeSysTables); } @Override public List<TableDescriptor> listTableDescriptors(Pattern pattern) throws IOException { return listTableDescriptors(pattern, false); } @Override public List<TableDescriptor> listTableDescriptors(Pattern pattern, boolean includeSysTables) throws IOException { try { String regex = (pattern == null ? null : pattern.toString()); List<TTableDescriptor> tTableDescriptors = client .getTableDescriptorsByPattern(regex, includeSysTables); return ThriftUtilities.tableDescriptorsFromThrift(tTableDescriptors); } catch (TException e) { throw new IOException(e); } } @Override public TableName[] listTableNames() throws IOException { return listTableNames(null); } @Override public TableName[] listTableNames(Pattern pattern) throws IOException { return listTableNames(pattern, false); } @Override public TableName[] listTableNames(Pattern pattern, boolean includeSysTables) throws IOException { String regex = (pattern == null ? null : pattern.toString()); try { List<TTableName> tTableNames = client.getTableNamesByPattern(regex, includeSysTables); return ThriftUtilities.tableNamesArrayFromThrift(tTableNames); } catch (TException e) { throw new IOException(e); } } @Override public TableDescriptor getDescriptor(TableName tableName) throws TableNotFoundException, IOException { TTableName tTableName = ThriftUtilities.tableNameFromHBase(tableName); try { TTableDescriptor tTableDescriptor = client.getTableDescriptor(tTableName); return ThriftUtilities.tableDescriptorFromThrift(tTableDescriptor); } catch (TException e) { throw new IOException(e); } } @Override public List<TableDescriptor> listTableDescriptorsByNamespace(byte[] name) throws IOException { try { List<TTableDescriptor> tTableDescriptors = client .getTableDescriptorsByNamespace(Bytes.toString(name)); return ThriftUtilities.tableDescriptorsFromThrift(tTableDescriptors); } catch (TException e) { throw new IOException(e); } } @Override public TableName[] listTableNamesByNamespace(String name) throws IOException { try { List<TTableName> tTableNames = client.getTableNamesByNamespace(name); return ThriftUtilities.tableNamesArrayFromThrift(tTableNames); } catch (TException e) { throw new IOException(e); } } @Override public void createTable(TableDescriptor desc) throws IOException { createTable(desc, null); } @Override public void createTable(TableDescriptor desc, byte[] startKey, byte[] endKey, int numRegions) throws IOException { if(numRegions < 3) { throw new IllegalArgumentException("Must create at least three regions"); } else if(Bytes.compareTo(startKey, endKey) >= 0) { throw new IllegalArgumentException("Start key must be smaller than end key"); } if (numRegions == 3) { createTable(desc, new byte[][]{startKey, endKey}); return; } byte [][] splitKeys = Bytes.split(startKey, endKey, numRegions - 3); if(splitKeys == null || splitKeys.length != numRegions - 1) { throw new IllegalArgumentException("Unable to split key range into enough regions"); } createTable(desc, splitKeys); } @Override public void createTable(TableDescriptor desc, byte[][] splitKeys) throws IOException { TTableDescriptor tTableDescriptor = ThriftUtilities.tableDescriptorFromHBase(desc); List<ByteBuffer> splitKeyInBuffer = ThriftUtilities.splitKeyFromHBase(splitKeys); try { client.createTable(tTableDescriptor, splitKeyInBuffer); } catch (TException e) { throw new IOException(e); } } @Override public void deleteTable(TableName tableName) throws IOException { TTableName tTableName = ThriftUtilities.tableNameFromHBase(tableName); try { client.deleteTable(tTableName); } catch (TException e) { throw new IOException(e); } } @Override public void truncateTable(TableName tableName, boolean preserveSplits) throws IOException { TTableName tTableName = ThriftUtilities.tableNameFromHBase(tableName); try { client.truncateTable(tTableName, preserveSplits); } catch (TException e) { throw new IOException(e); } } @Override public void enableTable(TableName tableName) throws IOException { TTableName tTableName = ThriftUtilities.tableNameFromHBase(tableName); try { client.enableTable(tTableName); } catch (TException e) { throw new IOException(e); } } @Override public void disableTable(TableName tableName) throws IOException { TTableName tTableName = ThriftUtilities.tableNameFromHBase(tableName); try { client.disableTable(tTableName); } catch (TException e) { throw new IOException(e); } } @Override public boolean isTableEnabled(TableName tableName) throws IOException { TTableName tTableName = ThriftUtilities.tableNameFromHBase(tableName); try { return client.isTableEnabled(tTableName); } catch (TException e) { throw new IOException(e); } } @Override public boolean isTableDisabled(TableName tableName) throws IOException { TTableName tTableName = ThriftUtilities.tableNameFromHBase(tableName); try { return client.isTableDisabled(tTableName); } catch (TException e) { throw new IOException(e); } } @Override public boolean isTableAvailable(TableName tableName) throws IOException { TTableName tTableName = ThriftUtilities.tableNameFromHBase(tableName); try { return client.isTableAvailable(tTableName); } catch (TException e) { throw new IOException(e); } } @Override public void addColumnFamily(TableName tableName, ColumnFamilyDescriptor columnFamily) throws IOException { TTableName tTableName = ThriftUtilities.tableNameFromHBase(tableName); TColumnFamilyDescriptor tColumnFamilyDescriptor = ThriftUtilities .columnFamilyDescriptorFromHBase(columnFamily); try { client.addColumnFamily(tTableName, tColumnFamilyDescriptor); } catch (TException e) { throw new IOException(e); } } @Override public void deleteColumnFamily(TableName tableName, byte[] columnFamily) throws IOException { TTableName tTableName = ThriftUtilities.tableNameFromHBase(tableName); try { client.deleteColumnFamily(tTableName, ByteBuffer.wrap(columnFamily)); } catch (TException e) { throw new IOException(e); } } @Override public void modifyColumnFamily(TableName tableName, ColumnFamilyDescriptor columnFamily) throws IOException { TTableName tTableName = ThriftUtilities.tableNameFromHBase(tableName); TColumnFamilyDescriptor tColumnFamilyDescriptor = ThriftUtilities .columnFamilyDescriptorFromHBase(columnFamily); try { client.modifyColumnFamily(tTableName, tColumnFamilyDescriptor); } catch (TException e) { throw new IOException(e); } } @Override public void modifyTable(TableDescriptor td) throws IOException { TTableDescriptor tTableDescriptor = ThriftUtilities .tableDescriptorFromHBase(td); try { client.modifyTable(tTableDescriptor); } catch (TException e) { throw new IOException(e); } } @Override public void modifyNamespace(NamespaceDescriptor descriptor) throws IOException { TNamespaceDescriptor tNamespaceDescriptor = ThriftUtilities .namespaceDescriptorFromHBase(descriptor); try { client.modifyNamespace(tNamespaceDescriptor); } catch (TException e) { throw new IOException(e); } } @Override public void deleteNamespace(String name) throws IOException { try { client.deleteNamespace(name); } catch (TException e) { throw new IOException(e); } } @Override public NamespaceDescriptor getNamespaceDescriptor(String name) throws NamespaceNotFoundException, IOException { try { TNamespaceDescriptor tNamespaceDescriptor = client.getNamespaceDescriptor(name); return ThriftUtilities.namespaceDescriptorFromThrift(tNamespaceDescriptor); } catch (TException e) { throw new IOException(e); } } @Override public String[] listNamespaces() throws IOException { try { List<String> tNamespaces = client.listNamespaces(); return tNamespaces.toArray(new String[tNamespaces.size()]); } catch (TException e) { throw new IOException(e); } } @Override public NamespaceDescriptor[] listNamespaceDescriptors() throws IOException { try { List<TNamespaceDescriptor> tNamespaceDescriptors = client.listNamespaceDescriptors(); return ThriftUtilities.namespaceDescriptorsFromThrift(tNamespaceDescriptors); } catch (TException e) { throw new IOException(e); } } @Override public void createNamespace(NamespaceDescriptor descriptor) throws IOException { TNamespaceDescriptor tNamespaceDescriptor = ThriftUtilities .namespaceDescriptorFromHBase(descriptor); try { client.createNamespace(tNamespaceDescriptor); } catch (TException e) { throw new IOException(e); } } @Override public boolean switchRpcThrottle(boolean enable) throws IOException { throw new NotImplementedException("switchRpcThrottle by pattern not supported in ThriftAdmin"); } @Override public boolean isRpcThrottleEnabled() throws IOException { throw new NotImplementedException( "isRpcThrottleEnabled by pattern not supported in ThriftAdmin"); } @Override public boolean exceedThrottleQuotaSwitch(boolean enable) throws IOException { throw new NotImplementedException( "exceedThrottleQuotaSwitch by pattern not supported in ThriftAdmin"); } @Override public List<TableDescriptor> listTableDescriptors(List<TableName> tableNames) throws IOException { throw new NotImplementedException("listTableDescriptors not supported in ThriftAdmin" + ", use getDescriptor to get descriptors one by one"); } @Override public List<RegionInfo> getRegions(ServerName serverName) { throw new NotImplementedException("getRegions not supported in ThriftAdmin"); } @Override public void flush(TableName tableName) { throw new NotImplementedException("flush not supported in ThriftAdmin"); } @Override public void flushRegion(byte[] regionName) { throw new NotImplementedException("flushRegion not supported in ThriftAdmin"); } @Override public void flushRegionServer(ServerName serverName) { throw new NotImplementedException("flushRegionServer not supported in ThriftAdmin"); } @Override public void compact(TableName tableName) { throw new NotImplementedException("compact not supported in ThriftAdmin"); } @Override public void compactRegion(byte[] regionName) { throw new NotImplementedException("compactRegion not supported in ThriftAdmin"); } @Override public void compact(TableName tableName, byte[] columnFamily) { throw new NotImplementedException("compact not supported in ThriftAdmin"); } @Override public void compactRegion(byte[] regionName, byte[] columnFamily) { throw new NotImplementedException("compactRegion not supported in ThriftAdmin"); } @Override public void compact(TableName tableName, CompactType compactType) { throw new NotImplementedException("compact not supported in ThriftAdmin"); } @Override public void compact(TableName tableName, byte[] columnFamily, CompactType compactType) { throw new NotImplementedException("compact not supported in ThriftAdmin"); } @Override public void majorCompact(TableName tableName) { throw new NotImplementedException("majorCompact not supported in ThriftAdmin"); } @Override public void majorCompactRegion(byte[] regionName) { throw new NotImplementedException("majorCompactRegion not supported in ThriftAdmin"); } @Override public void majorCompact(TableName tableName, byte[] columnFamily) { throw new NotImplementedException("majorCompact not supported in ThriftAdmin"); } @Override public void majorCompactRegion(byte[] regionName, byte[] columnFamily) { throw new NotImplementedException("majorCompactRegion not supported in ThriftAdmin"); } @Override public void majorCompact(TableName tableName, CompactType compactType) { throw new NotImplementedException("majorCompact not supported in ThriftAdmin"); } @Override public void majorCompact(TableName tableName, byte[] columnFamily, CompactType compactType) { throw new NotImplementedException("majorCompact not supported in ThriftAdmin"); } @Override public Map<ServerName, Boolean> compactionSwitch(boolean switchState, List<String> serverNamesList) { throw new NotImplementedException("compactionSwitch not supported in ThriftAdmin"); } @Override public void compactRegionServer(ServerName serverName) { throw new NotImplementedException("compactRegionServer not supported in ThriftAdmin"); } @Override public void majorCompactRegionServer(ServerName serverName) { throw new NotImplementedException("majorCompactRegionServer not supported in ThriftAdmin"); } @Override public void move(byte[] encodedRegionName) { throw new NotImplementedException("move not supported in ThriftAdmin"); } @Override public void move(byte[] encodedRegionName, ServerName destServerName) { throw new NotImplementedException("move not supported in ThriftAdmin"); } @Override public void assign(byte[] regionName) { throw new NotImplementedException("assign not supported in ThriftAdmin"); } @Override public void unassign(byte[] regionName, boolean force) { throw new NotImplementedException("unassign not supported in ThriftAdmin"); } @Override public void offline(byte[] regionName) { throw new NotImplementedException("offline not supported in ThriftAdmin"); } @Override public boolean balancerSwitch(boolean onOrOff, boolean synchronous) { throw new NotImplementedException("balancerSwitch not supported in ThriftAdmin"); } @Override public boolean balance() { throw new NotImplementedException("balance not supported in ThriftAdmin"); } @Override public boolean balance(boolean force) { throw new NotImplementedException("balance not supported in ThriftAdmin"); } @Override public boolean isBalancerEnabled() { throw new NotImplementedException("isBalancerEnabled not supported in ThriftAdmin"); } @Override public CacheEvictionStats clearBlockCache(TableName tableName) { throw new NotImplementedException("clearBlockCache not supported in ThriftAdmin"); } @Override public boolean normalize() { throw new NotImplementedException("normalize not supported in ThriftAdmin"); } @Override public boolean isNormalizerEnabled() { throw new NotImplementedException("isNormalizerEnabled not supported in ThriftAdmin"); } @Override public boolean normalizerSwitch(boolean on) { throw new NotImplementedException("normalizerSwitch not supported in ThriftAdmin"); } @Override public boolean catalogJanitorSwitch(boolean onOrOff) { throw new NotImplementedException("catalogJanitorSwitch not supported in ThriftAdmin"); } @Override public int runCatalogJanitor() { throw new NotImplementedException("runCatalogJanitor not supported in ThriftAdmin"); } @Override public boolean isCatalogJanitorEnabled() { throw new NotImplementedException("isCatalogJanitorEnabled not supported in ThriftAdmin"); } @Override public boolean cleanerChoreSwitch(boolean onOrOff) { throw new NotImplementedException("cleanerChoreSwitch not supported in ThriftAdmin"); } @Override public boolean runCleanerChore() { throw new NotImplementedException("runCleanerChore not supported in ThriftAdmin"); } @Override public boolean isCleanerChoreEnabled() { throw new NotImplementedException("isCleanerChoreEnabled not supported in ThriftAdmin"); } @Override public Future<Void> mergeRegionsAsync(byte[] nameOfRegionA, byte[] nameOfRegionB, boolean forcible) { throw new NotImplementedException("mergeRegionsAsync not supported in ThriftAdmin"); } @Override public Future<Void> mergeRegionsAsync(byte[][] nameofRegionsToMerge, boolean forcible) { throw new NotImplementedException("mergeRegionsAsync not supported in ThriftAdmin"); } @Override public void split(TableName tableName) { throw new NotImplementedException("split not supported in ThriftAdmin"); } @Override public void split(TableName tableName, byte[] splitPoint) { throw new NotImplementedException("split not supported in ThriftAdmin"); } @Override public Future<Void> splitRegionAsync(byte[] regionName, byte[] splitPoint) { throw new NotImplementedException("splitRegionAsync not supported in ThriftAdmin"); } @Override public Future<Void> modifyTableAsync(TableDescriptor td) { throw new NotImplementedException("modifyTableAsync not supported in ThriftAdmin"); } @Override public void shutdown() { throw new NotImplementedException("shutdown not supported in ThriftAdmin"); } @Override public void stopMaster() { throw new NotImplementedException("stopMaster not supported in ThriftAdmin"); } @Override public boolean isMasterInMaintenanceMode() { throw new NotImplementedException("isMasterInMaintenanceMode not supported in ThriftAdmin"); } @Override public void stopRegionServer(String hostnamePort) { throw new NotImplementedException("stopRegionServer not supported in ThriftAdmin"); } @Override public ClusterMetrics getClusterMetrics(EnumSet<ClusterMetrics.Option> options) { throw new NotImplementedException("getClusterMetrics not supported in ThriftAdmin"); } @Override public List<RegionMetrics> getRegionMetrics(ServerName serverName) { throw new NotImplementedException("getRegionMetrics not supported in ThriftAdmin"); } @Override public List<RegionMetrics> getRegionMetrics(ServerName serverName, TableName tableName) { throw new NotImplementedException("getRegionMetrics not supported in ThriftAdmin"); } @Override public Future<Void> createNamespaceAsync(NamespaceDescriptor descriptor) { throw new NotImplementedException("createNamespaceAsync not supported in ThriftAdmin"); } @Override public Future<Void> modifyNamespaceAsync(NamespaceDescriptor descriptor) { throw new NotImplementedException("modifyNamespaceAsync not supported in ThriftAdmin"); } @Override public List<RegionInfo> getRegions(TableName tableName) { throw new NotImplementedException("getRegions not supported in ThriftAdmin"); } @Override public boolean abortProcedure(long procId, boolean mayInterruptIfRunning) { throw new NotImplementedException("abortProcedure not supported in ThriftAdmin"); } @Override public Future<Boolean> abortProcedureAsync(long procId, boolean mayInterruptIfRunning) { throw new NotImplementedException("abortProcedureAsync not supported in ThriftAdmin"); } @Override public String getProcedures() { throw new NotImplementedException("getProcedures not supported in ThriftAdmin"); } @Override public String getLocks() { throw new NotImplementedException("getLocks not supported in ThriftAdmin"); } @Override public void rollWALWriter(ServerName serverName) { throw new NotImplementedException("rollWALWriter not supported in ThriftAdmin"); } @Override public CompactionState getCompactionState(TableName tableName) { throw new NotImplementedException("getCompactionState not supported in ThriftAdmin"); } @Override public CompactionState getCompactionState(TableName tableName, CompactType compactType) { throw new NotImplementedException("getCompactionState not supported in ThriftAdmin"); } @Override public CompactionState getCompactionStateForRegion(byte[] regionName) { throw new NotImplementedException("getCompactionStateForRegion not supported in ThriftAdmin"); } @Override public long getLastMajorCompactionTimestamp(TableName tableName) { throw new NotImplementedException( "getLastMajorCompactionTimestamp not supported in ThriftAdmin"); } @Override public long getLastMajorCompactionTimestampForRegion(byte[] regionName) { throw new NotImplementedException( "getLastMajorCompactionTimestampForRegion not supported in ThriftAdmin"); } @Override public void snapshot(String snapshotName, TableName tableName) { throw new NotImplementedException("snapshot not supported in ThriftAdmin"); } @Override public void snapshot(String snapshotName, TableName tableName, SnapshotType type) { throw new NotImplementedException("snapshot not supported in ThriftAdmin"); } @Override public void snapshot(SnapshotDescription snapshot) { throw new NotImplementedException("snapshot not supported in ThriftAdmin"); } @Override public Future<Void> snapshotAsync(SnapshotDescription snapshot) { throw new NotImplementedException("snapshotAsync not supported in ThriftAdmin"); } @Override public boolean isSnapshotFinished(SnapshotDescription snapshot) { throw new NotImplementedException("isSnapshotFinished not supported in ThriftAdmin"); } @Override public void restoreSnapshot(String snapshotName) { throw new NotImplementedException("restoreSnapshot not supported in ThriftAdmin"); } @Override public void restoreSnapshot(String snapshotName, boolean takeFailSafeSnapshot, boolean restoreAcl) { throw new NotImplementedException("restoreSnapshot not supported in ThriftAdmin"); } @Override public Future<Void> cloneSnapshotAsync(String snapshotName, TableName tableName, boolean cloneAcl) throws IOException, TableExistsException, RestoreSnapshotException { throw new NotImplementedException("cloneSnapshotAsync not supported in ThriftAdmin"); } @Override public void execProcedure(String signature, String instance, Map<String, String> props) { throw new NotImplementedException("execProcedure not supported in ThriftAdmin"); } @Override public byte[] execProcedureWithReturn(String signature, String instance, Map<String, String> props) { throw new NotImplementedException("execProcedureWithReturn not supported in ThriftAdmin"); } @Override public boolean isProcedureFinished(String signature, String instance, Map<String, String> props) { throw new NotImplementedException("isProcedureFinished not supported in ThriftAdmin"); } @Override public List<SnapshotDescription> listSnapshots() { throw new NotImplementedException("listSnapshots not supported in ThriftAdmin"); } @Override public List<SnapshotDescription> listSnapshots(Pattern pattern) { throw new NotImplementedException("listSnapshots not supported in ThriftAdmin"); } @Override public List<SnapshotDescription> listTableSnapshots(Pattern tableNamePattern, Pattern snapshotNamePattern) { throw new NotImplementedException("listTableSnapshots not supported in ThriftAdmin"); } @Override public void deleteSnapshot(String snapshotName) { throw new NotImplementedException("deleteSnapshot not supported in ThriftAdmin"); } @Override public void deleteSnapshots(Pattern pattern) { throw new NotImplementedException("deleteSnapshots not supported in ThriftAdmin"); } @Override public void deleteTableSnapshots(Pattern tableNamePattern, Pattern snapshotNamePattern) { throw new NotImplementedException("deleteTableSnapshots not supported in ThriftAdmin"); } @Override public void setQuota(QuotaSettings quota) { throw new NotImplementedException("setQuota not supported in ThriftAdmin"); } @Override public List<QuotaSettings> getQuota(QuotaFilter filter) { throw new NotImplementedException("getQuota not supported in ThriftAdmin"); } @Override public CoprocessorRpcChannel coprocessorService() { throw new NotImplementedException("coprocessorService not supported in ThriftAdmin"); } @Override public CoprocessorRpcChannel coprocessorService(ServerName serverName) { throw new NotImplementedException("coprocessorService not supported in ThriftAdmin"); } @Override public void updateConfiguration(ServerName server) { throw new NotImplementedException("updateConfiguration not supported in ThriftAdmin"); } @Override public void updateConfiguration() { throw new NotImplementedException("updateConfiguration not supported in ThriftAdmin"); } @Override public List<SecurityCapability> getSecurityCapabilities() { throw new NotImplementedException("getSecurityCapabilities not supported in ThriftAdmin"); } @Override public boolean splitSwitch(boolean enabled, boolean synchronous) { throw new NotImplementedException("splitSwitch not supported in ThriftAdmin"); } @Override public boolean mergeSwitch(boolean enabled, boolean synchronous) { throw new NotImplementedException("mergeSwitch not supported in ThriftAdmin"); } @Override public boolean isSplitEnabled() { throw new NotImplementedException("isSplitEnabled not supported in ThriftAdmin"); } @Override public boolean isMergeEnabled() { throw new NotImplementedException("isMergeEnabled not supported in ThriftAdmin"); } @Override public Future<Void> addReplicationPeerAsync(String peerId, ReplicationPeerConfig peerConfig, boolean enabled) { throw new NotImplementedException("addReplicationPeerAsync not supported in ThriftAdmin"); } @Override public Future<Void> removeReplicationPeerAsync(String peerId) { throw new NotImplementedException("removeReplicationPeerAsync not supported in ThriftAdmin"); } @Override public Future<Void> enableReplicationPeerAsync(String peerId) { throw new NotImplementedException("enableReplicationPeerAsync not supported in ThriftAdmin"); } @Override public Future<Void> disableReplicationPeerAsync(String peerId) { throw new NotImplementedException("disableReplicationPeerAsync not supported in ThriftAdmin"); } @Override public ReplicationPeerConfig getReplicationPeerConfig(String peerId) { throw new NotImplementedException("getReplicationPeerConfig not supported in ThriftAdmin"); } @Override public Future<Void> updateReplicationPeerConfigAsync(String peerId, ReplicationPeerConfig peerConfig) { throw new NotImplementedException( "updateReplicationPeerConfigAsync not supported in ThriftAdmin"); } @Override public List<ReplicationPeerDescription> listReplicationPeers() { throw new NotImplementedException("listReplicationPeers not supported in ThriftAdmin"); } @Override public List<ReplicationPeerDescription> listReplicationPeers(Pattern pattern) { throw new NotImplementedException("listReplicationPeers not supported in ThriftAdmin"); } @Override public Future<Void> transitReplicationPeerSyncReplicationStateAsync(String peerId, SyncReplicationState state) { throw new NotImplementedException( "transitReplicationPeerSyncReplicationStateAsync not supported in ThriftAdmin"); } @Override public void decommissionRegionServers(List<ServerName> servers, boolean offload) { throw new NotImplementedException("decommissionRegionServers not supported in ThriftAdmin"); } @Override public List<ServerName> listDecommissionedRegionServers() { throw new NotImplementedException( "listDecommissionedRegionServers not supported in ThriftAdmin"); } @Override public void recommissionRegionServer(ServerName server, List<byte[]> encodedRegionNames) { throw new NotImplementedException("recommissionRegionServer not supported in ThriftAdmin"); } @Override public List<TableCFs> listReplicatedTableCFs() { throw new NotImplementedException("listReplicatedTableCFs not supported in ThriftAdmin"); } @Override public void enableTableReplication(TableName tableName) { throw new NotImplementedException("enableTableReplication not supported in ThriftAdmin"); } @Override public void disableTableReplication(TableName tableName) { throw new NotImplementedException("disableTableReplication not supported in ThriftAdmin"); } @Override public void clearCompactionQueues(ServerName serverName, Set<String> queues) { throw new NotImplementedException("clearCompactionQueues not supported in ThriftAdmin"); } @Override public List<ServerName> clearDeadServers(List<ServerName> servers) { throw new NotImplementedException("clearDeadServers not supported in ThriftAdmin"); } @Override public void cloneTableSchema(TableName tableName, TableName newTableName, boolean preserveSplits) { throw new NotImplementedException("cloneTableSchema not supported in ThriftAdmin"); } @Override public Future<Void> createTableAsync(TableDescriptor desc) { throw new NotImplementedException("createTableAsync not supported in ThriftAdmin"); } @Override public Future<Void> createTableAsync(TableDescriptor desc, byte[][] splitKeys) { throw new NotImplementedException("createTableAsync not supported in ThriftAdmin"); } @Override public Future<Void> deleteTableAsync(TableName tableName) { throw new NotImplementedException("deleteTableAsync not supported in ThriftAdmin"); } @Override public Future<Void> truncateTableAsync(TableName tableName, boolean preserveSplits) { throw new NotImplementedException("truncateTableAsync not supported in ThriftAdmin"); } @Override public Future<Void> enableTableAsync(TableName tableName) { throw new NotImplementedException("enableTableAsync not supported in ThriftAdmin"); } @Override public Future<Void> disableTableAsync(TableName tableName) { throw new NotImplementedException("disableTableAsync not supported in ThriftAdmin"); } @Override public Future<Void> deleteColumnFamilyAsync(TableName tableName, byte[] columnFamily) { throw new NotImplementedException("deleteColumnFamilyAsync not supported in ThriftAdmin"); } @Override public Future<Void> addColumnFamilyAsync(TableName tableName, ColumnFamilyDescriptor columnFamily) { throw new NotImplementedException("addColumnFamilyAsync not supported in ThriftAdmin"); } @Override public Future<Void> modifyColumnFamilyAsync(TableName tableName, ColumnFamilyDescriptor columnFamily) { throw new NotImplementedException("modifyColumnFamilyAsync not supported in ThriftAdmin"); } @Override public Future<Void> deleteNamespaceAsync(String name) { throw new NotImplementedException("deleteNamespaceAsync not supported in ThriftAdmin"); } @Override public Map<TableName, Long> getSpaceQuotaTableSizes() throws IOException { throw new NotImplementedException("getSpaceQuotaTableSizes not supported in ThriftAdmin"); } @Override public Map<TableName, SpaceQuotaSnapshot> getRegionServerSpaceQuotaSnapshots( ServerName serverName) throws IOException { throw new NotImplementedException( "getRegionServerSpaceQuotaSnapshots not supported in ThriftAdmin"); } @Override public SpaceQuotaSnapshot getCurrentSpaceQuotaSnapshot(String namespace) throws IOException { throw new NotImplementedException("getCurrentSpaceQuotaSnapshot not supported in ThriftAdmin"); } @Override public SpaceQuotaSnapshot getCurrentSpaceQuotaSnapshot(TableName tableName) throws IOException { throw new NotImplementedException("getCurrentSpaceQuotaSnapshot not supported in ThriftAdmin"); } @Override public void grant(UserPermission userPermission, boolean mergeExistingPermissions) { throw new NotImplementedException("grant not supported in ThriftAdmin"); } @Override public void revoke(UserPermission userPermission) { throw new NotImplementedException("revoke not supported in ThriftAdmin"); } @Override public List<UserPermission> getUserPermissions( GetUserPermissionsRequest getUserPermissionsRequest) { throw new NotImplementedException("getUserPermissions not supported in ThriftAdmin"); } @Override public List<Boolean> hasUserPermissions(String userName, List<Permission> permissions) { throw new NotImplementedException("hasUserPermissions not supported in ThriftAdmin"); } @Override public boolean snapshotCleanupSwitch(boolean on, boolean synchronous) { throw new NotImplementedException("snapshotCleanupSwitch not supported in ThriftAdmin"); } @Override public boolean isSnapshotCleanupEnabled() { throw new NotImplementedException("isSnapshotCleanupEnabled not supported in ThriftAdmin"); } @Override public List<OnlineLogRecord> getSlowLogResponses(final Set<ServerName> serverNames, final LogQueryFilter logQueryFilter) throws IOException { Set<TServerName> tServerNames = ThriftUtilities.getServerNamesFromHBase(serverNames); TLogQueryFilter tLogQueryFilter = ThriftUtilities.getSlowLogQueryFromHBase(logQueryFilter); try { List<TOnlineLogRecord> tOnlineLogRecords = client.getSlowLogResponses(tServerNames, tLogQueryFilter); return ThriftUtilities.getSlowLogRecordsFromThrift(tOnlineLogRecords); } catch (TException e) { throw new IOException(e); } } @Override public List<Boolean> clearSlowLogResponses(final Set<ServerName> serverNames) throws IOException { Set<TServerName> tServerNames = ThriftUtilities.getServerNamesFromHBase(serverNames); try { return client.clearSlowLogResponses(tServerNames); } catch (TException e) { throw new IOException(e); } } @Override public RSGroupInfo getRSGroup(String groupName) { throw new NotImplementedException("getRSGroup not supported in ThriftAdmin"); } @Override public void moveServersToRSGroup(Set<Address> servers, String targetGroup) { throw new NotImplementedException("moveToRSGroup not supported in ThriftAdmin"); } @Override public void addRSGroup(String groupName) { throw new NotImplementedException("addRSGroup not supported in ThriftAdmin"); } @Override public void removeRSGroup(String groupName) { throw new NotImplementedException("removeRSGroup not supported in ThriftAdmin"); } @Override public boolean balanceRSGroup(String groupName) { throw new NotImplementedException("balanceRSGroup not supported in ThriftAdmin"); } @Override public List<RSGroupInfo> listRSGroups() { throw new NotImplementedException("listRSGroups not supported in ThriftAdmin"); } @Override public RSGroupInfo getRSGroup(Address hostPort) { throw new NotImplementedException("getRSGroup not supported in ThriftAdmin"); } @Override public void removeServersFromRSGroup(Set<Address> servers) { throw new NotImplementedException("removeRSGroup not supported in ThriftAdmin"); } @Override public RSGroupInfo getRSGroup(TableName tableName) { throw new NotImplementedException("getRSGroup not supported in ThriftAdmin"); } @Override public void setRSGroup(Set<TableName> tables, String groupName) { throw new NotImplementedException("setRSGroup not supported in ThriftAdmin"); } @Override public Future<Void> splitRegionAsync(byte[] regionName) throws IOException { return splitRegionAsync(regionName, null); } @Override public List<TableName> listTablesInRSGroup(String groupName) throws IOException { throw new NotImplementedException("setRSGroup not supported in ThriftAdmin"); } @Override public Pair<List<String>, List<TableName>> getConfiguredNamespacesAndTablesInRSGroup(String groupName) throws IOException { throw new NotImplementedException("setRSGroup not supported in ThriftAdmin"); } }
package com.soundcloud.android.crop; import android.content.Context; import android.graphics.Canvas; import android.graphics.Rect; import android.support.annotation.NonNull; import android.util.AttributeSet; import android.view.MotionEvent; import java.util.ArrayList; public class CropImageView extends ImageViewTouchBase { ArrayList<HighlightView> highlightViews = new ArrayList<HighlightView>(); HighlightView motionHighlightView; Context context; private float lastX; private float lastY; private int motionEdge; public CropImageView(Context context) { super(context); } public CropImageView(Context context, AttributeSet attrs) { super(context, attrs); } public CropImageView(Context context, AttributeSet attrs, int defStyle) { super(context, attrs, defStyle); } @Override protected void onLayout(boolean changed, int left, int top, int right, int bottom) { super.onLayout(changed, left, top, right, bottom); if (bitmapDisplayed.getBitmap() != null) { for (HighlightView hv : highlightViews) { hv.matrix.set(getUnrotatedMatrix()); hv.invalidate(); if (hv.hasFocus()) { centerBasedOnHighlightView(hv); } } } } @Override protected void zoomTo(float scale, float centerX, float centerY) { super.zoomTo(scale, centerX, centerY); for (HighlightView hv : highlightViews) { hv.matrix.set(getUnrotatedMatrix()); hv.invalidate(); } } @Override protected void zoomIn() { super.zoomIn(); for (HighlightView hv : highlightViews) { hv.matrix.set(getUnrotatedMatrix()); hv.invalidate(); } } @Override protected void zoomOut() { super.zoomOut(); for (HighlightView hv : highlightViews) { hv.matrix.set(getUnrotatedMatrix()); hv.invalidate(); } } @Override protected void postTranslate(float deltaX, float deltaY) { super.postTranslate(deltaX, deltaY); for (HighlightView hv : highlightViews) { hv.matrix.postTranslate(deltaX, deltaY); hv.invalidate(); } } @Override public boolean onTouchEvent(MotionEvent event) { CropImageActivity cropImageActivity = (CropImageActivity) context; if (cropImageActivity.isSaving()) { return false; } switch (event.getAction()) { case MotionEvent.ACTION_DOWN: for (HighlightView hv : highlightViews) { int edge = hv.getHit(event.getX(), event.getY()); if (edge != HighlightView.GROW_NONE) { motionEdge = edge; motionHighlightView = hv; lastX = event.getX(); lastY = event.getY(); motionHighlightView.setMode((edge == HighlightView.MOVE) ? HighlightView.ModifyMode.Move : HighlightView.ModifyMode.Grow); break; } } break; case MotionEvent.ACTION_UP: if (motionHighlightView != null) { centerBasedOnHighlightView(motionHighlightView); motionHighlightView.setMode(HighlightView.ModifyMode.None); } motionHighlightView = null; break; case MotionEvent.ACTION_MOVE: if (motionHighlightView != null) { motionHighlightView.handleMotion(motionEdge, event.getX() - lastX, event.getY() - lastY); lastX = event.getX(); lastY = event.getY(); ensureVisible(motionHighlightView); } break; } switch (event.getAction()) { case MotionEvent.ACTION_UP: center(true, true); break; case MotionEvent.ACTION_MOVE: // if we're not zoomed then there's no point in even allowing // the user to move the image around. This call to center puts // it back to the normalized location (with false meaning don't // animate). if (getScale() == 1F) { center(true, true); } break; } return true; } // Pan the displayed image to make sure the cropping rectangle is visible. private void ensureVisible(HighlightView hv) { Rect r = hv.drawRect; int panDeltaX1 = Math.max(0, getLeft() - r.left); int panDeltaX2 = Math.min(0, getRight() - r.right); int panDeltaY1 = Math.max(0, getTop() - r.top); int panDeltaY2 = Math.min(0, getBottom() - r.bottom); int panDeltaX = panDeltaX1 != 0 ? panDeltaX1 : panDeltaX2; int panDeltaY = panDeltaY1 != 0 ? panDeltaY1 : panDeltaY2; if (panDeltaX != 0 || panDeltaY != 0) { panBy(panDeltaX, panDeltaY); } } // If the cropping rectangle's size changed significantly, change the // view's center and scale according to the cropping rectangle. private void centerBasedOnHighlightView(HighlightView hv) { Rect drawRect = hv.drawRect; float width = drawRect.width(); float height = drawRect.height(); float thisWidth = getWidth(); float thisHeight = getHeight(); float z1 = thisWidth / width * .6F; float z2 = thisHeight / height * .6F; float zoom = Math.min(z1, z2); zoom = zoom * this.getScale(); zoom = Math.max(1F, zoom); if ((Math.abs(zoom - getScale()) / zoom) > .1) { float[] coordinates = new float[] { hv.cropRect.centerX(), hv.cropRect.centerY() }; getUnrotatedMatrix().mapPoints(coordinates); zoomTo(zoom, coordinates[0], coordinates[1], 300F); } ensureVisible(hv); } @Override protected void onDraw(@NonNull Canvas canvas) { super.onDraw(canvas); for (HighlightView mHighlightView : highlightViews) { mHighlightView.draw(canvas); } } public void add(HighlightView hv) { highlightViews.add(hv); invalidate(); } }
/* * Copyright 2014-2015 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, * either express or implied. See the License for the specific language * governing permissions and limitations under the License. */ package org.docksidestage.dockside.dbflute.cbean.bs; import org.dbflute.cbean.AbstractConditionBean; import org.dbflute.cbean.ConditionBean; import org.dbflute.cbean.ConditionQuery; import org.dbflute.cbean.chelper.*; import org.dbflute.cbean.coption.*; import org.dbflute.cbean.dream.*; import org.dbflute.cbean.sqlclause.SqlClause; import org.dbflute.cbean.sqlclause.SqlClauseCreator; import org.dbflute.cbean.scoping.*; import org.dbflute.dbmeta.DBMetaProvider; import org.dbflute.twowaysql.factory.SqlAnalyzerFactory; import org.dbflute.twowaysql.style.BoundDateDisplayTimeZoneProvider; import org.docksidestage.dockside.dbflute.allcommon.DBFluteConfig; import org.docksidestage.dockside.dbflute.allcommon.DBMetaInstanceHandler; import org.docksidestage.dockside.dbflute.allcommon.ImplementedInvokerAssistant; import org.docksidestage.dockside.dbflute.allcommon.ImplementedSqlClauseCreator; import org.docksidestage.dockside.dbflute.cbean.*; import org.docksidestage.dockside.dbflute.cbean.cq.*; /** * The base condition-bean of VENDOR_$_DOLLAR. * @author DBFlute(AutoGenerator) */ public class BsVendor$DollarCB extends AbstractConditionBean { // =================================================================================== // Attribute // ========= protected Vendor$DollarCQ _conditionQuery; // =================================================================================== // Constructor // =========== public BsVendor$DollarCB() { if (DBFluteConfig.getInstance().isPagingCountLater()) { enablePagingCountLater(); } if (DBFluteConfig.getInstance().isPagingCountLeastJoin()) { enablePagingCountLeastJoin(); } if (DBFluteConfig.getInstance().isNonSpecifiedColumnAccessAllowed()) { enableNonSpecifiedColumnAccess(); } if (DBFluteConfig.getInstance().isSpecifyColumnRequired()) { enableSpecifyColumnRequired(); } xsetSpecifyColumnRequiredExceptDeterminer(DBFluteConfig.getInstance().getSpecifyColumnRequiredExceptDeterminer()); if (DBFluteConfig.getInstance().isSpecifyColumnRequiredWarningOnly()) { xenableSpecifyColumnRequiredWarningOnly(); } if (DBFluteConfig.getInstance().isQueryUpdateCountPreCheck()) { enableQueryUpdateCountPreCheck(); } } // =================================================================================== // SqlClause // ========= @Override protected SqlClause createSqlClause() { SqlClauseCreator creator = DBFluteConfig.getInstance().getSqlClauseCreator(); if (creator != null) { return creator.createSqlClause(this); } return new ImplementedSqlClauseCreator().createSqlClause(this); // as default } // =================================================================================== // DB Meta // ======= @Override protected DBMetaProvider getDBMetaProvider() { return DBMetaInstanceHandler.getProvider(); // as default } public String asTableDbName() { return "VENDOR_$_DOLLAR"; } // =================================================================================== // PrimaryKey Handling // =================== /** * Accept the query condition of primary key as equal. * @param vendor$DollarId : PK, NotNull, INTEGER(10). (NotNull) * @return this. (NotNull) */ public Vendor$DollarCB acceptPK(Integer vendor$DollarId) { assertObjectNotNull("vendor$DollarId", vendor$DollarId); BsVendor$DollarCB cb = this; cb.query().setVendor$DollarId_Equal(vendor$DollarId); return (Vendor$DollarCB)this; } public ConditionBean addOrderBy_PK_Asc() { query().addOrderBy_Vendor$DollarId_Asc(); return this; } public ConditionBean addOrderBy_PK_Desc() { query().addOrderBy_Vendor$DollarId_Desc(); return this; } // =================================================================================== // Query // ===== /** * Prepare for various queries. <br> * Examples of main functions are following: * <pre> * <span style="color: #3F7E5E">// Basic Queries</span> * cb.query().setMemberId_Equal(value); <span style="color: #3F7E5E">// =</span> * cb.query().setMemberId_NotEqual(value); <span style="color: #3F7E5E">// !=</span> * cb.query().setMemberId_GreaterThan(value); <span style="color: #3F7E5E">// &gt;</span> * cb.query().setMemberId_LessThan(value); <span style="color: #3F7E5E">// &lt;</span> * cb.query().setMemberId_GreaterEqual(value); <span style="color: #3F7E5E">// &gt;=</span> * cb.query().setMemberId_LessEqual(value); <span style="color: #3F7E5E">// &lt;=</span> * cb.query().setMemberName_InScope(valueList); <span style="color: #3F7E5E">// in ('a', 'b')</span> * cb.query().setMemberName_NotInScope(valueList); <span style="color: #3F7E5E">// not in ('a', 'b')</span> * <span style="color: #3F7E5E">// LikeSearch with various options: (versatile)</span> * <span style="color: #3F7E5E">// {like ... [options]}</span> * cb.query().setMemberName_LikeSearch(value, option); * cb.query().setMemberName_NotLikeSearch(value, option); <span style="color: #3F7E5E">// not like ...</span> * <span style="color: #3F7E5E">// FromTo with various options: (versatile)</span> * <span style="color: #3F7E5E">// {(default) fromDatetime &lt;= BIRTHDATE &lt;= toDatetime}</span> * cb.query().setBirthdate_FromTo(fromDatetime, toDatetime, option); * <span style="color: #3F7E5E">// DateFromTo: (Date means yyyy/MM/dd)</span> * <span style="color: #3F7E5E">// {fromDate &lt;= BIRTHDATE &lt; toDate + 1 day}</span> * cb.query().setBirthdate_IsNull(); <span style="color: #3F7E5E">// is null</span> * cb.query().setBirthdate_IsNotNull(); <span style="color: #3F7E5E">// is not null</span> * * <span style="color: #3F7E5E">// ExistsReferrer: (correlated sub-query)</span> * <span style="color: #3F7E5E">// {where exists (select PURCHASE_ID from PURCHASE where ...)}</span> * cb.query().existsPurchase(purchaseCB <span style="color: #90226C; font-weight: bold"><span style="font-size: 120%">-</span>&gt;</span> { * purchaseCB.query().set... <span style="color: #3F7E5E">// referrer sub-query condition</span> * }); * cb.query().notExistsPurchase... * * <span style="color: #3F7E5E">// (Query)DerivedReferrer: (correlated sub-query)</span> * cb.query().derivedPurchaseList().max(purchaseCB <span style="color: #90226C; font-weight: bold"><span style="font-size: 120%">-</span>&gt;</span> { * purchaseCB.specify().columnPurchasePrice(); <span style="color: #3F7E5E">// derived column for function</span> * purchaseCB.query().set... <span style="color: #3F7E5E">// referrer sub-query condition</span> * }).greaterEqual(value); * * <span style="color: #3F7E5E">// ScalarCondition: (self-table sub-query)</span> * cb.query().scalar_Equal().max(scalarCB <span style="color: #90226C; font-weight: bold"><span style="font-size: 120%">-</span>&gt;</span> { * scalarCB.specify().columnBirthdate(); <span style="color: #3F7E5E">// derived column for function</span> * scalarCB.query().set... <span style="color: #3F7E5E">// scalar sub-query condition</span> * }); * * <span style="color: #3F7E5E">// OrderBy</span> * cb.query().addOrderBy_MemberName_Asc(); * cb.query().addOrderBy_MemberName_Desc().withManualOrder(option); * cb.query().addOrderBy_MemberName_Desc().withNullsFirst(); * cb.query().addOrderBy_MemberName_Desc().withNullsLast(); * cb.query().addSpecifiedDerivedOrderBy_Desc(aliasName); * * <span style="color: #3F7E5E">// Query(Relation)</span> * cb.query().queryMemberStatus()...; * cb.query().queryMemberAddressAsValid(targetDate)...; * </pre> * @return The instance of condition-query for base-point table to set up query. (NotNull) */ public Vendor$DollarCQ query() { assertQueryPurpose(); // assert only when user-public query return doGetConditionQuery(); } public Vendor$DollarCQ xdfgetConditionQuery() { // public for parameter comment and internal return doGetConditionQuery(); } protected Vendor$DollarCQ doGetConditionQuery() { if (_conditionQuery == null) { _conditionQuery = createLocalCQ(); } return _conditionQuery; } protected Vendor$DollarCQ createLocalCQ() { return xcreateCQ(null, getSqlClause(), getSqlClause().getBasePointAliasName(), 0); } protected Vendor$DollarCQ xcreateCQ(ConditionQuery childQuery, SqlClause sqlClause, String aliasName, int nestLevel) { Vendor$DollarCQ cq = xnewCQ(childQuery, sqlClause, aliasName, nestLevel); cq.xsetBaseCB(this); return cq; } protected Vendor$DollarCQ xnewCQ(ConditionQuery childQuery, SqlClause sqlClause, String aliasName, int nestLevel) { return new Vendor$DollarCQ(childQuery, sqlClause, aliasName, nestLevel); } /** * {@inheritDoc} */ public ConditionQuery localCQ() { return doGetConditionQuery(); } // =================================================================================== // Union // ===== /** * Set up 'union' for base-point table. <br> * You don't need to call SetupSelect in union-query, * because it inherits calls before. (Don't call SetupSelect after here) * <pre> * cb.query().<span style="color: #CC4747">union</span>(<span style="color: #553000">unionCB</span> <span style="color: #90226C; font-weight: bold"><span style="font-size: 120%">-</span>&gt;</span> { * <span style="color: #553000">unionCB</span>.query().set... * }); * </pre> * @param unionCBLambda The callback for query of 'union'. (NotNull) */ public void union(UnionQuery<Vendor$DollarCB> unionCBLambda) { final Vendor$DollarCB cb = new Vendor$DollarCB(); cb.xsetupForUnion(this); xsyncUQ(cb); try { lock(); unionCBLambda.query(cb); } finally { unlock(); } xsaveUCB(cb); final Vendor$DollarCQ cq = cb.query(); query().xsetUnionQuery(cq); } /** * Set up 'union all' for base-point table. <br> * You don't need to call SetupSelect in union-query, * because it inherits calls before. (Don't call SetupSelect after here) * <pre> * cb.query().<span style="color: #CC4747">unionAll</span>(<span style="color: #553000">unionCB</span> <span style="color: #90226C; font-weight: bold"><span style="font-size: 120%">-</span>&gt;</span> { * <span style="color: #553000">unionCB</span>.query().set... * }); * </pre> * @param unionCBLambda The callback for query of 'union all'. (NotNull) */ public void unionAll(UnionQuery<Vendor$DollarCB> unionCBLambda) { final Vendor$DollarCB cb = new Vendor$DollarCB(); cb.xsetupForUnion(this); xsyncUQ(cb); try { lock(); unionCBLambda.query(cb); } finally { unlock(); } xsaveUCB(cb); final Vendor$DollarCQ cq = cb.query(); query().xsetUnionAllQuery(cq); } // =================================================================================== // SetupSelect // =========== // [DBFlute-0.7.4] // =================================================================================== // Specify // ======= protected HpSpecification _specification; /** * Prepare for SpecifyColumn, (Specify)DerivedReferrer. <br> * This method should be called after SetupSelect. * <pre> * <span style="color: #0000C0">memberBhv</span>.selectEntity(<span style="color: #553000">cb</span> <span style="color: #90226C; font-weight: bold"><span style="font-size: 120%">-</span>&gt;</span> { * <span style="color: #553000">cb</span>.setupSelect_MemberStatus(); <span style="color: #3F7E5E">// should be called before specify()</span> * <span style="color: #553000">cb</span>.specify().columnMemberName(); * <span style="color: #553000">cb</span>.specify().specifyMemberStatus().columnMemberStatusName(); * <span style="color: #553000">cb</span>.specify().derivedPurchaseList().max(<span style="color: #553000">purchaseCB</span> <span style="color: #90226C; font-weight: bold"><span style="font-size: 120%">-</span>&gt;</span> { * <span style="color: #553000">purchaseCB</span>.specify().columnPurchaseDatetime(); * <span style="color: #553000">purchaseCB</span>.query().set... * }, aliasName); * }).alwaysPresent(<span style="color: #553000">member</span> <span style="color: #90226C; font-weight: bold"><span style="font-size: 120%">-</span>&gt;</span> { * ... * }); * </pre> * @return The instance of specification. (NotNull) */ public HpSpecification specify() { assertSpecifyPurpose(); if (_specification == null) { _specification = new HpSpecification(this , xcreateSpQyCall(() -> true, () -> xdfgetConditionQuery()) , _purpose, getDBMetaProvider(), xcSDRFnFc()); } return _specification; } public HpColumnSpHandler localSp() { return specify(); } public boolean hasSpecifiedLocalColumn() { return _specification != null && _specification.hasSpecifiedColumn(); } public static class HpSpecification extends HpAbstractSpecification<Vendor$DollarCQ> { public HpSpecification(ConditionBean baseCB, HpSpQyCall<Vendor$DollarCQ> qyCall , HpCBPurpose purpose, DBMetaProvider dbmetaProvider , HpSDRFunctionFactory sdrFuncFactory) { super(baseCB, qyCall, purpose, dbmetaProvider, sdrFuncFactory); } /** * VENDOR_$_DOLLAR_ID: {PK, NotNull, INTEGER(10)} * @return The information object of specified column. (NotNull) */ public SpecifiedColumn columnVendor$DollarId() { return doColumn("VENDOR_$_DOLLAR_ID"); } /** * VENDOR_$_DOLLAR_NAME: {VARCHAR(32)} * @return The information object of specified column. (NotNull) */ public SpecifiedColumn columnVendor$DollarName() { return doColumn("VENDOR_$_DOLLAR_NAME"); } public void everyColumn() { doEveryColumn(); } public void exceptRecordMetaColumn() { doExceptRecordMetaColumn(); } @Override protected void doSpecifyRequiredColumn() { columnVendor$DollarId(); // PK } @Override protected String getTableDbName() { return "VENDOR_$_DOLLAR"; } /** * Prepare for (Specify)MyselfDerived (SubQuery). * @return The object to set up a function for myself table. (NotNull) */ public HpSDRFunction<Vendor$DollarCB, Vendor$DollarCQ> myselfDerived() { assertDerived("myselfDerived"); if (xhasSyncQyCall()) { xsyncQyCall().qy(); } // for sync (for example, this in ColumnQuery) return cHSDRF(_baseCB, _qyCall.qy(), (String fn, SubQuery<Vendor$DollarCB> sq, Vendor$DollarCQ cq, String al, DerivedReferrerOption op) -> cq.xsmyselfDerive(fn, sq, al, op), _dbmetaProvider); } } // =================================================================================== // Dream Cruise // ============ /** * Welcome to the Dream Cruise for condition-bean deep world. <br> * This is very specialty so you can get the frontier spirit. Bon voyage! * @return The condition-bean for dream cruise, which is linked to main condition-bean. */ public Vendor$DollarCB dreamCruiseCB() { Vendor$DollarCB cb = new Vendor$DollarCB(); cb.xsetupForDreamCruise((Vendor$DollarCB) this); return cb; } protected ConditionBean xdoCreateDreamCruiseCB() { return dreamCruiseCB(); } // [DBFlute-0.9.5.3] // =================================================================================== // Column Query // ============ /** * Set up column-query. {column1 = column2} * <pre> * <span style="color: #3F7E5E">// where FOO &lt; BAR</span> * cb.<span style="color: #CC4747">columnQuery</span>(<span style="color: #553000">colCB</span> <span style="color: #90226C; font-weight: bold"><span style="font-size: 120%">-</span>&gt;</span> { * <span style="color: #553000">colCB</span>.specify().<span style="color: #CC4747">columnFoo()</span>; <span style="color: #3F7E5E">// left column</span> * }).lessThan(<span style="color: #553000">colCB</span> <span style="color: #90226C; font-weight: bold"><span style="font-size: 120%">-</span>&gt;</span> { * <span style="color: #553000">colCB</span>.specify().<span style="color: #CC4747">columnBar()</span>; <span style="color: #3F7E5E">// right column</span> * }); <span style="color: #3F7E5E">// you can calculate for right column like '}).plus(3);'</span> * </pre> * @param colCBLambda The callback for specify-query of left column. (NotNull) * @return The object for setting up operand and right column. (NotNull) */ public HpColQyOperand<Vendor$DollarCB> columnQuery(final SpecifyQuery<Vendor$DollarCB> colCBLambda) { return xcreateColQyOperand((rightSp, operand) -> { return xcolqy(xcreateColumnQueryCB(), xcreateColumnQueryCB(), colCBLambda, rightSp, operand); }); } protected Vendor$DollarCB xcreateColumnQueryCB() { Vendor$DollarCB cb = new Vendor$DollarCB(); cb.xsetupForColumnQuery((Vendor$DollarCB)this); return cb; } // [DBFlute-0.9.6.3] // =================================================================================== // OrScope Query // ============= /** * Set up the query for or-scope. <br> * (Same-column-and-same-condition-key conditions are allowed in or-scope) * <pre> * <span style="color: #3F7E5E">// where (FOO = '...' or BAR = '...')</span> * cb.<span style="color: #CC4747">orScopeQuery</span>(<span style="color: #553000">orCB</span> <span style="color: #90226C; font-weight: bold"><span style="font-size: 120%">-</span>&gt;</span> { * <span style="color: #553000">orCB</span>.query().setFoo... * <span style="color: #553000">orCB</span>.query().setBar... * }); * </pre> * @param orCBLambda The callback for query of or-condition. (NotNull) */ public void orScopeQuery(OrQuery<Vendor$DollarCB> orCBLambda) { xorSQ((Vendor$DollarCB)this, orCBLambda); } /** * Set up the and-part of or-scope. <br> * (However nested or-scope query and as-or-split of like-search in and-part are unsupported) * <pre> * <span style="color: #3F7E5E">// where (FOO = '...' or (BAR = '...' and QUX = '...'))</span> * cb.<span style="color: #994747">orScopeQuery</span>(<span style="color: #553000">orCB</span> <span style="color: #90226C; font-weight: bold"><span style="font-size: 120%">-</span>&gt;</span> { * <span style="color: #553000">orCB</span>.query().setFoo... * <span style="color: #553000">orCB</span>.<span style="color: #CC4747">orScopeQueryAndPart</span>(<span style="color: #553000">andCB</span> <span style="color: #90226C; font-weight: bold"><span style="font-size: 120%">-</span>&gt;</span> { * <span style="color: #553000">andCB</span>.query().setBar... * <span style="color: #553000">andCB</span>.query().setQux... * }); * }); * </pre> * @param andCBLambda The callback for query of and-condition. (NotNull) */ public void orScopeQueryAndPart(AndQuery<Vendor$DollarCB> andCBLambda) { xorSQAP((Vendor$DollarCB)this, andCBLambda); } // =================================================================================== // DisplaySQL // ========== @Override protected SqlAnalyzerFactory getSqlAnalyzerFactory() { return new ImplementedInvokerAssistant().assistSqlAnalyzerFactory(); } @Override protected String getConfiguredLogDatePattern() { return DBFluteConfig.getInstance().getLogDatePattern(); } @Override protected String getConfiguredLogTimestampPattern() { return DBFluteConfig.getInstance().getLogTimestampPattern(); } @Override protected String getConfiguredLogTimePattern() { return DBFluteConfig.getInstance().getLogTimePattern(); } @Override protected BoundDateDisplayTimeZoneProvider getConfiguredLogTimeZoneProvider() { return DBFluteConfig.getInstance().getLogTimeZoneProvider(); } // =================================================================================== // Meta Handling // ============= public boolean hasUnionQueryOrUnionAllQuery() { return query().hasUnionQueryOrUnionAllQuery(); } // =================================================================================== // Purpose Type // ============ @Override protected void xprepareSyncQyCall(ConditionBean mainCB) { final Vendor$DollarCB cb; if (mainCB != null) { cb = (Vendor$DollarCB)mainCB; } else { cb = new Vendor$DollarCB(); } specify().xsetSyncQyCall(xcreateSpQyCall(() -> true, () -> cb.query())); } // =================================================================================== // Internal // ======== // very internal (for suppressing warn about 'Not Use Import') protected String xgetConditionBeanClassNameInternally() { return Vendor$DollarCB.class.getName(); } protected String xgetConditionQueryClassNameInternally() { return Vendor$DollarCQ.class.getName(); } protected String xgetSubQueryClassNameInternally() { return SubQuery.class.getName(); } protected String xgetConditionOptionClassNameInternally() { return ConditionOption.class.getName(); } }
/* * Copyright (c) 2013, WSO2 Inc. (http://www.wso2.org) All Rights Reserved. * * WSO2 Inc. licenses this file to you under the Apache License, * Version 2.0 (the "License"); you may not use this file except * in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.wso2.carbon.identity.oauth.config; import org.apache.axiom.om.OMElement; import org.apache.axis2.util.JavaUtils; import org.apache.commons.lang.StringUtils; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.oltu.oauth2.as.issuer.MD5Generator; import org.apache.oltu.oauth2.as.issuer.OAuthIssuer; import org.apache.oltu.oauth2.as.issuer.OAuthIssuerImpl; import org.apache.oltu.oauth2.as.validator.AuthorizationCodeValidator; import org.apache.oltu.oauth2.as.validator.ClientCredentialValidator; import org.apache.oltu.oauth2.as.validator.CodeValidator; import org.apache.oltu.oauth2.as.validator.PasswordValidator; import org.apache.oltu.oauth2.as.validator.RefreshTokenValidator; import org.apache.oltu.oauth2.as.validator.TokenValidator; import org.apache.oltu.oauth2.common.message.types.GrantType; import org.apache.oltu.oauth2.common.message.types.ResponseType; import org.apache.oltu.oauth2.common.validators.OAuthValidator; import org.wso2.carbon.identity.core.util.IdentityConfigParser; import org.wso2.carbon.identity.core.util.IdentityCoreConstants; import org.wso2.carbon.identity.core.util.IdentityUtil; import org.wso2.carbon.identity.oauth.common.IDTokenResponseValidator; import org.wso2.carbon.identity.oauth.common.IDTokenTokenResponseValidator; import org.wso2.carbon.identity.oauth.common.OAuthConstants; import org.wso2.carbon.identity.oauth.common.SAML2GrantValidator; import org.wso2.carbon.identity.oauth.tokenprocessor.PlainTextPersistenceProcessor; import org.wso2.carbon.identity.oauth.tokenprocessor.TokenPersistenceProcessor; import org.wso2.carbon.identity.oauth2.IdentityOAuth2Exception; import org.wso2.carbon.identity.oauth2.authz.handlers.ResponseTypeHandler; import org.wso2.carbon.identity.oauth2.token.handlers.clientauth.ClientAuthenticationHandler; import org.wso2.carbon.identity.oauth2.token.handlers.grant.AuthorizationGrantHandler; import org.wso2.carbon.identity.oauth2.token.handlers.grant.saml.SAML2TokenCallbackHandler; import org.wso2.carbon.identity.oauth2.validators.OAuth2ScopeValidator; import org.wso2.carbon.identity.openidconnect.CustomClaimsCallbackHandler; import org.wso2.carbon.identity.openidconnect.IDTokenBuilder; import org.wso2.carbon.utils.CarbonUtils; import javax.servlet.http.HttpServletRequest; import javax.xml.namespace.QName; import java.util.ArrayList; import java.util.Arrays; import java.util.HashMap; import java.util.HashSet; import java.util.Hashtable; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Properties; import java.util.Set; /** * Runtime representation of the OAuth Configuration as configured through * identity.xml */ public class OAuthServerConfiguration { private static final String CONFIG_ELEM_OAUTH = "OAuth"; // Grant Handler Classes private static final String AUTHORIZATION_CODE_GRANT_HANDLER_CLASS = "org.wso2.carbon.identity.oauth2.token.handlers.grant.AuthorizationCodeHandler"; private static final String CLIENT_CREDENTIALS_GRANT_HANDLER_CLASS = "org.wso2.carbon.identity.oauth2.token.handlers.grant.ClientCredentialsGrantHandler"; private static final String PASSWORD_GRANT_HANDLER_CLASS = "org.wso2.carbon.identity.oauth2.token.handlers.grant.PasswordGrantHandler"; private static final String REFRESH_TOKEN_GRANT_HANDLER_CLASS = "org.wso2.carbon.identity.oauth2.token.handlers.grant.RefreshGrantHandler"; private static final String SAML20_BEARER_GRANT_HANDLER_CLASS = "org.wso2.carbon.identity.oauth2.token.handlers.grant.saml.SAML2BearerGrantHandler"; private static final String IWA_NTLM_BEARER_GRANT_HANDLER_CLASS = "org.wso2.carbon.identity.oauth2.token.handlers.grant.iwa.ntlm.NTLMAuthenticationGrantHandler"; private static Log log = LogFactory.getLog(OAuthServerConfiguration.class); private static OAuthServerConfiguration instance; private static String oauth1RequestTokenUrl = null; private static String oauth1AuthorizeUrl = null; private static String oauth1AccessTokenUrl = null; private static String oauth2AuthzEPUrl = null; private static String oauth2TokenEPUrl = null; private static String oauth2UserInfoEPUrl = null; private static String oidcConsentPageUrl = null; private static String oauth2ConsentPageUrl = null; private static String oauth2ErrorPageUrl = null; private long authorizationCodeValidityPeriodInSeconds = 300; private long userAccessTokenValidityPeriodInSeconds = 3600; private long applicationAccessTokenValidityPeriodInSeconds = 3600; private long refreshTokenValidityPeriodInSeconds = 24L * 3600; private long timeStampSkewInSeconds = 300; private String tokenPersistenceProcessorClassName = "org.wso2.carbon.identity.oauth.tokenprocessor.PlainTextPersistenceProcessor"; private String oauthTokenGeneratorClassName; private OAuthIssuer oauthTokenGenerator; private boolean cacheEnabled = true; private boolean isRefreshTokenRenewalEnabled = true; private boolean assertionsUserNameEnabled = false; private boolean accessTokenPartitioningEnabled = false; private String accessTokenPartitioningDomains = null; private TokenPersistenceProcessor persistenceProcessor = null; private Set<OAuthCallbackHandlerMetaData> callbackHandlerMetaData = new HashSet<>(); private Map<String, String> supportedGrantTypeClassNames = new HashMap<>(); private Map<String, AuthorizationGrantHandler> supportedGrantTypes; private Map<String, String> supportedGrantTypeValidatorNames = new HashMap<>(); private Map<String, Class<? extends OAuthValidator<HttpServletRequest>>> supportedGrantTypeValidators; private Map<String, String> supportedResponseTypeClassNames = new HashMap<>(); private Map<String, ResponseTypeHandler> supportedResponseTypes; private Map<String, String> supportedResponseTypeValidatorNames = new HashMap<>(); private Map<String, Class<? extends OAuthValidator<HttpServletRequest>>> supportedResponseTypeValidators; private String[] supportedClaims = null; private Map<String, Properties> supportedClientAuthHandlerData = new HashMap<>(); private List<ClientAuthenticationHandler> supportedClientAuthHandlers; private String saml2TokenCallbackHandlerName = null; private SAML2TokenCallbackHandler saml2TokenCallbackHandler = null; private Map<String, String> tokenValidatorClassNames = new HashMap(); private boolean isAuthContextTokGenEnabled = false; private String tokenGeneratorImplClass = "org.wso2.carbon.identity.oauth2.token.JWTTokenGenerator"; private String claimsRetrieverImplClass = "org.wso2.carbon.identity.oauth2.token.DefaultClaimsRetriever"; private String consumerDialectURI = "http://wso2.org/claims"; private String signatureAlgorithm = "SHA256withRSA"; private String authContextTTL = "15L"; // OpenID Connect configurations private String openIDConnectIDTokenBuilderClassName = "org.wso2.carbon.identity.openidconnect.DefaultIDTokenBuilder"; private String openIDConnectIDTokenCustomClaimsHanlderClassName = "org.wso2.carbon.identity.openidconnect.SAMLAssertionClaimsCallback"; private IDTokenBuilder openIDConnectIDTokenBuilder = null; private CustomClaimsCallbackHandler openidConnectIDTokenCustomClaimsCallbackHandler = null; private String openIDConnectIDTokenIssuerIdentifier = null; private String openIDConnectIDTokenSubClaim = "http://wso2.org/claims/fullname"; private String openIDConnectSkipUserConsent = "true"; private String openIDConnectIDTokenExpiration = "300"; private String openIDConnectUserInfoEndpointClaimDialect = "http://wso2.org/claims"; private String openIDConnectUserInfoEndpointClaimRetriever = "org.wso2.carbon.identity.oauth.endpoint.user.impl.UserInfoUserStoreClaimRetriever"; private String openIDConnectUserInfoEndpointRequestValidator = "org.wso2.carbon.identity.oauth.endpoint.user.impl.UserInforRequestDefaultValidator"; private String openIDConnectUserInfoEndpointAccessTokenValidator = "org.wso2.carbon.identity.oauth.endpoint.user.impl.UserInfoISAccessTokenValidator"; private String openIDConnectUserInfoEndpointResponseBuilder = "org.wso2.carbon.identity.oauth.endpoint.user.impl.UserInfoJSONResponseBuilder"; private OAuth2ScopeValidator oAuth2ScopeValidator; private OAuthServerConfiguration() { buildOAuthServerConfiguration(); } public static OAuthServerConfiguration getInstance() { CarbonUtils.checkSecurity(); if (instance == null) { synchronized (OAuthServerConfiguration.class) { if (instance == null) { instance = new OAuthServerConfiguration(); } } } return instance; } private void buildOAuthServerConfiguration() { IdentityConfigParser configParser = IdentityConfigParser.getInstance(); OMElement oauthElem = configParser.getConfigElement(CONFIG_ELEM_OAUTH); if (oauthElem == null) { warnOnFaultyConfiguration("OAuth element is not available."); return; } // read callback handler configurations parseOAuthCallbackHandlers(oauthElem.getFirstChildWithName( getQNameWithIdentityNS(ConfigElements.OAUTH_CALLBACK_HANDLERS))); // get the token validators by type parseTokenValidators(oauthElem.getFirstChildWithName( getQNameWithIdentityNS(ConfigElements.TOKEN_VALIDATORS))); // Get the configured scope validator OMElement scopeValidatorElem = oauthElem.getFirstChildWithName( getQNameWithIdentityNS(ConfigElements.SCOPE_VALIDATOR)); if (scopeValidatorElem != null) { parseScopeValidator(scopeValidatorElem); } // read default timeout periods parseDefaultValidityPeriods(oauthElem); // read OAuth URLs parseOAuthURLs(oauthElem); // read caching configurations parseCachingConfiguration(oauthElem); // read refresh token renewal config parseRefreshTokenRenewalConfiguration(oauthElem); // read token persistence processor config parseTokenPersistenceProcessorConfig(oauthElem); // read supported grant types parseSupportedGrantTypesConfig(oauthElem); // read supported response types parseSupportedResponseTypesConfig(oauthElem); // read supported response types parseSupportedClientAuthHandlersConfig(oauthElem.getFirstChildWithName( getQNameWithIdentityNS(ConfigElements.CLIENT_AUTH_HANDLERS))); // read SAML2 grant config parseSAML2GrantConfig(oauthElem); // read JWT generator config parseAuthorizationContextTokenGeneratorConfig(oauthElem); // read the assertions user name config parseEnableAssertionsUserNameConfig(oauthElem); // read access token partitioning config parseAccessTokenPartitioningConfig(oauthElem); // read access token partitioning domains config parseAccessTokenPartitioningDomainsConfig(oauthElem); // read openid connect configurations parseOpenIDConnectConfig(oauthElem); // parse OAuth 2.0 token generator parseOAuthTokenGeneratorConfig(oauthElem); } public Set<OAuthCallbackHandlerMetaData> getCallbackHandlerMetaData() { return callbackHandlerMetaData; } public String getOAuth1RequestTokenUrl() { return oauth1RequestTokenUrl; } public String getOAuth1AuthorizeUrl() { return oauth1AuthorizeUrl; } public String getOAuth1AccessTokenUrl() { return oauth1AccessTokenUrl; } public String getOAuth2AuthzEPUrl() { return oauth2AuthzEPUrl; } public String getOAuth2TokenEPUrl() { return oauth2TokenEPUrl; } public String getOauth2UserInfoEPUrl() { return oauth2UserInfoEPUrl; } /** * instantiate the OAuth token generator. to override the default implementation, one can specify the custom class * in the identity.xml. * * @return */ public OAuthIssuer getOAuthTokenGenerator() { if (oauthTokenGenerator == null) { synchronized (this) { if (oauthTokenGenerator == null) { try { if (oauthTokenGeneratorClassName != null) { Class clazz = this.getClass().getClassLoader().loadClass(oauthTokenGeneratorClassName); oauthTokenGenerator = (OAuthIssuer) clazz.newInstance(); log.info("An instance of " + oauthTokenGeneratorClassName + " is created for OAuth token generation."); } else { oauthTokenGenerator = new OAuthIssuerImpl(new MD5Generator()); log.info("The default OAuth token issuer will be used. No custom token generator is set."); } } catch (Exception e) { String errorMsg = "Error when instantiating the OAuthIssuer : " + tokenPersistenceProcessorClassName + ". Defaulting to OAuthIssuerImpl"; log.error(errorMsg, e); oauthTokenGenerator = new OAuthIssuerImpl(new MD5Generator()); } } } } return oauthTokenGenerator; } public String getOIDCConsentPageUrl() { return oidcConsentPageUrl; } public String getOauth2ConsentPageUrl() { return oauth2ConsentPageUrl; } public String getOauth2ErrorPageUrl() { return oauth2ErrorPageUrl; } public long getAuthorizationCodeValidityPeriodInSeconds() { return authorizationCodeValidityPeriodInSeconds; } public long getUserAccessTokenValidityPeriodInSeconds() { return userAccessTokenValidityPeriodInSeconds; } public long getApplicationAccessTokenValidityPeriodInSeconds() { return applicationAccessTokenValidityPeriodInSeconds; } public long getRefreshTokenValidityPeriodInSeconds() { return refreshTokenValidityPeriodInSeconds; } public long getTimeStampSkewInSeconds() { return timeStampSkewInSeconds; } public boolean isCacheEnabled() { return cacheEnabled; } public boolean isRefreshTokenRenewalEnabled() { return isRefreshTokenRenewalEnabled; } public Map<String, AuthorizationGrantHandler> getSupportedGrantTypes() { if (supportedGrantTypes == null) { synchronized (this) { if (supportedGrantTypes == null) { Map<String, AuthorizationGrantHandler> supportedGrantTypesTemp = new Hashtable<>(); for (Map.Entry<String, String> entry : supportedGrantTypeClassNames.entrySet()) { AuthorizationGrantHandler authzGrantHandler = null; try { authzGrantHandler = (AuthorizationGrantHandler) Class.forName(entry.getValue()).newInstance(); authzGrantHandler.init(); } catch (InstantiationException e) { log.error("Error instantiating " + entry.getValue(), e); } catch (IllegalAccessException e) { log.error("Illegal access to " + entry.getValue(), e); } catch (ClassNotFoundException e) { log.error("Cannot find class: " + entry.getValue(), e); } catch (IdentityOAuth2Exception e) { log.error("Error while initializing " + entry.getValue(), e); } supportedGrantTypesTemp.put(entry.getKey(), authzGrantHandler); supportedGrantTypes = supportedGrantTypesTemp; } } } } return supportedGrantTypes; } /** * Returns a map of supported grant type validators that are configured in identity.xml. * This method loads default grant type validator classes for PASSWORD, CLIENT_CREDENTIALS, AUTHORIZATION_CODE, * REFRESH_TOKEN and SAML20_BEARER grant types and also loads validator classes configured in identity.xml for * custom grant types under /Server/OAuth/SupportedGrantTypes/GrantTypeValidatorImplClass element. * A validator class defined under this element should be an implementation of org.apache.amber.oauth2.common * .validators.OAuthValidator * * @return a map of <Grant type, Oauth validator class> */ public Map<String, Class<? extends OAuthValidator<HttpServletRequest>>> getSupportedGrantTypeValidators() { if (supportedGrantTypeValidators == null) { synchronized (this) { if (supportedGrantTypeValidators == null) { Map<String,Class<? extends OAuthValidator<HttpServletRequest>>> supportedGrantTypeValidatorsTemp = new Hashtable<>(); // Load default grant type validators supportedGrantTypeValidatorsTemp .put(GrantType.PASSWORD.toString(), PasswordValidator.class); supportedGrantTypeValidatorsTemp.put(GrantType.CLIENT_CREDENTIALS.toString(), ClientCredentialValidator.class); supportedGrantTypeValidatorsTemp.put(GrantType.AUTHORIZATION_CODE.toString(), AuthorizationCodeValidator.class); supportedGrantTypeValidatorsTemp.put(GrantType.REFRESH_TOKEN.toString(), RefreshTokenValidator.class); supportedGrantTypeValidatorsTemp.put( org.wso2.carbon.identity.oauth.common.GrantType.SAML20_BEARER .toString(), SAML2GrantValidator.class); if (supportedGrantTypeValidatorNames != null) { // Load configured grant type validators for (Map.Entry<String, String> entry : supportedGrantTypeValidatorNames.entrySet()) { try { @SuppressWarnings("unchecked") Class<? extends OAuthValidator<HttpServletRequest>> oauthValidatorClass = (Class<? extends OAuthValidator<HttpServletRequest>>) Class .forName(entry.getValue()); supportedGrantTypeValidatorsTemp .put(entry.getKey(), oauthValidatorClass); } catch (ClassNotFoundException e) { log.error("Cannot find class: " + entry.getValue(), e); } catch (ClassCastException e) { log.error("Cannot cast class: " + entry.getValue(), e); } } } supportedGrantTypeValidators = supportedGrantTypeValidatorsTemp; } } } return supportedGrantTypeValidators; } public Map<String, Class<? extends OAuthValidator<HttpServletRequest>>> getSupportedResponseTypeValidators() { if (supportedResponseTypeValidators == null) { synchronized (this) { if (supportedResponseTypeValidators == null) { Map<String, Class<? extends OAuthValidator<HttpServletRequest>>> supportedResponseTypeValidatorsTemp = new Hashtable<>(); // Load default grant type validators supportedResponseTypeValidatorsTemp .put(ResponseType.CODE.toString(), CodeValidator.class); supportedResponseTypeValidatorsTemp.put(ResponseType.TOKEN.toString(), TokenValidator.class); supportedResponseTypeValidatorsTemp.put("id_token", IDTokenResponseValidator.class); supportedResponseTypeValidatorsTemp.put("id_token token", IDTokenTokenResponseValidator.class); if (supportedResponseTypeValidatorNames != null) { // Load configured grant type validators for (Map.Entry<String, String> entry : supportedResponseTypeValidatorNames .entrySet()) { try { @SuppressWarnings("unchecked") Class<? extends OAuthValidator<HttpServletRequest>> oauthValidatorClass = (Class<? extends OAuthValidator<HttpServletRequest>>) Class .forName(entry.getValue()); supportedResponseTypeValidatorsTemp .put(entry.getKey(), oauthValidatorClass); } catch (ClassNotFoundException e) { log.error("Cannot find class: " + entry.getValue(), e); } catch (ClassCastException e) { log.error("Cannot cast class: " + entry.getValue(), e); } } supportedResponseTypeValidators = supportedResponseTypeValidatorsTemp; } } } } return supportedResponseTypeValidators; } public Map<String, ResponseTypeHandler> getSupportedResponseTypes() { if (supportedResponseTypes == null) { synchronized (this) { if (supportedResponseTypes == null) { Map<String,ResponseTypeHandler> supportedResponseTypesTemp = new Hashtable<>(); for (Map.Entry<String, String> entry : supportedResponseTypeClassNames.entrySet()) { ResponseTypeHandler responseTypeHandler = null; try { responseTypeHandler = (ResponseTypeHandler) Class.forName(entry.getValue()).newInstance(); responseTypeHandler.init(); } catch (InstantiationException e) { log.error("Error instantiating " + entry.getValue(), e); } catch (IllegalAccessException e) { log.error("Illegal access to " + entry.getValue(), e); } catch (ClassNotFoundException e) { log.error("Cannot find class: " + entry.getValue(), e); } catch (IdentityOAuth2Exception e) { log.error("Error while initializing " + entry.getValue(), e); } supportedResponseTypesTemp.put(entry.getKey(), responseTypeHandler); } supportedResponseTypes = supportedResponseTypesTemp; } } } return supportedResponseTypes; } public String[] getSupportedClaims() { return supportedClaims; } public List<ClientAuthenticationHandler> getSupportedClientAuthHandlers() { if (supportedClientAuthHandlers == null) { synchronized (this) { if (supportedClientAuthHandlers == null) { List<ClientAuthenticationHandler> supportedClientAuthHandlersTemp = new ArrayList<>(); for (Map.Entry<String, Properties> entry : supportedClientAuthHandlerData.entrySet()) { ClientAuthenticationHandler clientAuthenticationHandler = null; try { clientAuthenticationHandler = (ClientAuthenticationHandler) Class.forName(entry.getKey()).newInstance(); clientAuthenticationHandler.init(entry.getValue()); supportedClientAuthHandlersTemp.add(clientAuthenticationHandler); //Exceptions necessarily don't have to break the flow since there are cases //runnable without client auth handlers } catch (InstantiationException e) { log.error("Error instantiating " + entry, e); } catch (IllegalAccessException e) { log.error("Illegal access to " + entry, e); } catch (ClassNotFoundException e) { log.error("Cannot find class: " + entry, e); } catch (IdentityOAuth2Exception e) { log.error("Error while initializing " + entry, e); } supportedClientAuthHandlers = supportedClientAuthHandlersTemp; } } } } return supportedClientAuthHandlers; } public SAML2TokenCallbackHandler getSAML2TokenCallbackHandler() { if (StringUtils.isBlank(saml2TokenCallbackHandlerName)) { return null; } if (saml2TokenCallbackHandler == null) { synchronized (SAML2TokenCallbackHandler.class) { if (saml2TokenCallbackHandler == null) { try { Class clazz = Thread.currentThread().getContextClassLoader() .loadClass(saml2TokenCallbackHandlerName); saml2TokenCallbackHandler = (SAML2TokenCallbackHandler) clazz.newInstance(); } catch (ClassNotFoundException e) { log.error("Error while instantiating the SAML2TokenCallbackHandler ", e); } catch (InstantiationException e) { log.error("Error while instantiating the SAML2TokenCallbackHandler ", e); } catch (IllegalAccessException e) { log.error("Error while instantiating the SAML2TokenCallbackHandler ", e); } } } } return saml2TokenCallbackHandler; } public Map<String, String> getTokenValidatorClassNames() { return tokenValidatorClassNames; } public boolean isAccessTokenPartitioningEnabled() { return accessTokenPartitioningEnabled; } public boolean isUserNameAssertionEnabled() { return assertionsUserNameEnabled; } public String getAccessTokenPartitioningDomains() { return accessTokenPartitioningDomains; } private QName getQNameWithIdentityNS(String localPart) { return new QName(IdentityCoreConstants.IDENTITY_DEFAULT_NAMESPACE, localPart); } public boolean isAuthContextTokGenEnabled() { return isAuthContextTokGenEnabled; } public String getTokenGeneratorImplClass() { return tokenGeneratorImplClass; } public String getSignatureAlgorithm() { return signatureAlgorithm; } public String getConsumerDialectURI() { return consumerDialectURI; } public String getClaimsRetrieverImplClass() { return claimsRetrieverImplClass; } public String getAuthorizationContextTTL() { return authContextTTL; } public TokenPersistenceProcessor getPersistenceProcessor() throws IdentityOAuth2Exception { if (persistenceProcessor == null) { synchronized (this) { if (persistenceProcessor == null) { try { Class clazz = this.getClass().getClassLoader() .loadClass(tokenPersistenceProcessorClassName); persistenceProcessor = (TokenPersistenceProcessor) clazz.newInstance(); if (log.isDebugEnabled()) { log.debug("An instance of " + tokenPersistenceProcessorClassName + " is created for OAuthServerConfiguration."); } } catch (Exception e) { String errorMsg = "Error when instantiating the TokenPersistenceProcessor : " + tokenPersistenceProcessorClassName + ". Defaulting to PlainTextPersistenceProcessor"; log.error(errorMsg, e); persistenceProcessor = new PlainTextPersistenceProcessor(); } } } } return persistenceProcessor; } /** * Return an instance of the IDToken builder * * @return */ public IDTokenBuilder getOpenIDConnectIDTokenBuilder() { if (openIDConnectIDTokenBuilder == null) { synchronized (IDTokenBuilder.class) { if (openIDConnectIDTokenBuilder == null) { try { Class clazz = Thread.currentThread().getContextClassLoader() .loadClass(openIDConnectIDTokenBuilderClassName); openIDConnectIDTokenBuilder = (IDTokenBuilder) clazz.newInstance(); } catch (ClassNotFoundException e) { log.error("Error while instantiating the IDTokenBuilder ", e); } catch (InstantiationException e) { log.error("Error while instantiating the IDTokenBuilder ", e); } catch (IllegalAccessException e) { log.error("Error while instantiating the IDTokenBuilder ", e); } } } } return openIDConnectIDTokenBuilder; } /** * Returns the custom claims builder for the IDToken * * @return */ public CustomClaimsCallbackHandler getOpenIDConnectCustomClaimsCallbackHandler() { if (openidConnectIDTokenCustomClaimsCallbackHandler == null) { synchronized (CustomClaimsCallbackHandler.class) { if (openidConnectIDTokenCustomClaimsCallbackHandler == null) { try { Class clazz = Thread.currentThread().getContextClassLoader() .loadClass(openIDConnectIDTokenCustomClaimsHanlderClassName); openidConnectIDTokenCustomClaimsCallbackHandler = (CustomClaimsCallbackHandler) clazz.newInstance(); } catch (ClassNotFoundException e) { log.error("Error while instantiating the IDTokenBuilder ", e); } catch (InstantiationException e) { log.error("Error while instantiating the IDTokenBuilder ", e); } catch (IllegalAccessException e) { log.error("Error while instantiating the IDTokenBuilder ", e); } } } } return openidConnectIDTokenCustomClaimsCallbackHandler; } /** * @return the openIDConnectIDTokenIssuer */ public String getOpenIDConnectIDTokenIssuerIdentifier() { return openIDConnectIDTokenIssuerIdentifier; } public String getOpenIDConnectIDTokenSubjectClaim() { return openIDConnectIDTokenSubClaim; } /** * Returns if skip user consent enabled or not * * @return */ public boolean getOpenIDConnectSkipeUserConsentConfig() { return "true".equalsIgnoreCase(openIDConnectSkipUserConsent); } /** * @return the openIDConnectIDTokenExpiration */ public String getOpenIDConnectIDTokenExpiration() { return openIDConnectIDTokenExpiration; } public String getOpenIDConnectUserInfoEndpointClaimDialect() { return openIDConnectUserInfoEndpointClaimDialect; } public String getOpenIDConnectUserInfoEndpointClaimRetriever() { return openIDConnectUserInfoEndpointClaimRetriever; } public String getOpenIDConnectUserInfoEndpointRequestValidator() { return openIDConnectUserInfoEndpointRequestValidator; } public String getOpenIDConnectUserInfoEndpointAccessTokenValidator() { return openIDConnectUserInfoEndpointAccessTokenValidator; } public String getOpenIDConnectUserInfoEndpointResponseBuilder() { return openIDConnectUserInfoEndpointResponseBuilder; } private void parseOAuthCallbackHandlers(OMElement callbackHandlersElem) { if (callbackHandlersElem == null) { warnOnFaultyConfiguration("OAuthCallbackHandlers element is not available."); return; } Iterator callbackHandlers = callbackHandlersElem.getChildrenWithLocalName(ConfigElements.OAUTH_CALLBACK_HANDLER); int callbackHandlerCount = 0; if (callbackHandlers != null) { for (; callbackHandlers.hasNext(); ) { OAuthCallbackHandlerMetaData cbHandlerMetadata = buildAuthzCallbackHandlerMetadata((OMElement) callbackHandlers.next()); if (cbHandlerMetadata != null) { callbackHandlerMetaData.add(cbHandlerMetadata); if (log.isDebugEnabled()) { log.debug("OAuthCallbackHandlerMetadata was added. Class : " + cbHandlerMetadata.getClassName()); } callbackHandlerCount++; } } } // if no callback handlers are registered, print a WARN if (!(callbackHandlerCount > 0)) { warnOnFaultyConfiguration("No OAuthCallbackHandler elements were found."); } } private void parseTokenValidators(OMElement tokenValidators) { if (tokenValidators == null) { return; } Iterator validators = tokenValidators.getChildrenWithLocalName(ConfigElements.TOKEN_VALIDATOR); if (validators != null) { for (; validators.hasNext(); ) { OMElement validator = (OMElement) validators.next(); if (validator != null) { String clazzName = validator.getAttributeValue(new QName(ConfigElements.TOKEN_CLASS_ATTR)); String type = validator.getAttributeValue(new QName(ConfigElements.TOKEN_TYPE_ATTR)); tokenValidatorClassNames.put(type, clazzName); } } } } private void parseScopeValidator(OMElement scopeValidatorElem) { String scopeValidatorClazz = scopeValidatorElem.getAttributeValue(new QName(ConfigElements.SCOPE_CLASS_ATTR)); String scopesToSkipAttr = scopeValidatorElem.getAttributeValue(new QName(ConfigElements.SKIP_SCOPE_ATTR)); try { Class clazz = Thread.currentThread().getContextClassLoader().loadClass(scopeValidatorClazz); OAuth2ScopeValidator scopeValidator = (OAuth2ScopeValidator) clazz.newInstance(); if (scopesToSkipAttr != null && !"".equals(scopesToSkipAttr)) { //Split the scopes attr by a -space- character and create the set (avoid duplicates). Set<String> scopesToSkip = new HashSet<String>(Arrays.asList(scopesToSkipAttr.split(" "))); scopeValidator.setScopesToSkip(scopesToSkip); } setoAuth2ScopeValidator(scopeValidator); } catch (ClassNotFoundException e) { log.error("Class not found in build path " + scopeValidatorClazz, e); } catch (InstantiationException e) { log.error("Class initialization error " + scopeValidatorClazz, e); } catch (IllegalAccessException e) { log.error("Class access error " + scopeValidatorClazz, e); } } private void warnOnFaultyConfiguration(String logMsg) { log.warn("Error in OAuth Configuration. " + logMsg); } private OAuthCallbackHandlerMetaData buildAuthzCallbackHandlerMetadata(OMElement omElement) { // read the class attribute which is mandatory String className = omElement.getAttributeValue(new QName(ConfigElements.CALLBACK_CLASS)); if (className == null) { log.error("Mandatory attribute \"Class\" is not present in the " + "AuthorizationCallbackHandler element. " + "AuthorizationCallbackHandler will not be registered."); return null; } // read the priority element, if it is not there, use the default // priority of 1 int priority = OAuthConstants.OAUTH_AUTHZ_CB_HANDLER_DEFAULT_PRIORITY; OMElement priorityElem = omElement.getFirstChildWithName(getQNameWithIdentityNS(ConfigElements.CALLBACK_PRIORITY)); if (priorityElem != null) { priority = Integer.parseInt(priorityElem.getText()); } if (log.isDebugEnabled()) { log.debug("Priority level of : " + priority + " is set for the " + "AuthorizationCallbackHandler with the class : " + className); } // read the additional properties. OMElement paramsElem = omElement.getFirstChildWithName(getQNameWithIdentityNS(ConfigElements.CALLBACK_PROPERTIES)); Properties properties = null; if (paramsElem != null) { Iterator paramItr = paramsElem.getChildrenWithLocalName(ConfigElements.CALLBACK_PROPERTY); properties = new Properties(); if (log.isDebugEnabled()) { log.debug("Registering Properties for AuthorizationCallbackHandler class : " + className); } for (; paramItr.hasNext(); ) { OMElement paramElem = (OMElement) paramItr.next(); String paramName = paramElem.getAttributeValue(new QName(ConfigElements.CALLBACK_ATTR_NAME)); String paramValue = paramElem.getText(); properties.put(paramName, paramValue); if (log.isDebugEnabled()) { log.debug("Property name : " + paramName + ", Property Value : " + paramValue); } } } return new OAuthCallbackHandlerMetaData(className, properties, priority); } private void parseDefaultValidityPeriods(OMElement oauthConfigElem) { // set the authorization code default timeout OMElement authzCodeTimeoutElem = oauthConfigElem.getFirstChildWithName(getQNameWithIdentityNS(ConfigElements.AUTHORIZATION_CODE_DEFAULT_VALIDITY_PERIOD)); if (authzCodeTimeoutElem != null) { authorizationCodeValidityPeriodInSeconds = Long.parseLong(authzCodeTimeoutElem.getText()); } // set the access token default timeout OMElement accessTokTimeoutElem = oauthConfigElem.getFirstChildWithName(getQNameWithIdentityNS( ConfigElements.USER_ACCESS_TOKEN_DEFAULT_VALIDITY_PERIOD)); if (accessTokTimeoutElem != null) { userAccessTokenValidityPeriodInSeconds = Long.parseLong(accessTokTimeoutElem.getText()); } // set the application access token default timeout OMElement applicationAccessTokTimeoutElem = oauthConfigElem.getFirstChildWithName( getQNameWithIdentityNS(ConfigElements.APPLICATION_ACCESS_TOKEN_VALIDATION_PERIOD)); if (applicationAccessTokTimeoutElem != null) { applicationAccessTokenValidityPeriodInSeconds = Long.parseLong(applicationAccessTokTimeoutElem.getText()); } // set the application access token default timeout OMElement refreshTokenTimeoutElem = oauthConfigElem.getFirstChildWithName( getQNameWithIdentityNS(ConfigElements.REFRESH_TOKEN_VALIDITY_PERIOD)); if (refreshTokenTimeoutElem != null) { refreshTokenValidityPeriodInSeconds = Long.parseLong(refreshTokenTimeoutElem.getText().trim()); } OMElement timeStampSkewElem = oauthConfigElem.getFirstChildWithName( getQNameWithIdentityNS(ConfigElements.TIMESTAMP_SKEW)); if (timeStampSkewElem != null) { timeStampSkewInSeconds = Long.parseLong(timeStampSkewElem.getText()); } if (log.isDebugEnabled()) { if (authzCodeTimeoutElem == null) { log.debug("\"Authorization Code Default Timeout\" element was not available " + "in identity.xml. Continuing with the default value."); } if (accessTokTimeoutElem == null) { log.debug("\"Access Token Default Timeout\" element was not available " + "in from identity.xml. Continuing with the default value."); } if (refreshTokenTimeoutElem == null) { log.debug("\"Refresh Token Default Timeout\" element was not available " + "in from identity.xml. Continuing with the default value."); } if (timeStampSkewElem == null) { log.debug("\"Default Timestamp Skew\" element was not available " + "in from identity.xml. Continuing with the default value."); } if(log.isDebugEnabled()) { log.debug("Authorization Code Default Timeout is set to : " + authorizationCodeValidityPeriodInSeconds + "ms."); log.debug("User Access Token Default Timeout is set to " + userAccessTokenValidityPeriodInSeconds + "ms."); log.debug("Application Access Token Default Timeout is set to " + applicationAccessTokenValidityPeriodInSeconds + "ms."); log.debug("Refresh Token validity period is set to " + refreshTokenValidityPeriodInSeconds + "s."); log.debug("Default TimestampSkew is set to " + timeStampSkewInSeconds + "ms."); } } } private void parseOAuthURLs(OMElement oauthConfigElem) { OMElement elem = oauthConfigElem.getFirstChildWithName(getQNameWithIdentityNS( ConfigElements.OAUTH1_REQUEST_TOKEN_URL)); if (elem != null) { if(StringUtils.isNotBlank(elem.getText())) { oauth1RequestTokenUrl = IdentityUtil.fillURLPlaceholders(elem.getText()); } } elem = oauthConfigElem.getFirstChildWithName(getQNameWithIdentityNS( ConfigElements.OAUTH1_AUTHORIZE_URL)); if (elem != null) { if(StringUtils.isNotBlank(elem.getText())) { oauth1AuthorizeUrl = IdentityUtil.fillURLPlaceholders(elem.getText()); } } elem = oauthConfigElem.getFirstChildWithName(getQNameWithIdentityNS( ConfigElements.OAUTH1_ACCESS_TOKEN_URL)); if (elem != null) { if(StringUtils.isNotBlank(elem.getText())) { oauth1AccessTokenUrl = IdentityUtil.fillURLPlaceholders(elem.getText()); } } elem = oauthConfigElem.getFirstChildWithName(getQNameWithIdentityNS( ConfigElements.OAUTH2_AUTHZ_EP_URL)); if (elem != null) { if(StringUtils.isNotBlank(elem.getText())) { oauth2AuthzEPUrl = IdentityUtil.fillURLPlaceholders(elem.getText()); } } elem = oauthConfigElem.getFirstChildWithName(getQNameWithIdentityNS( ConfigElements.OAUTH2_TOKEN_EP_URL)); if (elem != null) { if(StringUtils.isNotBlank(elem.getText())) { oauth2TokenEPUrl = IdentityUtil.fillURLPlaceholders(elem.getText()); } } elem = oauthConfigElem.getFirstChildWithName(getQNameWithIdentityNS( ConfigElements.OAUTH2_USERINFO_EP_URL)); if (elem != null) { if(StringUtils.isNotBlank(elem.getText())) { oauth2UserInfoEPUrl = IdentityUtil.fillURLPlaceholders(elem.getText()); } } elem = oauthConfigElem.getFirstChildWithName(getQNameWithIdentityNS( ConfigElements.OAUTH2_CONSENT_PAGE_URL)); if (elem != null) { if(StringUtils.isNotBlank(elem.getText())) { oauth2ConsentPageUrl = IdentityUtil.fillURLPlaceholders(elem.getText()); } } elem = oauthConfigElem.getFirstChildWithName(getQNameWithIdentityNS( ConfigElements.OIDC_CONSENT_PAGE_URL)); if (elem != null) { if(StringUtils.isNotBlank(elem.getText())) { oidcConsentPageUrl = IdentityUtil.fillURLPlaceholders(elem.getText()); } } elem = oauthConfigElem.getFirstChildWithName(getQNameWithIdentityNS( ConfigElements.OAUTH2_ERROR_PAGE_URL)); if (elem != null) { if(StringUtils.isNotBlank(elem.getText())) { oauth2ErrorPageUrl = IdentityUtil.fillURLPlaceholders(elem.getText()); } } } private void parseCachingConfiguration(OMElement oauthConfigElem) { OMElement enableCacheElem = oauthConfigElem.getFirstChildWithName(getQNameWithIdentityNS(ConfigElements.ENABLE_CACHE)); if (enableCacheElem != null) { cacheEnabled = Boolean.parseBoolean(enableCacheElem.getText()); } if (log.isDebugEnabled()) { log.debug("Enable OAuth Cache was set to : " + cacheEnabled); } } private void parseRefreshTokenRenewalConfiguration(OMElement oauthConfigElem) { OMElement enableRefreshTokenRenewalElem = oauthConfigElem.getFirstChildWithName(getQNameWithIdentityNS( ConfigElements.RENEW_REFRESH_TOKEN_FOR_REFRESH_GRANT)); if (enableRefreshTokenRenewalElem != null) { isRefreshTokenRenewalEnabled = Boolean.parseBoolean(enableRefreshTokenRenewalElem.getText()); } if (log.isDebugEnabled()) { log.debug("RenewRefreshTokenForRefreshGrant was set to : " + isRefreshTokenRenewalEnabled); } } private void parseAccessTokenPartitioningConfig(OMElement oauthConfigElem) { OMElement enableAccessTokenPartitioningElem = oauthConfigElem.getFirstChildWithName(getQNameWithIdentityNS(ConfigElements.ENABLE_ACCESS_TOKEN_PARTITIONING)); if (enableAccessTokenPartitioningElem != null) { accessTokenPartitioningEnabled = Boolean.parseBoolean(enableAccessTokenPartitioningElem.getText()); } if (log.isDebugEnabled()) { log.debug("Enable OAuth Access Token Partitioning was set to : " + accessTokenPartitioningEnabled); } } private void parseAccessTokenPartitioningDomainsConfig(OMElement oauthConfigElem) { OMElement enableAccessTokenPartitioningElem = oauthConfigElem.getFirstChildWithName(getQNameWithIdentityNS(ConfigElements.ACCESS_TOKEN_PARTITIONING_DOMAINS)); if (enableAccessTokenPartitioningElem != null) { accessTokenPartitioningDomains = enableAccessTokenPartitioningElem.getText(); } if (log.isDebugEnabled()) { log.debug("Enable OAuth Access Token Partitioning Domains was set to : " + accessTokenPartitioningDomains); } } private void parseEnableAssertionsUserNameConfig(OMElement oauthConfigElem) { OMElement enableAssertionsElem = oauthConfigElem.getFirstChildWithName(getQNameWithIdentityNS(ConfigElements.ENABLE_ASSERTIONS)); if (enableAssertionsElem != null) { OMElement enableAssertionsUserNameElem = enableAssertionsElem.getFirstChildWithName(getQNameWithIdentityNS(ConfigElements.ENABLE_ASSERTIONS_USERNAME)); if (enableAssertionsUserNameElem != null) { assertionsUserNameEnabled = Boolean.parseBoolean(enableAssertionsUserNameElem.getText()); } } if (log.isDebugEnabled()) { log.debug("Enable Assertions-UserName was set to : " + assertionsUserNameEnabled); } } private void parseTokenPersistenceProcessorConfig(OMElement oauthConfigElem) { OMElement persistenceprocessorConfigElem = oauthConfigElem.getFirstChildWithName(getQNameWithIdentityNS(ConfigElements.TOKEN_PERSISTENCE_PROCESSOR)); if (persistenceprocessorConfigElem != null && StringUtils.isNotBlank(persistenceprocessorConfigElem.getText())) { tokenPersistenceProcessorClassName = persistenceprocessorConfigElem.getText().trim(); } if (log.isDebugEnabled()) { log.debug("Token Persistence Processor was set to : " + tokenPersistenceProcessorClassName); } } /** * parse the configuration to load the class name of the OAuth 2.0 token generator. * this is a global configuration at the moment. * @param oauthConfigElem */ private void parseOAuthTokenGeneratorConfig(OMElement oauthConfigElem) { OMElement tokenGeneratorClassConfigElem = oauthConfigElem .getFirstChildWithName(getQNameWithIdentityNS(ConfigElements.OAUTH_TOKEN_GENERATOR)); if (tokenGeneratorClassConfigElem != null && !"".equals(tokenGeneratorClassConfigElem.getText().trim())) { oauthTokenGeneratorClassName = tokenGeneratorClassConfigElem.getText().trim(); if (log.isDebugEnabled()) { log.debug("OAuth token generator is set to : " + oauthTokenGeneratorClassName); } } else { if (log.isDebugEnabled()) { log.debug("The default OAuth token issuer will be used. No custom token generator is set."); } } } private void parseSupportedGrantTypesConfig(OMElement oauthConfigElem) { OMElement supportedGrantTypesElem = oauthConfigElem.getFirstChildWithName(getQNameWithIdentityNS(ConfigElements.SUPPORTED_GRANT_TYPES)); if (supportedGrantTypesElem != null) { Iterator<OMElement> iterator = supportedGrantTypesElem .getChildrenWithName(getQNameWithIdentityNS(ConfigElements.SUPPORTED_GRANT_TYPE)); while (iterator.hasNext()) { OMElement supportedGrantTypeElement = iterator.next(); OMElement grantTypeNameElement = supportedGrantTypeElement .getFirstChildWithName(getQNameWithIdentityNS(ConfigElements.GRANT_TYPE_NAME)); String grantTypeName = null; if (grantTypeNameElement != null) { grantTypeName = grantTypeNameElement.getText(); } OMElement authzGrantHandlerClassNameElement = supportedGrantTypeElement .getFirstChildWithName(getQNameWithIdentityNS(ConfigElements.GRANT_TYPE_HANDLER_IMPL_CLASS)); String authzGrantHandlerImplClass = null; if (authzGrantHandlerClassNameElement != null) { authzGrantHandlerImplClass = authzGrantHandlerClassNameElement.getText(); } if (!StringUtils.isEmpty(grantTypeName) && !StringUtils.isEmpty(authzGrantHandlerImplClass)) { supportedGrantTypeClassNames.put(grantTypeName, authzGrantHandlerImplClass); OMElement authzGrantValidatorClassNameElement = supportedGrantTypeElement.getFirstChildWithName( getQNameWithIdentityNS(ConfigElements.GRANT_TYPE_VALIDATOR_IMPL_CLASS)); String authzGrantValidatorImplClass = null; if (authzGrantValidatorClassNameElement != null) { authzGrantValidatorImplClass = authzGrantValidatorClassNameElement.getText(); } if (!StringUtils.isEmpty(authzGrantValidatorImplClass)) { supportedGrantTypeValidatorNames.put(grantTypeName, authzGrantValidatorImplClass); } } } } else { // if this element is not present, assume the default case. log.warn("\'SupportedGrantTypes\' element not configured in identity.xml. " + "Therefore instantiating default grant type handlers"); Map<String, String> defaultGrantTypes = new HashMap<>(5); defaultGrantTypes.put(GrantType.AUTHORIZATION_CODE.toString(), AUTHORIZATION_CODE_GRANT_HANDLER_CLASS); defaultGrantTypes.put(GrantType.CLIENT_CREDENTIALS.toString(), CLIENT_CREDENTIALS_GRANT_HANDLER_CLASS); defaultGrantTypes.put(GrantType.PASSWORD.toString(), PASSWORD_GRANT_HANDLER_CLASS); defaultGrantTypes.put(GrantType.REFRESH_TOKEN.toString(), REFRESH_TOKEN_GRANT_HANDLER_CLASS); defaultGrantTypes.put(org.wso2.carbon.identity.oauth.common.GrantType.SAML20_BEARER.toString(), SAML20_BEARER_GRANT_HANDLER_CLASS); defaultGrantTypes.put(org.wso2.carbon.identity.oauth.common.GrantType.IWA_NTLM.toString(), IWA_NTLM_BEARER_GRANT_HANDLER_CLASS); supportedGrantTypeClassNames.putAll(defaultGrantTypes); } if (log.isDebugEnabled()) { for (Map.Entry entry : supportedGrantTypeClassNames.entrySet()) { String grantTypeName = entry.getKey().toString(); String authzGrantHandlerImplClass = entry.getValue().toString(); log.debug(grantTypeName + "supported by" + authzGrantHandlerImplClass); } } } private void parseSupportedResponseTypesConfig(OMElement oauthConfigElem) { OMElement supportedRespTypesElem = oauthConfigElem.getFirstChildWithName(getQNameWithIdentityNS(ConfigElements.SUPPORTED_RESP_TYPES)); if (supportedRespTypesElem != null) { Iterator<OMElement> iterator = supportedRespTypesElem.getChildrenWithName(getQNameWithIdentityNS(ConfigElements.SUPPORTED_RESP_TYPE)); while (iterator.hasNext()) { OMElement supportedResponseTypeElement = iterator.next(); OMElement responseTypeNameElement = supportedResponseTypeElement. getFirstChildWithName( getQNameWithIdentityNS(ConfigElements.RESP_TYPE_NAME)); String responseTypeName = null; if (responseTypeNameElement != null) { responseTypeName = responseTypeNameElement.getText(); } OMElement responseTypeHandlerImplClassElement = supportedResponseTypeElement.getFirstChildWithName( getQNameWithIdentityNS(ConfigElements.RESP_TYPE_HANDLER_IMPL_CLASS)); String responseTypeHandlerImplClass = null; if (responseTypeHandlerImplClassElement != null) { responseTypeHandlerImplClass = responseTypeHandlerImplClassElement.getText(); } if (responseTypeName != null && !"".equals(responseTypeName) && responseTypeHandlerImplClass != null && !"".equals(responseTypeHandlerImplClass)) { supportedResponseTypeClassNames.put(responseTypeName, responseTypeHandlerImplClass); } } } else { // if this element is not present, assume the default case. log.warn("\'SupportedResponseTypes\' element not configured in identity.xml. " + "Therefore instantiating default response type handlers"); Map<String, String> defaultResponseTypes = new HashMap<>(4); defaultResponseTypes.put(ResponseType.CODE.toString(), "org.wso2.carbon.identity.oauth2.authz.handlers.CodeResponseTypeHandler"); defaultResponseTypes.put(ResponseType.TOKEN.toString(), "org.wso2.carbon.identity.oauth2.authz.handlers.TokenResponseTypeHandler"); defaultResponseTypes.put("id_token", "org.wso2.carbon.identity.oauth2.authz.handlers.TokenResponseTypeHandler"); defaultResponseTypes.put("id_token token", "org.wso2.carbon.identity.oauth2.authz.handlers.TokenResponseTypeHandler"); supportedResponseTypeClassNames.putAll(defaultResponseTypes); } if (log.isDebugEnabled()) { for (Map.Entry entry : supportedResponseTypeClassNames.entrySet()) { String responseTypeName = entry.getKey().toString(); String authzHandlerImplClass = entry.getValue().toString(); log.debug(responseTypeName + "supported by" + authzHandlerImplClass); } } } private void parseSupportedClientAuthHandlersConfig(OMElement clientAuthElement) { if (clientAuthElement != null) { Iterator<OMElement> iterator = clientAuthElement.getChildrenWithLocalName( ConfigElements.CLIENT_AUTH_HANDLER_IMPL_CLASS); while (iterator.hasNext()) { OMElement supportedClientAuthHandler = iterator.next(); Iterator<OMElement> confProperties = supportedClientAuthHandler .getChildrenWithLocalName(ConfigElements.CLIENT_AUTH_PROPERTY); Properties properties = null; while (confProperties.hasNext()) { properties = new Properties(); OMElement paramElem = confProperties.next(); String paramName = paramElem.getAttributeValue( new QName(ConfigElements.CLIENT_AUTH_NAME)); String paramValue = paramElem.getText(); properties.put(paramName, paramValue); if (log.isDebugEnabled()) { log.debug("Property name : " + paramName + ", Property Value : " + paramValue); } } String clientAuthHandlerImplClass = supportedClientAuthHandler.getAttributeValue( new QName(ConfigElements.CLIENT_AUTH_CLASS)); if (StringUtils.isEmpty(clientAuthHandlerImplClass)) { log.error("Mandatory attribute \"Class\" is not present in the " + "ClientAuthHandler element. "); return; } if (properties != null) { supportedClientAuthHandlerData.put(clientAuthHandlerImplClass, properties); } else { supportedClientAuthHandlerData.put(clientAuthHandlerImplClass, new Properties()); } } } else { // if this element is not present, assume the default case. log.warn("\'SupportedClientAuthMethods\' element not configured in identity.xml. " + "Therefore instantiating default client authentication handlers"); Map<String, Properties> defaultClientAuthHandlers = new HashMap<>(1); defaultClientAuthHandlers.put( ConfigElements.DEFAULT_CLIENT_AUTHENTICATOR, new Properties()); supportedClientAuthHandlerData.putAll(defaultClientAuthHandlers); } if (log.isDebugEnabled()) { for (Map.Entry<String, Properties> clazz : supportedClientAuthHandlerData.entrySet()) { log.debug("Supported client authentication method " + clazz.getKey()); } } } private void parseSAML2GrantConfig(OMElement oauthConfigElem) { OMElement saml2GrantElement = oauthConfigElem.getFirstChildWithName(getQNameWithIdentityNS(ConfigElements.SAML2_GRANT)); OMElement saml2TokenHandlerElement = null; if (saml2GrantElement != null) { saml2TokenHandlerElement = saml2GrantElement.getFirstChildWithName(getQNameWithIdentityNS(ConfigElements.SAML2_TOKEN_HANDLER)); } if (saml2TokenHandlerElement != null && StringUtils.isNotBlank(saml2TokenHandlerElement.getText())) { saml2TokenCallbackHandlerName = saml2TokenHandlerElement.getText().trim(); } } private void parseAuthorizationContextTokenGeneratorConfig(OMElement oauthConfigElem) { OMElement authContextTokGenConfigElem = oauthConfigElem.getFirstChildWithName(getQNameWithIdentityNS(ConfigElements.AUTHORIZATION_CONTEXT_TOKEN_GENERATION)); if (authContextTokGenConfigElem != null) { OMElement enableJWTGenerationConfigElem = authContextTokGenConfigElem.getFirstChildWithName(getQNameWithIdentityNS(ConfigElements.ENABLED)); if (enableJWTGenerationConfigElem != null) { String enableJWTGeneration = enableJWTGenerationConfigElem.getText().trim(); if (enableJWTGeneration != null && JavaUtils.isTrueExplicitly(enableJWTGeneration)) { isAuthContextTokGenEnabled = true; if (authContextTokGenConfigElem.getFirstChildWithName(getQNameWithIdentityNS(ConfigElements.TOKEN_GENERATOR_IMPL_CLASS)) != null) { tokenGeneratorImplClass = authContextTokGenConfigElem.getFirstChildWithName(getQNameWithIdentityNS(ConfigElements.TOKEN_GENERATOR_IMPL_CLASS)) .getText().trim(); } if (authContextTokGenConfigElem.getFirstChildWithName(getQNameWithIdentityNS(ConfigElements.CLAIMS_RETRIEVER_IMPL_CLASS)) != null) { claimsRetrieverImplClass = authContextTokGenConfigElem.getFirstChildWithName(getQNameWithIdentityNS(ConfigElements.CLAIMS_RETRIEVER_IMPL_CLASS)) .getText().trim(); } if (authContextTokGenConfigElem.getFirstChildWithName(getQNameWithIdentityNS(ConfigElements.CONSUMER_DIALECT_URI)) != null) { consumerDialectURI = authContextTokGenConfigElem.getFirstChildWithName(getQNameWithIdentityNS(ConfigElements.CONSUMER_DIALECT_URI)) .getText().trim(); } if (authContextTokGenConfigElem.getFirstChildWithName(getQNameWithIdentityNS(ConfigElements.SIGNATURE_ALGORITHM)) != null) { signatureAlgorithm = authContextTokGenConfigElem.getFirstChildWithName(getQNameWithIdentityNS(ConfigElements.SIGNATURE_ALGORITHM)) .getText().trim(); } if (authContextTokGenConfigElem.getFirstChildWithName(getQNameWithIdentityNS(ConfigElements.SECURITY_CONTEXT_TTL)) != null) { authContextTTL = authContextTokGenConfigElem.getFirstChildWithName(getQNameWithIdentityNS(ConfigElements.SECURITY_CONTEXT_TTL)) .getText().trim(); } } } } if (log.isDebugEnabled()) { if (isAuthContextTokGenEnabled) { log.debug("JWT Generation is enabled"); } else { log.debug("JWT Generation is disabled"); } } } private void parseOpenIDConnectConfig(OMElement oauthConfigElem) { OMElement openIDConnectConfigElem = oauthConfigElem.getFirstChildWithName(getQNameWithIdentityNS(ConfigElements.OPENID_CONNECT)); if (openIDConnectConfigElem != null) { if (openIDConnectConfigElem.getFirstChildWithName(getQNameWithIdentityNS(ConfigElements.OPENID_CONNECT_IDTOKEN_BUILDER)) != null) { openIDConnectIDTokenBuilderClassName = openIDConnectConfigElem.getFirstChildWithName(getQNameWithIdentityNS(ConfigElements.OPENID_CONNECT_IDTOKEN_BUILDER)) .getText().trim(); } if (openIDConnectConfigElem.getFirstChildWithName(getQNameWithIdentityNS(ConfigElements.OPENID_CONNECT_IDTOKEN_CUSTOM_CLAIM_CALLBACK_HANDLER)) != null) { openIDConnectIDTokenCustomClaimsHanlderClassName = openIDConnectConfigElem.getFirstChildWithName(getQNameWithIdentityNS(ConfigElements.OPENID_CONNECT_IDTOKEN_CUSTOM_CLAIM_CALLBACK_HANDLER)) .getText().trim(); } if (openIDConnectConfigElem.getFirstChildWithName(getQNameWithIdentityNS(ConfigElements.OPENID_CONNECT_IDTOKEN_SUB_CLAIM)) != null) { openIDConnectIDTokenSubClaim = openIDConnectConfigElem.getFirstChildWithName(getQNameWithIdentityNS(ConfigElements.OPENID_CONNECT_IDTOKEN_SUB_CLAIM)) .getText().trim(); } if (openIDConnectConfigElem.getFirstChildWithName(getQNameWithIdentityNS(ConfigElements.OPENID_CONNECT_SKIP_USER_CONSENT)) != null) { openIDConnectSkipUserConsent = openIDConnectConfigElem.getFirstChildWithName(getQNameWithIdentityNS(ConfigElements.OPENID_CONNECT_SKIP_USER_CONSENT)) .getText().trim(); } if (openIDConnectConfigElem.getFirstChildWithName(getQNameWithIdentityNS(ConfigElements.OPENID_CONNECT_IDTOKEN_ISSUER_ID)) != null) { openIDConnectIDTokenIssuerIdentifier = IdentityUtil.fillURLPlaceholders( openIDConnectConfigElem.getFirstChildWithName(getQNameWithIdentityNS( ConfigElements.OPENID_CONNECT_IDTOKEN_ISSUER_ID)).getText().trim()); } if (openIDConnectConfigElem.getFirstChildWithName(getQNameWithIdentityNS(ConfigElements.OPENID_CONNECT_IDTOKEN_EXPIRATION)) != null) { openIDConnectIDTokenExpiration = openIDConnectConfigElem.getFirstChildWithName(getQNameWithIdentityNS(ConfigElements.OPENID_CONNECT_IDTOKEN_EXPIRATION)) .getText().trim(); } if (openIDConnectConfigElem.getFirstChildWithName(getQNameWithIdentityNS(ConfigElements.OPENID_CONNECT_USERINFO_ENDPOINT_CLAIM_DIALECT)) != null) { openIDConnectUserInfoEndpointClaimDialect = openIDConnectConfigElem.getFirstChildWithName(getQNameWithIdentityNS(ConfigElements.OPENID_CONNECT_USERINFO_ENDPOINT_CLAIM_DIALECT)) .getText().trim(); } if (openIDConnectConfigElem.getFirstChildWithName(getQNameWithIdentityNS(ConfigElements.OPENID_CONNECT_USERINFO_ENDPOINT_CLAIM_RETRIEVER)) != null) { openIDConnectUserInfoEndpointClaimRetriever = openIDConnectConfigElem.getFirstChildWithName(getQNameWithIdentityNS(ConfigElements.OPENID_CONNECT_USERINFO_ENDPOINT_CLAIM_RETRIEVER)) .getText().trim(); } if (openIDConnectConfigElem.getFirstChildWithName(getQNameWithIdentityNS(ConfigElements.OPENID_CONNECT_USERINFO_ENDPOINT_REQUEST_VALIDATOR)) != null) { openIDConnectUserInfoEndpointRequestValidator = openIDConnectConfigElem.getFirstChildWithName(getQNameWithIdentityNS(ConfigElements.OPENID_CONNECT_USERINFO_ENDPOINT_REQUEST_VALIDATOR)) .getText().trim(); } if (openIDConnectConfigElem.getFirstChildWithName(getQNameWithIdentityNS(ConfigElements.OPENID_CONNECT_USERINFO_ENDPOINT_ACCESS_TOKEN_VALIDATOR)) != null) { openIDConnectUserInfoEndpointAccessTokenValidator = openIDConnectConfigElem.getFirstChildWithName(getQNameWithIdentityNS(ConfigElements.OPENID_CONNECT_USERINFO_ENDPOINT_ACCESS_TOKEN_VALIDATOR)) .getText().trim(); } if (openIDConnectConfigElem.getFirstChildWithName(getQNameWithIdentityNS(ConfigElements.OPENID_CONNECT_USERINFO_ENDPOINT_RESPONSE_BUILDER)) != null) { openIDConnectUserInfoEndpointResponseBuilder = openIDConnectConfigElem.getFirstChildWithName(getQNameWithIdentityNS(ConfigElements.OPENID_CONNECT_USERINFO_ENDPOINT_RESPONSE_BUILDER)) .getText().trim(); } if (openIDConnectConfigElem.getFirstChildWithName(getQNameWithIdentityNS(ConfigElements.SUPPORTED_CLAIMS)) != null) { String supportedClaimStr = openIDConnectConfigElem.getFirstChildWithName(getQNameWithIdentityNS(ConfigElements.SUPPORTED_CLAIMS)) .getText().trim(); if (log.isDebugEnabled()) { log.debug("Supported Claims : " + supportedClaimStr); } if (StringUtils.isNotEmpty(supportedClaimStr)) { supportedClaims = supportedClaimStr.split(","); } } } } public OAuth2ScopeValidator getoAuth2ScopeValidator() { return oAuth2ScopeValidator; } public void setoAuth2ScopeValidator(OAuth2ScopeValidator oAuth2ScopeValidator) { this.oAuth2ScopeValidator = oAuth2ScopeValidator; } /** * Localpart names for the OAuth configuration in identity.xml. */ private class ConfigElements { // URLs public static final String OAUTH1_REQUEST_TOKEN_URL = "OAuth1RequestTokenUrl"; public static final String OAUTH1_AUTHORIZE_URL = "OAuth1AuthorizeUrl"; public static final String OAUTH1_ACCESS_TOKEN_URL = "OAuth1AccessTokenUrl"; public static final String OAUTH2_AUTHZ_EP_URL = "OAuth2AuthzEPUrl"; public static final String OAUTH2_TOKEN_EP_URL = "OAuth2TokenEPUrl"; public static final String OAUTH2_USERINFO_EP_URL = "OAuth2UserInfoEPUrl"; public static final String OAUTH2_CONSENT_PAGE_URL = "OAuth2ConsentPage"; public static final String OAUTH2_ERROR_PAGE_URL = "OAuth2ErrorPage"; public static final String OIDC_CONSENT_PAGE_URL = "OIDCConsentPage"; // JWT Generator public static final String AUTHORIZATION_CONTEXT_TOKEN_GENERATION = "AuthorizationContextTokenGeneration"; public static final String ENABLED = "Enabled"; public static final String TOKEN_GENERATOR_IMPL_CLASS = "TokenGeneratorImplClass"; public static final String CLAIMS_RETRIEVER_IMPL_CLASS = "ClaimsRetrieverImplClass"; public static final String CONSUMER_DIALECT_URI = "ConsumerDialectURI"; public static final String SIGNATURE_ALGORITHM = "SignatureAlgorithm"; public static final String SECURITY_CONTEXT_TTL = "AuthorizationContextTTL"; public static final String ENABLE_ASSERTIONS = "EnableAssertions"; public static final String ENABLE_ASSERTIONS_USERNAME = "UserName"; public static final String ENABLE_ACCESS_TOKEN_PARTITIONING = "EnableAccessTokenPartitioning"; public static final String ACCESS_TOKEN_PARTITIONING_DOMAINS = "AccessTokenPartitioningDomains"; // OpenIDConnect configurations public static final String OPENID_CONNECT = "OpenIDConnect"; public static final String OPENID_CONNECT_IDTOKEN_BUILDER = "IDTokenBuilder"; public static final String OPENID_CONNECT_IDTOKEN_SUB_CLAIM = "IDTokenSubjectClaim"; public static final String OPENID_CONNECT_IDTOKEN_ISSUER_ID = "IDTokenIssuerID"; public static final String OPENID_CONNECT_IDTOKEN_EXPIRATION = "IDTokenExpiration"; public static final String OPENID_CONNECT_SKIP_USER_CONSENT = "SkipUserConsent"; public static final String OPENID_CONNECT_USERINFO_ENDPOINT_CLAIM_DIALECT = "UserInfoEndpointClaimDialect"; public static final String OPENID_CONNECT_USERINFO_ENDPOINT_CLAIM_RETRIEVER = "UserInfoEndpointClaimRetriever"; public static final String OPENID_CONNECT_USERINFO_ENDPOINT_REQUEST_VALIDATOR = "UserInfoEndpointRequestValidator"; public static final String OPENID_CONNECT_USERINFO_ENDPOINT_ACCESS_TOKEN_VALIDATOR = "UserInfoEndpointAccessTokenValidator"; public static final String OPENID_CONNECT_USERINFO_ENDPOINT_RESPONSE_BUILDER = "UserInfoEndpointResponseBuilder"; public static final String OPENID_CONNECT_IDTOKEN_CUSTOM_CLAIM_CALLBACK_HANDLER = "IDTokenCustomClaimsCallBackHandler"; public static final String SUPPORTED_CLAIMS = "OpenIDConnectClaims"; // Callback handler related configuration elements private static final String OAUTH_CALLBACK_HANDLERS = "OAuthCallbackHandlers"; private static final String OAUTH_CALLBACK_HANDLER = "OAuthCallbackHandler"; private static final String CALLBACK_CLASS = "Class"; private static final String CALLBACK_PRIORITY = "Priority"; private static final String CALLBACK_PROPERTIES = "Properties"; private static final String CALLBACK_PROPERTY = "Property"; private static final String CALLBACK_ATTR_NAME = "Name"; private static final String TOKEN_VALIDATORS = "TokenValidators"; private static final String TOKEN_VALIDATOR = "TokenValidator"; private static final String TOKEN_TYPE_ATTR = "type"; private static final String TOKEN_CLASS_ATTR = "class"; private static final String SCOPE_VALIDATOR = "OAuthScopeValidator"; private static final String SCOPE_CLASS_ATTR = "class"; private static final String SKIP_SCOPE_ATTR = "scopesToSkip"; // Default timestamp skew private static final String TIMESTAMP_SKEW = "TimestampSkew"; // Default validity periods private static final String AUTHORIZATION_CODE_DEFAULT_VALIDITY_PERIOD = "AuthorizationCodeDefaultValidityPeriod"; private static final String USER_ACCESS_TOKEN_DEFAULT_VALIDITY_PERIOD = "UserAccessTokenDefaultValidityPeriod"; private static final String APPLICATION_ACCESS_TOKEN_VALIDATION_PERIOD = "AccessTokenDefaultValidityPeriod"; private static final String REFRESH_TOKEN_VALIDITY_PERIOD = "RefreshTokenValidityPeriod"; // Enable/Disable cache private static final String ENABLE_CACHE = "EnableOAuthCache"; // Enable/Disable refresh token renewal on each refresh_token grant request private static final String RENEW_REFRESH_TOKEN_FOR_REFRESH_GRANT = "RenewRefreshTokenForRefreshGrant"; // TokenPersistenceProcessor private static final String TOKEN_PERSISTENCE_PROCESSOR = "TokenPersistenceProcessor"; // Token issuer generator. private static final String OAUTH_TOKEN_GENERATOR = "OAuthTokenGenerator"; // Supported Grant Types private static final String SUPPORTED_GRANT_TYPES = "SupportedGrantTypes"; private static final String SUPPORTED_GRANT_TYPE = "SupportedGrantType"; private static final String GRANT_TYPE_NAME = "GrantTypeName"; private static final String GRANT_TYPE_HANDLER_IMPL_CLASS = "GrantTypeHandlerImplClass"; private static final String GRANT_TYPE_VALIDATOR_IMPL_CLASS = "GrantTypeValidatorImplClass"; // Supported Client Authentication Methods private static final String CLIENT_AUTH_HANDLERS = "ClientAuthHandlers"; private static final String CLIENT_AUTH_HANDLER_IMPL_CLASS = "ClientAuthHandler"; private static final String STRICT_CLIENT_AUTHENTICATION = "StrictClientCredentialValidation"; private static final String CLIENT_AUTH_CLASS = "Class"; private static final String DEFAULT_CLIENT_AUTHENTICATOR = "org.wso2.carbon.identity.oauth2.token.handlers.clientauth.BasicAuthClientAuthHandler"; private static final String CLIENT_AUTH_PROPERTY = "Property"; private static final String CLIENT_AUTH_NAME = "Name"; // Supported Response Types private static final String SUPPORTED_RESP_TYPES = "SupportedResponseTypes"; private static final String SUPPORTED_RESP_TYPE = "SupportedResponseType"; private static final String RESP_TYPE_NAME = "ResponseTypeName"; private static final String RESP_TYPE_HANDLER_IMPL_CLASS = "ResponseTypeHandlerImplClass"; // SAML2 assertion profile configurations private static final String SAML2_GRANT = "SAML2Grant"; private static final String SAML2_TOKEN_HANDLER = "SAML2TokenHandler"; } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.vaidya.statistics.job; import java.text.ParseException; import java.util.ArrayList; import java.util.Collections; import java.util.Comparator; import java.util.Hashtable; import java.util.Map; import java.util.regex.Pattern; import org.apache.hadoop.mapred.Counters; import org.apache.hadoop.mapred.JobConf; import org.apache.hadoop.mapreduce.TaskID; import org.apache.hadoop.mapred.TaskStatus; import org.apache.hadoop.mapreduce.TaskType; import org.apache.hadoop.mapreduce.jobhistory.JobHistoryParser; import org.apache.hadoop.mapreduce.jobhistory.JobHistoryParser.JobInfo; import org.apache.hadoop.mapreduce.jobhistory.JobHistoryParser.TaskAttemptInfo; import org.apache.hadoop.mapreduce.jobhistory.JobHistoryParser.TaskInfo; /** * */ public class JobStatistics implements JobStatisticsInterface { /* * Pattern for parsing the COUNTERS */ private static final Pattern _pattern = Pattern.compile("[[^,]?]+"); //"[[^,]?]+" /* * Job configuration */ private JobConf _jobConf; /** * @param jobConf the jobConf to set */ void setJobConf(JobConf jobConf) { this._jobConf = jobConf; // TODO: Add job conf to _job array } /* * Aggregated Job level counters */ private JobHistoryParser.JobInfo _jobInfo; /* * Job stats */ private java.util.Hashtable<Enum, String> _job; /** * @param jobConf the jobConf to set */ public JobConf getJobConf() { return this._jobConf; } /* * Get Job Counters of type long */ public long getLongValue(Enum key) { if (this._job.get(key) == null) { return (long)0; } else { return Long.parseLong(this._job.get(key)); } } /* * Get job Counters of type Double */ public double getDoubleValue(Enum key) { if (this._job.get(key) == null) { return (double)0; } else { return Double.parseDouble(this._job.get(key)); } } /* * Get Job Counters of type String */ public String getStringValue(Enum key) { if (this._job.get(key) == null) { return ""; } else { return this._job.get(key); } } /* * Set key value of type long */ public void setValue(Enum key, long value) { this._job.put(key, Long.toString(value)); } /* * Set key value of type double */ public void setValue(Enum key, double value) { this._job.put(key, Double.toString(value)); } /* * Set key value of type String */ public void setValue(Enum key, String value) { this._job.put(key, value); } /* * Map Task List (Sorted by task id) */ private ArrayList<MapTaskStatistics> _mapTaskList = new ArrayList<MapTaskStatistics>(); /* * Reduce Task List (Sorted by task id) */ private ArrayList<ReduceTaskStatistics> _reduceTaskList = new ArrayList<ReduceTaskStatistics>(); /* * Ctor: */ public JobStatistics (JobConf jobConf, JobInfo jobInfo) throws ParseException { this._jobConf = jobConf; this._jobInfo = jobInfo; this._job = new Hashtable<Enum, String>(); populate_Job(this._job, jobInfo); populate_MapReduceTaskLists(this._mapTaskList, this._reduceTaskList, jobInfo.getAllTasks()); // Add the Job Type: MAP_REDUCE, MAP_ONLY if (getLongValue(JobKeys.TOTAL_REDUCES) == 0) { this._job.put(JobKeys.JOBTYPE,"MAP_ONLY"); } else { this._job.put(JobKeys.JOBTYPE,"MAP_REDUCE"); } } /* * */ private void populate_MapReduceTaskLists (ArrayList<MapTaskStatistics> mapTaskList, ArrayList<ReduceTaskStatistics> reduceTaskList, Map<TaskID, TaskInfo> taskMap) throws ParseException { int num_tasks = taskMap.entrySet().size(); // DO we need these lists? // List<TaskAttemptInfo> successfulMapAttemptList = // new ArrayList<TaskAttemptInfo>(); // List<TaskAttemptInfo> successfulReduceAttemptList = // new ArrayList<TaskAttemptInfo>(); for (JobHistoryParser.TaskInfo taskInfo: taskMap.values()) { if (taskInfo.getTaskType().equals(TaskType.MAP)) { MapTaskStatistics mapT = new MapTaskStatistics(); TaskAttemptInfo successfulAttempt = getLastSuccessfulTaskAttempt(taskInfo); mapT.setValue(MapTaskKeys.TASK_ID, successfulAttempt.getAttemptId().getTaskID().toString()); mapT.setValue(MapTaskKeys.ATTEMPT_ID, successfulAttempt.getAttemptId().toString()); mapT.setValue(MapTaskKeys.HOSTNAME, successfulAttempt.getTrackerName()); mapT.setValue(MapTaskKeys.TASK_TYPE, successfulAttempt.getTaskType().toString()); mapT.setValue(MapTaskKeys.STATUS, successfulAttempt.getTaskStatus().toString()); mapT.setValue(MapTaskKeys.START_TIME, successfulAttempt.getStartTime()); mapT.setValue(MapTaskKeys.FINISH_TIME, successfulAttempt.getFinishTime()); mapT.setValue(MapTaskKeys.SPLITS, taskInfo.getSplitLocations()); mapT.setValue(MapTaskKeys.TRACKER_NAME, successfulAttempt.getTrackerName()); mapT.setValue(MapTaskKeys.STATE_STRING, successfulAttempt.getState()); mapT.setValue(MapTaskKeys.HTTP_PORT, successfulAttempt.getHttpPort()); mapT.setValue(MapTaskKeys.ERROR, successfulAttempt.getError()); parseAndAddMapTaskCounters(mapT, successfulAttempt.getCounters().toString()); mapTaskList.add(mapT); // Add number of task attempts mapT.setValue(MapTaskKeys.NUM_ATTEMPTS, (new Integer(taskInfo.getAllTaskAttempts().size())).toString()); // Add EXECUTION_TIME = FINISH_TIME - START_TIME long etime = mapT.getLongValue(MapTaskKeys.FINISH_TIME) - mapT.getLongValue(MapTaskKeys.START_TIME); mapT.setValue(MapTaskKeys.EXECUTION_TIME, (new Long(etime)).toString()); }else if (taskInfo.getTaskType().equals(TaskType.REDUCE)) { ReduceTaskStatistics reduceT = new ReduceTaskStatistics(); TaskAttemptInfo successfulAttempt = getLastSuccessfulTaskAttempt(taskInfo); reduceT.setValue(ReduceTaskKeys.TASK_ID, successfulAttempt.getAttemptId().getTaskID().toString()); reduceT.setValue(ReduceTaskKeys.ATTEMPT_ID, successfulAttempt.getAttemptId().toString()); reduceT.setValue(ReduceTaskKeys.HOSTNAME, successfulAttempt.getTrackerName()); reduceT.setValue(ReduceTaskKeys.TASK_TYPE, successfulAttempt.getTaskType().toString()); reduceT.setValue(ReduceTaskKeys.STATUS, successfulAttempt.getTaskStatus().toString()); reduceT.setValue(ReduceTaskKeys.START_TIME, successfulAttempt.getStartTime()); reduceT.setValue(ReduceTaskKeys.FINISH_TIME, successfulAttempt.getFinishTime()); reduceT.setValue(ReduceTaskKeys.SHUFFLE_FINISH_TIME, successfulAttempt.getShuffleFinishTime()); reduceT.setValue(ReduceTaskKeys.SORT_FINISH_TIME, successfulAttempt.getSortFinishTime()); reduceT.setValue(ReduceTaskKeys.SPLITS, ""); reduceT.setValue(ReduceTaskKeys.TRACKER_NAME, successfulAttempt.getTrackerName()); reduceT.setValue(ReduceTaskKeys.STATE_STRING, successfulAttempt.getState()); reduceT.setValue(ReduceTaskKeys.HTTP_PORT, successfulAttempt.getHttpPort()); parseAndAddReduceTaskCounters(reduceT, successfulAttempt.getCounters().toString()); reduceTaskList.add(reduceT); // Add number of task attempts reduceT.setValue(ReduceTaskKeys.NUM_ATTEMPTS, (new Integer(taskInfo.getAllTaskAttempts().size())).toString()); // Add EXECUTION_TIME = FINISH_TIME - START_TIME long etime1 = reduceT.getLongValue(ReduceTaskKeys.FINISH_TIME) - reduceT.getLongValue(ReduceTaskKeys.START_TIME); reduceT.setValue(ReduceTaskKeys.EXECUTION_TIME, (new Long(etime1)).toString()); } else if (taskInfo.getTaskType().equals(TaskType.JOB_CLEANUP) || taskInfo.getTaskType().equals(TaskType.JOB_SETUP)) { //System.out.println("INFO: IGNORING TASK TYPE : "+task.get(Keys.TASK_TYPE)); } else { System.err.println("UNKNOWN TASK TYPE : "+taskInfo.getTaskType()); } } } /* * Get last successful task attempt to be added in the stats */ private TaskAttemptInfo getLastSuccessfulTaskAttempt(TaskInfo task) { for (TaskAttemptInfo ai: task.getAllTaskAttempts().values()) { if (ai.getTaskStatus().equals(TaskStatus.State.SUCCEEDED.toString())) { return ai; } } return null; } /* * Popuate the job stats */ private void populate_Job (Hashtable<Enum, String> job, JobInfo jobInfo) throws ParseException { job.put(JobKeys.FINISH_TIME, String.valueOf(jobInfo.getFinishTime())); job.put(JobKeys.JOBID, jobInfo.getJobId().toString()); job.put(JobKeys.JOBNAME, jobInfo.getJobname()); job.put(JobKeys.USER, jobInfo.getUsername()); job.put(JobKeys.JOBCONF, jobInfo.getJobConfPath()); job.put(JobKeys.SUBMIT_TIME, String.valueOf(jobInfo.getSubmitTime())); job.put(JobKeys.LAUNCH_TIME, String.valueOf(jobInfo.getLaunchTime())); job.put(JobKeys.TOTAL_MAPS, String.valueOf(jobInfo.getTotalMaps())); job.put(JobKeys.TOTAL_REDUCES, String.valueOf(jobInfo.getTotalReduces())); job.put(JobKeys.FAILED_MAPS, String.valueOf(jobInfo.getFailedMaps())); job.put(JobKeys.FAILED_REDUCES, String.valueOf(jobInfo.getFailedReduces())); job.put(JobKeys.FINISHED_MAPS, String.valueOf(jobInfo.getFinishedMaps())); job.put(JobKeys.FINISHED_REDUCES, String.valueOf(jobInfo.getFinishedReduces())); job.put(JobKeys.STATUS, jobInfo.getJobStatus().toString()); job.put(JobKeys.JOB_PRIORITY, jobInfo.getPriority()); parseAndAddJobCounters(job, jobInfo.getTotalCounters().toString()); } /* * Parse and add the job counters */ private void parseAndAddJobCounters(Hashtable<Enum, String> job, String counters) throws ParseException { Counters cnt = Counters.fromEscapedCompactString(counters); for (java.util.Iterator<Counters.Group> grps = cnt.iterator(); grps.hasNext(); ) { Counters.Group grp = grps.next(); //String groupname = "<" + grp.getName() + ">::<" + grp.getDisplayName() + ">"; for (java.util.Iterator<Counters.Counter> mycounters = grp.iterator(); mycounters.hasNext(); ) { Counters.Counter counter = mycounters.next(); //String countername = "<"+counter.getName()+">::<"+counter.getDisplayName()+">::<"+counter.getValue()+">"; //System.err.println("groupName:"+groupname+",countername: "+countername); String countername = grp.getDisplayName()+"."+counter.getDisplayName(); String value = (new Long(counter.getValue())).toString(); String[] parts = {countername,value}; //System.err.println("part0:<"+parts[0]+">,:part1 <"+parts[1]+">"); if (parts[0].equals("FileSystemCounters.FILE_BYTES_READ")) { job.put(JobKeys.FILE_BYTES_READ, parts[1]); } else if (parts[0].equals("FileSystemCounters.FILE_BYTES_WRITTEN")) { job.put(JobKeys.FILE_BYTES_WRITTEN, parts[1]); } else if (parts[0].equals("FileSystemCounters.HDFS_BYTES_READ")) { job.put(JobKeys.HDFS_BYTES_READ, parts[1]); } else if (parts[0].equals("FileSystemCounters.HDFS_BYTES_WRITTEN")) { job.put(JobKeys.HDFS_BYTES_WRITTEN, parts[1]); } else if (parts[0].equals("Job Counters .Launched map tasks")) { job.put(JobKeys.LAUNCHED_MAPS, parts[1]); } else if (parts[0].equals("Job Counters .Launched reduce tasks")) { job.put(JobKeys.LAUNCHED_REDUCES, parts[1]); } else if (parts[0].equals("Job Counters .Data-local map tasks")) { job.put(JobKeys.DATALOCAL_MAPS, parts[1]); } else if (parts[0].equals("Job Counters .Rack-local map tasks")) { job.put(JobKeys.RACKLOCAL_MAPS, parts[1]); } else if (parts[0].equals("Map-Reduce Framework.Map input records")) { job.put(JobKeys.MAP_INPUT_RECORDS, parts[1]); } else if (parts[0].equals("Map-Reduce Framework.Map output records")) { job.put(JobKeys.MAP_OUTPUT_RECORDS, parts[1]); } else if (parts[0].equals("Map-Reduce Framework.Map input bytes")) { job.put(JobKeys.MAP_INPUT_BYTES, parts[1]); } else if (parts[0].equals("Map-Reduce Framework.Map output bytes")) { job.put(JobKeys.MAP_OUTPUT_BYTES, parts[1]); } else if (parts[0].equals("Map-Reduce Framework.Combine input records")) { job.put(JobKeys.COMBINE_INPUT_RECORDS, parts[1]); } else if (parts[0].equals("Map-Reduce Framework.Combine output records")) { job.put(JobKeys.COMBINE_OUTPUT_RECORDS, parts[1]); } else if (parts[0].equals("Map-Reduce Framework.Reduce input groups")) { job.put(JobKeys.REDUCE_INPUT_GROUPS, parts[1]); } else if (parts[0].equals("Map-Reduce Framework.Reduce input records")) { job.put(JobKeys.REDUCE_INPUT_RECORDS, parts[1]); } else if (parts[0].equals("Map-Reduce Framework.Reduce output records")) { job.put(JobKeys.REDUCE_OUTPUT_RECORDS, parts[1]); } else if (parts[0].equals("Map-Reduce Framework.Spilled Records")) { job.put(JobKeys.SPILLED_RECORDS, parts[1]); } else if (parts[0].equals("Map-Reduce Framework.Reduce shuffle bytes")) { job.put(JobKeys.SHUFFLE_BYTES, parts[1]); } else { System.err.println("JobCounterKey:<"+parts[0]+"> ==> NOT INCLUDED IN PERFORMANCE ADVISOR"); } } } } /* * Parse and add the Map task counters */ private void parseAndAddMapTaskCounters(MapTaskStatistics mapTask, String counters) throws ParseException { Counters cnt = Counters.fromEscapedCompactString(counters); for (java.util.Iterator<Counters.Group> grps = cnt.iterator(); grps.hasNext(); ) { Counters.Group grp = grps.next(); //String groupname = "<" + grp.getName() + ">::<" + grp.getDisplayName() + ">"; for (java.util.Iterator<Counters.Counter> mycounters = grp.iterator(); mycounters.hasNext(); ) { Counters.Counter counter = mycounters.next(); //String countername = "<"+counter.getName()+">::<"+counter.getDisplayName()+">::<"+counter.getValue()+">"; //System.out.println("groupName:"+groupname+",countername: "+countername); String countername = grp.getDisplayName()+"."+counter.getDisplayName(); String value = (new Long(counter.getValue())).toString(); String[] parts = {countername,value}; //System.out.println("part0:"+parts[0]+",:part1 "+parts[1]); if (parts[0].equals("FileSystemCounters.FILE_BYTES_READ")) { mapTask.setValue(MapTaskKeys.FILE_BYTES_READ, parts[1]); } else if (parts[0].equals("FileSystemCounters.FILE_BYTES_WRITTEN")) { mapTask.setValue(MapTaskKeys.FILE_BYTES_WRITTEN, parts[1]); } else if (parts[0].equals("FileSystemCounters.HDFS_BYTES_READ")) { mapTask.setValue(MapTaskKeys.HDFS_BYTES_READ, parts[1]); } else if (parts[0].equals("FileSystemCounters.HDFS_BYTES_WRITTEN")) { mapTask.setValue(MapTaskKeys.HDFS_BYTES_WRITTEN, parts[1]); } else if (parts[0].equals("Map-Reduce Framework.Map input records")) { mapTask.setValue(MapTaskKeys.INPUT_RECORDS, parts[1]); } else if (parts[0].equals("Map-Reduce Framework.Map output records")) { mapTask.setValue(MapTaskKeys.OUTPUT_RECORDS, parts[1]); } else if (parts[0].equals("Map-Reduce Framework.Map output bytes")) { mapTask.setValue(MapTaskKeys.OUTPUT_BYTES, parts[1]); } else if (parts[0].equals("Map-Reduce Framework.Combine input records")) { mapTask.setValue(MapTaskKeys.COMBINE_INPUT_RECORDS, parts[1]); } else if (parts[0].equals("Map-Reduce Framework.Combine output records")) { mapTask.setValue(MapTaskKeys.COMBINE_OUTPUT_RECORDS, parts[1]); } else if (parts[0].equals("Map-Reduce Framework.Spilled Records")) { mapTask.setValue(MapTaskKeys.SPILLED_RECORDS, parts[1]); } else if (parts[0].equals("FileInputFormatCounters.BYTES_READ")) { mapTask.setValue(MapTaskKeys.INPUT_BYTES, parts[1]); } else { System.err.println("MapCounterKey:<"+parts[0]+"> ==> NOT INCLUDED IN PERFORMANCE ADVISOR MAP TASK"); } } } } /* * Parse and add the reduce task counters */ private void parseAndAddReduceTaskCounters(ReduceTaskStatistics reduceTask, String counters) throws ParseException { Counters cnt = Counters.fromEscapedCompactString(counters); for (java.util.Iterator<Counters.Group> grps = cnt.iterator(); grps.hasNext(); ) { Counters.Group grp = grps.next(); //String groupname = "<" + grp.getName() + ">::<" + grp.getDisplayName() + ">"; for (java.util.Iterator<Counters.Counter> mycounters = grp.iterator(); mycounters.hasNext(); ) { Counters.Counter counter = mycounters.next(); //String countername = "<"+counter.getName()+">::<"+counter.getDisplayName()+">::<"+counter.getValue()+">"; //System.out.println("groupName:"+groupname+",countername: "+countername); String countername = grp.getDisplayName()+"."+counter.getDisplayName(); String value = (new Long(counter.getValue())).toString(); String[] parts = {countername,value}; //System.out.println("part0:"+parts[0]+",:part1 "+parts[1]); if (parts[0].equals("FileSystemCounters.FILE_BYTES_READ")) { reduceTask.setValue(ReduceTaskKeys.FILE_BYTES_READ, parts[1]); } else if (parts[0].equals("FileSystemCounters.FILE_BYTES_WRITTEN")) { reduceTask.setValue(ReduceTaskKeys.FILE_BYTES_WRITTEN, parts[1]); } else if (parts[0].equals("FileSystemCounters.HDFS_BYTES_READ")) { reduceTask.setValue(ReduceTaskKeys.HDFS_BYTES_READ, parts[1]); } else if (parts[0].equals("FileSystemCounters.HDFS_BYTES_WRITTEN")) { reduceTask.setValue(ReduceTaskKeys.HDFS_BYTES_WRITTEN, parts[1]); } else if (parts[0].equals("Map-Reduce Framework.Reduce input records")) { reduceTask.setValue(ReduceTaskKeys.INPUT_RECORDS, parts[1]); } else if (parts[0].equals("Map-Reduce Framework.Reduce output records")) { reduceTask.setValue(ReduceTaskKeys.OUTPUT_RECORDS, parts[1]); } else if (parts[0].equals("Map-Reduce Framework.Combine input records")) { reduceTask.setValue(ReduceTaskKeys.COMBINE_INPUT_RECORDS, parts[1]); } else if (parts[0].equals("Map-Reduce Framework.Combine output records")) { reduceTask.setValue(ReduceTaskKeys.COMBINE_OUTPUT_RECORDS, parts[1]); } else if (parts[0].equals("Map-Reduce Framework.Reduce input groups")) { reduceTask.setValue(ReduceTaskKeys.INPUT_GROUPS, parts[1]); } else if (parts[0].equals("Map-Reduce Framework.Spilled Records")) { reduceTask.setValue(ReduceTaskKeys.SPILLED_RECORDS, parts[1]); } else if (parts[0].equals("Map-Reduce Framework.Reduce shuffle bytes")) { reduceTask.setValue(ReduceTaskKeys.SHUFFLE_BYTES, parts[1]); } else { System.err.println("ReduceCounterKey:<"+parts[0]+"> ==> NOT INCLUDED IN PERFORMANCE ADVISOR REDUCE TASK"); } } } } /* * Print the Job Execution Statistics * TODO: split to pring job, map/reduce task list and individual map/reduce task stats */ public void printJobExecutionStatistics() { /* * Print Job Counters */ System.out.println("JOB COUNTERS *********************************************"); int size = this._job.size(); java.util.Iterator<Map.Entry<Enum, String>> kv = this._job.entrySet().iterator(); for (int i = 0; i < size; i++) { Map.Entry<Enum, String> entry = (Map.Entry<Enum, String>) kv.next(); Enum key = entry.getKey(); String value = entry.getValue(); System.out.println("Key:<" + key.name() + ">, value:<"+ value +">"); } /* * */ System.out.println("MAP COUNTERS *********************************************"); int size1 = this._mapTaskList.size(); for (int i = 0; i < size1; i++) { System.out.println("MAP TASK *********************************************"); this._mapTaskList.get(i).printKeys(); } /* * */ System.out.println("REDUCE COUNTERS *********************************************"); int size2 = this._mapTaskList.size(); for (int i = 0; i < size2; i++) { System.out.println("REDUCE TASK *********************************************"); this._reduceTaskList.get(i).printKeys(); } } /* * Hash table keeping sorted lists of map tasks based on the specific map task key */ private Hashtable <Enum, ArrayList<MapTaskStatistics>> _sortedMapTaskListsByKey = new Hashtable<Enum, ArrayList<MapTaskStatistics>>(); /* * @return mapTaskList : ArrayList of MapTaskStatistics * @param mapTaskSortKey : Specific counter key used for sorting the task list * @param datatype : indicates the data type of the counter key used for sorting * If sort key is null then by default map tasks are sorted using map task ids. */ public synchronized ArrayList<MapTaskStatistics> getMapTaskList(Enum mapTaskSortKey, KeyDataType dataType) { /* * If mapTaskSortKey is null then use the task id as a key. */ if (mapTaskSortKey == null) { mapTaskSortKey = MapTaskKeys.TASK_ID; } if (this._sortedMapTaskListsByKey.get(mapTaskSortKey) == null) { ArrayList<MapTaskStatistics> newList = (ArrayList<MapTaskStatistics>)this._mapTaskList.clone(); this._sortedMapTaskListsByKey.put(mapTaskSortKey, this.sortMapTasksByKey(newList, mapTaskSortKey, dataType)); } return this._sortedMapTaskListsByKey.get(mapTaskSortKey); } private ArrayList<MapTaskStatistics> sortMapTasksByKey (ArrayList<MapTaskStatistics> mapTasks, Enum key, Enum dataType) { MapCounterComparator mcc = new MapCounterComparator(key, dataType); Collections.sort (mapTasks, mcc); return mapTasks; } private class MapCounterComparator implements Comparator<MapTaskStatistics> { public Enum _sortKey; public Enum _dataType; public MapCounterComparator(Enum key, Enum dataType) { this._sortKey = key; this._dataType = dataType; } // Comparator interface requires defining compare method. public int compare(MapTaskStatistics a, MapTaskStatistics b) { if (this._dataType == KeyDataType.LONG) { long aa = a.getLongValue(this._sortKey); long bb = b.getLongValue(this._sortKey); if (aa<bb) return -1; if (aa==bb) return 0; if (aa>bb) return 1; } else { return a.getStringValue(this._sortKey).compareToIgnoreCase(b.getStringValue(this._sortKey)); } return 0; } } /* * Reduce Array List sorting */ private Hashtable <Enum, ArrayList<ReduceTaskStatistics>> _sortedReduceTaskListsByKey = new Hashtable<Enum,ArrayList<ReduceTaskStatistics>>(); /* * @return reduceTaskList : ArrayList of ReduceTaskStatistics * @param reduceTaskSortKey : Specific counter key used for sorting the task list * @param dataType : indicates the data type of the counter key used for sorting * If sort key is null then, by default reduce tasks are sorted using task ids. */ public synchronized ArrayList<ReduceTaskStatistics> getReduceTaskList (Enum reduceTaskSortKey, KeyDataType dataType) { /* * If reduceTaskSortKey is null then use the task id as a key. */ if (reduceTaskSortKey == null) { reduceTaskSortKey = ReduceTaskKeys.TASK_ID; } if (this._sortedReduceTaskListsByKey.get(reduceTaskSortKey) == null) { ArrayList<ReduceTaskStatistics> newList = (ArrayList<ReduceTaskStatistics>)this._reduceTaskList.clone(); this._sortedReduceTaskListsByKey.put(reduceTaskSortKey, this.sortReduceTasksByKey(newList, reduceTaskSortKey, dataType)); } return this._sortedReduceTaskListsByKey.get(reduceTaskSortKey); } private ArrayList<ReduceTaskStatistics> sortReduceTasksByKey (ArrayList<ReduceTaskStatistics> reduceTasks, Enum key, Enum dataType) { ReduceCounterComparator rcc = new ReduceCounterComparator(key, dataType); Collections.sort (reduceTasks, rcc); return reduceTasks; } private class ReduceCounterComparator implements Comparator<ReduceTaskStatistics> { public Enum _sortKey; public Enum _dataType; //either long or string public ReduceCounterComparator(Enum key, Enum dataType) { this._sortKey = key; this._dataType = dataType; } // Comparator interface requires defining compare method. public int compare(ReduceTaskStatistics a, ReduceTaskStatistics b) { if (this._dataType == KeyDataType.LONG) { long aa = a.getLongValue(this._sortKey); long bb = b.getLongValue(this._sortKey); if (aa<bb) return -1; if (aa==bb) return 0; if (aa>bb) return 1; } else { return a.getStringValue(this._sortKey).compareToIgnoreCase(b.getStringValue(this._sortKey)); } return 0; } } }
/* * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.facebook.presto.metadata; import com.facebook.presto.Session; import com.facebook.presto.spi.CatalogSchemaName; import com.facebook.presto.spi.ColumnHandle; import com.facebook.presto.spi.ColumnMetadata; import com.facebook.presto.spi.ConnectorId; import com.facebook.presto.spi.ConnectorTableMetadata; import com.facebook.presto.spi.Constraint; import com.facebook.presto.spi.SystemTable; import com.facebook.presto.spi.TableHandle; import com.facebook.presto.spi.block.BlockEncodingSerde; import com.facebook.presto.spi.connector.ConnectorCapabilities; import com.facebook.presto.spi.connector.ConnectorOutputMetadata; import com.facebook.presto.spi.function.SqlFunction; import com.facebook.presto.spi.predicate.TupleDomain; import com.facebook.presto.spi.security.GrantInfo; import com.facebook.presto.spi.security.PrestoPrincipal; import com.facebook.presto.spi.security.Privilege; import com.facebook.presto.spi.security.RoleGrant; import com.facebook.presto.spi.statistics.ComputedStatistics; import com.facebook.presto.spi.statistics.TableStatistics; import com.facebook.presto.spi.statistics.TableStatisticsMetadata; import com.facebook.presto.spi.type.Type; import com.facebook.presto.spi.type.TypeManager; import com.facebook.presto.spi.type.TypeSignature; import com.facebook.presto.sql.planner.PartitioningHandle; import com.google.common.util.concurrent.ListenableFuture; import io.airlift.slice.Slice; import java.util.Collection; import java.util.List; import java.util.Map; import java.util.Optional; import java.util.OptionalLong; import java.util.Set; public abstract class AbstractMockMetadata implements Metadata { public static Metadata dummyMetadata() { return new AbstractMockMetadata() {}; } @Override public void verifyComparableOrderableContract() { throw new UnsupportedOperationException(); } @Override public Type getType(TypeSignature signature) { throw new UnsupportedOperationException(); } @Override public List<SqlFunction> listFunctions(Session session) { throw new UnsupportedOperationException(); } @Override public void registerBuiltInFunctions(List<? extends BuiltInFunction> functions) { throw new UnsupportedOperationException(); } @Override public boolean schemaExists(Session session, CatalogSchemaName schema) { throw new UnsupportedOperationException(); } @Override public List<String> listSchemaNames(Session session, String catalogName) { throw new UnsupportedOperationException(); } @Override public Optional<TableHandle> getTableHandle(Session session, QualifiedObjectName tableName) { throw new UnsupportedOperationException(); } @Override public Optional<TableHandle> getTableHandleForStatisticsCollection(Session session, QualifiedObjectName tableName, Map<String, Object> analyzeProperties) { throw new UnsupportedOperationException(); } @Override public Optional<SystemTable> getSystemTable(Session session, QualifiedObjectName tableName) { throw new UnsupportedOperationException(); } @Override public TableLayoutResult getLayout(Session session, TableHandle tableHandle, Constraint<ColumnHandle> constraint, Optional<Set<ColumnHandle>> desiredColumns) { throw new UnsupportedOperationException(); } @Override public TableLayout getLayout(Session session, TableHandle handle) { throw new UnsupportedOperationException(); } @Override public TableHandle getAlternativeTableHandle(Session session, TableHandle tableHandle, PartitioningHandle partitioningHandle) { throw new UnsupportedOperationException(); } @Override public boolean isLegacyGetLayoutSupported(Session session, TableHandle tableHandle) { return true; } @Override public Optional<PartitioningHandle> getCommonPartitioning(Session session, PartitioningHandle left, PartitioningHandle right) { throw new UnsupportedOperationException(); } @Override public boolean isRefinedPartitioningOver(Session session, PartitioningHandle a, PartitioningHandle b) { throw new UnsupportedOperationException(); } @Override public PartitioningHandle getPartitioningHandleForExchange(Session session, String catalogName, int partitionCount, List<Type> partitionTypes) { throw new UnsupportedOperationException(); } @Override public Optional<Object> getInfo(Session session, TableHandle handle) { throw new UnsupportedOperationException(); } @Override public TableMetadata getTableMetadata(Session session, TableHandle tableHandle) { throw new UnsupportedOperationException(); } @Override public TableStatistics getTableStatistics(Session session, TableHandle tableHandle, List<ColumnHandle> columnHandles, Constraint<ColumnHandle> constraint) { throw new UnsupportedOperationException(); } @Override public List<QualifiedObjectName> listTables(Session session, QualifiedTablePrefix prefix) { throw new UnsupportedOperationException(); } @Override public Map<String, ColumnHandle> getColumnHandles(Session session, TableHandle tableHandle) { throw new UnsupportedOperationException(); } @Override public ColumnMetadata getColumnMetadata(Session session, TableHandle tableHandle, ColumnHandle columnHandle) { throw new UnsupportedOperationException(); } @Override public TupleDomain<ColumnHandle> toExplainIOConstraints(Session session, TableHandle tableHandle, TupleDomain<ColumnHandle> constraints) { throw new UnsupportedOperationException(); } @Override public Map<QualifiedObjectName, List<ColumnMetadata>> listTableColumns(Session session, QualifiedTablePrefix prefix) { throw new UnsupportedOperationException(); } @Override public void createSchema(Session session, CatalogSchemaName schema, Map<String, Object> properties) { throw new UnsupportedOperationException(); } @Override public void dropSchema(Session session, CatalogSchemaName schema) { throw new UnsupportedOperationException(); } @Override public void renameSchema(Session session, CatalogSchemaName source, String target) { throw new UnsupportedOperationException(); } @Override public void createTable(Session session, String catalogName, ConnectorTableMetadata tableMetadata, boolean ignoreExisting) { throw new UnsupportedOperationException(); } @Override public TableHandle createTemporaryTable(Session session, String catalogName, List<ColumnMetadata> columns, Optional<PartitioningMetadata> partitioningMetadata) { throw new UnsupportedOperationException(); } @Override public void renameTable(Session session, TableHandle tableHandle, QualifiedObjectName newTableName) { throw new UnsupportedOperationException(); } @Override public void renameColumn(Session session, TableHandle tableHandle, ColumnHandle source, String target) { throw new UnsupportedOperationException(); } @Override public void addColumn(Session session, TableHandle tableHandle, ColumnMetadata column) { throw new UnsupportedOperationException(); } @Override public void dropTable(Session session, TableHandle tableHandle) { throw new UnsupportedOperationException(); } @Override public Optional<NewTableLayout> getNewTableLayout(Session session, String catalogName, ConnectorTableMetadata tableMetadata) { throw new UnsupportedOperationException(); } @Override public Optional<NewTableLayout> getPreferredShuffleLayoutForNewTable(Session session, String catalogName, ConnectorTableMetadata tableMetadata) { throw new UnsupportedOperationException(); } @Override public OutputTableHandle beginCreateTable(Session session, String catalogName, ConnectorTableMetadata tableMetadata, Optional<NewTableLayout> layout) { throw new UnsupportedOperationException(); } @Override public Optional<ConnectorOutputMetadata> finishCreateTable(Session session, OutputTableHandle tableHandle, Collection<Slice> fragments, Collection<ComputedStatistics> computedStatistics) { throw new UnsupportedOperationException(); } @Override public Optional<NewTableLayout> getInsertLayout(Session session, TableHandle target) { throw new UnsupportedOperationException(); } @Override public Optional<NewTableLayout> getPreferredShuffleLayoutForInsert(Session session, TableHandle target) { throw new UnsupportedOperationException(); } @Override public TableStatisticsMetadata getStatisticsCollectionMetadataForWrite(Session session, String catalogName, ConnectorTableMetadata tableMetadata) { throw new UnsupportedOperationException(); } @Override public TableStatisticsMetadata getStatisticsCollectionMetadata(Session session, String catalogName, ConnectorTableMetadata tableMetadata) { throw new UnsupportedOperationException(); } @Override public AnalyzeTableHandle beginStatisticsCollection(Session session, TableHandle tableHandle) { throw new UnsupportedOperationException(); } @Override public void finishStatisticsCollection(Session session, AnalyzeTableHandle tableHandle, Collection<ComputedStatistics> computedStatistics) { throw new UnsupportedOperationException(); } @Override public void beginQuery(Session session, Set<ConnectorId> connectors) { throw new UnsupportedOperationException(); } @Override public void cleanupQuery(Session session) { throw new UnsupportedOperationException(); } @Override public InsertTableHandle beginInsert(Session session, TableHandle tableHandle) { throw new UnsupportedOperationException(); } @Override public Optional<ConnectorOutputMetadata> finishInsert(Session session, InsertTableHandle tableHandle, Collection<Slice> fragments, Collection<ComputedStatistics> computedStatistics) { throw new UnsupportedOperationException(); } @Override public ColumnHandle getUpdateRowIdColumnHandle(Session session, TableHandle tableHandle) { throw new UnsupportedOperationException(); } @Override public boolean supportsMetadataDelete(Session session, TableHandle tableHandle) { throw new UnsupportedOperationException(); } @Override public OptionalLong metadataDelete(Session session, TableHandle tableHandle) { throw new UnsupportedOperationException(); } @Override public TableHandle beginDelete(Session session, TableHandle tableHandle) { throw new UnsupportedOperationException(); } @Override public void finishDelete(Session session, TableHandle tableHandle, Collection<Slice> fragments) { throw new UnsupportedOperationException(); } @Override public Optional<ConnectorId> getCatalogHandle(Session session, String catalogName) { throw new UnsupportedOperationException(); } @Override public Map<String, ConnectorId> getCatalogNames(Session session) { throw new UnsupportedOperationException(); } @Override public List<QualifiedObjectName> listViews(Session session, QualifiedTablePrefix prefix) { throw new UnsupportedOperationException(); } @Override public Map<QualifiedObjectName, ViewDefinition> getViews(Session session, QualifiedTablePrefix prefix) { throw new UnsupportedOperationException(); } @Override public Optional<ViewDefinition> getView(Session session, QualifiedObjectName viewName) { throw new UnsupportedOperationException(); } @Override public void createView(Session session, String catalogName, ConnectorTableMetadata viewMetadata, String viewData, boolean replace) { throw new UnsupportedOperationException(); } @Override public void dropView(Session session, QualifiedObjectName viewName) { throw new UnsupportedOperationException(); } @Override public Optional<ResolvedIndex> resolveIndex(Session session, TableHandle tableHandle, Set<ColumnHandle> indexableColumns, Set<ColumnHandle> outputColumns, TupleDomain<ColumnHandle> tupleDomain) { throw new UnsupportedOperationException(); } @Override public void createRole(Session session, String role, Optional<PrestoPrincipal> grantor, String catalog) { throw new UnsupportedOperationException(); } @Override public void dropRole(Session session, String role, String catalog) { throw new UnsupportedOperationException(); } @Override public Set<String> listRoles(Session session, String catalog) { throw new UnsupportedOperationException(); } @Override public void grantRoles(Session session, Set<String> roles, Set<PrestoPrincipal> grantees, boolean withAdminOption, Optional<PrestoPrincipal> grantor, String catalog) { throw new UnsupportedOperationException(); } @Override public void revokeRoles(Session session, Set<String> roles, Set<PrestoPrincipal> grantees, boolean adminOptionFor, Optional<PrestoPrincipal> grantor, String catalog) { throw new UnsupportedOperationException(); } @Override public Set<RoleGrant> listApplicableRoles(Session session, PrestoPrincipal principal, String catalog) { throw new UnsupportedOperationException(); } @Override public Set<String> listEnabledRoles(Session session, String catalog) { throw new UnsupportedOperationException(); } @Override public Set<RoleGrant> listRoleGrants(Session session, String catalog, PrestoPrincipal principal) { throw new UnsupportedOperationException(); } @Override public void grantTablePrivileges(Session session, QualifiedObjectName tableName, Set<Privilege> privileges, PrestoPrincipal grantee, boolean grantOption) { throw new UnsupportedOperationException(); } @Override public void revokeTablePrivileges(Session session, QualifiedObjectName tableName, Set<Privilege> privileges, PrestoPrincipal grantee, boolean grantOption) { throw new UnsupportedOperationException(); } @Override public List<GrantInfo> listTablePrivileges(Session session, QualifiedTablePrefix prefix) { throw new UnsupportedOperationException(); } @Override public ListenableFuture<Void> commitPageSinkAsync(Session session, OutputTableHandle tableHandle, Collection<Slice> fragments) { throw new UnsupportedOperationException(); } @Override public ListenableFuture<Void> commitPageSinkAsync(Session session, InsertTableHandle tableHandle, Collection<Slice> fragments) { throw new UnsupportedOperationException(); } @Override public FunctionManager getFunctionManager() { throw new UnsupportedOperationException(); } @Override public ProcedureRegistry getProcedureRegistry() { throw new UnsupportedOperationException(); } @Override public TypeManager getTypeManager() { throw new UnsupportedOperationException(); } @Override public BlockEncodingSerde getBlockEncodingSerde() { throw new UnsupportedOperationException(); } @Override public SessionPropertyManager getSessionPropertyManager() { throw new UnsupportedOperationException(); } @Override public SchemaPropertyManager getSchemaPropertyManager() { throw new UnsupportedOperationException(); } @Override public TablePropertyManager getTablePropertyManager() { throw new UnsupportedOperationException(); } @Override public ColumnPropertyManager getColumnPropertyManager() { throw new UnsupportedOperationException(); } @Override public AnalyzePropertyManager getAnalyzePropertyManager() { throw new UnsupportedOperationException(); } @Override public void dropColumn(Session session, TableHandle tableHandle, ColumnHandle column) { throw new UnsupportedOperationException(); } @Override public boolean catalogExists(Session session, String catalogName) { throw new UnsupportedOperationException(); } @Override public Set<ConnectorCapabilities> getConnectorCapabilities(Session session, ConnectorId catalogName) { throw new UnsupportedOperationException(); } }
package org.mitre.synthea.engine; import java.io.Serializable; import java.util.ArrayList; import java.util.Collections; import java.util.LinkedList; import java.util.List; import java.util.Map; import java.util.concurrent.ConcurrentHashMap; import org.mitre.synthea.engine.ExpressedSymptom.SymptomInfo; import org.mitre.synthea.engine.ExpressedSymptom.SymptomSource; import org.mitre.synthea.export.JSONSkip; import org.mitre.synthea.world.agents.Person; public class ExpressedConditionRecord implements Cloneable, Serializable { private static final long serialVersionUID = 4322116644425686900L; // this class contains basic info regarding an expressed conditions. // such as the onset time and end time public class ConditionPeriod implements Cloneable, Serializable { private static final long serialVersionUID = 4322116644425686901L; private Long onsetTime; private Long endTime; public ConditionPeriod(Long onsetTime) { this.onsetTime = onsetTime; this.endTime = null; } public ConditionPeriod(Long onsetTime, Long endTime) { this.onsetTime = onsetTime; this.endTime = endTime; } public ConditionPeriod clone() { return new ConditionPeriod(this.onsetTime, this.endTime); } public Long getEndTime() { return endTime; } public void setEndTime(Long endTime) { this.endTime = endTime; } public Long getOnsetTime() { return onsetTime; } } /** * A condition with a set of onset and end time entries. */ public class OnsetCondition implements Cloneable, Serializable { private static final long serialVersionUID = 4322116644425686902L; // name of the condition private String name; private List<ConditionPeriod> timeInfos; public OnsetCondition(String name) { this.name = name; timeInfos = new LinkedList<ConditionPeriod>(); } /** * Create a shallow copy of this object. */ public OnsetCondition clone() { OnsetCondition data = new OnsetCondition(this.name); data.timeInfos.addAll(this.timeInfos); return data; } public String getName() { return name; } public List<ConditionPeriod> getTimeInfos() { return timeInfos; } /** * Get the last recorded onset time. */ public Long getLastOnsetTime() { if (timeInfos.isEmpty()) { return null; } else { int size = timeInfos.size(); return timeInfos.get(size - 1).getOnsetTime(); } } /** * Get the last recorded end time. */ public Long getLastEndTime() { if (timeInfos.isEmpty()) { return null; } else { int size = timeInfos.size(); return timeInfos.get(size - 1).getEndTime(); } } public void addNewEntry(long onsetTime) { ConditionPeriod entry = new ConditionPeriod(Long.valueOf(onsetTime), null); timeInfos.add(entry); } /** * Set the end time the last entry. */ public void endLastEntry(long time) { int size = timeInfos.size(); if (size > 0) { timeInfos.get(size - 1).setEndTime(Long.valueOf(time)); } } } /** * Used to record condition onset by modules. */ public class ModuleConditions implements Cloneable, Serializable { private static final long serialVersionUID = 4322116644425686903L; // source from which the conditions are onset private String source; /** Data structure for storing onset conditions (init_time, end_time).*/ private Map<String, OnsetCondition> onsetConditions; /** Data structure for storing mapping from state to condition names * This is useful when facing ConditionEnd.conditionOnSet attribute*/ private Map<String, String> state2conditionMapping; /** * Create new instance for the specified module name. */ public ModuleConditions(String source) { this.source = source; onsetConditions = new ConcurrentHashMap<String, OnsetCondition>(); state2conditionMapping = new ConcurrentHashMap<String, String>(); } /** * Create a shallow copy of this instance. */ public ModuleConditions clone() { ModuleConditions data = new ModuleConditions(this.source); data.state2conditionMapping.putAll(this.state2conditionMapping); data.onsetConditions.putAll(this.onsetConditions); return data; } /** * Record the onset of a condition. */ public void onsetCondition(String condition, String state, long time) { if (!onsetConditions.containsKey(condition)) { onsetConditions.put(condition, new OnsetCondition(condition)); } OnsetCondition onsetCondition = onsetConditions.get(condition); onsetCondition.addNewEntry(time); state2conditionMapping.put(state, condition); } /** * Record the end of a condition. */ public void endCondition(String condition, long time) { if (onsetConditions.containsKey(condition)) { onsetConditions.get(condition).endLastEntry(time); } } /** * Get the last recorded onset time. */ public Long getConditionLastOnsetTime(String condition) { if (onsetConditions.containsKey(condition)) { return onsetConditions.get(condition).getLastOnsetTime(); } return null; } /** * Get the last recorded end time. */ public Long getConditionLastEndTime(String condition) { if (onsetConditions.containsKey(condition)) { return onsetConditions.get(condition).getLastEndTime(); } return null; } /** * Get the condition for the supplied state. */ public String getConditionFromState(String state) { if (state2conditionMapping.containsKey(state)) { return state2conditionMapping.get(state); } return null; } /** * Get the recorded conditions and onset/end information. * @return a map of condition name to onset/end records. */ public Map<String, OnsetCondition> getOnsetConditions() { return onsetConditions; } } // this class represents a condition with its associated symptoms public class ConditionWithSymptoms implements Cloneable, Serializable { private static final long serialVersionUID = 4322116644425686904L; private String conditionName; private Long onsetTime; private Long endTime; // Data structure for storing symptoms and associated values during the condition private Map<String, List<Integer>> symptoms; /** * Create a new instance for the supplied condition name, onset and end times. */ public ConditionWithSymptoms(String name, Long onsetTime, Long endTime) { this.conditionName = name; this.onsetTime = onsetTime; this.endTime = endTime; this.symptoms = new ConcurrentHashMap<String, List<Integer>>(); } /** * Create a shallow copy of this instance. */ public ConditionWithSymptoms clone() { ConditionWithSymptoms data = new ConditionWithSymptoms(conditionName, onsetTime, endTime); data.symptoms.putAll(this.symptoms); return data; } /** * Record a symptom for the supplied module. * @param name symptom name. * @param symptomSource module origin of the symptom. */ public void addSymptoms(String name, SymptomSource symptomSource) { Map<Long, SymptomInfo> timedTypedSymptoms = symptomSource.getTimeInfos(); // get the value that correspond to the all times belonging // to the interval [begin, end] of the condition if any. List<Long> allTimes = new ArrayList<Long>(); for (Long time : timedTypedSymptoms.keySet()) { boolean greatThanBegin = time >= onsetTime; boolean lowThanEnd = (endTime != null && time <= endTime) || (endTime == null); if (greatThanBegin && lowThanEnd) { allTimes.add(time); } } if (allTimes.size() > 0) { Collections.sort(allTimes); if (!symptoms.containsKey(name)) { symptoms.put(name, new ArrayList<Integer>()); } for (Long time : allTimes) { Integer value = timedTypedSymptoms.get(time).getValue(); symptoms.get(name).add(value); } } } public Long getOnsetTime() { return onsetTime; } public Long getEndTime() { return endTime; } public String getConditionName() { return conditionName; } public Map<String, List<Integer>> getSymptoms() { return symptoms; } } // a map: module.name -> Conditions private Map<String, ModuleConditions> sources; @JSONSkip Person person; public ExpressedConditionRecord(Person person) { this.person = person; sources = new ConcurrentHashMap<String, ModuleConditions>(); } /** * Create a shallow clone of this instance. */ public ExpressedConditionRecord clone() { ExpressedConditionRecord data = new ExpressedConditionRecord(this.person); data.sources.putAll(this.sources); return data; } public Map<String, ModuleConditions> getSources() { return sources; } /** * Method that is used to update the onsetConditions field when * a ConditionOnset state is processed. */ public void onConditionOnset(String module, String state, String condition, long time) { if (!sources.containsKey(module)) { sources.put(module, new ModuleConditions(module)); } ModuleConditions moduleConditions = sources.get(module); moduleConditions.onsetCondition(condition, state, time); } /** * Method that is used to retrieve the last time a condition * has been onset from a given module. */ public Long getConditionLastOnsetTimeFromModule(String module, String condition) { Long result = null; if (sources.containsKey(module)) { ModuleConditions moduleConditions = sources.get(module); result = moduleConditions.getConditionLastOnsetTime(condition); } return result; } /** * Method that is used to retrieve the last time a ConditionEnd state * has been processed for a given condition from a given module. */ public Long getConditionLastEndTimeFromModule(String module, String condition) { Long result = null; if (sources.containsKey(module)) { ModuleConditions moduleConditions = sources.get(module); result = moduleConditions.getConditionLastEndTime(condition); } return result; } /** * Method for retrieving the condition name from a state name. * Useful when dealing with ConditionEnd.conditionOnSet attribute. */ public String getConditionFromState(String module, String state) { String result = null; boolean isModulePresent = sources.containsKey(module); if (isModulePresent) { result = sources.get(module).getConditionFromState(state); } return result; } /** * Method that is used to update the onsetConditions field when * a ConditionEnd state is processed. */ public void onConditionEnd(String module, String condition, long time) { boolean isModulePresent = sources.containsKey(module); if (isModulePresent) { sources.get(module).endCondition(condition, time); } } /** * Get the symptoms that were expressed as parts of * the conditions the person suffers from. * The returned data is a map of [time: List of ConditionWithSymtoms]. * It captures the conditions a person has suffered from together * with the related symptoms at different age/time. */ public Map<Long, List<ConditionWithSymptoms>> getConditionSymptoms() { Map<String, ExpressedSymptom> symptoms = person.getExpressedSymptoms(); Map<Long, List<ConditionWithSymptoms>> result; result = new ConcurrentHashMap<Long, List<ConditionWithSymptoms>>(); for (String module : sources.keySet()) { ModuleConditions moduleConditions = sources.get(module); for (String condition : moduleConditions.getOnsetConditions().keySet()) { List<ConditionPeriod> infos = moduleConditions.getOnsetConditions().get( condition).getTimeInfos(); for (ConditionPeriod entry : infos) { Long begin = entry.getOnsetTime(); Long end = entry.getEndTime(); if (!result.containsKey(begin)) { result.put(begin, new LinkedList<ConditionWithSymptoms>()); } ConditionWithSymptoms conditionWithSymptoms = new ConditionWithSymptoms( condition, begin, end ); for (String type : symptoms.keySet()) { ExpressedSymptom expressedSymptom = symptoms.get(type); if (expressedSymptom.getSources().containsKey(module)) { SymptomSource symptomSource = expressedSymptom.getSources().get(module); conditionWithSymptoms.addSymptoms(type, symptomSource); } } result.get(begin).add(conditionWithSymptoms); } } } return result; } }
package org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.policies; import org.eclipse.gef.commands.Command; import org.eclipse.gmf.runtime.emf.type.core.requests.CreateElementRequest; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.commands.APIResourceEndpointCreateCommand; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.commands.AddressEndPointCreateCommand; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.commands.AddressingEndpointCreateCommand; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.commands.AggregateMediatorCreateCommand; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.commands.BAMMediatorCreateCommand; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.commands.BeanMediatorCreateCommand; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.commands.BuilderMediatorCreateCommand; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.commands.CacheMediatorCreateCommand; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.commands.CallMediatorCreateCommand; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.commands.CallTemplateMediatorCreateCommand; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.commands.CalloutMediatorCreateCommand; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.commands.ClassMediatorCreateCommand; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.commands.CloneMediatorCreateCommand; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.commands.CloudConnectorCreateCommand; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.commands.CloudConnectorOperationCreateCommand; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.commands.CommandMediatorCreateCommand; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.commands.ConditionalRouterMediatorCreateCommand; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.commands.DBLookupMediatorCreateCommand; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.commands.DBReportMediatorCreateCommand; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.commands.DataMapperMediatorCreateCommand; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.commands.DefaultEndPointCreateCommand; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.commands.DropMediatorCreateCommand; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.commands.EJBMediatorCreateCommand; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.commands.EnqueueMediatorCreateCommand; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.commands.EnrichMediatorCreateCommand; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.commands.EntitlementMediatorCreateCommand; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.commands.EventMediatorCreateCommand; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.commands.FailoverEndPointCreateCommand; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.commands.FastXSLTMediatorCreateCommand; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.commands.FaultMediatorCreateCommand; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.commands.FilterMediatorCreateCommand; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.commands.ForEachMediatorCreateCommand; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.commands.HTTPEndpointCreateCommand; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.commands.HeaderMediatorCreateCommand; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.commands.IterateMediatorCreateCommand; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.commands.LoadBalanceEndPointCreateCommand; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.commands.LogMediatorCreateCommand; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.commands.LoopBackMediatorCreateCommand; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.commands.NamedEndpointCreateCommand; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.commands.OAuthMediatorCreateCommand; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.commands.PayloadFactoryMediatorCreateCommand; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.commands.PropertyMediatorCreateCommand; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.commands.PublishEventMediatorCreateCommand; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.commands.RMSequenceMediatorCreateCommand; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.commands.RecipientListEndPointCreateCommand; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.commands.RespondMediatorCreateCommand; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.commands.RouterMediatorCreateCommand; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.commands.RuleMediatorCreateCommand; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.commands.ScriptMediatorCreateCommand; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.commands.SendMediatorCreateCommand; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.commands.SequenceCreateCommand; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.commands.SmooksMediatorCreateCommand; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.commands.SpringMediatorCreateCommand; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.commands.StoreMediatorCreateCommand; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.commands.SwitchMediatorCreateCommand; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.commands.TemplateEndpointCreateCommand; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.commands.ThrottleMediatorCreateCommand; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.commands.TransactionMediatorCreateCommand; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.commands.URLRewriteMediatorCreateCommand; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.commands.ValidateMediatorCreateCommand; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.commands.WSDLEndPointCreateCommand; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.commands.XQueryMediatorCreateCommand; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.commands.XSLTMediatorCreateCommand; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.providers.EsbElementTypes; /** * @generated */ public class MediatorFlowMediatorFlowCompartment23ItemSemanticEditPolicy extends EsbBaseItemSemanticEditPolicy { /** * @generated */ public MediatorFlowMediatorFlowCompartment23ItemSemanticEditPolicy() { super(EsbElementTypes.MediatorFlow_3758); } /** * @generated */ protected Command getCreateCommand(CreateElementRequest req) { if (EsbElementTypes.DropMediator_3491 == req.getElementType()) { return getGEFWrapper(new DropMediatorCreateCommand(req)); } if (EsbElementTypes.PropertyMediator_3492 == req.getElementType()) { return getGEFWrapper(new PropertyMediatorCreateCommand(req)); } if (EsbElementTypes.ThrottleMediator_3493 == req.getElementType()) { return getGEFWrapper(new ThrottleMediatorCreateCommand(req)); } if (EsbElementTypes.FilterMediator_3494 == req.getElementType()) { return getGEFWrapper(new FilterMediatorCreateCommand(req)); } if (EsbElementTypes.LogMediator_3495 == req.getElementType()) { return getGEFWrapper(new LogMediatorCreateCommand(req)); } if (EsbElementTypes.EnrichMediator_3496 == req.getElementType()) { return getGEFWrapper(new EnrichMediatorCreateCommand(req)); } if (EsbElementTypes.XSLTMediator_3497 == req.getElementType()) { return getGEFWrapper(new XSLTMediatorCreateCommand(req)); } if (EsbElementTypes.SwitchMediator_3498 == req.getElementType()) { return getGEFWrapper(new SwitchMediatorCreateCommand(req)); } if (EsbElementTypes.Sequence_3503 == req.getElementType()) { return getGEFWrapper(new SequenceCreateCommand(req)); } if (EsbElementTypes.EventMediator_3504 == req.getElementType()) { return getGEFWrapper(new EventMediatorCreateCommand(req)); } if (EsbElementTypes.EntitlementMediator_3505 == req.getElementType()) { return getGEFWrapper(new EntitlementMediatorCreateCommand(req)); } if (EsbElementTypes.ClassMediator_3506 == req.getElementType()) { return getGEFWrapper(new ClassMediatorCreateCommand(req)); } if (EsbElementTypes.SpringMediator_3507 == req.getElementType()) { return getGEFWrapper(new SpringMediatorCreateCommand(req)); } if (EsbElementTypes.ScriptMediator_3508 == req.getElementType()) { return getGEFWrapper(new ScriptMediatorCreateCommand(req)); } if (EsbElementTypes.FaultMediator_3509 == req.getElementType()) { return getGEFWrapper(new FaultMediatorCreateCommand(req)); } if (EsbElementTypes.XQueryMediator_3510 == req.getElementType()) { return getGEFWrapper(new XQueryMediatorCreateCommand(req)); } if (EsbElementTypes.CommandMediator_3511 == req.getElementType()) { return getGEFWrapper(new CommandMediatorCreateCommand(req)); } if (EsbElementTypes.DBLookupMediator_3512 == req.getElementType()) { return getGEFWrapper(new DBLookupMediatorCreateCommand(req)); } if (EsbElementTypes.DBReportMediator_3513 == req.getElementType()) { return getGEFWrapper(new DBReportMediatorCreateCommand(req)); } if (EsbElementTypes.SmooksMediator_3514 == req.getElementType()) { return getGEFWrapper(new SmooksMediatorCreateCommand(req)); } if (EsbElementTypes.SendMediator_3515 == req.getElementType()) { return getGEFWrapper(new SendMediatorCreateCommand(req)); } if (EsbElementTypes.HeaderMediator_3516 == req.getElementType()) { return getGEFWrapper(new HeaderMediatorCreateCommand(req)); } if (EsbElementTypes.CloneMediator_3517 == req.getElementType()) { return getGEFWrapper(new CloneMediatorCreateCommand(req)); } if (EsbElementTypes.CacheMediator_3518 == req.getElementType()) { return getGEFWrapper(new CacheMediatorCreateCommand(req)); } if (EsbElementTypes.IterateMediator_3519 == req.getElementType()) { return getGEFWrapper(new IterateMediatorCreateCommand(req)); } if (EsbElementTypes.CalloutMediator_3520 == req.getElementType()) { return getGEFWrapper(new CalloutMediatorCreateCommand(req)); } if (EsbElementTypes.TransactionMediator_3521 == req.getElementType()) { return getGEFWrapper(new TransactionMediatorCreateCommand(req)); } if (EsbElementTypes.RMSequenceMediator_3522 == req.getElementType()) { return getGEFWrapper(new RMSequenceMediatorCreateCommand(req)); } if (EsbElementTypes.RuleMediator_3523 == req.getElementType()) { return getGEFWrapper(new RuleMediatorCreateCommand(req)); } if (EsbElementTypes.OAuthMediator_3524 == req.getElementType()) { return getGEFWrapper(new OAuthMediatorCreateCommand(req)); } if (EsbElementTypes.AggregateMediator_3525 == req.getElementType()) { return getGEFWrapper(new AggregateMediatorCreateCommand(req)); } if (EsbElementTypes.StoreMediator_3588 == req.getElementType()) { return getGEFWrapper(new StoreMediatorCreateCommand(req)); } if (EsbElementTypes.BuilderMediator_3591 == req.getElementType()) { return getGEFWrapper(new BuilderMediatorCreateCommand(req)); } if (EsbElementTypes.CallTemplateMediator_3594 == req.getElementType()) { return getGEFWrapper(new CallTemplateMediatorCreateCommand(req)); } if (EsbElementTypes.PayloadFactoryMediator_3597 == req.getElementType()) { return getGEFWrapper(new PayloadFactoryMediatorCreateCommand(req)); } if (EsbElementTypes.EnqueueMediator_3600 == req.getElementType()) { return getGEFWrapper(new EnqueueMediatorCreateCommand(req)); } if (EsbElementTypes.URLRewriteMediator_3620 == req.getElementType()) { return getGEFWrapper(new URLRewriteMediatorCreateCommand(req)); } if (EsbElementTypes.ValidateMediator_3623 == req.getElementType()) { return getGEFWrapper(new ValidateMediatorCreateCommand(req)); } if (EsbElementTypes.RouterMediator_3628 == req.getElementType()) { return getGEFWrapper(new RouterMediatorCreateCommand(req)); } if (EsbElementTypes.ConditionalRouterMediator_3635 == req.getElementType()) { return getGEFWrapper(new ConditionalRouterMediatorCreateCommand(req)); } if (EsbElementTypes.BAMMediator_3680 == req.getElementType()) { return getGEFWrapper(new BAMMediatorCreateCommand(req)); } if (EsbElementTypes.BeanMediator_3683 == req.getElementType()) { return getGEFWrapper(new BeanMediatorCreateCommand(req)); } if (EsbElementTypes.EJBMediator_3686 == req.getElementType()) { return getGEFWrapper(new EJBMediatorCreateCommand(req)); } if (EsbElementTypes.DefaultEndPoint_3609 == req.getElementType()) { return getGEFWrapper(new DefaultEndPointCreateCommand(req)); } if (EsbElementTypes.AddressEndPoint_3610 == req.getElementType()) { return getGEFWrapper(new AddressEndPointCreateCommand(req)); } if (EsbElementTypes.FailoverEndPoint_3611 == req.getElementType()) { return getGEFWrapper(new FailoverEndPointCreateCommand(req)); } if (EsbElementTypes.RecipientListEndPoint_3692 == req.getElementType()) { return getGEFWrapper(new RecipientListEndPointCreateCommand(req)); } if (EsbElementTypes.WSDLEndPoint_3612 == req.getElementType()) { return getGEFWrapper(new WSDLEndPointCreateCommand(req)); } if (EsbElementTypes.NamedEndpoint_3660 == req.getElementType()) { return getGEFWrapper(new NamedEndpointCreateCommand(req)); } if (EsbElementTypes.LoadBalanceEndPoint_3613 == req.getElementType()) { return getGEFWrapper(new LoadBalanceEndPointCreateCommand(req)); } if (EsbElementTypes.APIResourceEndpoint_3674 == req.getElementType()) { return getGEFWrapper(new APIResourceEndpointCreateCommand(req)); } if (EsbElementTypes.AddressingEndpoint_3689 == req.getElementType()) { return getGEFWrapper(new AddressingEndpointCreateCommand(req)); } if (EsbElementTypes.HTTPEndpoint_3709 == req.getElementType()) { return getGEFWrapper(new HTTPEndpointCreateCommand(req)); } if (EsbElementTypes.TemplateEndpoint_3716 == req.getElementType()) { return getGEFWrapper(new TemplateEndpointCreateCommand(req)); } if (EsbElementTypes.CloudConnector_3719 == req.getElementType()) { return getGEFWrapper(new CloudConnectorCreateCommand(req)); } if (EsbElementTypes.CloudConnectorOperation_3722 == req.getElementType()) { return getGEFWrapper(new CloudConnectorOperationCreateCommand(req)); } if (EsbElementTypes.LoopBackMediator_3736 == req.getElementType()) { return getGEFWrapper(new LoopBackMediatorCreateCommand(req)); } if (EsbElementTypes.RespondMediator_3739 == req.getElementType()) { return getGEFWrapper(new RespondMediatorCreateCommand(req)); } if (EsbElementTypes.CallMediator_3742 == req.getElementType()) { return getGEFWrapper(new CallMediatorCreateCommand(req)); } if (EsbElementTypes.DataMapperMediator_3761 == req.getElementType()) { return getGEFWrapper(new DataMapperMediatorCreateCommand(req)); } if (EsbElementTypes.FastXSLTMediator_3764 == req.getElementType()) { return getGEFWrapper(new FastXSLTMediatorCreateCommand(req)); } if (EsbElementTypes.ForEachMediator_3780 == req.getElementType()) { return getGEFWrapper(new ForEachMediatorCreateCommand(req)); } if (EsbElementTypes.PublishEventMediator_3785 == req.getElementType()) { return getGEFWrapper(new PublishEventMediatorCreateCommand(req)); } return super.getCreateCommand(req); } }
/** * Licensed to Cloudera, Inc. under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. Cloudera, Inc. licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.cloudera.flume.agent.diskfailover; import java.io.File; import java.io.IOException; import java.util.Collection; import java.util.Collections; import java.util.Map; import java.util.concurrent.BlockingQueue; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.LinkedBlockingQueue; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicLong; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.cloudera.flume.conf.Context; import com.cloudera.flume.core.Event; import com.cloudera.flume.core.EventImpl; import com.cloudera.flume.core.EventSink; import com.cloudera.flume.core.EventSinkDecorator; import com.cloudera.flume.core.EventSource; import com.cloudera.flume.handlers.hdfs.SeqfileEventSink; import com.cloudera.flume.handlers.hdfs.SeqfileEventSource; import com.cloudera.flume.handlers.rolling.RollSink; import com.cloudera.flume.handlers.rolling.RollTrigger; import com.cloudera.flume.handlers.rolling.Tagger; import com.cloudera.flume.reporter.ReportEvent; import com.cloudera.flume.reporter.ReportUtil; import com.cloudera.flume.reporter.Reportable; import com.cloudera.util.FileUtil; import com.google.common.base.Preconditions; /** * This class is responsible for managing where faults are detected and future * messages need to be made durable until it has been sent downstream on hop * * When there are error conditions, IllegalStateExceptions, * IllegalArgumentExceptions and NullPointerExceptions can be thrown. */ public class NaiveFileFailoverManager implements DiskFailoverManager, Reportable { static final Logger LOG = LoggerFactory .getLogger(NaiveFileFailoverManager.class); // This is the state of the node. final private ConcurrentHashMap<String, DFOData> table = new ConcurrentHashMap<String, DFOData>(); final private LinkedBlockingQueue<String> writingQ = new LinkedBlockingQueue<String>(); final private LinkedBlockingQueue<String> loggedQ = new LinkedBlockingQueue<String>(); final private LinkedBlockingQueue<String> sendingQ = new LinkedBlockingQueue<String>(); public static final String IMPORTDIR = "dfo_import"; public static final String WRITINGDIR = "dfo_writing"; public static final String LOGGEDDIR = "dfo_logged"; public static final String ERRORDIR = "dfo_error"; public static final String SENDINGDIR = "dfo_sending"; // Internal statistics private AtomicLong writingCount = new AtomicLong(0); // # of batches current // being written. private AtomicLong loggedCount = new AtomicLong(0); // # of batches logged // (durably written // locally) private AtomicLong sendingCount = new AtomicLong(0); // # of messages sending private AtomicLong sentCount = new AtomicLong(0); // # of messages resent private AtomicLong importedCount = new AtomicLong(0); // # of batches imported private AtomicLong retryCount = new AtomicLong(0); // # of batches retried private AtomicLong recoverCount = new AtomicLong(0); // # batches recovered private AtomicLong errCount = new AtomicLong(0); // # batches with errors private AtomicLong writingEvtCount = new AtomicLong(0); private AtomicLong readEvtCount = new AtomicLong(0); // 'closed' is the state where no data can be inserted and no data can be // drained from this decorator. // 'closing' is the state after a close is requested where the values can // still be drained by the subordinate thread, but no new data can be // inserted. This is necessary for clean closes. enum ManagerState { INIT, OPEN, CLOSED, CLOSING }; volatile ManagerState state = ManagerState.INIT; static class DFOData { State s; String tag; DFOData(String tag) { this.s = State.WRITING; this.tag = tag; } static DFOData recovered(String tag) { DFOData data = new DFOData(tag); data.s = State.LOGGED; return data; } }; File importDir, writingDir, loggedDir, sendingDir, errorDir; public NaiveFileFailoverManager(File baseDir) { File writingDir = new File(baseDir, WRITINGDIR); File loggedDir = new File(baseDir, LOGGEDDIR); File xmitableDir = new File(baseDir, SENDINGDIR); File errDir = new File(baseDir, ERRORDIR); Preconditions.checkNotNull(writingDir); Preconditions.checkNotNull(loggedDir); Preconditions.checkNotNull(xmitableDir); Preconditions.checkNotNull(errDir); this.importDir = new File(baseDir, IMPORTDIR); this.writingDir = writingDir; this.loggedDir = loggedDir; this.sendingDir = xmitableDir; this.errorDir = errDir; state = ManagerState.CLOSED; } synchronized public void open() throws IOException { // TODO (jon) be less strict. ?? need to return on and figure out why this is // wrong, add // latches. // Preconditions.checkState(state == ManagerState.CLOSED, // "Must be in CLOSED state to open, currently " + state); // make the dirs if they do not exist if (!FileUtil.makeDirs(importDir)) { throw new IOException("Unable to create import dir: " + importDir); } if (!FileUtil.makeDirs(writingDir)) { throw new IOException("Unable to create writing dir: " + writingDir); } if (!FileUtil.makeDirs(loggedDir)) { throw new IOException("Unable to create logged dir: " + loggedDir); } if (!FileUtil.makeDirs(sendingDir)) { throw new IOException("Unable to create sending dir: " + sendingDir); } if (!FileUtil.makeDirs(errorDir)) { throw new IOException("Unable to create error dir: " + sendingDir); } state = ManagerState.OPEN; } public Collection<String> getWritingTags() { return Collections.unmodifiableCollection(writingQ); } public Collection<String> getLoggedTags() { return Collections.unmodifiableCollection(loggedQ); } public Collection<String> getSendingTags() { return Collections.unmodifiableCollection(sendingQ); } synchronized public void close() throws IOException { if (state == ManagerState.CLOSED) { LOG.warn("Double close (which is ok)"); } if (state == ManagerState.CLOSING) { LOG.warn("Close while in closing state, odd"); } state = ManagerState.CLOSING; } /** * This looks at directory structure and recovers state based on where files * are in the file system. * * For a first cut, we will just get everything into the logged state and * restart from there. Optimizations will recover at finer grain and be more * performant. */ synchronized public void recover() throws IOException { // move all writing into the logged dir. for (String f : writingDir.list()) { File old = new File(writingDir, f); if (!old.isFile() || !old.renameTo(new File(loggedDir, f))) { throw new IOException("Unable to recover - couldn't rename " + old + " to " + loggedDir + f); } LOG.debug("Recover moved " + f + " from WRITING to LOGGED"); } // move all sending into the logged dir for (String f : sendingDir.list()) { File old = new File(sendingDir, f); if (!old.isFile() || !old.renameTo(new File(loggedDir, f))) { throw new IOException("Unable to recover - couldn't rename " + old + " to " + loggedDir + f); } LOG.debug("Recover moved " + f + " from SENDING to LOGGED"); } // add all logged to loggedQ and table for (String f : loggedDir.list()) { // File log = new File(loggedDir, f); DFOData data = DFOData.recovered(f); table.put(f, data); loggedQ.add(f); recoverCount.incrementAndGet(); LOG.debug("Recover loaded " + f); } // carry on now on your merry way. } /** * Returns a new sink when the roller asks for a new one. */ synchronized public EventSink newWritingSink(final Tagger tagger) throws IOException { File dir = getDir(State.WRITING); final String tag = tagger.newTag(); EventSink curSink = new SeqfileEventSink(new File(dir, tag) .getAbsoluteFile()); writingQ.add(tag); DFOData data = new DFOData(tag); table.put(tag, data); writingCount.incrementAndGet(); return new EventSinkDecorator<EventSink>(curSink) { @Override synchronized public void append(Event e) throws IOException, InterruptedException { synchronized (NaiveFileFailoverManager.this) { getSink().append(e); writingEvtCount.incrementAndGet(); } } @Override synchronized public void close() throws IOException, InterruptedException { synchronized (NaiveFileFailoverManager.this) { super.close(); if (!writingQ.contains(tag)) { LOG.warn("Already changed tag " + tag + " out of WRITING state"); return; } LOG.info("File lives in " + getFile(tag)); changeState(tag, State.WRITING, State.LOGGED); loggedCount.incrementAndGet(); } } }; } /** * This instantiates a roller where all input is sent to. */ @Override public RollSink getEventSink(Context ctx, final RollTrigger t) throws IOException { // NaiveFileFailover is just a place holder return new RollSink(ctx, "NaiveFileFailover", t, 250) { @Override public EventSink newSink(Context ctx) throws IOException { // TODO (jon) clean this up -- want to deprecate Tagger return newWritingSink(t.getTagger()); } }; } /** * This is private and not thread safe. */ private LinkedBlockingQueue<String> getQueue(State state) { Preconditions.checkNotNull(state, "Attempted to get queue for invalid null state"); switch (state) { case WRITING: return writingQ; case LOGGED: return loggedQ; case SENDING: return sendingQ; case IMPORT: default: return null; } } /** * This is private and not thread safe. */ private File getDir(State state) { Preconditions.checkNotNull(state, "Attempted to get dir for invalid null state"); switch (state) { case IMPORT: return importDir; case WRITING: return writingDir; case LOGGED: return loggedDir; case SENDING: return sendingDir; case ERROR: return errorDir; default: return null; } } /** * This is private and not thread safe */ private File getFile(String tag) { Preconditions.checkNotNull(tag, "Attempted to get file for empty tag"); DFOData data = table.get(tag); Preconditions.checkNotNull(data, "Data for tag " + tag + " was empty"); File dir = getDir(data.s); return new File(dir, tag); } /** * Cleanup a file after it has been successfully processed. * * This can through both IOExceptions and runtime exceptions due to * Preconditions failures. * * According to the link below, Solaris (I assume POSIX/linux) does atomic * rename but Windows does not guarantee it. * http://bugs.sun.com/bugdatabase/view_bug.do?bug_id=4017593 To be truly * correct, I need to check the return value (will likely fail in unix if * moving from one volume to another instead of just within same volume) */ synchronized void changeState(String tag, State oldState, State newState) throws IOException { DFOData data = table.get(tag); Preconditions.checkArgument(data != null, "Tag " + tag + " has no data"); Preconditions.checkArgument(tag.equals(data.tag), "Data associated with tag didn't match tag " + tag); if (LOG.isDebugEnabled()) { LOG.debug("Change " + data.s + "/" + oldState + " to " + newState + " : " + tag); } // null allows any previous state. if (oldState == null) { oldState = data.s; } Preconditions.checkState(data.s == oldState, "Expected state to be " + oldState + " but was " + data.s); if (oldState == State.ERROR) { throw new IllegalStateException("Cannot move from error state"); } // SENT is terminal state, no where to move, just delete it. if (newState == State.SENT) { getQueue(oldState).remove(tag); File sentFile = getFile(tag); data.s = newState; if (!sentFile.delete()) { LOG.error("Couldn't delete " + sentFile + " - can be safely manually deleted"); } // TODO (jon) need to eventually age off sent files entry to not exhaust // memory return; } // move files to other directories to making state change durable. File orig = getFile(tag); File newf = new File(getDir(newState), tag); boolean success = orig.renameTo(newf); if (!success) { throw new IOException("Move " + orig + " -> " + newf + "failed!"); } // is successful, update queues. LOG.debug("old state is " + oldState); getQueue(oldState).remove(tag); BlockingQueue<String> q = getQueue(newState); if (q != null) { q.add(tag); } data.s = newState; } /** * This decorator wraps sources and updates state transitions for the * different sets of data. It intercepts exceptions from its sources and moves * the batch to error state. */ class StateChangeDeco extends EventSource.Base { final String tag; final EventSource src; public StateChangeDeco(EventSource src, String tag) { Preconditions.checkNotNull(src, "StateChangeDeco called with null src"); this.src = src; this.tag = tag; } @Override public void open() throws IOException, InterruptedException { try { src.open(); } catch (IOException ioe) { changeState(tag, State.SENDING, State.ERROR); errCount.incrementAndGet(); // TODO(jon) Eat the exception? throw ioe; } } @Override public void close() throws IOException, InterruptedException { try { src.close(); changeState(tag, State.SENDING, State.SENT); sentCount.incrementAndGet(); } catch (IOException ioe) { LOG.warn("close had a problem " + src, ioe); changeState(tag, null, State.ERROR); throw ioe; // rethrow this } } @Override public Event next() throws IOException, InterruptedException { try { Event e = src.next(); if (e != null) { readEvtCount.incrementAndGet(); // TODO make the roll tag a parameter so that we don't have to remove // it here. e = EventImpl.unselect(e, RollSink.DEFAULT_ROLL_TAG); } updateEventProcessingStats(e); return e; } catch (IOException ioe) { LOG.warn("next had a problem " + src, ioe); changeState(tag, null, State.ERROR); errCount.incrementAndGet(); throw ioe; } } @Override public void getReports(String namePrefix, Map<String, ReportEvent> reports) { super.getReports(namePrefix, reports); src.getReports(namePrefix + getName() + ".", reports); } } /** * This gets a valid seqfile event source. If open fails, it just cleans that * file up and moves on to the next * * Will block unless this manager has been told to close. When closed will * return null; */ public EventSource getUnsentSource() throws IOException { synchronized (this) { if (state == ManagerState.CLOSED) { return null; } } // need to get a current file? String sendingTag = null; try { while (sendingTag == null) { sendingTag = loggedQ.poll(200, TimeUnit.MILLISECONDS); // exit condition is when closed is flagged and the queue is empty. // this checks on the queues needs to be atomic. if (sendingTag == null) { synchronized (this) { if (state == ManagerState.CLOSING && writingQ.isEmpty() && loggedQ.isEmpty() && sendingQ.isEmpty()) { state = ManagerState.CLOSED; // this manager is now closed and the queues are empty. return null; } } } } } catch (InterruptedException e) { LOG.error("interrupted", e); throw new IOException(e); } LOG.info("opening new file for " + sendingTag); changeState(sendingTag, State.LOGGED, State.SENDING); sendingCount.incrementAndGet(); File curFile = getFile(sendingTag); EventSource curSource = new SeqfileEventSource(curFile.getAbsolutePath()); return new StateChangeDeco(curSource, sendingTag); } /** * change something that is sent and not acknowledged to logged state so that * the normal mechanisms eventually retry sending it. */ synchronized public void retry(String tag) throws IOException { // Yuck. This is like a CAS right now. DFOData data = table.get(tag); if (data != null) { if (data.s == State.SENDING || data.s == State.LOGGED) { LOG.warn("There was a race that happend with SENT vs SENDING states"); return; } } changeState(tag, State.SENT, State.LOGGED); retryCount.incrementAndGet(); } @Override public EventSource getEventSource() throws IOException { return new DiskFailoverSource(this); } /** * This is a hook that imports external files to the dfo bypassing the default * append */ synchronized public void importData() throws IOException { // move all writing into the logged dir. for (String fn : importDir.list()) { // add to logging queue DFOData data = DFOData.recovered(fn); synchronized (this) { table.put(fn, data); loggedQ.add(fn); importedCount.incrementAndGet(); } } } /** * Returns true if the dfo log is empty, false if there remain events saved * off */ @Override public synchronized boolean isEmpty() { return writingQ.isEmpty() && loggedQ.isEmpty() && sendingQ.isEmpty(); } @Override public String getName() { return "NaiveDiskFailover"; } @Override synchronized public ReportEvent getMetrics() { ReportEvent rpt = new ReportEvent(getName()); // historical counts rpt.setLongMetric(A_IMPORTED, importedCount.get()); rpt.setLongMetric(A_WRITING, writingCount.get()); rpt.setLongMetric(A_LOGGED, loggedCount.get()); rpt.setLongMetric(A_SENDING, sendingCount.get()); rpt.setLongMetric(A_ERROR, errCount.get()); rpt.setLongMetric(A_RECOVERED, recoverCount.get()); // Waiting to send rpt.setLongMetric(A_IN_LOGGED, loggedQ.size()); // message counts rpt.setLongMetric(A_MSG_WRITING, writingEvtCount.get()); rpt.setLongMetric(A_MSG_READ, readEvtCount.get()); return rpt; } @Override public Map<String, Reportable> getSubMetrics() { return ReportUtil.noChildren(); } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.flink.runtime.jobmaster; import org.apache.flink.api.common.JobID; import org.apache.flink.api.common.JobStatus; import org.apache.flink.api.common.time.Time; import org.apache.flink.runtime.checkpoint.StandaloneCheckpointRecoveryFactory; import org.apache.flink.runtime.clusterframework.types.AllocationID; import org.apache.flink.runtime.clusterframework.types.ResourceID; import org.apache.flink.runtime.clusterframework.types.ResourceProfile; import org.apache.flink.runtime.executiongraph.ExecutionAttemptID; import org.apache.flink.runtime.heartbeat.HeartbeatServices; import org.apache.flink.runtime.highavailability.TestingHighAvailabilityServices; import org.apache.flink.runtime.jobgraph.JobGraph; import org.apache.flink.runtime.jobgraph.JobGraphTestUtils; import org.apache.flink.runtime.jobmanager.OnCompletionActions; import org.apache.flink.runtime.jobmaster.utils.JobMasterBuilder; import org.apache.flink.runtime.leaderelection.TestingLeaderElectionService; import org.apache.flink.runtime.leaderretrieval.SettableLeaderRetrievalService; import org.apache.flink.runtime.messages.Acknowledge; import org.apache.flink.runtime.rpc.TestingRpcServiceResource; import org.apache.flink.runtime.taskexecutor.AccumulatorReport; import org.apache.flink.runtime.taskexecutor.ExecutionDeploymentReport; import org.apache.flink.runtime.taskexecutor.TaskExecutorGateway; import org.apache.flink.runtime.taskexecutor.TaskExecutorToJobManagerHeartbeatPayload; import org.apache.flink.runtime.taskexecutor.TestingTaskExecutorGateway; import org.apache.flink.runtime.taskexecutor.TestingTaskExecutorGatewayBuilder; import org.apache.flink.runtime.taskexecutor.slot.SlotOffer; import org.apache.flink.runtime.taskmanager.LocalUnresolvedTaskManagerLocation; import org.apache.flink.runtime.taskmanager.UnresolvedTaskManagerLocation; import org.apache.flink.runtime.util.TestingFatalErrorHandlerResource; import org.apache.flink.testutils.TestingUtils; import org.apache.flink.util.TestLogger; import org.junit.Before; import org.junit.ClassRule; import org.junit.Rule; import org.junit.Test; import java.util.Collection; import java.util.Collections; import java.util.Map; import java.util.UUID; import java.util.concurrent.CompletableFuture; import java.util.concurrent.ExecutionException; import static org.hamcrest.core.Is.is; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertThat; /** Tests for the execution deployment-reconciliation logic in the {@link JobMaster}. */ public class JobMasterExecutionDeploymentReconciliationTest extends TestLogger { private static final Time testingTimeout = Time.seconds(10L); private final HeartbeatServices heartbeatServices = new HeartbeatServices(Integer.MAX_VALUE, Integer.MAX_VALUE); private final TestingHighAvailabilityServices haServices = new TestingHighAvailabilityServices(); private final SettableLeaderRetrievalService resourceManagerLeaderRetriever = new SettableLeaderRetrievalService(); private final TestingLeaderElectionService resourceManagerLeaderElectionService = new TestingLeaderElectionService(); @ClassRule public static final TestingRpcServiceResource RPC_SERVICE_RESOURCE = new TestingRpcServiceResource(); @Rule public final TestingFatalErrorHandlerResource testingFatalErrorHandlerResource = new TestingFatalErrorHandlerResource(); @Before public void setup() { haServices.setResourceManagerLeaderRetriever(resourceManagerLeaderRetriever); haServices.setResourceManagerLeaderElectionService(resourceManagerLeaderElectionService); haServices.setCheckpointRecoveryFactory(new StandaloneCheckpointRecoveryFactory()); } /** Tests how the job master handles unknown/missing executions. */ @Test public void testExecutionDeploymentReconciliation() throws Exception { JobMasterBuilder.TestingOnCompletionActions onCompletionActions = new JobMasterBuilder.TestingOnCompletionActions(); TestingExecutionDeploymentTrackerWrapper deploymentTrackerWrapper = new TestingExecutionDeploymentTrackerWrapper(); final JobGraph jobGraph = JobGraphTestUtils.singleNoOpJobGraph(); JobMaster jobMaster = createAndStartJobMaster(onCompletionActions, deploymentTrackerWrapper, jobGraph); JobMasterGateway jobMasterGateway = jobMaster.getSelfGateway(JobMasterGateway.class); RPC_SERVICE_RESOURCE .getTestingRpcService() .registerGateway(jobMasterGateway.getAddress(), jobMasterGateway); final CompletableFuture<ExecutionAttemptID> taskCancellationFuture = new CompletableFuture<>(); TaskExecutorGateway taskExecutorGateway = createTaskExecutorGateway(taskCancellationFuture); LocalUnresolvedTaskManagerLocation localUnresolvedTaskManagerLocation = new LocalUnresolvedTaskManagerLocation(); registerTaskExecutorAndOfferSlots( jobMasterGateway, jobGraph.getJobID(), taskExecutorGateway, localUnresolvedTaskManagerLocation); ExecutionAttemptID deployedExecution = deploymentTrackerWrapper.getTaskDeploymentFuture().get(); assertFalse(taskCancellationFuture.isDone()); ExecutionAttemptID unknownDeployment = new ExecutionAttemptID(); // the deployment report is missing the just deployed task, but contains the ID of some // other unknown deployment // the job master should cancel the unknown deployment, and fail the job jobMasterGateway.heartbeatFromTaskManager( localUnresolvedTaskManagerLocation.getResourceID(), new TaskExecutorToJobManagerHeartbeatPayload( new AccumulatorReport(Collections.emptyList()), new ExecutionDeploymentReport(Collections.singleton(unknownDeployment)))); assertThat(taskCancellationFuture.get(), is(unknownDeployment)); assertThat(deploymentTrackerWrapper.getStopFuture().get(), is(deployedExecution)); assertThat( onCompletionActions .getJobReachedGloballyTerminalStateFuture() .get() .getArchivedExecutionGraph() .getState(), is(JobStatus.FAILED)); } /** * Tests that the job master does not issue a cancel call if the heartbeat reports an execution * for which the deployment was not yet acknowledged. */ @Test public void testExecutionDeploymentReconciliationForPendingExecution() throws Exception { TestingExecutionDeploymentTrackerWrapper deploymentTrackerWrapper = new TestingExecutionDeploymentTrackerWrapper(); final JobGraph jobGraph = JobGraphTestUtils.singleNoOpJobGraph(); JobMaster jobMaster = createAndStartJobMaster(deploymentTrackerWrapper, jobGraph); JobMasterGateway jobMasterGateway = jobMaster.getSelfGateway(JobMasterGateway.class); RPC_SERVICE_RESOURCE .getTestingRpcService() .registerGateway(jobMasterGateway.getAddress(), jobMasterGateway); final CompletableFuture<ExecutionAttemptID> taskSubmissionFuture = new CompletableFuture<>(); final CompletableFuture<ExecutionAttemptID> taskCancellationFuture = new CompletableFuture<>(); final CompletableFuture<Acknowledge> taskSubmissionAcknowledgeFuture = new CompletableFuture<>(); TaskExecutorGateway taskExecutorGateway = createTaskExecutorGateway( taskCancellationFuture, taskSubmissionFuture, taskSubmissionAcknowledgeFuture); LocalUnresolvedTaskManagerLocation localUnresolvedTaskManagerLocation = new LocalUnresolvedTaskManagerLocation(); registerTaskExecutorAndOfferSlots( jobMasterGateway, jobGraph.getJobID(), taskExecutorGateway, localUnresolvedTaskManagerLocation); ExecutionAttemptID pendingExecutionId = taskSubmissionFuture.get(); // the execution has not been acknowledged yet by the TaskExecutor, but we already allow the // ID to be in the heartbeat payload jobMasterGateway.heartbeatFromTaskManager( localUnresolvedTaskManagerLocation.getResourceID(), new TaskExecutorToJobManagerHeartbeatPayload( new AccumulatorReport(Collections.emptyList()), new ExecutionDeploymentReport(Collections.singleton(pendingExecutionId)))); taskSubmissionAcknowledgeFuture.complete(Acknowledge.get()); deploymentTrackerWrapper.getTaskDeploymentFuture().get(); assertFalse(taskCancellationFuture.isDone()); } private JobMaster createAndStartJobMaster( ExecutionDeploymentTracker executionDeploymentTracker, JobGraph jobGraph) throws Exception { return createAndStartJobMaster( new JobMasterBuilder.TestingOnCompletionActions(), executionDeploymentTracker, jobGraph); } private JobMaster createAndStartJobMaster( OnCompletionActions onCompletionActions, ExecutionDeploymentTracker executionDeploymentTracker, JobGraph jobGraph) throws Exception { JobMaster jobMaster = new JobMasterBuilder(jobGraph, RPC_SERVICE_RESOURCE.getTestingRpcService()) .withFatalErrorHandler( testingFatalErrorHandlerResource.getFatalErrorHandler()) .withHighAvailabilityServices(haServices) .withHeartbeatServices(heartbeatServices) .withExecutionDeploymentTracker(executionDeploymentTracker) .withOnCompletionActions(onCompletionActions) .createJobMaster(); jobMaster.start(); return jobMaster; } private TaskExecutorGateway createTaskExecutorGateway( CompletableFuture<ExecutionAttemptID> taskCancellationFuture) { return createTaskExecutorGateway( taskCancellationFuture, new CompletableFuture<>(), CompletableFuture.completedFuture(Acknowledge.get())); } private TaskExecutorGateway createTaskExecutorGateway( CompletableFuture<ExecutionAttemptID> taskCancellationFuture, CompletableFuture<ExecutionAttemptID> taskSubmissionFuture, CompletableFuture<Acknowledge> taskSubmissionResponse) { TestingTaskExecutorGateway taskExecutorGateway = new TestingTaskExecutorGatewayBuilder() .setAddress(UUID.randomUUID().toString()) .setCancelTaskFunction( executionAttemptId -> { taskCancellationFuture.complete(executionAttemptId); return CompletableFuture.completedFuture(Acknowledge.get()); }) .setSubmitTaskConsumer( (tdd, ignored) -> { taskSubmissionFuture.complete(tdd.getExecutionAttemptId()); return taskSubmissionResponse; }) .createTestingTaskExecutorGateway(); RPC_SERVICE_RESOURCE .getTestingRpcService() .registerGateway(taskExecutorGateway.getAddress(), taskExecutorGateway); return taskExecutorGateway; } private void registerTaskExecutorAndOfferSlots( JobMasterGateway jobMasterGateway, JobID jobId, TaskExecutorGateway taskExecutorGateway, UnresolvedTaskManagerLocation taskManagerLocation) throws ExecutionException, InterruptedException { jobMasterGateway .registerTaskManager( jobId, TaskManagerRegistrationInformation.create( taskExecutorGateway.getAddress(), taskManagerLocation, TestingUtils.zeroUUID()), testingTimeout) .get(); Collection<SlotOffer> slotOffers = Collections.singleton(new SlotOffer(new AllocationID(), 0, ResourceProfile.ANY)); jobMasterGateway .offerSlots(taskManagerLocation.getResourceID(), slotOffers, testingTimeout) .get(); } private static class TestingExecutionDeploymentTrackerWrapper implements ExecutionDeploymentTracker { private final ExecutionDeploymentTracker originalTracker; private final CompletableFuture<ExecutionAttemptID> taskDeploymentFuture; private final CompletableFuture<ExecutionAttemptID> stopFuture; private TestingExecutionDeploymentTrackerWrapper() { this(new DefaultExecutionDeploymentTracker()); } private TestingExecutionDeploymentTrackerWrapper( ExecutionDeploymentTracker originalTracker) { this.originalTracker = originalTracker; this.taskDeploymentFuture = new CompletableFuture<>(); this.stopFuture = new CompletableFuture<>(); } @Override public void startTrackingPendingDeploymentOf( ExecutionAttemptID executionAttemptId, ResourceID host) { originalTracker.startTrackingPendingDeploymentOf(executionAttemptId, host); } @Override public void completeDeploymentOf(ExecutionAttemptID executionAttemptId) { originalTracker.completeDeploymentOf(executionAttemptId); taskDeploymentFuture.complete(executionAttemptId); } @Override public void stopTrackingDeploymentOf(ExecutionAttemptID executionAttemptId) { originalTracker.stopTrackingDeploymentOf(executionAttemptId); stopFuture.complete(executionAttemptId); } @Override public Map<ExecutionAttemptID, ExecutionDeploymentState> getExecutionsOn(ResourceID host) { return originalTracker.getExecutionsOn(host); } public CompletableFuture<ExecutionAttemptID> getTaskDeploymentFuture() { return taskDeploymentFuture; } public CompletableFuture<ExecutionAttemptID> getStopFuture() { return stopFuture; } } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.nifi.security.util.crypto; import org.apache.nifi.security.util.crypto.scrypt.Scrypt; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.math.BigInteger; import java.util.concurrent.TimeUnit; /** * Provides an implementation of {@code Scrypt} for secure password hashing. * <p> * One <strong>critical</strong> difference is that this implementation uses a * <strong>static universal</strong> salt unless instructed otherwise, which provides * strict determinism across nodes in a cluster. The purpose for this is to allow for * blind equality comparison of sensitive values hashed on different nodes (with * potentially different {@code nifi.sensitive.props.key} values) during flow inheritance * (see {@code FingerprintFactory}). * <p> * The resulting output is referred to as a <em>hash</em> to be consistent with {@link SecureHasher} terminology, * but the length parameter is clarified as the <em>derived key length</em> {@code dkLen} in Scrypt terms, not to be * confused with the internal concept of <em>hash length</em> for the PBKDF2 cryptographic hash function (CHF) primitive (SHA-256). */ public class ScryptSecureHasher extends AbstractSecureHasher { private static final Logger logger = LoggerFactory.getLogger(ScryptSecureHasher.class); /** * These values can be calculated automatically using the code {@see ScryptCipherProviderGroovyTest#calculateMinimumParameters} or manually updated by a maintainer */ private static final int DEFAULT_N = Double.valueOf(Math.pow(2, 14)).intValue(); private static final int DEFAULT_R = 8; private static final int DEFAULT_P = 1; private static final int DEFAULT_DK_LENGTH = 32; private static final int DEFAULT_SALT_LENGTH = Scrypt.getDefaultSaltLength(); private static final int MIN_P = 1; private static final int MIN_DK_LENGTH = 1; private static final int MIN_N = 1; private static final int MIN_R = 1; private static final int MAX_R = Double.valueOf(Math.pow(2, 31)).intValue() - 1; private static final int MIN_SALT_LENGTH = 8; private static final int MAX_SALT_LENGTH = Double.valueOf(Math.pow(2, 31)).intValue() - 1; private final int n; private final int r; private final int p; private final int dkLength; /** * Instantiates an Scrypt secure hasher using the default cost parameters * ({@code N = }{@link #DEFAULT_N}, * {@code r = }{@link #DEFAULT_R}, * {@code p = }{@link #DEFAULT_R}, * {@code dkLen = }{@link #DEFAULT_DK_LENGTH}). A static salt is also used. */ public ScryptSecureHasher() { this(DEFAULT_N, DEFAULT_R, DEFAULT_P, DEFAULT_DK_LENGTH, 0); } /** * Instantiates an Scrypt secure hasher using the default cost parameters and specified derived key length * @param dkLength Derived Key Length */ public ScryptSecureHasher(final int dkLength) { this(DEFAULT_N, DEFAULT_R, DEFAULT_P, dkLength, 0); } /** * Instantiates an Scrypt secure hasher using the provided cost parameters. A static * {@link #DEFAULT_SALT_LENGTH} byte salt will be generated on every hash request. * * @param n number of iterations (power of 2 from {@code 1 to 2^(128 * r / 8)}) * @param r the block size of memory ({@code > 0}) * @param p parallelization factor from ({@code 1 to ((2^32-1) * 32) / (128 * r)}) * @param dkLength the output length in bytes ({@code 1 to (2^32 - 1) * 32}) */ public ScryptSecureHasher(int n, int r, int p, int dkLength) { this(n, r, p, dkLength, 0); } /** * Instantiates an Scrypt secure hasher using the provided cost parameters. A unique * salt of the specified length will be generated on every hash request. * * @param n number of iterations (power of 2 from {@code 1 to 2^(128 * r / 8)}) * @param r the block size of memory ({@code > 0}) * @param p parallelization factor from ({@code 1 to ((2^32-1) * 32) / (128 * r)}) * @param dkLength the output length in bytes ({@code 1 to (2^32 - 1) * 32}) * @param saltLength the salt length in bytes {@code >= 8}) */ public ScryptSecureHasher(int n, int r, int p, int dkLength, int saltLength) { validateParameters(n, r, p, dkLength, saltLength); this.n = n; this.r = r; this.p = p; this.dkLength = dkLength; this.saltLength = saltLength; } /** * Enforces valid Scrypt secure hasher cost parameters are provided. * * @param n number of iterations (power of 2 from {@code 1 to 2^(128 * r / 8)}) * @param r the block size of memory ({@code > 0}) * @param p parallelization factor from ({@code 1 to ((2^32-1) * 32) / (128 * r)}) * @param dkLength the output length in bytes ({@code 1 to (2^32 - 1) * 32}) * @param saltLength the salt length in bytes {@code >= 8}) */ private void validateParameters(Integer n, Integer r, int p, Integer dkLength, Integer saltLength) { // Check r first because it is not dependent on other parameters if (!isRValid(r)) { logger.error("The provided block size r {} ( * 128 bytes) is outside the boundary of 1 to 2^31 - 1.", r); throw new IllegalArgumentException("Invalid r is not within the memory boundary."); } if (!isNValid(n, r)) { logger.error("The iteration count N {} is outside the boundary of powers of 2 from 1 to 2^(128 * r / 8).", n); throw new IllegalArgumentException("Invalid N exceeds the iterations boundary."); } if (!isPValid(p, r)) { logger.error("The provided parallelization factor {} is outside the boundary of 1 to ((2^32 - 1) * 32) / (128 * r).", p); throw new IllegalArgumentException("Invalid p exceeds the parallelism boundary."); } if (!isDKLengthValid(dkLength)) { logger.error("The provided hash length {} is outside the boundary of 1 to (2^32 - 1) * 32.", dkLength); throw new IllegalArgumentException("Invalid hash length is not within the dkLength boundary."); } initializeSalt(saltLength); } /** * Internal method to hash the raw bytes. * * @param input the raw bytes to hash (can be length 0) * @return the generated hash */ byte[] hash(byte[] input) { // Contains only the raw salt byte[] rawSalt = getSalt(); return hash(input, rawSalt); } /** * Internal method to hash the raw bytes. * * @param input the raw bytes to hash (can be length 0) * @param rawSalt the raw bytes to salt * @return the generated hash */ byte[] hash(byte[] input, byte[] rawSalt) { logger.debug("Creating {} byte Scrypt hash with salt [{}]", dkLength, org.bouncycastle.util.encoders.Hex.toHexString(rawSalt)); if (!isSaltLengthValid(rawSalt.length)) { throw new IllegalArgumentException("The salt length (" + rawSalt.length + " bytes) is invalid"); } final long startNanos = System.nanoTime(); byte[] hash = Scrypt.scrypt(input, rawSalt, n, r, p, dkLength * 8); final long generateNanos = System.nanoTime(); final long totalDurationMillis = TimeUnit.NANOSECONDS.toMillis(generateNanos - startNanos); logger.debug("Generated Scrypt hash in {} ms", totalDurationMillis); return hash; } /** * Returns true if the provided iteration count N is within boundaries. The lower bound >= 1 and the * upper bound <= 2^(128 * r / 8). * * @param n number of iterations * @param r the blocksize parameter * @return true if iterations is within boundaries */ protected static boolean isNValid(Integer n, int r) { if (n < DEFAULT_N) { logger.warn("The provided iteration count N {} is below the recommended minimum {}.", n, DEFAULT_N); } return n >= MIN_N && n <= Double.valueOf(Math.pow(2, (128 * r / 8.0))).intValue(); } /** * Returns true if the provided block size in bytes is within boundaries. The lower bound >= 1 and the * upper bound <= 2^32 - 1. * * @param r the integer number * 128 B used * @return true if r is within boundaries */ protected static boolean isRValid(int r) { if (r < DEFAULT_R) { logger.warn("The provided r size {} * 128 B is below the recommended minimum {}.", r, DEFAULT_R); } return r >= MIN_R && r <= MAX_R; } /** * Returns true if the provided parallelization factor is within boundaries. The lower bound >= 1 and the * upper bound <= ((2^32 - 1) * 32) / (128 * r). * * @param p degree of parallelism * @param r the blocksize parameter * @return true if parallelism is within boundaries */ protected static boolean isPValid(int p, int r) { if (p < DEFAULT_P) { logger.warn("The provided parallelization factor {} is below the recommended minimum {}.", p, DEFAULT_P); } long dividend = Double.valueOf((Math.pow(2, 32) - 1) * 32).longValue(); int divisor = 128 * r; BigInteger MAX_P = new BigInteger(String.valueOf(dividend)).divide(new BigInteger(String.valueOf(divisor))); logger.debug("Calculated maximum p value as (2^32 - 1) * 32 [{}] / (128 * r) [{}] = {}", dividend, divisor, MAX_P.intValue()); return p >= MIN_P && p <= MAX_P.intValue(); } /** * Returns whether the provided hash (derived key) length is within boundaries. The lower bound >= 1 and the * upper bound <= (2^32 - 1) * 32. * * @param dkLength the output length in bytes * @return true if dkLength is within boundaries */ protected static boolean isDKLengthValid(Integer dkLength) { return dkLength >= MIN_DK_LENGTH && dkLength <= UPPER_BOUNDARY; } /** * Returns the algorithm-specific default salt length in bytes. * * @return the Scrypt default salt length */ @Override public int getDefaultSaltLength() { return DEFAULT_SALT_LENGTH; } @Override public int getMinSaltLength() { return MIN_SALT_LENGTH; } @Override public int getMaxSaltLength() { return MAX_SALT_LENGTH; } @Override String getAlgorithmName() { return "Scrypt"; } @Override boolean acceptsEmptyInput() { return false; } }
package controllers; import actions.User; import com.avaje.ebean.Ebean; import models.Difficulty; import models.Event; import models.Game; import models.Mode; import models.Platform; import models.Ship; import models.Stage; import play.data.Form; import play.mvc.Controller; import play.mvc.Result; import views.html.game_read; import views.html.game_update; import java.util.Map; import java.util.Scanner; import static play.data.Form.form; public class GameController extends Controller { public static Result indexWithName(models.Game game, String name) { return ok(game_read.render(game)); } public static Result index(models.Game game) { if (game == null) { return notFound(); } return ok(game_read.render(game)); } public static Result update(models.Game game) { if (game == null) { return notFound(); } if (!User.current().isAuthenticated() && !User.current().isAdministrator()) { return unauthorized(); } return ok(game_update.render(game)); } public static Result create() { return ok(views.html.game_create.render()); } public static Result createEventOf(models.Game game) { return ok(views.html.event_create.render(game, form(Event.class))); } public static Result saveEvent(models.Game originalGame) { Form<models.Event> form = new Form<Event>(models.Event.class).bindFromRequest(); models.Event event = form.get(); Game game = new Game("[" + event.name + "] " + originalGame.title, originalGame.cover, originalGame.thread); if (originalGame.hasDifficulties()) { for (Difficulty difficulty : originalGame.difficulties) { game.difficulties.add(new Difficulty(difficulty.name)); } } if (originalGame.hasStages()) { for (Stage stage : originalGame.stages) { game.stages.add(new Stage(stage.name)); } } if (originalGame.hasModes()) { for (Mode mode : originalGame.modes) { game.modes.add(new Mode(mode.name, mode.scoreType)); } } if (originalGame.hasPlatforms()) { for (Platform platform : originalGame.platforms) { game.platforms.add(new Platform(platform.name)); } } if (originalGame.hasShip()) { for (Ship ship : originalGame.ships) { game.ships.add(new Ship(ship.name)); } } event.game = game; Ebean.save(event); return index(game); } public static Result save() { Form<models.Game> form = new Form<models.Game>(models.Game.class).bindFromRequest(); models.Game game = form.get(); createPlatforms(game); createDifficulties(game); createShips(game); createModes(game); createStages(game); game.save(); return index(game); } public static Result addPlatform(Game game) { String platformName = request().body().asFormUrlEncoded().get("platform")[0]; createPlatform(game, platformName); game.save(); return update(game); } public static Result addDifficulty(Game game) { Map<String, String[]> data = request().body().asFormUrlEncoded(); String difficultyName = data.get("difficulty")[0]; int sortOrder = Integer.parseInt(data.get("sortOrder")[0]); createDifficulty(game, sortOrder, difficultyName); game.save(); return update(game); } public static Result addMode(Game game) { Map<String, String[]> data = request().body().asFormUrlEncoded(); String modeName = data.get("mode")[0]; int sortOrder = Integer.parseInt(data.get("sortOrder")[0]); createMode(game, sortOrder, modeName); game.save(); return update(game); } public static Result addShip(Game game) { Map<String, String[]> data = request().body().asFormUrlEncoded(); String shipName = data.get("ship")[0]; int sortOrder = Integer.parseInt(data.get("sortOrder")[0]); createShip(game, sortOrder, shipName); game.save(); return update(game); } public static Result addStage(Game game) { Map<String, String[]> data = request().body().asFormUrlEncoded(); String stageName = data.get("stage")[0]; int sortOrder = Integer.parseInt(data.get("sortOrder")[0]); createStage(game, sortOrder, stageName); game.save(); return update(game); } private static void createPlatform(Game game, String platformName) { game.platforms.add(new Platform(platformName)); } private static void createPlatforms(models.Game game) { Scanner sc = new Scanner(game.platforms.get(0).name); if (sc.hasNextLine()) { while (sc.hasNextLine()) { createPlatform(game, sc.nextLine().trim()); } game.platforms.remove(0); } else { createPlatform(game, " "); } } private static void createDifficulties(models.Game game) { Integer index = 0; Scanner sc = new Scanner(game.difficulties.get(0).name); game.difficulties.remove(0); while (sc.hasNextLine()) { createDifficulty(game, index, sc.nextLine().trim()); index++; } } private static void createDifficulty(Game game, Integer index, String difficultyName) { Difficulty difficulty = new Difficulty(difficultyName); difficulty.sortOrder = index.toString(); game.difficulties.add(difficulty); } private static void createModes(models.Game game) { Integer index = 0; Scanner sc = new Scanner(game.modes.get(0).name); game.modes.remove(0); while (sc.hasNextLine()) { createMode(game, index, sc.nextLine().trim()); index++; } } private static void createMode(Game game, Integer index, String modeName) { Mode mode = new Mode(modeName); mode.sortOrder = index.toString(); game.modes.add(mode); } private static void createShips(models.Game game) { Integer index = 0; Scanner sc = new Scanner(game.ships.get(0).name); game.ships.remove(0); while (sc.hasNextLine()) { createShip(game, index, sc.nextLine().trim()); index++; } } private static void createShip(Game game, Integer index, String shipName) { Ship ship = new Ship(shipName); ship.sortOrder = index.toString(); game.ships.add(ship); } private static void createStage(Game game, Integer index, String stageName) { Stage stage = new Stage(stageName); stage.sortOrder = index.longValue(); game.stages.add(stage); } private static void createStages(models.Game game) { Long index = 0L; Scanner sc = new Scanner(game.stages.get(0).name); game.stages.remove(0); if (sc.hasNextLine()) { while (sc.hasNextLine()) { Stage stage = new Stage(sc.nextLine().trim()); stage.sortOrder = index; game.stages.add(stage); index++; } } else { Stage stage = new Stage(" "); game.stages.add(stage); stage.sortOrder = index; } } }
/* * Copyright 2000-2016 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intellij.openapi.progress.util; import com.intellij.openapi.Disposable; import com.intellij.openapi.application.Application; import com.intellij.openapi.application.ApplicationManager; import com.intellij.openapi.application.ModalityState; import com.intellij.openapi.application.ReadAction; import com.intellij.openapi.components.ServiceManager; import com.intellij.openapi.diagnostic.Logger; import com.intellij.openapi.progress.EmptyProgressIndicator; import com.intellij.openapi.progress.ProcessCanceledException; import com.intellij.openapi.progress.ProgressIndicator; import com.intellij.openapi.progress.ProgressManager; import com.intellij.openapi.project.Project; import com.intellij.openapi.util.Computable; import com.intellij.openapi.util.Disposer; import com.intellij.openapi.util.Pair; import com.intellij.util.*; import com.intellij.util.concurrency.AppExecutorUtil; import com.intellij.util.messages.MessageBus; import com.intellij.util.messages.Topic; import org.jetbrains.annotations.CalledInAny; import org.jetbrains.annotations.CalledInAwt; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import java.util.concurrent.*; import java.util.concurrent.atomic.AtomicReference; public class BackgroundTaskUtil { private static final Logger LOG = Logger.getInstance(BackgroundTaskUtil.class); @NotNull @CalledInAwt public static ProgressIndicator executeAndTryWait(@NotNull Function<? super ProgressIndicator, /*@NotNull*/ ? extends Runnable> backgroundTask, @Nullable Runnable onSlowAction) { return executeAndTryWait(backgroundTask, onSlowAction, ProgressWindow.DEFAULT_PROGRESS_DIALOG_POSTPONE_TIME_MILLIS, false); } /** * Executor to perform <i>possibly</i> long operation on pooled thread. * If computation was performed within given time frame, * the computed callback will be executed synchronously (avoiding unnecessary <tt>invokeLater()</tt>). * In this case, {@code onSlowAction} will not be executed at all. * <ul> * <li> If the computation is fast, execute callback synchronously. * <li> If the computation is slow, execute <tt>onSlowAction</tt> synchronously. When the computation is completed, execute callback in EDT. * </ul><p> * It can be used to reduce blinking when background task might be completed fast.<br> * A Simple approach: * <pre> * onSlowAction.run() // show "Loading..." * executeOnPooledThread({ * Runnable callback = backgroundTask(); // some background computations * invokeLater(callback); // apply changes * }); * </pre> * will lead to "Loading..." visible between current moment and execution of invokeLater() event. * This period can be very short and looks like 'jumping' if background operation is fast. */ @NotNull @CalledInAwt public static ProgressIndicator executeAndTryWait(@NotNull Function<? super ProgressIndicator, /*@NotNull*/ ? extends Runnable> backgroundTask, @Nullable Runnable onSlowAction, long waitMillis, boolean forceEDT) { ModalityState modality = ModalityState.current(); if (forceEDT) { ProgressIndicator indicator = new EmptyProgressIndicator(modality); try { Runnable callback = backgroundTask.fun(indicator); finish(callback, indicator); } catch (ProcessCanceledException ignore) { } catch (Throwable t) { LOG.error(t); } return indicator; } else { Pair<Runnable, ProgressIndicator> pair = computeInBackgroundAndTryWait( backgroundTask, (callback, indicator) -> ApplicationManager.getApplication().invokeLater(() -> finish(callback, indicator), modality), modality, waitMillis); Runnable callback = pair.first; ProgressIndicator indicator = pair.second; if (callback != null) { finish(callback, indicator); } else { if (onSlowAction != null) onSlowAction.run(); } return indicator; } } @CalledInAwt private static void finish(@NotNull Runnable result, @NotNull ProgressIndicator indicator) { if (!indicator.isCanceled()) result.run(); } /** * Try to compute value in background and abort computation if it takes too long. * <ul> * <li> If the computation is fast, return computed value. * <li> If the computation is slow, abort computation (cancel ProgressIndicator). * </ul> */ @Nullable @CalledInAwt public static <T> T tryComputeFast(@NotNull Function<? super ProgressIndicator, ? extends T> backgroundTask, long waitMillis) { Pair<T, ProgressIndicator> pair = computeInBackgroundAndTryWait( backgroundTask, (result, indicator) -> { }, ModalityState.defaultModalityState(), waitMillis); T result = pair.first; ProgressIndicator indicator = pair.second; indicator.cancel(); return result; } @Nullable @CalledInAny public static <T> T computeInBackgroundAndTryWait(@NotNull Computable<? extends T> computable, @NotNull Consumer<? super T> asyncCallback, long waitMillis) { Pair<T, ProgressIndicator> pair = computeInBackgroundAndTryWait( indicator -> computable.compute(), (result, indicator) -> asyncCallback.consume(result), ModalityState.defaultModalityState(), waitMillis ); return pair.first; } /** * Compute value in background and try wait for its completion. * <ul> * <li> If the computation is fast, return computed value synchronously. Callback will not be called in this case. * <li> If the computation is slow, return <tt>null</tt>. When the computation is completed, pass the value to the callback. * </ul> * Callback will be executed on the same thread as the background task. */ @NotNull @CalledInAny private static <T> Pair<T, ProgressIndicator> computeInBackgroundAndTryWait(@NotNull Function<? super ProgressIndicator, ? extends T> task, @NotNull PairConsumer<? super T, ? super ProgressIndicator> asyncCallback, @NotNull ModalityState modality, long waitMillis) { ProgressIndicator indicator = new EmptyProgressIndicator(modality); indicator.start(); Helper<T> helper = new Helper<>(); ApplicationManager.getApplication().executeOnPooledThread(() -> ProgressManager.getInstance().runProcess(() -> { T result = task.fun(indicator); if (!helper.setResult(result)) { asyncCallback.consume(result, indicator); } }, indicator)); T result = null; if (helper.await(waitMillis)) { result = helper.getResult(); } return Pair.create(result, indicator); } /** * An alternative to plain {@link Application#executeOnPooledThread(Runnable)} which wraps the task in a process with a * {@link ProgressIndicator} which gets cancelled when the given disposable is disposed. <br/><br/> * * This allows to stop a lengthy background activity by calling {@link ProgressManager#checkCanceled()} * and avoid Already Disposed exceptions (in particular, because checkCanceled() is called in {@link ServiceManager#getService(Class)}. */ @NotNull @CalledInAny public static ProgressIndicator executeOnPooledThread(@NotNull Disposable parent, @NotNull Runnable runnable) { ProgressIndicator indicator = new EmptyProgressIndicator(); indicator.start(); CompletableFuture<?> future = CompletableFuture.runAsync(() -> { ProgressManager.getInstance().runProcess(runnable, indicator); }, AppExecutorUtil.getAppExecutorService()); Disposable disposable = () -> { if (indicator.isRunning()) indicator.cancel(); try { future.get(1, TimeUnit.SECONDS); } catch (ExecutionException e) { if (e.getCause() instanceof ProcessCanceledException) { // ignore: expected cancellation } else { LOG.error(e); } } catch (InterruptedException | TimeoutException e) { LOG.error("Couldn't await background process on disposal: " + runnable); } }; if (!registerIfParentNotDisposed(parent, disposable)) { indicator.cancel(); return indicator; } future.whenComplete((o, e) -> Disposer.dispose(disposable)); return indicator; } @CalledInAny public static void runUnderDisposeAwareIndicator(@NotNull Disposable parent, @NotNull Runnable task) { runUnderDisposeAwareIndicator(parent, () -> { task.run(); return null; }); } @CalledInAny public static <T> T runUnderDisposeAwareIndicator(@NotNull Disposable parent, @NotNull Computable<T> task) { ProgressIndicator indicator = new EmptyProgressIndicator(ModalityState.defaultModalityState()); indicator.start(); Disposable disposable = () -> { if (indicator.isRunning()) indicator.cancel(); }; if (!registerIfParentNotDisposed(parent, disposable)) { indicator.cancel(); throw new ProcessCanceledException(); } try { return ProgressManager.getInstance().runProcess(task, indicator); } finally { Disposer.dispose(disposable); } } private static boolean registerIfParentNotDisposed(@NotNull Disposable parent, @NotNull Disposable disposable) { return ReadAction.compute(() -> { if (Disposer.isDisposed(parent)) return false; try { Disposer.register(parent, disposable); return true; } catch(IncorrectOperationException ioe) { LOG.error(ioe); return false; } }); } /** * Wraps {@link MessageBus#syncPublisher(Topic)} in a dispose check, * and throws a {@link ProcessCanceledException} if the project is disposed, * instead of throwing an assertion which would happen otherwise. * * @see #syncPublisher(Topic) */ @CalledInAny @NotNull public static <L> L syncPublisher(@NotNull Project project, @NotNull Topic<L> topic) throws ProcessCanceledException { return ReadAction.compute(() -> { if (project.isDisposed()) throw new ProcessCanceledException(); return project.getMessageBus().syncPublisher(topic); }); } /** * Wraps {@link MessageBus#syncPublisher(Topic)} in a dispose check, * and throws a {@link ProcessCanceledException} if the application is disposed, * instead of throwing an assertion which would happen otherwise. * * @see #syncPublisher(Project, Topic) */ @CalledInAny @NotNull public static <L> L syncPublisher(@NotNull Topic<L> topic) throws ProcessCanceledException { return ReadAction.compute(() -> { if (ApplicationManager.getApplication().isDisposed()) throw new ProcessCanceledException(); return ApplicationManager.getApplication().getMessageBus().syncPublisher(topic); }); } private static class Helper<T> { private static final Object INITIAL_STATE = ObjectUtils.sentinel("INITIAL_STATE"); private static final Object SLOW_OPERATION_STATE = ObjectUtils.sentinel("SLOW_OPERATION_STATE"); private final Semaphore mySemaphore = new Semaphore(0); private final AtomicReference<Object> myResultRef = new AtomicReference<>(INITIAL_STATE); /** * @return true if computation was fast, and callback should be handled by other thread */ public boolean setResult(T result) { boolean isFast = myResultRef.compareAndSet(INITIAL_STATE, result); mySemaphore.release(); return isFast; } /** * @return true if computation was fast, and callback should be handled by current thread */ public boolean await(long waitMillis) { try { mySemaphore.tryAcquire(waitMillis, TimeUnit.MILLISECONDS); } catch (InterruptedException ignore) { } return !myResultRef.compareAndSet(INITIAL_STATE, SLOW_OPERATION_STATE); } public T getResult() { Object result = myResultRef.get(); assert result != INITIAL_STATE && result != SLOW_OPERATION_STATE; //noinspection unchecked return (T)result; } } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more contributor license * agreements. See the NOTICE file distributed with this work for additional information regarding * copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance with the License. You may obtain a * copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express * or implied. See the License for the specific language governing permissions and limitations under * the License. */ package org.apache.geode.admin.jmx.internal; import java.io.IOException; import java.net.InetAddress; import java.net.ServerSocket; import java.net.UnknownHostException; import java.rmi.NoSuchObjectException; import java.rmi.NotBoundException; import java.rmi.RemoteException; import java.rmi.registry.LocateRegistry; import java.rmi.registry.Registry; import java.rmi.server.RMIServerSocketFactory; import java.rmi.server.UnicastRemoteObject; /** * This MBean is an implementation of {@link RMIRegistryServiceMBean}. * */ public class RMIRegistryService implements RMIRegistryServiceMBean { /* RMI Registry host */ private String host; /* RMI Registry port */ private int port; /* RMI Registry */ private Registry registry; /* RMI Server Socket Factory */ private RMIServerSocketFactory ssf; /* Whether RMI Registry is started & running */ private boolean isRunning; /** * Constructor to configure RMI Registry to start using default RMI Registry port: * {@link Registry#REGISTRY_PORT} */ public RMIRegistryService() { this(Registry.REGISTRY_PORT); } /** * Constructor to configure RMI Registry to start using given RMI Registry port. * * @param port to run RMI Registry on */ public RMIRegistryService(int port) { setPort(port); } /** * Constructor to configure RMI Registry to start using given RMI Registry port & host bind * address. * * @param host to bind RMI Registry to * @param port to run RMI Registry on * * @throws UnknownHostException if IP Address can not be resolved for the given host string while * creating the RMIServerSocketFactory */ public RMIRegistryService(String host, int port) throws UnknownHostException { setPort(port); setHost(host); if (host != null && !host.trim().equals("")) { ssf = new RMIServerSocketFactoryImpl(host); } } /** * Returns the host on which rmiregistry listens for incoming connections * * @return the host on which rmiregistry listens for incoming connections */ @Override public String getHost() { return host; } /** * Sets the host on which rmiregistry listens for incoming connections * * @param host the host on which rmiregistry listens for incoming connections */ protected void setHost(String host) { if (isRunning()) { throw new IllegalStateException("RMIRegistryService is running, cannot change the host"); } this.host = host; } /** * Returns the port on which rmiregistry listens for incoming connections * * @return the port on which rmiregistry listens for incoming connections */ @Override public int getPort() { return port; } /** * Sets the port on which rmiregistry listens for incoming connections * * @param port the port on which rmiregistry listens for incoming connections */ protected void setPort(int port) { if (isRunning()) { throw new IllegalStateException("RMIRegistryService is running, cannot change the port"); } this.port = port; } /** * Starts this MBean: rmiregistry can now accept incoming calls * * @see #stop * @see #isRunning */ @Override public synchronized void start() throws RemoteException { if (!isRunning()) { if (ssf != null) { registry = LocateRegistry.createRegistry(port, null, // RMIClientSocketFactory ssf); // RMIServerSocketFactory } else { registry = LocateRegistry.createRegistry(port); } isRunning = true; } } /** * Returns whether this MBean has been started and not yet stopped. * * @return whether this MBean has been started and not yet stopped. * @see #start */ @Override public synchronized boolean isRunning() { return isRunning; } /** * Stops this MBean: rmiregistry cannot accept anymore incoming calls * * @see #start */ @Override public synchronized void stop() throws NoSuchObjectException { if (isRunning()) { isRunning = !UnicastRemoteObject.unexportObject(registry, true); } } /** * Returns an array of the names bound in the rmiregistry * * @return an array of the names bound in the rmiregistry * @see java.rmi.registry.Registry#list() */ @Override public String[] list() throws RemoteException { if (!isRunning()) { throw new IllegalStateException("RMIRegistryService is not running"); } return registry.list(); } /** * Removes the binding for the specified <code>name</code> in the rmiregistry * * @see java.rmi.registry.Registry#unbind(String) */ @Override public void unbind(String name) throws RemoteException, NotBoundException { if (!isRunning()) { throw new IllegalStateException("RMIRegistryService is not running"); } registry.unbind(name); } } /** * Custom implementation of the {@link RMIServerSocketFactory} * */ class RMIServerSocketFactoryImpl implements RMIServerSocketFactory { /* IP address to use for creating ServerSocket */ private final InetAddress bindAddress; /** * Constructs a RMIServerSocketFactory. The given rmiBindAddress is used to bind the ServerSockets * created from this factory. * * @param rmiBindAddress String representation of the address to bind the ServerSockets to * * @throws UnknownHostException if IP Address can not be resolved for the given host string */ /* default */ RMIServerSocketFactoryImpl(String rmiBindAddress) throws UnknownHostException { bindAddress = InetAddress.getByName(rmiBindAddress); } /** * Create a server socket on the specified port (port 0 indicates an anonymous port). * * @param port the port number * @return the server socket on the specified port * @exception IOException if an I/O error occurs during server socket creation */ @Override public ServerSocket createServerSocket(int port) throws IOException { return new ServerSocket(port, 0/* backlog - for '0' internally uses the default */, bindAddress); } }
/* * Copyright 2000-2015 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intellij.codeInsight.navigation; import com.intellij.codeInsight.CodeInsightBundle; import com.intellij.codeInsight.TargetElementUtil; import com.intellij.codeInsight.documentation.DocumentationManager; import com.intellij.codeInsight.documentation.DocumentationManagerProtocol; import com.intellij.codeInsight.hint.HintManager; import com.intellij.codeInsight.hint.HintManagerImpl; import com.intellij.codeInsight.hint.HintUtil; import com.intellij.codeInsight.navigation.actions.GotoDeclarationAction; import com.intellij.codeInsight.navigation.actions.GotoTypeDeclarationAction; import com.intellij.ide.IdeTooltipManager; import com.intellij.ide.util.EditSourceUtil; import com.intellij.lang.documentation.DocumentationProvider; import com.intellij.navigation.ItemPresentation; import com.intellij.navigation.NavigationItem; import com.intellij.openapi.actionSystem.IdeActions; import com.intellij.openapi.actionSystem.impl.ActionButton; import com.intellij.openapi.actionSystem.impl.PresentationFactory; import com.intellij.openapi.application.ApplicationManager; import com.intellij.openapi.components.AbstractProjectComponent; import com.intellij.openapi.diagnostic.Logger; import com.intellij.openapi.editor.Document; import com.intellij.openapi.editor.Editor; import com.intellij.openapi.editor.EditorFactory; import com.intellij.openapi.editor.LogicalPosition; import com.intellij.openapi.editor.colors.EditorColors; import com.intellij.openapi.editor.colors.EditorColorsManager; import com.intellij.openapi.editor.event.*; import com.intellij.openapi.editor.ex.util.EditorUtil; import com.intellij.openapi.editor.markup.HighlighterLayer; import com.intellij.openapi.editor.markup.HighlighterTargetArea; import com.intellij.openapi.editor.markup.RangeHighlighter; import com.intellij.openapi.editor.markup.TextAttributes; import com.intellij.openapi.fileEditor.FileEditorManager; import com.intellij.openapi.fileEditor.FileEditorManagerAdapter; import com.intellij.openapi.fileEditor.FileEditorManagerEvent; import com.intellij.openapi.fileEditor.FileEditorManagerListener; import com.intellij.openapi.keymap.Keymap; import com.intellij.openapi.keymap.KeymapManager; import com.intellij.openapi.keymap.KeymapUtil; import com.intellij.openapi.progress.ProgressIndicator; import com.intellij.openapi.progress.util.ProgressIndicatorBase; import com.intellij.openapi.progress.util.ProgressIndicatorUtils; import com.intellij.openapi.progress.util.ReadTask; import com.intellij.openapi.project.DumbAwareRunnable; import com.intellij.openapi.project.DumbService; import com.intellij.openapi.project.IndexNotReadyException; import com.intellij.openapi.project.Project; import com.intellij.openapi.startup.StartupManager; import com.intellij.openapi.util.Comparing; import com.intellij.openapi.util.Computable; import com.intellij.openapi.util.Ref; import com.intellij.openapi.util.TextRange; import com.intellij.openapi.util.text.StringUtil; import com.intellij.openapi.vfs.VirtualFile; import com.intellij.pom.Navigatable; import com.intellij.psi.*; import com.intellij.psi.impl.source.tree.injected.InjectedLanguageUtil; import com.intellij.psi.search.searches.DefinitionsScopedSearch; import com.intellij.psi.util.PsiUtilCore; import com.intellij.ui.HintListener; import com.intellij.ui.LightweightHint; import com.intellij.ui.ScreenUtil; import com.intellij.ui.components.JBLayeredPane; import com.intellij.usageView.UsageViewShortNameLocation; import com.intellij.usageView.UsageViewTypeLocation; import com.intellij.usageView.UsageViewUtil; import com.intellij.util.Alarm; import com.intellij.util.Consumer; import com.intellij.util.Processor; import com.intellij.util.ui.UIUtil; import gnu.trove.TIntArrayList; import org.intellij.lang.annotations.JdkConstants; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import org.jetbrains.annotations.TestOnly; import javax.swing.*; import javax.swing.event.HyperlinkEvent; import javax.swing.event.HyperlinkListener; import java.awt.*; import java.awt.event.*; import java.util.ArrayList; import java.util.Collections; import java.util.EventObject; import java.util.List; public class CtrlMouseHandler extends AbstractProjectComponent { private static final Logger LOG = Logger.getInstance("#com.intellij.codeInsight.navigation.CtrlMouseHandler"); private static final AbstractDocumentationTooltipAction[] ourTooltipActions = {new ShowQuickDocAtPinnedWindowFromTooltipAction()}; private final EditorColorsManager myEditorColorsManager; private HighlightersSet myHighlighter; @JdkConstants.InputEventMask private int myStoredModifiers = 0; private TooltipProvider myTooltipProvider = null; private final FileEditorManager myFileEditorManager; private final DocumentationManager myDocumentationManager; @Nullable private Point myPrevMouseLocation; private LightweightHint myHint; public enum BrowseMode {None, Declaration, TypeDeclaration, Implementation} private final KeyListener myEditorKeyListener = new KeyAdapter() { @Override public void keyPressed(final KeyEvent e) { handleKey(e); } @Override public void keyReleased(final KeyEvent e) { handleKey(e); } private void handleKey(final KeyEvent e) { int modifiers = e.getModifiers(); if (modifiers == myStoredModifiers) { return; } BrowseMode browseMode = getBrowseMode(modifiers); if (browseMode == BrowseMode.None) { disposeHighlighter(); cancelPreviousTooltip(); } else { TooltipProvider tooltipProvider = myTooltipProvider; if (tooltipProvider != null) { if (browseMode != tooltipProvider.getBrowseMode()) { disposeHighlighter(); } myStoredModifiers = modifiers; cancelPreviousTooltip(); myTooltipProvider = new TooltipProvider(tooltipProvider.myEditor, tooltipProvider.myPosition); myTooltipProvider.execute(browseMode); } } } }; private final FileEditorManagerListener myFileEditorManagerListener = new FileEditorManagerAdapter() { @Override public void selectionChanged(@NotNull FileEditorManagerEvent e) { disposeHighlighter(); cancelPreviousTooltip(); } }; private final VisibleAreaListener myVisibleAreaListener = new VisibleAreaListener() { @Override public void visibleAreaChanged(VisibleAreaEvent e) { disposeHighlighter(); cancelPreviousTooltip(); } }; private final EditorMouseAdapter myEditorMouseAdapter = new EditorMouseAdapter() { @Override public void mouseReleased(EditorMouseEvent e) { disposeHighlighter(); cancelPreviousTooltip(); } }; private final EditorMouseMotionListener myEditorMouseMotionListener = new EditorMouseMotionAdapter() { @Override public void mouseMoved(final EditorMouseEvent e) { if (e.isConsumed() || !myProject.isInitialized() || myProject.isDisposed()) { return; } MouseEvent mouseEvent = e.getMouseEvent(); Point prevLocation = myPrevMouseLocation; myPrevMouseLocation = mouseEvent.getLocationOnScreen(); if (isMouseOverTooltip(mouseEvent.getLocationOnScreen()) || ScreenUtil.isMovementTowards(prevLocation, mouseEvent.getLocationOnScreen(), getHintBounds())) { return; } cancelPreviousTooltip(); myStoredModifiers = mouseEvent.getModifiers(); BrowseMode browseMode = getBrowseMode(myStoredModifiers); if (browseMode == BrowseMode.None) { disposeHighlighter(); return; } Editor editor = e.getEditor(); if (editor.getProject() != null && editor.getProject() != myProject) return; PsiDocumentManager documentManager = PsiDocumentManager.getInstance(myProject); PsiFile psiFile = documentManager.getPsiFile(editor.getDocument()); Point point = new Point(mouseEvent.getPoint()); if (documentManager.isCommitted(editor.getDocument())) { // when document is committed, try to check injected stuff - it's fast int offset = editor.logicalPositionToOffset(editor.xyToLogicalPosition(point)); editor = InjectedLanguageUtil.getEditorForInjectedLanguageNoCommit(editor, psiFile, offset); } LogicalPosition pos = editor.xyToLogicalPosition(point); int offset = editor.logicalPositionToOffset(pos); int selStart = editor.getSelectionModel().getSelectionStart(); int selEnd = editor.getSelectionModel().getSelectionEnd(); if (offset >= selStart && offset < selEnd) { disposeHighlighter(); return; } myTooltipProvider = new TooltipProvider(editor, pos); myTooltipProvider.execute(browseMode); } }; private void cancelPreviousTooltip() { if (myTooltipProvider != null) { myTooltipProvider.dispose(); myTooltipProvider = null; } } @NotNull private final Alarm myDocAlarm; public CtrlMouseHandler(final Project project, StartupManager startupManager, EditorColorsManager colorsManager, FileEditorManager fileEditorManager, @NotNull DocumentationManager documentationManager, @NotNull final EditorFactory editorFactory) { super(project); myEditorColorsManager = colorsManager; startupManager.registerPostStartupActivity(new DumbAwareRunnable() { @Override public void run() { EditorEventMulticaster eventMulticaster = editorFactory.getEventMulticaster(); eventMulticaster.addEditorMouseListener(myEditorMouseAdapter, project); eventMulticaster.addEditorMouseMotionListener(myEditorMouseMotionListener, project); eventMulticaster.addCaretListener(new CaretAdapter() { @Override public void caretPositionChanged(CaretEvent e) { if (myHint != null) { myDocumentationManager.updateToolwindowContext(); } } }, project); } }); myFileEditorManager = fileEditorManager; myDocumentationManager = documentationManager; myDocAlarm = new Alarm(Alarm.ThreadToUse.POOLED_THREAD, myProject); } @Override @NotNull public String getComponentName() { return "CtrlMouseHandler"; } private boolean isMouseOverTooltip(@NotNull Point mouseLocationOnScreen) { Rectangle bounds = getHintBounds(); return bounds != null && bounds.contains(mouseLocationOnScreen); } @Nullable private Rectangle getHintBounds() { LightweightHint hint = myHint; if (hint == null) { return null; } JComponent hintComponent = hint.getComponent(); if (!hintComponent.isShowing()) { return null; } return new Rectangle(hintComponent.getLocationOnScreen(), hintComponent.getSize()); } @NotNull private static BrowseMode getBrowseMode(@JdkConstants.InputEventMask int modifiers) { if (modifiers != 0) { final Keymap activeKeymap = KeymapManager.getInstance().getActiveKeymap(); if (KeymapUtil.matchActionMouseShortcutsModifiers(activeKeymap, modifiers, IdeActions.ACTION_GOTO_DECLARATION)) return BrowseMode.Declaration; if (KeymapUtil.matchActionMouseShortcutsModifiers(activeKeymap, modifiers, IdeActions.ACTION_GOTO_TYPE_DECLARATION)) return BrowseMode.TypeDeclaration; if (KeymapUtil.matchActionMouseShortcutsModifiers(activeKeymap, modifiers, IdeActions.ACTION_GOTO_IMPLEMENTATION)) return BrowseMode.Implementation; if (modifiers == InputEvent.CTRL_MASK || modifiers == InputEvent.META_MASK) return BrowseMode.Declaration; } return BrowseMode.None; } @Nullable @TestOnly public static String getInfo(PsiElement element, PsiElement atPointer) { return generateInfo(element, atPointer).text; } @Nullable @TestOnly public static String getInfo(@NotNull Editor editor, BrowseMode browseMode) { Project project = editor.getProject(); if (project == null) return null; PsiFile file = PsiDocumentManager.getInstance(project).getPsiFile(editor.getDocument()); if (file == null) return null; Info info = getInfoAt(project, editor, file, editor.getCaretModel().getOffset(), browseMode); return info == null ? null : info.getInfo().text; } @NotNull private static DocInfo generateInfo(PsiElement element, PsiElement atPointer) { final DocumentationProvider documentationProvider = DocumentationManager.getProviderFromElement(element, atPointer); String result = doGenerateInfo(element, atPointer, documentationProvider); return result == null ? DocInfo.EMPTY : new DocInfo(result, documentationProvider, element); } @Nullable private static String doGenerateInfo(@NotNull PsiElement element, @NotNull PsiElement atPointer, @NotNull DocumentationProvider documentationProvider) { String info = documentationProvider.getQuickNavigateInfo(element, atPointer); if (info != null) { return info; } if (element instanceof PsiFile) { final VirtualFile virtualFile = ((PsiFile)element).getVirtualFile(); if (virtualFile != null) { return virtualFile.getPresentableUrl(); } } info = getQuickNavigateInfo(element); if (info != null) { return info; } if (element instanceof NavigationItem) { final ItemPresentation presentation = ((NavigationItem)element).getPresentation(); if (presentation != null) { return presentation.getPresentableText(); } } return null; } @Nullable private static String getQuickNavigateInfo(PsiElement element) { final String name = ElementDescriptionUtil.getElementDescription(element, UsageViewShortNameLocation.INSTANCE); if (StringUtil.isEmpty(name)) return null; final String typeName = ElementDescriptionUtil.getElementDescription(element, UsageViewTypeLocation.INSTANCE); final PsiFile file = element.getContainingFile(); final StringBuilder sb = new StringBuilder(); if (StringUtil.isNotEmpty(typeName)) sb.append(typeName).append(" "); sb.append("\"").append(name).append("\""); if (file != null && file.isPhysical()) { sb.append(" [").append(file.getName()).append("]"); } return sb.toString(); } private abstract static class Info { @NotNull protected final PsiElement myElementAtPointer; @NotNull private final List<TextRange> myRanges; public Info(@NotNull PsiElement elementAtPointer, @NotNull List<TextRange> ranges) { myElementAtPointer = elementAtPointer; myRanges = ranges; } public Info(@NotNull PsiElement elementAtPointer) { this(elementAtPointer, Collections.singletonList(getReferenceRange(elementAtPointer))); } @NotNull private static TextRange getReferenceRange(@NotNull PsiElement elementAtPointer) { int textOffset = elementAtPointer.getTextOffset(); final TextRange range = elementAtPointer.getTextRange(); if (textOffset < range.getStartOffset() || textOffset < 0) { LOG.error("Invalid text offset " + textOffset + " of element " + elementAtPointer + " of " + elementAtPointer.getClass()); textOffset = range.getStartOffset(); } return new TextRange(textOffset, range.getEndOffset()); } boolean isSimilarTo(@NotNull Info that) { return Comparing.equal(myElementAtPointer, that.myElementAtPointer) && myRanges.equals(that.myRanges); } @NotNull public List<TextRange> getRanges() { return myRanges; } @NotNull public abstract DocInfo getInfo(); public abstract boolean isValid(@NotNull Document document); public abstract void showDocInfo(@NotNull DocumentationManager docManager); protected boolean rangesAreCorrect(@NotNull Document document) { final TextRange docRange = new TextRange(0, document.getTextLength()); for (TextRange range : getRanges()) { if (!docRange.contains(range)) return false; } return true; } } private static void showDumbModeNotification(@NotNull Project project) { DumbService.getInstance(project).showDumbModeNotification("Element information is not available during index update"); } private static class InfoSingle extends Info { @NotNull private final PsiElement myTargetElement; public InfoSingle(@NotNull PsiElement elementAtPointer, @NotNull PsiElement targetElement) { super(elementAtPointer); myTargetElement = targetElement; } public InfoSingle(@NotNull PsiReference ref, @NotNull final PsiElement targetElement) { super(ref.getElement(), ReferenceRange.getAbsoluteRanges(ref)); myTargetElement = targetElement; } @Override @NotNull public DocInfo getInfo() { return ApplicationManager.getApplication().runReadAction(new Computable<DocInfo>() { @Override public DocInfo compute() { try { return generateInfo(myTargetElement, myElementAtPointer); } catch (IndexNotReadyException e) { showDumbModeNotification(myTargetElement.getProject()); return DocInfo.EMPTY; } } }); } @Override public boolean isValid(@NotNull Document document) { if (!myTargetElement.isValid()) return false; if (!myElementAtPointer.isValid()) return false; if (myTargetElement == myElementAtPointer) return false; return rangesAreCorrect(document); } @Override public void showDocInfo(@NotNull DocumentationManager docManager) { docManager.showJavaDocInfo(myTargetElement, myElementAtPointer, null); docManager.setAllowContentUpdateFromContext(false); } } private static class InfoMultiple extends Info { public InfoMultiple(@NotNull final PsiElement elementAtPointer) { super(elementAtPointer); } public InfoMultiple(@NotNull final PsiElement elementAtPointer, @NotNull PsiReference ref) { super(elementAtPointer, ReferenceRange.getAbsoluteRanges(ref)); } @Override @NotNull public DocInfo getInfo() { return new DocInfo(CodeInsightBundle.message("multiple.implementations.tooltip"), null, null); } @Override public boolean isValid(@NotNull Document document) { return rangesAreCorrect(document); } @Override public void showDocInfo(@NotNull DocumentationManager docManager) { // Do nothing } } @Nullable private Info getInfoAt(@NotNull final Editor editor, @NotNull PsiFile file, int offset, @NotNull BrowseMode browseMode) { return getInfoAt(myProject, editor, file, offset, browseMode); } @Nullable private static Info getInfoAt(@NotNull Project project, @NotNull final Editor editor, @NotNull PsiFile file, int offset, @NotNull BrowseMode browseMode) { PsiElement targetElement = null; if (browseMode == BrowseMode.TypeDeclaration) { try { targetElement = GotoTypeDeclarationAction.findSymbolType(editor, offset); } catch (IndexNotReadyException e) { showDumbModeNotification(project); } } else if (browseMode == BrowseMode.Declaration) { final PsiReference ref = TargetElementUtil.findReference(editor, offset); final List<PsiElement> resolvedElements = ref == null ? Collections.<PsiElement>emptyList() : resolve(ref); final PsiElement resolvedElement = resolvedElements.size() == 1 ? resolvedElements.get(0) : null; final PsiElement[] targetElements = GotoDeclarationAction.findTargetElementsNoVS(project, editor, offset, false); final PsiElement elementAtPointer = file.findElementAt(TargetElementUtil.adjustOffset(file, editor.getDocument(), offset)); if (targetElements != null) { if (targetElements.length == 0) { return null; } else if (targetElements.length == 1) { if (targetElements[0] != resolvedElement && elementAtPointer != null && targetElements[0].isPhysical()) { return ref != null ? new InfoSingle(ref, targetElements[0]) : new InfoSingle(elementAtPointer, targetElements[0]); } } else { return elementAtPointer != null ? new InfoMultiple(elementAtPointer) : null; } } if (resolvedElements.size() == 1) { return new InfoSingle(ref, resolvedElements.get(0)); } if (resolvedElements.size() > 1) { return elementAtPointer != null ? new InfoMultiple(elementAtPointer, ref) : null; } } else if (browseMode == BrowseMode.Implementation) { final PsiElement element = TargetElementUtil.getInstance().findTargetElement(editor, ImplementationSearcher.getFlags(), offset); PsiElement[] targetElements = new ImplementationSearcher() { @Override @NotNull protected PsiElement[] searchDefinitions(final PsiElement element, Editor editor) { final List<PsiElement> found = new ArrayList<PsiElement>(2); DefinitionsScopedSearch.search(element, getSearchScope(element, editor)).forEach(new Processor<PsiElement>() { @Override public boolean process(final PsiElement psiElement) { found.add(psiElement); return found.size() != 2; } }); return PsiUtilCore.toPsiElementArray(found); } }.searchImplementations(editor, element, offset); if (targetElements.length > 1) { PsiElement elementAtPointer = file.findElementAt(offset); if (elementAtPointer != null) { return new InfoMultiple(elementAtPointer); } return null; } if (targetElements.length == 1) { Navigatable descriptor = EditSourceUtil.getDescriptor(targetElements[0]); if (descriptor == null || !descriptor.canNavigate()) { return null; } targetElement = targetElements[0]; } } if (targetElement != null && targetElement.isPhysical()) { PsiElement elementAtPointer = file.findElementAt(offset); if (elementAtPointer != null) { return new InfoSingle(elementAtPointer, targetElement); } } final PsiNameIdentifierOwner element = GotoDeclarationAction.findElementToShowUsagesOf(editor, offset); if (element != null) { PsiElement identifier = element.getNameIdentifier(); if (identifier != null && identifier.isValid()) { return new Info(identifier){ @Override public void showDocInfo(@NotNull DocumentationManager docManager) { } @NotNull @Override public DocInfo getInfo() { String name = UsageViewUtil.getType(element) + " '"+ UsageViewUtil.getShortName(element)+"'"; return new DocInfo("Show usages of "+name, null, element); } @Override public boolean isValid(@NotNull Document document) { return element.isValid(); } }; } } return null; } @NotNull private static List<PsiElement> resolve(@NotNull PsiReference ref) { // IDEA-56727 try resolve first as in GotoDeclarationAction PsiElement resolvedElement = ref.resolve(); if (resolvedElement == null && ref instanceof PsiPolyVariantReference) { List<PsiElement> result = new ArrayList<PsiElement>(); final ResolveResult[] psiElements = ((PsiPolyVariantReference)ref).multiResolve(false); for (ResolveResult resolveResult : psiElements) { if (resolveResult.getElement() != null) { result.add(resolveResult.getElement()); } } return result; } return resolvedElement == null ? Collections.<PsiElement>emptyList() : Collections.singletonList(resolvedElement); } private void disposeHighlighter() { if (myHighlighter != null) { myHighlighter.uninstall(); HintManager.getInstance().hideAllHints(); myHighlighter = null; } } private void fulfillDocInfo(@NotNull final String header, @NotNull final DocumentationProvider provider, @NotNull final PsiElement originalElement, @NotNull final PsiElement anchorElement, @NotNull final Consumer<String> newTextConsumer, @NotNull final LightweightHint hint) { myDocAlarm.cancelAllRequests(); myDocAlarm.addRequest(new Runnable() { @Override public void run() { final Ref<String> fullTextRef = new Ref<String>(); final Ref<String> qualifiedNameRef = new Ref<String>(); ApplicationManager.getApplication().runReadAction(new Runnable() { @Override public void run() { if (anchorElement.isValid() && originalElement.isValid()) { try { fullTextRef.set(provider.generateDoc(anchorElement, originalElement)); } catch (IndexNotReadyException e) { fullTextRef.set("Documentation is not available while indexing is in progress"); } if (anchorElement instanceof PsiQualifiedNamedElement) { qualifiedNameRef.set(((PsiQualifiedNamedElement)anchorElement).getQualifiedName()); } } } }); String fullText = fullTextRef.get(); if (fullText == null) { return; } final String updatedText = DocPreviewUtil.buildPreview(header, qualifiedNameRef.get(), fullText); final String newHtml = HintUtil.prepareHintText(updatedText, HintUtil.getInformationHint()); UIUtil.invokeLaterIfNeeded(new Runnable() { @Override public void run() { // There is a possible case that quick doc control width is changed, e.g. it contained text // like 'public final class String implements java.io.Serializable, java.lang.Comparable<java.lang.String>' and // new text replaces fully-qualified class names by hyperlinks with short name. // That's why we might need to update the control size. We assume that the hint component is located at the // layered pane, so, the algorithm is to find an ancestor layered pane and apply new size for the target component. JComponent component = hint.getComponent(); Dimension oldSize = component.getPreferredSize(); newTextConsumer.consume(newHtml); final int widthIncrease; if (component instanceof QuickDocInfoPane) { int buttonWidth = ((QuickDocInfoPane)component).getButtonWidth(); widthIncrease = calculateWidthIncrease(buttonWidth, updatedText); } else { widthIncrease = 0; } if (oldSize == null) { return; } Dimension newSize = component.getPreferredSize(); if (newSize.width + widthIncrease == oldSize.width) { return; } component.setPreferredSize(new Dimension(newSize.width + widthIncrease, newSize.height)); // We're assuming here that there are two possible hint representation modes: popup and layered pane. if (hint.isRealPopup()) { TooltipProvider tooltipProvider = myTooltipProvider; if (tooltipProvider != null) { // There is a possible case that 'raw' control was rather wide but the 'rich' one is narrower. That's why we try to // re-show the hint here. Benefits: there is a possible case that we'll be able to show nice layered pane-based balloon; // the popup will be re-positioned according to the new width. hint.hide(); tooltipProvider.showHint(new LightweightHint(component)); } else { component.setPreferredSize(new Dimension(newSize.width + widthIncrease, oldSize.height)); hint.pack(); } return; } Container topLevelLayeredPaneChild = null; boolean adjustBounds = false; for (Container current = component.getParent(); current != null; current = current.getParent()) { if (current instanceof JLayeredPane) { adjustBounds = true; break; } else { topLevelLayeredPaneChild = current; } } if (adjustBounds && topLevelLayeredPaneChild != null) { Rectangle bounds = topLevelLayeredPaneChild.getBounds(); topLevelLayeredPaneChild.setBounds(bounds.x, bounds.y, bounds.width + newSize.width + widthIncrease - oldSize.width, bounds.height); } } }); } }, 0); } /** * It's possible that we need to expand quick doc control's width in order to provide better visual representation * (see https://youtrack.jetbrains.com/issue/IDEA-101425). This method calculates that width expand. * * @param buttonWidth icon button's width * @param updatedText text which will be should at the quick doc control * @return width increase to apply to the target quick doc control (zero if no additional width increase is required) */ private static int calculateWidthIncrease(int buttonWidth, String updatedText) { int maxLineWidth = 0; TIntArrayList lineWidths = new TIntArrayList(); for (String lineText : StringUtil.split(updatedText, "<br/>")) { String html = HintUtil.prepareHintText(lineText, HintUtil.getInformationHint()); int width = new JLabel(html).getPreferredSize().width; maxLineWidth = Math.max(maxLineWidth, width); lineWidths.add(width); } if (!lineWidths.isEmpty()) { int firstLineAvailableTrailingWidth = maxLineWidth - lineWidths.get(0); if (firstLineAvailableTrailingWidth >= buttonWidth) { return 0; } else { return buttonWidth - firstLineAvailableTrailingWidth; } } return 0; } private class TooltipProvider { @NotNull private final Editor myEditor; @NotNull private final LogicalPosition myPosition; private BrowseMode myBrowseMode; private boolean myDisposed; private final ProgressIndicator myProgress = new ProgressIndicatorBase(); TooltipProvider(@NotNull Editor editor, @NotNull LogicalPosition pos) { myEditor = editor; myPosition = pos; } void dispose() { myDisposed = true; myProgress.cancel(); } public BrowseMode getBrowseMode() { return myBrowseMode; } void execute(@NotNull BrowseMode browseMode) { myBrowseMode = browseMode; Document document = myEditor.getDocument(); final PsiFile file = PsiDocumentManager.getInstance(myProject).getPsiFile(document); if (file == null) return; PsiDocumentManager.getInstance(myProject).commitAllDocuments(); if (EditorUtil.inVirtualSpace(myEditor, myPosition)) { disposeHighlighter(); return; } final int offset = myEditor.logicalPositionToOffset(myPosition); int selStart = myEditor.getSelectionModel().getSelectionStart(); int selEnd = myEditor.getSelectionModel().getSelectionEnd(); if (offset >= selStart && offset < selEnd) return; ProgressIndicatorUtils.scheduleWithWriteActionPriority(myProgress, new ReadTask() { @Override public void computeInReadAction(@NotNull ProgressIndicator indicator) { doExecute(file, offset); } @Override public void onCanceled(@NotNull ProgressIndicator indicator) { } }); } private void doExecute(@NotNull PsiFile file, int offset) { final Info info; try { info = getInfoAt(myEditor, file, offset, myBrowseMode); if (info == null) return; } catch (IndexNotReadyException e) { showDumbModeNotification(myProject); return; } ApplicationManager.getApplication().invokeLater(new Runnable() { @Override public void run() { if (myDisposed || myEditor.isDisposed() || !myEditor.getComponent().isShowing()) return; showHint(info); } }); } private void showHint(@NotNull Info info) { if (myDisposed || myEditor.isDisposed()) return; Component internalComponent = myEditor.getContentComponent(); if (myHighlighter != null) { if (!info.isSimilarTo(myHighlighter.getStoredInfo())) { disposeHighlighter(); } else { // highlighter already set internalComponent.setCursor(Cursor.getPredefinedCursor(Cursor.HAND_CURSOR)); return; } } if (!info.isValid(myEditor.getDocument())) { return; } myHighlighter = installHighlighterSet(info, myEditor); DocInfo docInfo = info.getInfo(); if (docInfo.text == null) return; if (myDocumentationManager.hasActiveDockedDocWindow()) { info.showDocInfo(myDocumentationManager); } HyperlinkListener hyperlinkListener = docInfo.docProvider == null ? null : new QuickDocHyperlinkListener(docInfo.docProvider, info.myElementAtPointer); final Ref<QuickDocInfoPane> quickDocPaneRef = new Ref<QuickDocInfoPane>(); MouseListener mouseListener = new MouseAdapter() { @Override public void mouseEntered(MouseEvent e) { QuickDocInfoPane pane = quickDocPaneRef.get(); if (pane != null) { pane.mouseEntered(e); } } @Override public void mouseExited(MouseEvent e) { QuickDocInfoPane pane = quickDocPaneRef.get(); if (pane != null) { pane.mouseExited(e); } } @Override public void mouseClicked(MouseEvent e) { } }; Ref<Consumer<String>> newTextConsumerRef = new Ref<Consumer<String>>(); JComponent label = HintUtil.createInformationLabel(docInfo.text, hyperlinkListener, mouseListener, newTextConsumerRef); Consumer<String> newTextConsumer = newTextConsumerRef.get(); QuickDocInfoPane quickDocPane = null; if (docInfo.documentationAnchor != null) { quickDocPane = new QuickDocInfoPane(docInfo.documentationAnchor, info.myElementAtPointer, label); quickDocPaneRef.set(quickDocPane); } JComponent hintContent = quickDocPane == null ? label : quickDocPane; final LightweightHint hint = new LightweightHint(hintContent); myHint = hint; hint.addHintListener(new HintListener() { @Override public void hintHidden(EventObject event) { myHint = null; } }); myDocAlarm.cancelAllRequests(); if (newTextConsumer != null && docInfo.docProvider != null && docInfo.documentationAnchor != null) { fulfillDocInfo(docInfo.text, docInfo.docProvider, info.myElementAtPointer, docInfo.documentationAnchor, newTextConsumer, hint); } showHint(hint); } public void showHint(@NotNull LightweightHint hint) { final HintManagerImpl hintManager = HintManagerImpl.getInstanceImpl(); Point p = HintManagerImpl.getHintPosition(hint, myEditor, myPosition, HintManager.ABOVE); hintManager.showEditorHint(hint, myEditor, p, HintManager.HIDE_BY_ANY_KEY | HintManager.HIDE_BY_TEXT_CHANGE | HintManager.HIDE_BY_SCROLLING, 0, false, HintManagerImpl.createHintHint(myEditor, p, hint, HintManager.ABOVE).setContentActive(false)); } } @NotNull private HighlightersSet installHighlighterSet(@NotNull Info info, @NotNull Editor editor) { final JComponent internalComponent = editor.getContentComponent(); internalComponent.addKeyListener(myEditorKeyListener); editor.getScrollingModel().addVisibleAreaListener(myVisibleAreaListener); final Cursor cursor = internalComponent.getCursor(); internalComponent.setCursor(Cursor.getPredefinedCursor(Cursor.HAND_CURSOR)); myFileEditorManager.addFileEditorManagerListener(myFileEditorManagerListener); List<RangeHighlighter> highlighters = new ArrayList<RangeHighlighter>(); TextAttributes attributes = myEditorColorsManager.getGlobalScheme().getAttributes(EditorColors.REFERENCE_HYPERLINK_COLOR); for (TextRange range : info.getRanges()) { TextAttributes attr = NavigationUtil.patchAttributesColor(attributes, range, editor); final RangeHighlighter highlighter = editor.getMarkupModel().addRangeHighlighter(range.getStartOffset(), range.getEndOffset(), HighlighterLayer.SELECTION + 1, attr, HighlighterTargetArea.EXACT_RANGE); highlighters.add(highlighter); } return new HighlightersSet(highlighters, editor, cursor, info); } private class HighlightersSet { @NotNull private final List<RangeHighlighter> myHighlighters; @NotNull private final Editor myHighlighterView; @NotNull private final Cursor myStoredCursor; @NotNull private final Info myStoredInfo; private HighlightersSet(@NotNull List<RangeHighlighter> highlighters, @NotNull Editor highlighterView, @NotNull Cursor storedCursor, @NotNull Info storedInfo) { myHighlighters = highlighters; myHighlighterView = highlighterView; myStoredCursor = storedCursor; myStoredInfo = storedInfo; } public void uninstall() { for (RangeHighlighter highlighter : myHighlighters) { highlighter.dispose(); } Component internalComponent = myHighlighterView.getContentComponent(); internalComponent.setCursor(myStoredCursor); internalComponent.removeKeyListener(myEditorKeyListener); myHighlighterView.getScrollingModel().removeVisibleAreaListener(myVisibleAreaListener); myFileEditorManager.removeFileEditorManagerListener(myFileEditorManagerListener); } @NotNull public Info getStoredInfo() { return myStoredInfo; } } private static class DocInfo { public static final DocInfo EMPTY = new DocInfo(null, null, null); @Nullable public final String text; @Nullable public final DocumentationProvider docProvider; @Nullable public final PsiElement documentationAnchor; DocInfo(@Nullable String text, @Nullable DocumentationProvider provider, @Nullable PsiElement documentationAnchor) { this.text = text; docProvider = provider; this.documentationAnchor = documentationAnchor; } } private class QuickDocInfoPane extends JBLayeredPane { private static final int BUTTON_HGAP = 5; @NotNull private final List<JComponent> myButtons = new ArrayList<JComponent>(); @NotNull private final JComponent myBaseDocControl; private final int myMinWidth; private final int myMinHeight; private final int myButtonWidth; QuickDocInfoPane(@NotNull PsiElement documentationAnchor, @NotNull PsiElement elementUnderMouse, @NotNull JComponent baseDocControl) { myBaseDocControl = baseDocControl; PresentationFactory presentationFactory = new PresentationFactory(); for (AbstractDocumentationTooltipAction action : ourTooltipActions) { Icon icon = action.getTemplatePresentation().getIcon(); Dimension minSize = new Dimension(icon.getIconWidth(), icon.getIconHeight()); myButtons.add(new ActionButton(action, presentationFactory.getPresentation(action), IdeTooltipManager.IDE_TOOLTIP_PLACE, minSize)); action.setDocInfo(documentationAnchor, elementUnderMouse); } Collections.reverse(myButtons); setPreferredSize(baseDocControl.getPreferredSize()); setMaximumSize(baseDocControl.getMaximumSize()); setMinimumSize(baseDocControl.getMinimumSize()); setBackground(baseDocControl.getBackground()); add(baseDocControl, Integer.valueOf(0)); int minWidth = 0; int minHeight = 0; int buttonWidth = 0; for (JComponent button : myButtons) { button.setBorder(null); button.setBackground(baseDocControl.getBackground()); add(button, Integer.valueOf(1)); button.setVisible(false); Dimension preferredSize = button.getPreferredSize(); minWidth += preferredSize.width; minHeight = Math.max(minHeight, preferredSize.height); buttonWidth = Math.max(buttonWidth, preferredSize.width); } myButtonWidth = buttonWidth; int margin = 2; myMinWidth = minWidth + margin * 2 + (myButtons.size() - 1) * BUTTON_HGAP; myMinHeight = minHeight + margin * 2; } public int getButtonWidth() { return myButtonWidth; } @Override public Dimension getPreferredSize() { return expandIfNecessary(myBaseDocControl.getPreferredSize()); } @Override public void setPreferredSize(Dimension preferredSize) { super.setPreferredSize(preferredSize); myBaseDocControl.setPreferredSize(preferredSize); } @Override public Dimension getMinimumSize() { return expandIfNecessary(myBaseDocControl.getMinimumSize()); } @Override public Dimension getMaximumSize() { return expandIfNecessary(myBaseDocControl.getMaximumSize()); } @NotNull private Dimension expandIfNecessary(@NotNull Dimension base) { if (base.width >= myMinWidth && base.height >= myMinHeight) { return base; } return new Dimension(Math.max(myMinWidth, base.width), Math.max(myMinHeight, base.height)); } @Override public void doLayout() { Rectangle bounds = getBounds(); myBaseDocControl.setBounds(new Rectangle(0, 0, bounds.width, bounds.height)); int x = bounds.width; for (JComponent button : myButtons) { Dimension buttonSize = button.getPreferredSize(); x -= buttonSize.width; button.setBounds(x, 0, buttonSize.width, buttonSize.height); x -= BUTTON_HGAP; } } public void mouseEntered(@NotNull MouseEvent e) { processStateChangeIfNecessary(e.getLocationOnScreen(), true); } public void mouseExited(@NotNull MouseEvent e) { processStateChangeIfNecessary(e.getLocationOnScreen(), false); } private void processStateChangeIfNecessary(@NotNull Point mouseScreenLocation, boolean mouseEntered) { // Don't show 'view quick doc' buttons if docked quick doc control is already active. if (myDocumentationManager.hasActiveDockedDocWindow()) { return; } // Skip event triggered when mouse leaves action button area. if (!mouseEntered && new Rectangle(getLocationOnScreen(), getSize()).contains(mouseScreenLocation)) { return; } for (JComponent button : myButtons) { button.setVisible(mouseEntered); } } } private class QuickDocHyperlinkListener implements HyperlinkListener { @NotNull private final DocumentationProvider myProvider; @NotNull private final PsiElement myContext; QuickDocHyperlinkListener(@NotNull DocumentationProvider provider, @NotNull PsiElement context) { myProvider = provider; myContext = context; } @Override public void hyperlinkUpdate(@NotNull HyperlinkEvent e) { if (e.getEventType() != HyperlinkEvent.EventType.ACTIVATED) { return; } String description = e.getDescription(); if (StringUtil.isEmpty(description) || !description.startsWith(DocumentationManagerProtocol.PSI_ELEMENT_PROTOCOL)) { return; } String elementName = e.getDescription().substring(DocumentationManagerProtocol.PSI_ELEMENT_PROTOCOL.length()); final PsiElement targetElement = myProvider.getDocumentationElementForLink(PsiManager.getInstance(myProject), elementName, myContext); if (targetElement != null) { LightweightHint hint = myHint; if (hint != null) { hint.hide(true); } myDocumentationManager.showJavaDocInfo(targetElement, myContext, null); } } } }
// Copyright 2013 The Chromium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. package org.chromium.media; import android.content.Context; import android.media.MediaCodec; import android.media.MediaCodec.BufferInfo; import android.media.MediaExtractor; import android.media.MediaFormat; import android.os.ParcelFileDescriptor; import android.util.Log; import org.chromium.base.CalledByNative; import org.chromium.base.JNINamespace; import java.io.File; import java.nio.ByteBuffer; @JNINamespace("media") class WebAudioMediaCodecBridge { static final String LOG_TAG = "WebAudioMediaCodec"; // TODO(rtoy): What is the correct timeout value for reading // from a file in memory? static final long TIMEOUT_MICROSECONDS = 500; @CalledByNative private static String createTempFile(Context ctx) throws java.io.IOException { File outputDirectory = ctx.getCacheDir(); File outputFile = File.createTempFile("webaudio", ".dat", outputDirectory); return outputFile.getAbsolutePath(); } @SuppressWarnings("deprecation") @CalledByNative private static boolean decodeAudioFile(Context ctx, long nativeMediaCodecBridge, int inputFD, long dataSize) { if (dataSize < 0 || dataSize > 0x7fffffff) return false; MediaExtractor extractor = new MediaExtractor(); ParcelFileDescriptor encodedFD; encodedFD = ParcelFileDescriptor.adoptFd(inputFD); try { extractor.setDataSource(encodedFD.getFileDescriptor(), 0, dataSize); } catch (Exception e) { e.printStackTrace(); encodedFD.detachFd(); return false; } if (extractor.getTrackCount() <= 0) { encodedFD.detachFd(); return false; } MediaFormat format = extractor.getTrackFormat(0); // Number of channels specified in the file int inputChannelCount = format.getInteger(MediaFormat.KEY_CHANNEL_COUNT); // Number of channels the decoder will provide. (Not // necessarily the same as inputChannelCount. See // crbug.com/266006.) int outputChannelCount = inputChannelCount; int sampleRate = format.getInteger(MediaFormat.KEY_SAMPLE_RATE); String mime = format.getString(MediaFormat.KEY_MIME); long durationMicroseconds = 0; if (format.containsKey(MediaFormat.KEY_DURATION)) { try { durationMicroseconds = format.getLong(MediaFormat.KEY_DURATION); } catch (Exception e) { Log.d(LOG_TAG, "Cannot get duration"); } } // If the duration is too long, set to 0 to force the caller // not to preallocate space. See crbug.com/326856. // FIXME: What should be the limit? We're arbitrarily using // about 2148 sec (35.8 min). if (durationMicroseconds > 0x7fffffff) { durationMicroseconds = 0; } Log.d(LOG_TAG, "Initial: Tracks: " + extractor.getTrackCount() + " Format: " + format); // Create decoder MediaCodec codec; try { codec = MediaCodec.createDecoderByType(mime); } catch (Exception e) { Log.w(LOG_TAG, "Failed to create MediaCodec for mime type: " + mime); encodedFD.detachFd(); return false; } try { codec.configure(format, null /* surface */, null /* crypto */, 0 /* flags */); } catch (Exception e) { Log.w(LOG_TAG, "Unable to configure codec for format " + format, e); return false; } try { codec.start(); } catch (Exception e) { Log.w(LOG_TAG, "Unable to start()", e); return false; } ByteBuffer[] codecInputBuffers; try { codecInputBuffers = codec.getInputBuffers(); } catch (Exception e) { Log.w(LOG_TAG, "getInputBuffers() failed", e); return false; } ByteBuffer[] codecOutputBuffers; try { codecOutputBuffers = codec.getOutputBuffers(); } catch (Exception e) { Log.w(LOG_TAG, "getOutputBuffers() failed", e); return false; } // A track must be selected and will be used to read samples. extractor.selectTrack(0); boolean sawInputEOS = false; boolean sawOutputEOS = false; boolean destinationInitialized = false; boolean decodedSuccessfully = true; // Keep processing until the output is done. while (!sawOutputEOS) { if (!sawInputEOS) { // Input side int inputBufIndex; try { inputBufIndex = codec.dequeueInputBuffer(TIMEOUT_MICROSECONDS); } catch (Exception e) { Log.w(LOG_TAG, "dequeueInputBuffer(" + TIMEOUT_MICROSECONDS + ") failed.", e); decodedSuccessfully = false; break; } if (inputBufIndex >= 0) { ByteBuffer dstBuf = codecInputBuffers[inputBufIndex]; int sampleSize = extractor.readSampleData(dstBuf, 0); long presentationTimeMicroSec = 0; if (sampleSize < 0) { sawInputEOS = true; sampleSize = 0; } else { presentationTimeMicroSec = extractor.getSampleTime(); } try { codec.queueInputBuffer(inputBufIndex, 0, /* offset */ sampleSize, presentationTimeMicroSec, sawInputEOS ? MediaCodec.BUFFER_FLAG_END_OF_STREAM : 0); } catch (Exception e) { Log.w(LOG_TAG, "queueInputBuffer(" + inputBufIndex + ", 0, " + sampleSize + ", " + presentationTimeMicroSec + ", " + (sawInputEOS ? MediaCodec.BUFFER_FLAG_END_OF_STREAM : 0) + ") failed.", e); decodedSuccessfully = false; break; } if (!sawInputEOS) { extractor.advance(); } } } // Output side MediaCodec.BufferInfo info = new BufferInfo(); final int outputBufIndex; try { outputBufIndex = codec.dequeueOutputBuffer(info, TIMEOUT_MICROSECONDS); } catch (Exception e) { Log.w(LOG_TAG, "dequeueOutputBuffer(" + info + ", " + TIMEOUT_MICROSECONDS + ") failed"); e.printStackTrace(); decodedSuccessfully = false; break; } if (outputBufIndex >= 0) { ByteBuffer buf = codecOutputBuffers[outputBufIndex]; if (!destinationInitialized) { // Initialize the destination as late as possible to // catch any changes in format. But be sure to // initialize it BEFORE we send any decoded audio, // and only initialize once. Log.d(LOG_TAG, "Final: Rate: " + sampleRate + " Channels: " + inputChannelCount + " Mime: " + mime + " Duration: " + durationMicroseconds + " microsec"); nativeInitializeDestination(nativeMediaCodecBridge, inputChannelCount, sampleRate, durationMicroseconds); destinationInitialized = true; } if (destinationInitialized && info.size > 0) { nativeOnChunkDecoded(nativeMediaCodecBridge, buf, info.size, inputChannelCount, outputChannelCount); } buf.clear(); codec.releaseOutputBuffer(outputBufIndex, false /* render */); if ((info.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) { sawOutputEOS = true; } } else if (outputBufIndex == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) { codecOutputBuffers = codec.getOutputBuffers(); } else if (outputBufIndex == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) { MediaFormat newFormat = codec.getOutputFormat(); outputChannelCount = newFormat.getInteger(MediaFormat.KEY_CHANNEL_COUNT); sampleRate = newFormat.getInteger(MediaFormat.KEY_SAMPLE_RATE); Log.d(LOG_TAG, "output format changed to " + newFormat); } } encodedFD.detachFd(); codec.stop(); codec.release(); codec = null; return decodedSuccessfully; } private static native void nativeOnChunkDecoded( long nativeWebAudioMediaCodecBridge, ByteBuffer buf, int size, int inputChannelCount, int outputChannelCount); private static native void nativeInitializeDestination( long nativeWebAudioMediaCodecBridge, int inputChannelCount, int sampleRate, long durationMicroseconds); }
/* * Copyright 2012 The Netty Project * * The Netty Project licenses this file to you under the Apache License, * version 2.0 (the "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at: * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations * under the License. */ package io.netty.handler.codec.http.websocketx; import io.netty.handler.codec.http.DefaultFullHttpRequest; import io.netty.handler.codec.http.FullHttpRequest; import io.netty.handler.codec.http.FullHttpResponse; import io.netty.handler.codec.http.HttpHeaderNames; import io.netty.handler.codec.http.HttpHeaderValues; import io.netty.handler.codec.http.HttpHeaders; import io.netty.handler.codec.http.HttpMethod; import io.netty.handler.codec.http.HttpResponseStatus; import io.netty.handler.codec.http.HttpVersion; import io.netty.util.CharsetUtil; import io.netty.util.internal.logging.InternalLogger; import io.netty.util.internal.logging.InternalLoggerFactory; import java.net.URI; /** * <p> * Performs client side opening and closing handshakes for web socket specification version <a * href="http://tools.ietf.org/html/draft-ietf-hybi-thewebsocketprotocol-17" >draft-ietf-hybi-thewebsocketprotocol- * 17</a> * </p> */ public class WebSocketClientHandshaker13 extends WebSocketClientHandshaker { private static final InternalLogger logger = InternalLoggerFactory.getInstance(WebSocketClientHandshaker13.class); public static final String MAGIC_GUID = "258EAFA5-E914-47DA-95CA-C5AB0DC85B11"; private String expectedChallengeResponseString; private final boolean allowExtensions; private final boolean performMasking; private final boolean allowMaskMismatch; /** * Creates a new instance. * * @param webSocketURL * URL for web socket communications. e.g "ws://myhost.com/mypath". Subsequent web socket frames will be * sent to this URL. * @param version * Version of web socket specification to use to connect to the server * @param subprotocol * Sub protocol request sent to the server. * @param allowExtensions * Allow extensions to be used in the reserved bits of the web socket frame * @param customHeaders * Map of custom headers to add to the client request * @param maxFramePayloadLength * Maximum length of a frame's payload */ public WebSocketClientHandshaker13(URI webSocketURL, WebSocketVersion version, String subprotocol, boolean allowExtensions, HttpHeaders customHeaders, int maxFramePayloadLength) { this(webSocketURL, version, subprotocol, allowExtensions, customHeaders, maxFramePayloadLength, true, false); } /** * Creates a new instance. * * @param webSocketURL * URL for web socket communications. e.g "ws://myhost.com/mypath". Subsequent web socket frames will be * sent to this URL. * @param version * Version of web socket specification to use to connect to the server * @param subprotocol * Sub protocol request sent to the server. * @param allowExtensions * Allow extensions to be used in the reserved bits of the web socket frame * @param customHeaders * Map of custom headers to add to the client request * @param maxFramePayloadLength * Maximum length of a frame's payload * @param performMasking * Whether to mask all written websocket frames. This must be set to true in order to be fully compatible * with the websocket specifications. Client applications that communicate with a non-standard server * which doesn't require masking might set this to false to achieve a higher performance. * @param allowMaskMismatch * Allows to loosen the masking requirement on received frames. When this is set to false then also * frames which are not masked properly according to the standard will still be accepted. */ public WebSocketClientHandshaker13(URI webSocketURL, WebSocketVersion version, String subprotocol, boolean allowExtensions, HttpHeaders customHeaders, int maxFramePayloadLength, boolean performMasking, boolean allowMaskMismatch) { super(webSocketURL, version, subprotocol, customHeaders, maxFramePayloadLength); this.allowExtensions = allowExtensions; this.performMasking = performMasking; this.allowMaskMismatch = allowMaskMismatch; } /** * /** * <p> * Sends the opening request to the server: * </p> * * <pre> * GET /chat HTTP/1.1 * Host: server.example.com * Upgrade: websocket * Connection: Upgrade * Sec-WebSocket-Key: dGhlIHNhbXBsZSBub25jZQ== * Sec-WebSocket-Origin: http://example.com * Sec-WebSocket-Protocol: chat, superchat * Sec-WebSocket-Version: 13 * </pre> * */ @Override protected FullHttpRequest newHandshakeRequest() { // Get path URI wsURL = uri(); String path = wsURL.getPath(); if (wsURL.getQuery() != null && !wsURL.getQuery().isEmpty()) { path = wsURL.getPath() + '?' + wsURL.getQuery(); } if (path == null || path.isEmpty()) { path = "/"; } // Get 16 bit nonce and base 64 encode it byte[] nonce = WebSocketUtil.randomBytes(16); String key = WebSocketUtil.base64(nonce); String acceptSeed = key + MAGIC_GUID; byte[] sha1 = WebSocketUtil.sha1(acceptSeed.getBytes(CharsetUtil.US_ASCII)); expectedChallengeResponseString = WebSocketUtil.base64(sha1); if (logger.isDebugEnabled()) { logger.debug( "WebSocket version 13 client handshake key: {}, expected response: {}", key, expectedChallengeResponseString); } // Format request int wsPort = wsURL.getPort(); // check if the URI contained a port if not set the correct one depending on the schema. // See https://github.com/netty/netty/pull/1558 if (wsPort == -1) { if ("wss".equals(wsURL.getScheme())) { wsPort = 443; } else { wsPort = 80; } } FullHttpRequest request = new DefaultFullHttpRequest(HttpVersion.HTTP_1_1, HttpMethod.GET, path); HttpHeaders headers = request.headers(); headers.add(HttpHeaderNames.UPGRADE, HttpHeaderValues.WEBSOCKET) .add(HttpHeaderNames.CONNECTION, HttpHeaderValues.UPGRADE) .add(HttpHeaderNames.SEC_WEBSOCKET_KEY, key) .add(HttpHeaderNames.HOST, wsURL.getHost() + ':' + wsPort); String originValue = "http://" + wsURL.getHost(); if (wsPort != 80 && wsPort != 443) { // if the port is not standard (80/443) its needed to add the port to the header. // See http://tools.ietf.org/html/rfc6454#section-6.2 originValue = originValue + ':' + wsPort; } headers.add(HttpHeaderNames.SEC_WEBSOCKET_ORIGIN, originValue); String expectedSubprotocol = expectedSubprotocol(); if (expectedSubprotocol != null && !expectedSubprotocol.isEmpty()) { headers.add(HttpHeaderNames.SEC_WEBSOCKET_PROTOCOL, expectedSubprotocol); } headers.add(HttpHeaderNames.SEC_WEBSOCKET_VERSION, "13"); if (customHeaders != null) { headers.add(customHeaders); } return request; } /** * <p> * Process server response: * </p> * * <pre> * HTTP/1.1 101 Switching Protocols * Upgrade: websocket * Connection: Upgrade * Sec-WebSocket-Accept: s3pPLMBiTxaQ9kYGzzhZRbK+xOo= * Sec-WebSocket-Protocol: chat * </pre> * * @param response * HTTP response returned from the server for the request sent by beginOpeningHandshake00(). * @throws WebSocketHandshakeException */ @Override protected void verify(FullHttpResponse response) { final HttpResponseStatus status = HttpResponseStatus.SWITCHING_PROTOCOLS; final HttpHeaders headers = response.headers(); if (!response.status().equals(status)) { throw new WebSocketHandshakeException("Invalid handshake response getStatus: " + response.status()); } CharSequence upgrade = headers.get(HttpHeaderNames.UPGRADE); if (!HttpHeaderValues.WEBSOCKET.contentEqualsIgnoreCase(upgrade)) { throw new WebSocketHandshakeException("Invalid handshake response upgrade: " + upgrade); } CharSequence connection = headers.get(HttpHeaderNames.CONNECTION); if (!HttpHeaderValues.UPGRADE.contentEqualsIgnoreCase(connection)) { throw new WebSocketHandshakeException("Invalid handshake response connection: " + connection); } CharSequence accept = headers.get(HttpHeaderNames.SEC_WEBSOCKET_ACCEPT); if (accept == null || !accept.equals(expectedChallengeResponseString)) { throw new WebSocketHandshakeException(String.format( "Invalid challenge. Actual: %s. Expected: %s", accept, expectedChallengeResponseString)); } } @Override protected WebSocketFrameDecoder newWebsocketDecoder() { return new WebSocket13FrameDecoder(false, allowExtensions, maxFramePayloadLength(), allowMaskMismatch); } @Override protected WebSocketFrameEncoder newWebSocketEncoder() { return new WebSocket13FrameEncoder(performMasking); } }
/** * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.aurora.common.stats; import java.lang.management.ClassLoadingMXBean; import java.lang.management.GarbageCollectorMXBean; import java.lang.management.ManagementFactory; import java.lang.management.MemoryMXBean; import java.lang.management.OperatingSystemMXBean; import java.lang.management.RuntimeMXBean; import java.lang.management.ThreadMXBean; import java.util.Map; import com.google.common.collect.ImmutableList; import com.google.common.collect.Iterables; import org.apache.aurora.common.quantity.Amount; import org.apache.aurora.common.quantity.Data; import org.apache.aurora.common.quantity.Time; /** * Convenience class to export statistics about the JVM. */ public class JvmStats { private static final long BYTES_PER_MB = Amount.of(1L, Data.MB).as(Data.BYTES); private static final double SECS_PER_NANO = ((double) 1) / Amount.of(1L, Time.SECONDS).as(Time.NANOSECONDS); private JvmStats() { // Utility class. } /** * Exports stats related to the JVM and runtime environment. */ public static void export() { final OperatingSystemMXBean osMbean = ManagementFactory.getOperatingSystemMXBean(); if (osMbean instanceof com.sun.management.OperatingSystemMXBean) { final com.sun.management.OperatingSystemMXBean sunOsMbean = (com.sun.management.OperatingSystemMXBean) osMbean; Stats.exportAll( ImmutableList.<Stat<? extends Number>>builder() .add(new StatImpl<Long>("system_free_physical_memory_mb") { @Override public Long read() { return sunOsMbean.getFreePhysicalMemorySize() / BYTES_PER_MB; } }) .add(new StatImpl<Long>("system_free_swap_mb") { @Override public Long read() { return sunOsMbean.getFreeSwapSpaceSize() / BYTES_PER_MB; } }) .add( Rate.of( new StatImpl<Long>("process_cpu_time_nanos") { @Override public Long read() { return sunOsMbean.getProcessCpuTime(); } }).withName("process_cpu_cores_utilized").withScaleFactor(SECS_PER_NANO).build()) .build()); } if (osMbean instanceof com.sun.management.UnixOperatingSystemMXBean) { final com.sun.management.UnixOperatingSystemMXBean unixOsMbean = (com.sun.management.UnixOperatingSystemMXBean) osMbean; Stats.exportAll(ImmutableList.<Stat<? extends Number>>builder() .add(new StatImpl<Long>("process_max_fd_count") { @Override public Long read() { return unixOsMbean.getMaxFileDescriptorCount(); } }).add(new StatImpl<Long>("process_open_fd_count") { @Override public Long read() { return unixOsMbean.getOpenFileDescriptorCount(); } }).build()); } final Runtime runtime = Runtime.getRuntime(); final ClassLoadingMXBean classLoadingBean = ManagementFactory.getClassLoadingMXBean(); final MemoryMXBean memoryBean = ManagementFactory.getMemoryMXBean(); final ThreadMXBean threads = ManagementFactory.getThreadMXBean(); final RuntimeMXBean runtimeMXBean = ManagementFactory.getRuntimeMXBean(); Stats.exportAll(ImmutableList.<Stat<? extends Number>>builder() .add(new StatImpl<Long>("jvm_time_ms") { @Override public Long read() { return System.currentTimeMillis(); } }) .add(new StatImpl<Integer>("jvm_available_processors") { @Override public Integer read() { return runtime.availableProcessors(); } }) .add(new StatImpl<Long>("jvm_memory_free_mb") { @Override public Long read() { return runtime.freeMemory() / BYTES_PER_MB; } }) .add(new StatImpl<Long>("jvm_memory_max_mb") { @Override public Long read() { return runtime.maxMemory() / BYTES_PER_MB; } }) .add(new StatImpl<Long>("jvm_memory_mb_total") { @Override public Long read() { return runtime.totalMemory() / BYTES_PER_MB; } }) .add(new StatImpl<Integer>("jvm_class_loaded_count") { @Override public Integer read() { return classLoadingBean.getLoadedClassCount(); } }) .add(new StatImpl<Long>("jvm_class_total_loaded_count") { @Override public Long read() { return classLoadingBean.getTotalLoadedClassCount(); } }) .add(new StatImpl<Long>("jvm_class_unloaded_count") { @Override public Long read() { return classLoadingBean.getUnloadedClassCount(); } }) .add(new StatImpl<Long>("jvm_gc_collection_time_ms") { @Override public Long read() { long collectionTimeMs = 0; for (GarbageCollectorMXBean bean : ManagementFactory.getGarbageCollectorMXBeans()) { collectionTimeMs += bean.getCollectionTime(); } return collectionTimeMs; } }) .add(new StatImpl<Long>("jvm_gc_collection_count") { @Override public Long read() { long collections = 0; for (GarbageCollectorMXBean bean : ManagementFactory.getGarbageCollectorMXBeans()) { collections += bean.getCollectionCount(); } return collections; } }) .add(new StatImpl<Long>("jvm_memory_heap_mb_used") { @Override public Long read() { return memoryBean.getHeapMemoryUsage().getUsed() / BYTES_PER_MB; } }) .add(new StatImpl<Long>("jvm_memory_heap_mb_committed") { @Override public Long read() { return memoryBean.getHeapMemoryUsage().getCommitted() / BYTES_PER_MB; } }) .add(new StatImpl<Long>("jvm_memory_heap_mb_max") { @Override public Long read() { return memoryBean.getHeapMemoryUsage().getMax() / BYTES_PER_MB; } }) .add(new StatImpl<Long>("jvm_memory_non_heap_mb_used") { @Override public Long read() { return memoryBean.getNonHeapMemoryUsage().getUsed() / BYTES_PER_MB; } }) .add(new StatImpl<Long>("jvm_memory_non_heap_mb_committed") { @Override public Long read() { return memoryBean.getNonHeapMemoryUsage().getCommitted() / BYTES_PER_MB; } }) .add(new StatImpl<Long>("jvm_memory_non_heap_mb_max") { @Override public Long read() { return memoryBean.getNonHeapMemoryUsage().getMax() / BYTES_PER_MB; } }) .add(new StatImpl<Long>("jvm_uptime_secs") { @Override public Long read() { return runtimeMXBean.getUptime() / 1000; } }) .add(new StatImpl<Double>("system_load_avg") { @Override public Double read() { return osMbean.getSystemLoadAverage(); } }) .add(new StatImpl<Integer>("jvm_threads_peak") { @Override public Integer read() { return threads.getPeakThreadCount(); } }) .add(new StatImpl<Long>("jvm_threads_started") { @Override public Long read() { return threads.getTotalStartedThreadCount(); } }) .add(new StatImpl<Integer>("jvm_threads_daemon") { @Override public Integer read() { return threads.getDaemonThreadCount(); } }) .add(new StatImpl<Integer>("jvm_threads_active") { @Override public Integer read() { return threads.getThreadCount(); } }) .build()); // Export per memory pool gc time and cycle count like Ostrich // This is based on code in Bridcage: https://cgit.twitter.biz/birdcage/tree/ \ // ostrich/src/main/scala/com/twitter/ostrich/stats/StatsCollection.scala Stats.exportAll(Iterables.transform(ManagementFactory.getGarbageCollectorMXBeans(), gcMXBean -> new StatImpl<Long>( "jvm_gc_" + Stats.normalizeName(gcMXBean.getName()) + "_collection_count") { @Override public Long read() { return gcMXBean.getCollectionCount(); } } )); Stats.exportAll(Iterables.transform(ManagementFactory.getGarbageCollectorMXBeans(), gcMXBean -> new StatImpl<Long>( "jvm_gc_" + Stats.normalizeName(gcMXBean.getName()) + "_collection_time_ms") { @Override public Long read() { return gcMXBean.getCollectionTime(); } } )); Stats.exportString( new StatImpl<String>("jvm_input_arguments") { @Override public String read() { return runtimeMXBean.getInputArguments().toString(); } } ); for (final String property : System.getProperties().stringPropertyNames()) { Stats.exportString( new StatImpl<String>("jvm_prop_" + Stats.normalizeName(property)) { @Override public String read() { return System.getProperty(property); } }); } for (final Map.Entry<String, String> environmentVariable : System.getenv().entrySet()) { Stats.exportString( new StatImpl<String>("system_env_" + Stats.normalizeName(environmentVariable.getKey())) { @Override public String read() { return environmentVariable.getValue(); } }); } } }
package us.inal.sna; import org.neo4j.ogm.annotation.GraphId; import org.neo4j.ogm.annotation.NodeEntity; import org.neo4j.ogm.annotation.Relationship; import org.neo4j.ogm.annotation.typeconversion.DateString; import java.util.Date; import java.util.Set; @NodeEntity public class Employee { @Relationship(type = "TAKDIR_ETTI") private Set<Takdir> takdirettis; @Relationship(type="TAKDIR_ALDI", direction=Relationship.INCOMING) private Set<Takdir> takdiraldis; @GraphId private Long id; private Integer lmsMemberID; private Integer gender; private Integer uID; private Boolean isCompleteProfile; private String fullName; private String surname; private String name; private String password; private String avatar; private String aciklama; private String activationCode; private String activationShortCode; private String lms_secretCode; private String email; private Integer userType; private Boolean isRegisterLMS; private Integer isActive; private Boolean isFirstLoginOK; @DateString("yyyy-MM-dd\'T\'HH:mm:ss.SSS") private Date lmsRegisterDate; @DateString("yyyy-MM-dd\'T\'HH:mm:ss") private Date birtDate; public Integer getuID() { return uID; } public void setuID(Integer uID) { this.uID = uID; } public String getAvatar() { return avatar; } public void setAvatar(String avatar) { this.avatar = avatar; } public String getAciklama() { return aciklama; } public void setAciklama(String aciklama) { this.aciklama = aciklama; } public String getActivationShortCode() { return activationShortCode; } public void setActivationShortCode(String activationShortCode) { this.activationShortCode = activationShortCode; } public String getLms_secretCode() { return lms_secretCode; } public void setLms_secretCode(String lms_secretCode) { this.lms_secretCode = lms_secretCode; } public Date getLmsRegisterDate() { return lmsRegisterDate; } public void setLmsRegisterDate(Date lmsRegisterDate) { this.lmsRegisterDate = lmsRegisterDate; } public Date getBirtDate() { return birtDate; } public void setBirthDate(Date birtDate) { this.birtDate = birtDate; } public Long getId() { return id; } public void setId(Long id) { this.id = id; } public Integer getLmsMemberID() { return lmsMemberID; } public void setLmsMemberID(Integer lmsMemberID) { this.lmsMemberID = lmsMemberID; } public Integer getGender() { return gender; } public void setGender(Integer gender) { this.gender = gender; } public Boolean getIsCompleteProfile() { return isCompleteProfile; } public void setIsCompleteProfile(Boolean isCompleteProfile) { this.isCompleteProfile = isCompleteProfile; } public String getFullName() { return fullName; } public void setFullName(String fullName) { this.fullName = fullName; } public String getSurname() { return surname; } public void setSurname(String surname) { this.surname = surname; } public String getName() { return name; } public void setName(String name) { this.name = name; } public String getPassword() { return password; } public void setPassword(String password) { this.password = password; } public String getActivationCode() { return activationCode; } public void setActivationCode(String activationCode) { this.activationCode = activationCode; } public String getEmail() { return email; } public void setEmail(String email) { this.email = email; } public Integer getUserType() { return userType; } public void setUserType(Integer userType) { this.userType = userType; } public Boolean getIsRegisterLMS() { return isRegisterLMS; } public void setIsRegisterLMS(Boolean isRegisterLMS) { this.isRegisterLMS = isRegisterLMS; } public Integer getIsActive() { return isActive; } public void setIsActive(Integer isActive) { this.isActive = isActive; } public Boolean getIsFirstLoginOK() { return isFirstLoginOK; } public void setIsFirstLoginOK(Boolean isFirstLoginOK) { this.isFirstLoginOK = isFirstLoginOK; } }
/* * Copyright 2000-2017 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intellij.openapi.externalSystem.service.project.manage; import com.intellij.execution.ProgramRunnerUtil; import com.intellij.execution.RunManager; import com.intellij.execution.RunnerAndConfigurationSettings; import com.intellij.execution.executors.DefaultRunExecutor; import com.intellij.icons.AllIcons; import com.intellij.ide.DataManager; import com.intellij.openapi.actionSystem.*; import com.intellij.openapi.application.ApplicationManager; import com.intellij.openapi.extensions.ExtensionPointName; import com.intellij.openapi.externalSystem.ExternalSystemManager; import com.intellij.openapi.externalSystem.ExternalSystemUiAware; import com.intellij.openapi.externalSystem.action.ExternalSystemAction; import com.intellij.openapi.externalSystem.action.ExternalSystemActionUtil; import com.intellij.openapi.externalSystem.model.DataNode; import com.intellij.openapi.externalSystem.model.ProjectKeys; import com.intellij.openapi.externalSystem.model.ProjectSystemId; import com.intellij.openapi.externalSystem.model.execution.ExternalTaskExecutionInfo; import com.intellij.openapi.externalSystem.model.project.ModuleData; import com.intellij.openapi.externalSystem.model.task.TaskData; import com.intellij.openapi.externalSystem.service.execution.AbstractExternalSystemTaskConfigurationType; import com.intellij.openapi.externalSystem.service.execution.ExternalSystemRunConfiguration; import com.intellij.openapi.externalSystem.service.ui.SelectExternalTaskDialog; import com.intellij.openapi.externalSystem.util.ExternalSystemApiUtil; import com.intellij.openapi.externalSystem.util.ExternalSystemBundle; import com.intellij.openapi.externalSystem.util.ExternalSystemUiUtil; import com.intellij.openapi.externalSystem.util.ExternalSystemUtil; import com.intellij.openapi.keymap.KeymapExtension; import com.intellij.openapi.keymap.KeymapGroup; import com.intellij.openapi.keymap.KeymapGroupFactory; import com.intellij.openapi.keymap.impl.ui.*; import com.intellij.openapi.options.ex.Settings; import com.intellij.openapi.project.Project; import com.intellij.openapi.util.Condition; import com.intellij.util.containers.ContainerUtil; import com.intellij.util.containers.MultiMap; import gnu.trove.THashSet; import icons.ExternalSystemIcons; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import javax.swing.*; import java.awt.event.MouseEvent; import java.util.Collection; import java.util.Map; import java.util.Set; /** * @author Vladislav.Soroka */ public class ExternalSystemKeymapExtension implements KeymapExtension { public interface ActionsProvider { ExtensionPointName<ActionsProvider> EP_NAME = ExtensionPointName.create("com.intellij.externalSystemKeymapProvider"); KeymapGroup createGroup(Condition<AnAction> condition, final Project project); } @Override public KeymapGroup createGroup(Condition<AnAction> condition, final Project project) { KeymapGroup result = KeymapGroupFactory.getInstance().createGroup( ExternalSystemBundle.message("external.system.keymap.group"), ExternalSystemIcons.TaskGroup); AnAction[] externalSystemActions = ActionsTreeUtil.getActions("ExternalSystem.Actions"); for (AnAction action : externalSystemActions) { ActionsTreeUtil.addAction(result, action, condition); } if (project == null) return result; MultiMap<ProjectSystemId, String> projectToActionsMapping = MultiMap.create(); for (ExternalSystemManager<?, ?, ?, ?, ?> manager : ExternalSystemApiUtil.getAllManagers()) { projectToActionsMapping.putValues(manager.getSystemId(), ContainerUtil.emptyList()); } ActionManager actionManager = ActionManager.getInstance(); if (actionManager != null) { for (String eachId : actionManager.getActionIds(getActionPrefix(project, null))) { AnAction eachAction = actionManager.getAction(eachId); if (!(eachAction instanceof MyExternalSystemAction)) continue; if (condition != null && !condition.value(actionManager.getActionOrStub(eachId))) continue; MyExternalSystemAction taskAction = (MyExternalSystemAction)eachAction; projectToActionsMapping.putValue(taskAction.getSystemId(), eachId); } } Map<ProjectSystemId, KeymapGroup> keymapGroupMap = ContainerUtil.newHashMap(); for (ProjectSystemId systemId : projectToActionsMapping.keySet()) { if (!keymapGroupMap.containsKey(systemId)) { final Icon projectIcon = ExternalSystemUiUtil.getUiAware(systemId).getProjectIcon(); KeymapGroup group = KeymapGroupFactory.getInstance().createGroup(systemId.getReadableName(), projectIcon); keymapGroupMap.put(systemId, group); } } for (Map.Entry<ProjectSystemId, Collection<String>> each : projectToActionsMapping.entrySet()) { Collection<String> tasks = each.getValue(); final ProjectSystemId systemId = each.getKey(); final KeymapGroup systemGroup = keymapGroupMap.get(systemId); if (systemGroup == null) continue; for (String actionId : tasks) { systemGroup.addActionId(actionId); } if (systemGroup instanceof Group) { Icon icon = AllIcons.General.Add; ((Group)systemGroup).addHyperlink(new Hyperlink(icon, "Choose a task to assign a shortcut") { @Override public void onClick(MouseEvent e) { SelectExternalTaskDialog dialog = new SelectExternalTaskDialog(systemId, project); if (dialog.showAndGet() && dialog.getResult() != null) { TaskData taskData = dialog.getResult().second; String ownerModuleName = dialog.getResult().first; ExternalSystemTaskAction externalSystemAction = (ExternalSystemTaskAction)getOrRegisterAction(project, ownerModuleName, taskData); ApplicationManager.getApplication().getMessageBus().syncPublisher(KeymapListener.CHANGE_TOPIC).processCurrentKeymapChanged(); Settings allSettings = Settings.KEY.getData(DataManager.getInstance().getDataContext(e.getComponent())); KeymapPanel keymapPanel = allSettings != null ? allSettings.find(KeymapPanel.class) : null; if (keymapPanel != null) { // clear actions filter keymapPanel.showOption(""); keymapPanel.selectAction(externalSystemAction.myId); } } } }); } } for (KeymapGroup keymapGroup : keymapGroupMap.values()) { if (isGroupFiltered(condition, keymapGroup)) { result.addGroup(keymapGroup); } } for (ActionsProvider extension : ActionsProvider.EP_NAME.getExtensions()) { KeymapGroup keymapGroup = extension.createGroup(condition, project); if (isGroupFiltered(condition, keymapGroup)) { result.addGroup(keymapGroup); } } return result; } public static void updateActions(Project project, Collection<? extends DataNode<TaskData>> taskData) { clearActions(project, taskData); createActions(project, taskData); } public static ExternalSystemAction getOrRegisterAction(Project project, String group, TaskData taskData) { ExternalSystemTaskAction action = new ExternalSystemTaskAction(project, group, taskData); ActionManager manager = ActionManager.getInstance(); AnAction anAction = manager.getAction(action.getId()); if (anAction instanceof ExternalSystemTaskAction && action.equals(anAction)) { return (ExternalSystemAction)anAction; } manager.replaceAction(action.getId(), action); return action; } private static boolean isGroupFiltered(Condition<? super AnAction> condition, KeymapGroup keymapGroup) { final EmptyAction emptyAction = new EmptyAction(); if (condition != null && !condition.value(emptyAction) && keymapGroup instanceof Group) { final Group group = (Group)keymapGroup; if (group.getSize() <= 1 && !condition.value(new EmptyAction(group.getName(), null, null))) { return false; } } return true; } private static void createActions(Project project, Collection<? extends DataNode<TaskData>> taskNodes) { ActionManager actionManager = ActionManager.getInstance(); final ExternalSystemShortcutsManager shortcutsManager = ExternalProjectsManagerImpl.getInstance(project).getShortcutsManager(); if (actionManager != null) { for (DataNode<TaskData> each : taskNodes) { final DataNode<ModuleData> moduleData = ExternalSystemApiUtil.findParent(each, ProjectKeys.MODULE); if (moduleData == null || moduleData.isIgnored()) continue; TaskData taskData = each.getData(); ExternalSystemTaskAction eachAction = new ExternalSystemTaskAction(project, moduleData.getData().getInternalName(), taskData); if (shortcutsManager.hasShortcuts(taskData.getLinkedExternalProjectPath(), taskData.getName())) { actionManager.replaceAction(eachAction.getId(), eachAction); } else { actionManager.unregisterAction(eachAction.getId()); } } } } public static void clearActions(Project project) { ActionManager manager = ActionManager.getInstance(); if (manager != null) { for (String each : manager.getActionIds(getActionPrefix(project, null))) { manager.unregisterAction(each); } } } public static void clearActions(Project project, Collection<? extends DataNode<TaskData>> taskData) { ActionManager actionManager = ActionManager.getInstance(); if (actionManager != null) { Set<String> externalProjectPaths = ContainerUtil.newHashSet(); for (DataNode<TaskData> node : taskData) { externalProjectPaths.add(node.getData().getLinkedExternalProjectPath()); } for (String externalProjectPath : externalProjectPaths) { for (String eachAction : actionManager.getActionIds(getActionPrefix(project, externalProjectPath))) { AnAction action = actionManager.getAction(eachAction); if (!(action instanceof ExternalSystemRunConfigurationAction)) { actionManager.unregisterAction(eachAction); } } } } } public static String getActionPrefix(@NotNull Project project, @Nullable String path) { return ExternalProjectsManagerImpl.getInstance(project).getShortcutsManager().getActionId(path, null); } public static void updateRunConfigurationActions(Project project, ProjectSystemId systemId) { final AbstractExternalSystemTaskConfigurationType configurationType = ExternalSystemUtil.findConfigurationType(systemId); if (configurationType == null) return; ActionManager actionManager = ActionManager.getInstance(); for (String eachAction : actionManager.getActionIds(getActionPrefix(project, null))) { AnAction action = actionManager.getAction(eachAction); if (action instanceof ExternalSystemRunConfigurationAction) { actionManager.unregisterAction(eachAction); } } Set<RunnerAndConfigurationSettings> settings = new THashSet<>( RunManager.getInstance(project).getConfigurationSettingsList(configurationType)); final ExternalSystemShortcutsManager shortcutsManager = ExternalProjectsManagerImpl.getInstance(project).getShortcutsManager(); for (RunnerAndConfigurationSettings configurationSettings : settings) { ExternalSystemRunConfigurationAction runConfigurationAction = new ExternalSystemRunConfigurationAction(project, configurationSettings); String id = runConfigurationAction.getId(); if (shortcutsManager.hasShortcuts(id)) { actionManager.replaceAction(id, runConfigurationAction); } else { actionManager.unregisterAction(id); } } } public static ExternalSystemAction getOrRegisterAction(Project project, RunnerAndConfigurationSettings configurationSettings) { ActionManager manager = ActionManager.getInstance(); ExternalSystemRunConfigurationAction runConfigurationAction = new ExternalSystemRunConfigurationAction(project, configurationSettings); String id = runConfigurationAction.getId(); manager.replaceAction(id, runConfigurationAction); return runConfigurationAction; } private abstract static class MyExternalSystemAction extends ExternalSystemAction { public abstract String getId(); public abstract String getGroup(); public abstract ProjectSystemId getSystemId(); } private static class ExternalSystemTaskAction extends MyExternalSystemAction { private final String myId; private final String myGroup; private final TaskData myTaskData; ExternalSystemTaskAction(Project project, String group, TaskData taskData) { myGroup = group; myTaskData = taskData; myId = getActionPrefix(project, taskData.getLinkedExternalProjectPath()) + taskData.getName(); Presentation template = getTemplatePresentation(); template.setText(myTaskData.getName() + " (" + group + ")", false); template.setDescription(myTaskData.getOwner().getReadableName() + " task action"); template.setIcon(ExternalSystemIcons.Task); } @Override protected boolean isEnabled(@NotNull AnActionEvent e) { return hasProject(e); } @Override public void actionPerformed(@NotNull AnActionEvent e) { final ExternalTaskExecutionInfo taskExecutionInfo = ExternalSystemActionUtil.buildTaskInfo(myTaskData); ExternalSystemUtil.runTask( taskExecutionInfo.getSettings(), taskExecutionInfo.getExecutorId(), getProject(e), myTaskData.getOwner()); } public TaskData getTaskData() { return myTaskData; } public String toString() { return myTaskData.toString(); } @Override public String getGroup() { return myGroup; } @Override public ProjectSystemId getSystemId() { return myTaskData.getOwner(); } @Override public String getId() { return myId; } @Override public boolean equals(Object o) { if (this == o) return true; if (!(o instanceof ExternalSystemTaskAction)) return false; ExternalSystemTaskAction action = (ExternalSystemTaskAction)o; if (myId != null ? !myId.equals(action.myId) : action.myId != null) return false; if (myGroup != null ? !myGroup.equals(action.myGroup) : action.myGroup != null) return false; if (!myTaskData.equals(action.myTaskData)) return false; return true; } @Override public int hashCode() { int result = myId != null ? myId.hashCode() : 0; result = 31 * result + (myGroup != null ? myGroup.hashCode() : 0); result = 31 * result + myTaskData.hashCode(); return result; } } private static class ExternalSystemRunConfigurationAction extends MyExternalSystemAction { private final String myId; private final String myGroup; private final RunnerAndConfigurationSettings myConfigurationSettings; private final ProjectSystemId systemId; ExternalSystemRunConfigurationAction(Project project, RunnerAndConfigurationSettings configurationSettings) { myConfigurationSettings = configurationSettings; ExternalSystemRunConfiguration runConfiguration = (ExternalSystemRunConfiguration)configurationSettings.getConfiguration(); systemId = runConfiguration.getSettings().getExternalSystemId(); ExternalSystemUiAware uiAware = ExternalSystemUiUtil.getUiAware(systemId); myGroup = uiAware.getProjectRepresentationName(runConfiguration.getSettings().getExternalProjectPath(), null); String actionIdPrefix = getActionPrefix(project, runConfiguration.getSettings().getExternalProjectPath()); myId = actionIdPrefix + configurationSettings.getName(); Presentation template = getTemplatePresentation(); template.setText(myConfigurationSettings.getName(), false); template.setIcon(runConfiguration.getIcon()); } @Override protected boolean isEnabled(@NotNull AnActionEvent e) { return hasProject(e); } @Override public void actionPerformed(@NotNull AnActionEvent e) { ProgramRunnerUtil.executeConfiguration(myConfigurationSettings, DefaultRunExecutor.getRunExecutorInstance()); } public String toString() { return myConfigurationSettings.toString(); } @Override public String getGroup() { return myGroup; } @Override public ProjectSystemId getSystemId() { return systemId; } @Override public String getId() { return myId; } } }
/* * Copyright 2012-2017 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.boot.testsupport.runner.classpath; import java.io.File; import java.lang.annotation.Annotation; import java.lang.management.ManagementFactory; import java.lang.reflect.Method; import java.net.URL; import java.net.URLClassLoader; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.List; import java.util.jar.Attributes; import java.util.jar.JarFile; import java.util.stream.Stream; import org.apache.maven.repository.internal.MavenRepositorySystemUtils; import org.eclipse.aether.DefaultRepositorySystemSession; import org.eclipse.aether.RepositorySystem; import org.eclipse.aether.artifact.DefaultArtifact; import org.eclipse.aether.collection.CollectRequest; import org.eclipse.aether.connector.basic.BasicRepositoryConnectorFactory; import org.eclipse.aether.graph.Dependency; import org.eclipse.aether.impl.DefaultServiceLocator; import org.eclipse.aether.repository.LocalRepository; import org.eclipse.aether.repository.RemoteRepository; import org.eclipse.aether.resolution.ArtifactResult; import org.eclipse.aether.resolution.DependencyRequest; import org.eclipse.aether.resolution.DependencyResult; import org.eclipse.aether.spi.connector.RepositoryConnectorFactory; import org.eclipse.aether.spi.connector.transport.TransporterFactory; import org.eclipse.aether.transport.http.HttpTransporterFactory; import org.junit.runners.BlockJUnit4ClassRunner; import org.junit.runners.model.FrameworkMethod; import org.junit.runners.model.InitializationError; import org.junit.runners.model.TestClass; import org.springframework.core.annotation.AnnotationUtils; import org.springframework.util.AntPathMatcher; import org.springframework.util.StringUtils; /** * A custom {@link BlockJUnit4ClassRunner} that runs tests using a modified class path. * Entries are excluded from the class path using {@link ClassPathExclusions} and * overridden using {@link ClassPathOverrides} on the test class. A class loader is * created with the customized class path and is used both to load the test class and as * the thread context class loader while the test is being run. * * @author Andy Wilkinson */ public class ModifiedClassPathRunner extends BlockJUnit4ClassRunner { public ModifiedClassPathRunner(Class<?> testClass) throws InitializationError { super(testClass); } @Override protected TestClass createTestClass(Class<?> testClass) { try { ClassLoader classLoader = createTestClassLoader(testClass); return new ModifiedClassPathTestClass(classLoader, testClass.getName()); } catch (Exception ex) { throw new IllegalStateException(ex); } } @Override protected Object createTest() throws Exception { ModifiedClassPathTestClass testClass = (ModifiedClassPathTestClass) getTestClass(); return testClass.doWithModifiedClassPathThreadContextClassLoader( () -> ModifiedClassPathRunner.super.createTest()); } private URLClassLoader createTestClassLoader(Class<?> testClass) throws Exception { ClassLoader classLoader = this.getClass().getClassLoader(); return new ModifiedClassPathClassLoader( processUrls(extractUrls(classLoader), testClass), classLoader.getParent(), classLoader); } private URL[] extractUrls(ClassLoader classLoader) throws Exception { List<URL> extractedUrls = new ArrayList<>(); doExtractUrls(classLoader).forEach((URL url) -> { if (isSurefireBooterJar(url)) { extractedUrls.addAll(extractUrlsFromManifestClassPath(url)); } else { extractedUrls.add(url); } }); return extractedUrls.toArray(new URL[extractedUrls.size()]); } private Stream<URL> doExtractUrls(ClassLoader classLoader) throws Exception { if (classLoader instanceof URLClassLoader) { return Stream.of(((URLClassLoader) classLoader).getURLs()); } return Stream.of(ManagementFactory.getRuntimeMXBean().getClassPath() .split(File.pathSeparator)).map(this::toURL); } private URL toURL(String entry) { try { return new File(entry).toURI().toURL(); } catch (Exception ex) { throw new IllegalArgumentException(ex); } } private boolean isSurefireBooterJar(URL url) { return url.getPath().contains("surefirebooter"); } private List<URL> extractUrlsFromManifestClassPath(URL booterJar) { List<URL> urls = new ArrayList<>(); try { for (String entry : getClassPath(booterJar)) { urls.add(new URL(entry)); } } catch (Exception ex) { throw new RuntimeException(ex); } return urls; } private String[] getClassPath(URL booterJar) throws Exception { try (JarFile jarFile = new JarFile(new File(booterJar.toURI()))) { return StringUtils.delimitedListToStringArray(jarFile.getManifest() .getMainAttributes().getValue(Attributes.Name.CLASS_PATH), " "); } } private URL[] processUrls(URL[] urls, Class<?> testClass) throws Exception { ClassPathEntryFilter filter = new ClassPathEntryFilter(testClass); List<URL> processedUrls = new ArrayList<>(); processedUrls.addAll(getAdditionalUrls(testClass)); for (URL url : urls) { if (!filter.isExcluded(url)) { processedUrls.add(url); } } return processedUrls.toArray(new URL[processedUrls.size()]); } private List<URL> getAdditionalUrls(Class<?> testClass) throws Exception { ClassPathOverrides overrides = AnnotationUtils.findAnnotation(testClass, ClassPathOverrides.class); if (overrides == null) { return Collections.emptyList(); } return resolveCoordinates(overrides.value()); } private List<URL> resolveCoordinates(String[] coordinates) throws Exception { DefaultServiceLocator serviceLocator = MavenRepositorySystemUtils .newServiceLocator(); serviceLocator.addService(RepositoryConnectorFactory.class, BasicRepositoryConnectorFactory.class); serviceLocator.addService(TransporterFactory.class, HttpTransporterFactory.class); RepositorySystem repositorySystem = serviceLocator .getService(RepositorySystem.class); DefaultRepositorySystemSession session = MavenRepositorySystemUtils.newSession(); LocalRepository localRepository = new LocalRepository( System.getProperty("user.home") + "/.m2/repository"); session.setLocalRepositoryManager( repositorySystem.newLocalRepositoryManager(session, localRepository)); CollectRequest collectRequest = new CollectRequest(null, Arrays.asList(new RemoteRepository.Builder("central", "default", "https://repo.maven.apache.org/maven2").build())); collectRequest.setDependencies(createDependencies(coordinates)); DependencyRequest dependencyRequest = new DependencyRequest(collectRequest, null); DependencyResult result = repositorySystem.resolveDependencies(session, dependencyRequest); List<URL> resolvedArtifacts = new ArrayList<>(); for (ArtifactResult artifact : result.getArtifactResults()) { resolvedArtifacts.add(artifact.getArtifact().getFile().toURI().toURL()); } return resolvedArtifacts; } private List<Dependency> createDependencies(String[] allCoordinates) { List<Dependency> dependencies = new ArrayList<>(); for (String coordinate : allCoordinates) { dependencies.add(new Dependency(new DefaultArtifact(coordinate), null)); } return dependencies; } /** * Filter for class path entries. */ private static final class ClassPathEntryFilter { private final List<String> exclusions; private final AntPathMatcher matcher = new AntPathMatcher(); private ClassPathEntryFilter(Class<?> testClass) throws Exception { this.exclusions = new ArrayList<>(); this.exclusions.add("log4j-*.jar"); ClassPathExclusions exclusions = AnnotationUtils.findAnnotation(testClass, ClassPathExclusions.class); if (exclusions != null) { this.exclusions.addAll(Arrays.asList(exclusions.value())); } } private boolean isExcluded(URL url) throws Exception { if (!"file".equals(url.getProtocol())) { return false; } String name = new File(url.toURI()).getName(); for (String exclusion : this.exclusions) { if (this.matcher.match(exclusion, name)) { return true; } } return false; } } /** * Custom {@link TestClass} that uses a modified class path. */ private static final class ModifiedClassPathTestClass extends TestClass { private final ClassLoader classLoader; ModifiedClassPathTestClass(ClassLoader classLoader, String testClassName) throws ClassNotFoundException { super(classLoader.loadClass(testClassName)); this.classLoader = classLoader; } @Override public List<FrameworkMethod> getAnnotatedMethods( Class<? extends Annotation> annotationClass) { try { return getAnnotatedMethods(annotationClass.getName()); } catch (ClassNotFoundException ex) { throw new RuntimeException(ex); } } @SuppressWarnings("unchecked") private List<FrameworkMethod> getAnnotatedMethods(String annotationClassName) throws ClassNotFoundException { Class<? extends Annotation> annotationClass = (Class<? extends Annotation>) this.classLoader .loadClass(annotationClassName); List<FrameworkMethod> methods = super.getAnnotatedMethods(annotationClass); return wrapFrameworkMethods(methods); } private List<FrameworkMethod> wrapFrameworkMethods( List<FrameworkMethod> methods) { List<FrameworkMethod> wrapped = new ArrayList<>(methods.size()); for (FrameworkMethod frameworkMethod : methods) { wrapped.add(new ModifiedClassPathFrameworkMethod( frameworkMethod.getMethod())); } return wrapped; } private <T, E extends Throwable> T doWithModifiedClassPathThreadContextClassLoader( ModifiedClassPathTcclAction<T, E> action) throws E { ClassLoader originalClassLoader = Thread.currentThread() .getContextClassLoader(); Thread.currentThread().setContextClassLoader(this.classLoader); try { return action.perform(); } finally { Thread.currentThread().setContextClassLoader(originalClassLoader); } } /** * An action to be performed with the {@link ModifiedClassPathClassLoader} set as * the thread context class loader. */ private interface ModifiedClassPathTcclAction<T, E extends Throwable> { T perform() throws E; } /** * Custom {@link FrameworkMethod} that runs methods with * {@link ModifiedClassPathClassLoader} as the thread context class loader. */ private final class ModifiedClassPathFrameworkMethod extends FrameworkMethod { private ModifiedClassPathFrameworkMethod(Method method) { super(method); } @Override public Object invokeExplosively(Object target, Object... params) throws Throwable { return doWithModifiedClassPathThreadContextClassLoader( () -> ModifiedClassPathFrameworkMethod.super.invokeExplosively( target, params)); } } } /** * Custom {@link URLClassLoader} that modifies the class path. */ private static final class ModifiedClassPathClassLoader extends URLClassLoader { private final ClassLoader junitLoader; ModifiedClassPathClassLoader(URL[] urls, ClassLoader parent, ClassLoader junitLoader) { super(urls, parent); this.junitLoader = junitLoader; } @Override public Class<?> loadClass(String name) throws ClassNotFoundException { if (name.startsWith("org.junit") || name.startsWith("org.hamcrest")) { return this.junitLoader.loadClass(name); } return super.loadClass(name); } } }
/******************************************************************************* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. *******************************************************************************/ package org.apache.ofbiz.base.util; import java.io.BufferedOutputStream; import java.io.BufferedReader; import java.io.BufferedWriter; import java.io.File; import java.io.FileNotFoundException; import java.io.FileOutputStream; import java.io.FileReader; import java.io.FileWriter; import java.io.FilenameFilter; import java.io.IOException; import java.io.OutputStream; import java.io.Reader; import java.io.Writer; import java.net.MalformedURLException; import java.util.HashSet; import java.util.LinkedList; import java.util.List; import java.util.Set; import org.apache.commons.io.FileUtils; import org.apache.ofbiz.base.location.ComponentLocationResolver; /** * File Utilities * */ public final class FileUtil { public static final String module = FileUtil.class.getName(); private FileUtil () {} private static class SearchTextFilesFilter implements FilenameFilter { String fileExtension; Set<String> stringsToFindInFile = new HashSet<String>(); Set<String> stringsToFindInPath = new HashSet<String>(); public SearchTextFilesFilter(String fileExtension, Set<String> stringsToFindInPath, Set<String> stringsToFindInFile) { this.fileExtension = fileExtension; if (stringsToFindInPath != null) { this.stringsToFindInPath.addAll(stringsToFindInPath); } if (stringsToFindInFile != null) { this.stringsToFindInFile.addAll(stringsToFindInFile); } } @Override public boolean accept(File dir, String name) { File file = new File(dir, name); if (file.getName().startsWith(".")) { return false; } if (file.isDirectory()) { return true; } boolean hasAllPathStrings = true; String fullPath = dir.getPath().replace('\\', '/'); for (String pathString: stringsToFindInPath) { if (fullPath.indexOf(pathString) < 0) { hasAllPathStrings = false; break; } } if (hasAllPathStrings && name.endsWith("." + fileExtension)) { if (stringsToFindInFile.size() == 0) { return true; } StringBuffer xmlFileBuffer = null; try { xmlFileBuffer = FileUtil.readTextFile(file, true); } catch (FileNotFoundException e) { Debug.logWarning("Error reading xml file [" + file + "] for file search: " + e.toString(), module); return false; } catch (IOException e) { Debug.logWarning("Error reading xml file [" + file + "] for file search: " + e.toString(), module); return false; } if (UtilValidate.isNotEmpty(xmlFileBuffer)) { boolean hasAllStrings = true; for (String stringToFile: stringsToFindInFile) { if (xmlFileBuffer.indexOf(stringToFile) < 0) { hasAllStrings = false; break; } } return hasAllStrings; } } else { return false; } return false; } } public static File getFile(String path) { return getFile(null, path); } public static File getFile(File root, String path) { if (path.startsWith("component://")) { try { path = ComponentLocationResolver.getBaseLocation(path).toString(); } catch (MalformedURLException e) { Debug.logError(e, module); return null; } } return new File(root, localizePath(path)); } /** * Converts a file path to one that is compatible with the host operating system. * * @param path The file path to convert. * @return The converted file path. */ public static String localizePath(String path) { String fileNameSeparator = ("\\".equals(File.separator) ? "\\" + File.separator : File.separator); return path.replaceAll("/+|\\\\+", fileNameSeparator); } public static void writeString(String fileName, String s) throws IOException { writeString(null, fileName, s); } public static void writeString(String path, String name, String s) throws IOException { Writer out = getBufferedWriter(path, name); try { out.write(s + System.getProperty("line.separator")); } catch (IOException e) { Debug.logError(e, module); throw e; } finally { if (out != null) { try { out.close(); } catch (IOException e) { Debug.logError(e, module); } } } } /** * Writes a file from a string with a specified encoding. * * @param path * @param name * @param encoding * @param s * @throws IOException */ public static void writeString(String path, String name, String encoding, String s) throws IOException { String fileName = getPatchedFileName(path, name); if (UtilValidate.isEmpty(fileName)) { throw new IOException("Cannot obtain buffered writer for an empty filename!"); } try { FileUtils.writeStringToFile(new File(fileName), s, encoding); } catch (IOException e) { Debug.logError(e, module); throw e; } } public static void writeString(String encoding, String s, File outFile) throws IOException { try { FileUtils.writeStringToFile(outFile, s, encoding); } catch (IOException e) { Debug.logError(e, module); throw e; } } public static Writer getBufferedWriter(String path, String name) throws IOException { String fileName = getPatchedFileName(path, name); if (UtilValidate.isEmpty(fileName)) { throw new IOException("Cannot obtain buffered writer for an empty filename!"); } return new BufferedWriter(new FileWriter(fileName)); } public static OutputStream getBufferedOutputStream(String path, String name) throws IOException { String fileName = getPatchedFileName(path, name); if (UtilValidate.isEmpty(fileName)) { throw new IOException("Cannot obtain buffered writer for an empty filename!"); } return new BufferedOutputStream(new FileOutputStream(fileName)); } public static String getPatchedFileName(String path, String fileName) throws IOException { // make sure the export directory exists if (UtilValidate.isNotEmpty(path)) { path = path.replaceAll("\\\\", "/"); File parentDir = new File(path); if (!parentDir.exists()) { if (!parentDir.mkdir()) { throw new IOException("Cannot create directory for path: " + path); } } // build the filename with the path if (!path.endsWith("/")) { path = path + "/"; } if (fileName.startsWith("/")) { fileName = fileName.substring(1); } fileName = path + fileName; } return fileName; } public static StringBuffer readTextFile(File file, boolean newline) throws FileNotFoundException, IOException { if (!file.exists()) { throw new FileNotFoundException(); } StringBuffer buf = new StringBuffer(); BufferedReader in = null; try { in = new BufferedReader(new FileReader(file)); String str; while ((str = in.readLine()) != null) { buf.append(str); if (newline) { buf.append(System.getProperty("line.separator")); } } } catch (IOException e) { Debug.logError(e, module); throw e; } finally { if (in != null) { try { in.close(); } catch (IOException e) { Debug.logError(e, module); } } } return buf; } public static StringBuffer readTextFile(String fileName, boolean newline) throws FileNotFoundException, IOException { File file = new File(fileName); return readTextFile(file, newline); } public static String readString(String encoding, File inFile) throws IOException { String readString = ""; try { readString = FileUtils.readFileToString(inFile, encoding); } catch (IOException e) { Debug.logError(e, module); throw e; } return readString; } public static void searchFiles(List<File> fileList, File path, FilenameFilter filter, boolean includeSubfolders) throws IOException { // Get filtered files in the current path File[] files = path.listFiles(filter); if (files == null) { return; } // Process each filtered entry for (int i = 0; i < files.length; i++) { // recurse if the entry is a directory if (files[i].isDirectory() && includeSubfolders && !files[i].getName().startsWith(".")) { searchFiles(fileList, files[i], filter, true); } else { // add the filtered file to the list fileList.add(files[i]); } } } public static List<File> findFiles(String fileExt, String basePath, String partialPath, String stringToFind) throws IOException { if (basePath == null) { basePath = System.getProperty("ofbiz.home"); } Set<String> stringsToFindInPath = new HashSet<String>(); Set<String> stringsToFindInFile = new HashSet<String>(); if (partialPath != null) { stringsToFindInPath.add(partialPath); } if (stringToFind != null) { stringsToFindInFile.add(stringToFind); } List<File> fileList = new LinkedList<File>(); FileUtil.searchFiles(fileList, new File(basePath), new SearchTextFilesFilter(fileExt, stringsToFindInPath, stringsToFindInFile), true); return fileList; } public static List<File> findXmlFiles(String basePath, String partialPath, String rootElementName, String xsdOrDtdName) throws IOException { if (basePath == null) { basePath = System.getProperty("ofbiz.home"); } Set<String> stringsToFindInPath = new HashSet<String>(); Set<String> stringsToFindInFile = new HashSet<String>(); if (partialPath != null) stringsToFindInPath.add(partialPath); if (rootElementName != null) stringsToFindInFile.add("<" + rootElementName + " "); if (xsdOrDtdName != null) stringsToFindInFile.add(xsdOrDtdName); List<File> fileList = new LinkedList<File>(); FileUtil.searchFiles(fileList, new File(basePath), new SearchTextFilesFilter("xml", stringsToFindInPath, stringsToFindInFile), true); return fileList; } /** * * * Search for the specified <code>searchString</code> in the given * {@link Reader}. * * @param reader A Reader in which the String will be searched. * @param searchString The String to search for * @return <code>TRUE</code> if the <code>searchString</code> is found; * <code>FALSE</code> otherwise. * @throws IOException */ public static boolean containsString(Reader reader, final String searchString) throws IOException { char[] buffer = new char[1024]; int numCharsRead; int count = 0; while((numCharsRead = reader.read(buffer)) > 0) { for (int c = 0; c < numCharsRead; ++c) { if (buffer[c] == searchString.charAt(count)) count++; else count = 0; if (count == searchString.length()) return true; } } return false; } /** * * * Search for the specified <code>searchString</code> in the given * filename. If the specified file doesn't exist, <code>FALSE</code> * returns. * * @param fileName A full path to a file in which the String will be searched. * @param searchString The String to search for * @return <code>TRUE</code> if the <code>searchString</code> is found; * <code>FALSE</code> otherwise. * @throws IOException */ public static boolean containsString(final String fileName, final String searchString) throws IOException { File inFile = new File(fileName); if (inFile.exists()) { BufferedReader in = new BufferedReader(new FileReader(inFile)); try { return containsString(in, searchString); } finally { if (in != null)in.close(); } } else { return false; } } /** * * * Check if the specified <code>fileName</code> exists and is a file (not a directory) * If the specified file doesn't exist or is a directory <code>FALSE</code> returns. * * @param fileName A full path to a file in which the String will be searched. * @return <code>TRUE</code> if the <code>fileName</code> exists and is a file (not a directory) * <code>FALSE</code> otherwise. */ public static boolean isFile(String fileName) { File f = new File(fileName); return f.isFile(); } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.karaf.tooling.features; import java.io.File; import java.io.FileInputStream; import java.io.FileOutputStream; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import java.net.URI; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.EnumSet; import java.util.HashSet; import java.util.List; import java.util.ListIterator; import java.util.Set; import javax.xml.bind.JAXBException; import javax.xml.stream.XMLStreamException; import org.apache.felix.utils.properties.Properties; import org.apache.karaf.features.BundleInfo; import org.apache.karaf.features.Dependency; import org.apache.karaf.features.FeaturesService; import org.apache.karaf.features.Repository; import org.apache.karaf.features.internal.model.Bundle; import org.apache.karaf.features.internal.model.Feature; import org.apache.karaf.features.internal.model.Features; import org.apache.karaf.features.internal.model.JaxbUtil; import org.apache.karaf.kar.internal.Kar; import org.apache.karaf.tooling.utils.MojoSupport; import org.apache.maven.artifact.Artifact; import org.apache.maven.artifact.repository.layout.DefaultRepositoryLayout; import org.apache.maven.plugin.MojoExecutionException; import org.apache.maven.plugin.MojoFailureException; /** * Installs kar dependencies into a server-under-construction in target/assembly * * @goal install-kars * @phase process-resources * @requiresDependencyResolution runtime * @inheritByDefault true * @description Install kar dependencies */ public class InstallKarsMojo extends MojoSupport { /** * Base directory used to copy the resources during the build (working directory). * * @parameter default-value="${project.build.directory}/assembly" * @required */ protected String workDirectory; /** * Features configuration file (etc/org.apache.karaf.features.cfg). * * @parameter default-value="${project.build.directory}/assembly/etc/org.apache.karaf.features.cfg" * @required */ protected File featuresCfgFile; /** * startup.properties file. * * @parameter default-value="${project.build.directory}/assembly/etc/startup.properties" * @required */ protected File startupPropertiesFile; /** * default start level for bundles in features that don't specify it. * * @parameter */ protected int defaultStartLevel = 30; /** * if false, unpack to system and add bundles to startup.properties * if true, unpack to system and add feature to features config */ protected boolean dontAddToStartup; /** * Directory used during build to construction the Karaf system repository. * * @parameter default-value="${project.build.directory}/assembly/system" * @required */ protected File systemDirectory; /** * List of features from runtime-scope features xml and kars to be installed into system and listed in startup.properties. * * @parameter */ private List<String> startupFeatures; /** * List of features from runtime-scope features xml and kars to be installed into system repo and listed in features service boot features. * * @parameter */ private List<String> bootFeatures; /** * List of features from runtime-scope features xml and kars to be installed into system repo and not mentioned elsewhere. * * @parameter */ private List<String> installedFeatures; /** * When a feature depends on another feature, try to find it in another referenced feature-file and install that one * too. * * @parameter */ private boolean addTransitiveFeatures = true; private URI system; private Properties startupProperties = new Properties(); private Set<Feature> featureSet = new HashSet<Feature>(); private List<Dependency> missingDependencies = new ArrayList<Dependency>(); // an access layer for available Aether implementation protected DependencyHelper dependencyHelper; /** * list of features to install into local repo. */ private List<Feature> localRepoFeatures = new ArrayList<Feature>(); @SuppressWarnings("deprecation") @Override public void execute() throws MojoExecutionException, MojoFailureException { this.dependencyHelper = DependencyHelperFactory.createDependencyHelper(this.container, this.project, this.mavenSession, getLog()); systemDirectory.mkdirs(); system = systemDirectory.toURI(); if (startupPropertiesFile.exists()) { try { InputStream in = new FileInputStream(startupPropertiesFile); try { startupProperties.load(in); } finally { in.close(); } } catch (IOException e) { throw new MojoFailureException("Could not open existing startup.properties file at " + startupPropertiesFile, e); } } else { startupProperties.setHeader(Collections.singletonList("#Bundles to be started on startup, with startlevel")); if (!startupPropertiesFile.getParentFile().exists()) { startupPropertiesFile.getParentFile().mkdirs(); } } FeaturesService featuresService = new OfflineFeaturesService(); Collection<Artifact> dependencies = project.getDependencyArtifacts(); StringBuilder buf = new StringBuilder(); for (Artifact artifact : dependencies) { dontAddToStartup = "runtime".equals(artifact.getScope()); if ("kar".equals(artifact.getType()) && acceptScope(artifact)) { File file = artifact.getFile(); try { Kar kar = new Kar(file.toURI()); kar.extract(new File(system.getPath()), new File(workDirectory)); for (URI repoUri : kar.getFeatureRepos()) { featuresService.removeRepository(repoUri); featuresService.addRepository(repoUri); } } catch (Exception e) { throw new RuntimeException("Could not install kar: " + artifact.toString() + "\n", e); //buf.append("Could not install kar: ").append(artifact.toString()).append("\n"); //buf.append(e.getMessage()).append("\n\n"); } } if ("features".equals(artifact.getClassifier()) && acceptScope(artifact)) { String uri = this.dependencyHelper.artifactToMvn(artifact); File source = artifact.getFile(); DefaultRepositoryLayout layout = new DefaultRepositoryLayout(); //remove timestamp version artifact = factory.createArtifactWithClassifier(artifact.getGroupId(), artifact.getArtifactId(), artifact.getBaseVersion(), artifact.getType(), artifact.getClassifier()); File target = new File(system.resolve(layout.pathOf(artifact))); if (!target.exists()) { target.getParentFile().mkdirs(); try { copy(source, target); } catch (RuntimeException e) { getLog().error("Could not copy features " + uri + " from source file " + source, e); } // for snapshot, generate the repository metadata in order to avoid override of snapshot from remote repositories if (artifact.isSnapshot()) { getLog().debug("Feature " + uri + " is a SNAPSHOT, generate the maven-metadata-local.xml file"); File metadataTarget = new File(target.getParentFile(), "maven-metadata-local.xml"); try { MavenUtil.generateMavenMetadata(artifact, metadataTarget); } catch (Exception e) { getLog().warn("Could not create maven-metadata-local.xml", e); getLog().warn("It means that this SNAPSHOT could be overwritten by an older one present on remote repositories"); } } } try { featuresService.addRepository(URI.create(uri)); } catch (Exception e) { buf.append("Could not install feature: ").append(artifact.toString()).append("\n"); buf.append(e.getMessage()).append("\n\n"); } } } // install bundles listed in startup properties that weren't in kars into the system dir Set<?> keySet = startupProperties.keySet(); for (Object keyObject : keySet) { String key = (String) keyObject; String path = this.dependencyHelper.pathFromMaven(key); File target = new File(system.resolve(path)); if (!target.exists()) { install(key, target); } } // install bundles listed in install features not in system for (Feature feature : localRepoFeatures) { for (Bundle bundle : feature.getBundle()) { if (!bundle.isDependency()) { String key = bundle.getLocation(); // remove wrap: protocol to resolve from maven if (key.startsWith("wrap:")) { key = key.substring(5); } String path = this.dependencyHelper.pathFromMaven(key); File test = new File(system.resolve(path)); if (!test.exists()) { File target = new File(system.resolve(path)); if (!target.exists()) { install(key, target); Artifact artifact = this.dependencyHelper.mvnToArtifact(key); if (artifact.isSnapshot()) { // generate maven-metadata-local.xml for the artifact File metadataSource = new File(this.dependencyHelper.resolveById(key, getLog()).getParentFile(), "maven-metadata-local.xml"); File metadataTarget = new File(target.getParentFile(), "maven-metadata-local.xml"); metadataTarget.getParentFile().mkdirs(); try { if (!metadataSource.exists()) { // the maven-metadata-local.xml doesn't exist in the local repo, generate one MavenUtil.generateMavenMetadata(artifact, metadataTarget); } else { // copy the metadata to the target copy(metadataSource, metadataTarget); } } catch (IOException ioException) { getLog().warn(ioException); getLog().warn("Unable to copy the maven-metadata-local.xml, it means that this SNAPSHOT will be overwritten by a remote one (if exist)"); } } } } } } } try { OutputStream out = new FileOutputStream(startupPropertiesFile); try { startupProperties.save(out); } finally { out.close(); } } catch (IOException e) { throw new MojoFailureException("Could not write startup.properties file at " + startupPropertiesFile, e); } if (buf.length() > 0) { throw new MojoExecutionException("Could not unpack all dependencies:\n" + buf.toString()); } } private void install(String key, File target) throws MojoFailureException { File source = this.dependencyHelper.resolveById(key, getLog()); target.getParentFile().mkdirs(); copy(source, target); } private boolean acceptScope(Artifact artifact) { return "compile".equals(artifact.getScope()) || "runtime".equals(artifact.getScope()); } private class OfflineFeaturesService implements FeaturesService { private static final String FEATURES_REPOSITORIES = "featuresRepositories"; private static final String FEATURES_BOOT = "featuresBoot"; @Override public void validateRepository(URI uri) throws Exception { } @Override public void addRepository(URI uri) throws Exception { if (dontAddToStartup) { getLog().info("Adding feature repository to system: " + uri); if (featuresCfgFile.exists()) { Properties properties = new Properties(); InputStream in = new FileInputStream(featuresCfgFile); try { properties.load(in); } finally { in.close(); } String existingFeatureRepos = retrieveProperty(properties, FEATURES_REPOSITORIES); if (!existingFeatureRepos.contains(uri.toString())) { existingFeatureRepos = existingFeatureRepos + uri.toString(); properties.put(FEATURES_REPOSITORIES, existingFeatureRepos); } Features repo = readFeatures(uri); for (String innerRepository : repo.getRepository()) { String innerRepositoryPath = dependencyHelper.pathFromMaven(innerRepository); File innerRepositoryTargetInSystemRepository = new File(system.resolve(innerRepositoryPath)); if (!innerRepositoryTargetInSystemRepository.exists()) { File innerRepositorySourceFile = dependencyHelper.resolveById(innerRepository, getLog()); innerRepositoryTargetInSystemRepository.getParentFile().mkdirs(); copy(innerRepositorySourceFile, innerRepositoryTargetInSystemRepository); // add metadata for snapshot Artifact innerRepositoryArtifact = dependencyHelper.mvnToArtifact(innerRepository); if (innerRepositoryArtifact.isSnapshot()) { getLog().debug("Feature repository " + innerRepository + " is a SNAPSHOT, generate the maven-metadata-local.xml file"); File metadataTarget = new File(innerRepositoryTargetInSystemRepository.getParentFile(), "maven-metadata-local.xml"); try { MavenUtil.generateMavenMetadata(innerRepositoryArtifact, metadataTarget); } catch (Exception e) { getLog().warn("Could not create maven-metadata-local.xml", e); getLog().warn("It means that this SNAPSHOT could be overwritten by an older one present on remote repositories"); } } } } for (Feature feature : repo.getFeature()) { featureSet.add(feature); if (startupFeatures != null && startupFeatures.contains(feature.getName())) { installFeature(feature, null); } else if (bootFeatures != null && bootFeatures.contains(feature.getName())) { localRepoFeatures.add(feature); missingDependencies.addAll(feature.getDependencies()); String existingBootFeatures = retrieveProperty(properties, FEATURES_BOOT); if (!existingBootFeatures.contains(feature.getName())) { existingBootFeatures = existingBootFeatures + feature.getName(); properties.put(FEATURES_BOOT, existingBootFeatures); } } else if (installedFeatures != null && installedFeatures.contains(feature.getName())) { localRepoFeatures.add(feature); missingDependencies.addAll(feature.getDependencies()); } } if (addTransitiveFeatures) { addMissingDependenciesToRepo(); } FileOutputStream out = new FileOutputStream(featuresCfgFile); try { properties.save(out); } finally { out.close(); } } } else { getLog().info("Installing feature " + uri + " to system and startup.properties"); Features features = readFeatures(uri); for (Feature feature : features.getFeature()) { installFeature(feature, null); } } } private void addMissingDependenciesToRepo() { for (ListIterator<Dependency> iterator = missingDependencies.listIterator(); iterator.hasNext(); ) { Dependency dependency = iterator.next(); Feature depFeature = lookupFeature(dependency); if (depFeature == null) { continue; } localRepoFeatures.add(depFeature); iterator.remove(); addAllMissingDependencies(iterator, depFeature); } } private void addAllMissingDependencies(ListIterator<Dependency> iterator, Feature depFeature) { for (Dependency dependency : depFeature.getDependencies()) { if (!missingDependencies.contains(dependency)) { iterator.add(dependency); } } } @Override public void addRepository(URI uri, boolean install) throws Exception { } private String retrieveProperty(Properties properties, String key) { String val = properties.getProperty(key); return val != null && val.length() > 0 ? val + "," : ""; } private Features readFeatures(URI uri) throws MojoExecutionException, XMLStreamException, JAXBException, IOException { File repoFile; if (uri.toString().startsWith("mvn:")) { URI featuresPath = system.resolve(dependencyHelper.pathFromMaven(uri.toString())); repoFile = new File(featuresPath); } else { repoFile = new File(uri); } InputStream in = new FileInputStream(repoFile); Features features; try { features = JaxbUtil.unmarshal(in, false); } finally { in.close(); } return features; } @Override public void removeRepository(URI uri) { } @Override public void removeRepository(URI uri, boolean install) { } @Override public void restoreRepository(URI uri) throws Exception { } @Override public Repository[] listRepositories() { return new Repository[0]; } @Override public void installFeature(String name) throws Exception { } @Override public void installFeature(String name, EnumSet<Option> options) throws Exception { } @Override public void installFeature(String name, String version) throws Exception { } @Override public void installFeature(String name, String version, EnumSet<Option> options) throws Exception { } @Override public void installFeature(org.apache.karaf.features.Feature feature, EnumSet<Option> options) throws Exception { List<String> comment = Arrays.asList(new String[]{"", "# feature: " + feature.getName() + " version: " + feature.getVersion()}); for (BundleInfo bundle : feature.getBundles()) { String location = bundle.getLocation(); String startLevel = Integer.toString(bundle.getStartLevel() == 0 ? defaultStartLevel : bundle.getStartLevel()); if (startupProperties.containsKey(location)) { int oldStartLevel = Integer.decode((String) startupProperties.get(location)); if (oldStartLevel > bundle.getStartLevel()) { startupProperties.put(location, startLevel); } } else { if (comment == null) { startupProperties.put(location, startLevel); } else { startupProperties.put(location, comment, startLevel); comment = null; } } } } private Feature lookupFeature(Dependency dependency) { for (Feature feature : featureSet) { if (featureSatisfiesDependency(feature, dependency)) { return feature; } } return null; } private boolean featureSatisfiesDependency(Feature feature, Dependency dependency) { if (!feature.getName().equals(dependency.getName())) { return false; } return true; } @Override public void installFeatures(Set<org.apache.karaf.features.Feature> features, EnumSet<Option> options) throws Exception { } @Override public void uninstallFeature(String name) throws Exception { } @Override public void uninstallFeature(String name, EnumSet<Option> options) { } @Override public void uninstallFeature(String name, String version) throws Exception { } @Override public void uninstallFeature(String name, String version, EnumSet<Option> options) { } @Override public Feature[] listFeatures() throws Exception { return new Feature[0]; } @Override public Feature[] listInstalledFeatures() { return new Feature[0]; } @Override public boolean isInstalled(org.apache.karaf.features.Feature f) { return false; } @Override public org.apache.karaf.features.Feature getFeature(String name, String version) throws Exception { return null; } @Override public org.apache.karaf.features.Feature getFeature(String name) throws Exception { return null; } @Override public Repository getRepository(String repoName) { // TODO Auto-generated method stub return null; } @Override public void refreshRepository(URI uri) throws Exception { // TODO Auto-generated method stub } } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.ftpserver.config.spring; import java.io.File; import java.util.Arrays; import java.util.List; import java.util.Map; import org.apache.ftpserver.ConnectionConfigFactory; import org.apache.ftpserver.FtpServer; import org.apache.ftpserver.FtpServerConfigurationException; import org.apache.ftpserver.FtpServerFactory; import org.apache.ftpserver.message.MessageResource; import org.apache.ftpserver.message.MessageResourceFactory; import org.springframework.beans.factory.config.BeanDefinition; import org.springframework.beans.factory.config.BeanDefinitionHolder; import org.springframework.beans.factory.support.BeanDefinitionBuilder; import org.springframework.beans.factory.support.ManagedMap; import org.springframework.beans.factory.xml.AbstractSingleBeanDefinitionParser; import org.springframework.beans.factory.xml.ParserContext; import org.springframework.util.StringUtils; import org.w3c.dom.Element; /** * Parses the FtpServer "server" element into a Spring bean graph * * @author <a href="http://mina.apache.org">Apache MINA Project</a> */ public class ServerBeanDefinitionParser extends AbstractSingleBeanDefinitionParser { /** * {@inheritDoc} */ protected Class<? extends FtpServer> getBeanClass(final Element element) { return null; } /** * {@inheritDoc} */ @Override protected void doParse(final Element element, final ParserContext parserContext, final BeanDefinitionBuilder builder) { BeanDefinitionBuilder factoryBuilder = BeanDefinitionBuilder.genericBeanDefinition(FtpServerFactory.class); List<Element> childs = SpringUtil.getChildElements(element); for (Element childElm : childs) { String childName = childElm.getLocalName(); if ("listeners".equals(childName)) { Map listeners = parseListeners(childElm, parserContext, builder); if (listeners.size() > 0) { factoryBuilder.addPropertyValue("listeners", listeners); } } else if ("ftplets".equals(childName)) { Map ftplets = parseFtplets(childElm, parserContext, builder); factoryBuilder.addPropertyValue("ftplets", ftplets); } else if ("file-user-manager".equals(childName) || "db-user-manager".equals(childName)) { Object userManager = parserContext.getDelegate() .parseCustomElement(childElm, builder.getBeanDefinition()); factoryBuilder.addPropertyValue("userManager", userManager); } else if ("user-manager".equals(childName)) { factoryBuilder.addPropertyValue("userManager", SpringUtil .parseSpringChildElement(childElm, parserContext, builder)); } else if ("native-filesystem".equals(childName)) { Object fileSystem = parserContext.getDelegate() .parseCustomElement(childElm, builder.getBeanDefinition()); factoryBuilder.addPropertyValue("fileSystem", fileSystem); } else if ("filesystem".equals(childName)) { factoryBuilder.addPropertyValue("fileSystem", SpringUtil .parseSpringChildElement(childElm, parserContext, builder)); } else if ("commands".equals(childName)) { Object commandFactory = parserContext.getDelegate() .parseCustomElement(childElm, builder.getBeanDefinition()); factoryBuilder.addPropertyValue("commandFactory", commandFactory); } else if ("messages".equals(childName)) { MessageResource mr = parseMessageResource(childElm, parserContext, builder); factoryBuilder.addPropertyValue("messageResource", mr); } else { throw new FtpServerConfigurationException( "Unknown configuration name: " + childName); } } // Configure login limits ConnectionConfigFactory connectionConfig = new ConnectionConfigFactory(); if (StringUtils.hasText(element.getAttribute("max-logins"))) { connectionConfig.setMaxLogins(SpringUtil.parseInt(element, "max-logins")); } if (StringUtils.hasText(element.getAttribute("max-threads"))) { connectionConfig.setMaxThreads(SpringUtil.parseInt(element, "max-threads")); } if (StringUtils.hasText(element.getAttribute("max-anon-logins"))) { connectionConfig.setMaxAnonymousLogins(SpringUtil.parseInt(element, "max-anon-logins")); } if (StringUtils.hasText(element.getAttribute("anon-enabled"))) { connectionConfig.setAnonymousLoginEnabled(SpringUtil.parseBoolean( element, "anon-enabled", true)); } if (StringUtils.hasText(element.getAttribute("max-login-failures"))) { connectionConfig.setMaxLoginFailures(SpringUtil.parseInt(element, "max-login-failures")); } if (StringUtils.hasText(element.getAttribute("login-failure-delay"))) { connectionConfig.setLoginFailureDelay(SpringUtil.parseInt(element, "login-failure-delay")); } factoryBuilder.addPropertyValue("connectionConfig", connectionConfig.createConnectionConfig()); BeanDefinition factoryDefinition = factoryBuilder.getBeanDefinition(); String factoryName = parserContext.getReaderContext().generateBeanName(factoryDefinition); BeanDefinitionHolder factoryHolder = new BeanDefinitionHolder(factoryDefinition, factoryName); registerBeanDefinition(factoryHolder, parserContext.getRegistry()); // set the factory on the listener bean builder.getRawBeanDefinition().setFactoryBeanName(factoryName); builder.getRawBeanDefinition().setFactoryMethodName("createServer"); } /** * Parse the "messages" element */ private MessageResource parseMessageResource(final Element childElm, final ParserContext parserContext, final BeanDefinitionBuilder builder) { MessageResourceFactory mr = new MessageResourceFactory(); if (StringUtils.hasText(childElm.getAttribute("languages"))) { String langString = childElm.getAttribute("languages"); String[] languages = langString.split("[\\s,]+"); mr.setLanguages(Arrays.asList(languages)); } if (StringUtils.hasText(childElm.getAttribute("directory"))) { mr.setCustomMessageDirectory(new File(childElm .getAttribute("directory"))); } return mr.createMessageResource(); } /** * Parse the "ftplets" element */ private Map parseFtplets(final Element childElm, final ParserContext parserContext, final BeanDefinitionBuilder builder) { List<Element> childs = SpringUtil.getChildElements(childElm); if(childs.size() > 0 && childs.get(0).getLocalName().equals("map")) { // using a beans:map element return (Map) parserContext.getDelegate().parseMapElement( childs.get(0), builder.getBeanDefinition()); } else { ManagedMap ftplets = new ManagedMap(); for (Element ftpletElm : childs) { ftplets .put(ftpletElm.getAttribute("name"), SpringUtil .parseSpringChildElement(ftpletElm, parserContext, builder)); } return ftplets; } } /** * Parse listeners elements */ @SuppressWarnings("unchecked") private Map parseListeners(final Element listenersElm, final ParserContext parserContext, final BeanDefinitionBuilder builder) { ManagedMap listeners = new ManagedMap(); List<Element> childs = SpringUtil.getChildElements(listenersElm); for (Element listenerElm : childs) { Object listener = null; String ln = listenerElm.getLocalName(); if ("nio-listener".equals(ln)) { listener = parserContext.getDelegate().parseCustomElement( listenerElm, builder.getBeanDefinition()); } else if ("listener".equals(ln)) { listener = SpringUtil.parseSpringChildElement(listenerElm, parserContext, builder); } else { throw new FtpServerConfigurationException( "Unknown listener element " + ln); } String name = listenerElm.getAttribute("name"); listeners.put(name, listener); } return listeners; } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.drill.exec.physical.rowSet.impl; import java.util.List; import org.apache.drill.exec.memory.BufferAllocator; import org.apache.drill.exec.physical.rowSet.impl.ColumnState.BaseMapColumnState; import org.apache.drill.exec.record.BatchSchema.SelectionVectorMode; import org.apache.drill.exec.record.metadata.ColumnMetadata; import org.apache.drill.exec.record.metadata.TupleMetadata; import org.apache.drill.exec.record.metadata.TupleSchema; import org.apache.drill.exec.record.VectorContainer; import org.apache.drill.exec.vector.UInt4Vector; import org.apache.drill.exec.vector.ValueVector; import org.apache.drill.exec.vector.complex.AbstractMapVector; import org.apache.drill.exec.vector.complex.MapVector; import org.apache.drill.exec.vector.complex.RepeatedMapVector; /** * Builds the harvest vector container that includes only the columns that * are included in the harvest schema version. That is, it excludes columns * added while writing an overflow row. * <p> * Because a Drill row is actually a hierarchy, walks the internal hierarchy * and builds a corresponding output hierarchy. * <ul> * <li>The root node is the row itself (vector container),</li> * <li>Internal nodes are maps (structures),</li> * <li>Leaf notes are primitive vectors (which may be arrays).</li> * </ul> * The basic algorithm is to identify the version of the output schema, * then add any new columns added up to that version. This object maintains * the output container across batches, meaning that updates are incremental: * we need only add columns that are new since the last update. And, those new * columns will always appear directly after all existing columns in the row * or in a map. * <p> * As special case occurs when columns are added in the overflow row. These * columns <i>do not</i> appear in the output container for the main part * of the batch; instead they appear in the <i>next</i> output container * that includes the overflow row. * <p> * Since the container here may contain a subset of the internal columns, an * interesting case occurs for maps. The maps in the output container are * <b>not</b> the same as those used internally. Since a map column can contain * either one list of columns or another, the internal and external maps must * differ. The set of child vectors (except for child maps) are shared. */ public class VectorContainerBuilder { /** * Drill vector containers and maps are both tuples, but they irritatingly * have completely different APIs for working with their child vectors. * This class acts as a proxy to wrap the two APIs to provide a common * view for the use of the container builder. */ public static abstract class TupleProxy { protected TupleMetadata schema; public TupleProxy(TupleMetadata schema) { this.schema = schema; } protected abstract int size(); protected abstract ValueVector vector(int index); protected abstract void add(ValueVector vector); protected TupleProxy mapProxy(int index) { return new MapProxy( schema.metadata(index).mapSchema(), (AbstractMapVector) vector(index)); } } /** * Proxy wrapper class for a vector container. */ protected static class ContainerProxy extends TupleProxy { private VectorContainer container; protected ContainerProxy(TupleMetadata schema, VectorContainer container) { super(schema); this.container = container; } @Override protected int size() { return container.getNumberOfColumns(); } @Override protected ValueVector vector(int index) { return container.getValueVector(index).getValueVector(); } @Override protected void add(ValueVector vector) { container.add(vector); } } /** * Proxy wrapper for a map container. */ protected static class MapProxy extends TupleProxy { private AbstractMapVector mapVector; protected MapProxy(TupleMetadata schema, AbstractMapVector mapVector) { super(schema); this.mapVector = mapVector; } @Override protected int size() { return mapVector.size(); } @Override protected ValueVector vector(int index) { return mapVector.getChildByOrdinal(index); } @Override protected void add(ValueVector vector) { mapVector.putChild(vector.getField().getName(), vector); } } private final ResultSetLoaderImpl resultSetLoader; private int outputSchemaVersion = -1; private TupleMetadata schema; private VectorContainer container; public VectorContainerBuilder(ResultSetLoaderImpl rsLoader) { this.resultSetLoader = rsLoader; container = new VectorContainer(rsLoader.allocator); schema = new TupleSchema(); } public void update(int targetVersion) { if (outputSchemaVersion >= targetVersion) { return; } outputSchemaVersion = targetVersion; updateTuple(resultSetLoader.rootState(), new ContainerProxy(schema, container)); container.buildSchema(SelectionVectorMode.NONE); } public VectorContainer container() { return container; } public int outputSchemaVersion() { return outputSchemaVersion; } public BufferAllocator allocator() { return resultSetLoader.allocator(); } private void updateTuple(TupleState sourceModel, TupleProxy destProxy) { int prevCount = destProxy.size(); List<ColumnState> cols = sourceModel.columns(); int currentCount = cols.size(); // Scan any existing maps for column additions for (int i = 0; i < prevCount; i++) { ColumnState colState = cols.get(i); if (! colState.schema().isProjected()) { continue; } if (colState.schema().isMap()) { updateTuple((TupleState) ((BaseMapColumnState) colState).mapState(), destProxy.mapProxy(i)); } } // Add new columns, which may be maps for (int i = prevCount; i < currentCount; i++) { ColumnState colState = cols.get(i); if (! colState.schema().isProjected()) { continue; } // If the column was added after the output schema version cutoff, // skip that column for now. if (colState.addVersion > outputSchemaVersion) { break; } if (colState.schema().isMap()) { buildMap(destProxy, (BaseMapColumnState) colState); } else { destProxy.add(colState.vector()); destProxy.schema.addColumn(colState.schema()); assert destProxy.size() == destProxy.schema.size(); } } } @SuppressWarnings("resource") private void buildMap(TupleProxy parentTuple, BaseMapColumnState colModel) { // Creating the map vector will create its contained vectors if we // give it a materialized field with children. So, instead pass a clone // without children so we can add them. ColumnMetadata mapColSchema = colModel.schema().cloneEmpty(); // Don't get the map vector from the vector cache. Map vectors may // have content that varies from batch to batch. Only the leaf // vectors can be cached. AbstractMapVector mapVector; if (mapColSchema.isArray()) { // A repeated map shares an offset vector with the internal // repeated map. UInt4Vector offsets = (UInt4Vector) colModel.vector(); mapVector = new RepeatedMapVector(mapColSchema.schema(), offsets, null); } else { mapVector = new MapVector(mapColSchema.schema(), allocator(), null); } // Add the map vector and schema to the parent tuple parentTuple.add(mapVector); int index = parentTuple.schema.addColumn(mapColSchema); assert parentTuple.size() == parentTuple.size(); // Update the tuple, which will add the new columns in the map updateTuple(colModel.mapState(), parentTuple.mapProxy(index)); } public TupleMetadata schema() { return schema; } }
/* * Copyright (c) 2013 Etsy * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.deeal.exchange.view; import android.content.Context; import android.content.res.Configuration; import android.content.res.TypedArray; import android.os.Parcel; import android.os.Parcelable; import android.util.AttributeSet; import android.util.Log; import android.util.SparseArray; import android.view.View; import android.view.ViewGroup; import com.deeal.exchange.R; import java.util.Arrays; /** * A staggered grid view which supports multiple columns with rows of varying sizes. * <p/> * Builds multiple columns on top of {@link ExtendableListView} * <p/> * Partly inspired by - https://github.com/huewu/PinterestLikeAdapterView */ public class StaggeredGridView extends ExtendableListView { private static final String TAG = "StaggeredGridView"; private static final boolean DBG = false; private static final int DEFAULT_COLUMNS_PORTRAIT = 2; private static final int DEFAULT_COLUMNS_LANDSCAPE = 3; private int mColumnCount; private int mItemMargin; private int mColumnWidth; private boolean mNeedSync; private int mColumnCountPortrait = DEFAULT_COLUMNS_PORTRAIT; private int mColumnCountLandscape = DEFAULT_COLUMNS_LANDSCAPE; /** * A key-value collection where the key is the position and the * {@link GridItemRecord} with some info about that position * so we can maintain it's position - and reorg on orientation change. */ private SparseArray<GridItemRecord> mPositionData; private int mGridPaddingLeft; private int mGridPaddingRight; private int mGridPaddingTop; private int mGridPaddingBottom; /*** * Our grid item state record with {@link Parcelable} implementation * so we can persist them across the SGV lifecycle. */ static class GridItemRecord implements Parcelable { int column; double heightRatio; boolean isHeaderFooter; GridItemRecord() { } /** * Constructor called from {@link #CREATOR} */ private GridItemRecord(Parcel in) { column = in.readInt(); heightRatio = in.readDouble(); isHeaderFooter = in.readByte() == 1; } @Override public int describeContents() { return 0; } @Override public void writeToParcel(Parcel out, int flags) { out.writeInt(column); out.writeDouble(heightRatio); out.writeByte((byte) (isHeaderFooter ? 1 : 0)); } @Override public String toString() { return "GridItemRecord.ListSavedState{" + Integer.toHexString(System.identityHashCode(this)) + " column:" + column + " heightRatio:" + heightRatio + " isHeaderFooter:" + isHeaderFooter + "}"; } public static final Creator<GridItemRecord> CREATOR = new Creator<GridItemRecord>() { public GridItemRecord createFromParcel(Parcel in) { return new GridItemRecord(in); } public GridItemRecord[] newArray(int size) { return new GridItemRecord[size]; } }; } /** * The location of the top of each top item added in each column. */ private int[] mColumnTops; /** * The location of the bottom of each bottom item added in each column. */ private int[] mColumnBottoms; /** * The left location to put items for each column */ private int[] mColumnLefts; /*** * Tells us the distance we've offset from the top. * Can be slightly off on orientation change - TESTING */ private int mDistanceToTop; public StaggeredGridView(final Context context) { this(context, null); } public StaggeredGridView(final Context context, final AttributeSet attrs) { this(context, attrs, 0); } public StaggeredGridView(final Context context, final AttributeSet attrs, final int defStyle) { super(context, attrs, defStyle); if (attrs != null) { // get the number of columns in portrait and landscape TypedArray typedArray = context.obtainStyledAttributes(attrs, R.styleable.StaggeredGridView, defStyle, 0); mColumnCount = typedArray.getInteger( R.styleable.StaggeredGridView_column_count, 0); if (mColumnCount > 0) { mColumnCountPortrait = mColumnCount; mColumnCountLandscape = mColumnCount; } else { mColumnCountPortrait = typedArray.getInteger( R.styleable.StaggeredGridView_column_count_portrait, DEFAULT_COLUMNS_PORTRAIT); mColumnCountLandscape = typedArray.getInteger( R.styleable.StaggeredGridView_column_count_landscape, DEFAULT_COLUMNS_LANDSCAPE); } mItemMargin = typedArray.getDimensionPixelSize( R.styleable.StaggeredGridView_item_margin, 0); mGridPaddingLeft = typedArray.getDimensionPixelSize( R.styleable.StaggeredGridView_grid_paddingLeft, 0); mGridPaddingRight = typedArray.getDimensionPixelSize( R.styleable.StaggeredGridView_grid_paddingRight, 0); mGridPaddingTop = typedArray.getDimensionPixelSize( R.styleable.StaggeredGridView_grid_paddingTop, 0); mGridPaddingBottom = typedArray.getDimensionPixelSize( R.styleable.StaggeredGridView_grid_paddingBottom, 0); typedArray.recycle(); } mColumnCount = 0; // determined onMeasure // Creating these empty arrays to avoid saving null states mColumnTops = new int[0]; mColumnBottoms = new int[0]; mColumnLefts = new int[0]; mPositionData = new SparseArray<GridItemRecord>(); } // ////////////////////////////////////////////////////////////////////////////////////////// // PROPERTIES // // Grid padding is applied to the list item rows but not the header and footer public int getRowPaddingLeft() { return getListPaddingLeft() + mGridPaddingLeft; } public int getRowPaddingRight() { return getListPaddingRight() + mGridPaddingRight; } public int getRowPaddingTop() { return getListPaddingTop() + mGridPaddingTop; } public int getRowPaddingBottom() { return getListPaddingBottom() + mGridPaddingBottom; } public void setGridPadding(int left, int top, int right, int bottom) { mGridPaddingLeft = left; mGridPaddingTop = top; mGridPaddingRight = right; mGridPaddingBottom = bottom; } public void setColumnCountPortrait(int columnCountPortrait) { mColumnCountPortrait = columnCountPortrait; onSizeChanged(getWidth(), getHeight()); requestLayoutChildren(); } public void setColumnCountLandscape(int columnCountLandscape) { mColumnCountLandscape = columnCountLandscape; onSizeChanged(getWidth(), getHeight()); requestLayoutChildren(); } public void setColumnCount(int columnCount) { mColumnCountPortrait = columnCount; mColumnCountLandscape = columnCount; // mColumnCount set onSizeChanged(); onSizeChanged(getWidth(), getHeight()); requestLayoutChildren(); } // ////////////////////////////////////////////////////////////////////////////////////////// // MEASUREMENT // private boolean isLandscape() { return getResources().getConfiguration().orientation == Configuration.ORIENTATION_LANDSCAPE; } @Override protected void onMeasure(final int widthMeasureSpec, final int heightMeasureSpec) { super.onMeasure(widthMeasureSpec, heightMeasureSpec); if (mColumnCount <= 0) { boolean isLandscape = isLandscape(); mColumnCount = isLandscape ? mColumnCountLandscape : mColumnCountPortrait; } // our column width is the width of the listview // minus it's padding // minus the total items margin // divided by the number of columns mColumnWidth = calculateColumnWidth(getMeasuredWidth()); if (mColumnTops == null || mColumnTops.length != mColumnCount) { mColumnTops = new int[mColumnCount]; initColumnTops(); } if (mColumnBottoms == null || mColumnBottoms.length != mColumnCount) { mColumnBottoms = new int[mColumnCount]; initColumnBottoms(); } if (mColumnLefts == null || mColumnLefts.length != mColumnCount) { mColumnLefts = new int[mColumnCount]; initColumnLefts(); } } @Override protected void onMeasureChild(final View child, final LayoutParams layoutParams) { final int viewType = layoutParams.viewType; final int position = layoutParams.position; if (viewType == ITEM_VIEW_TYPE_HEADER_OR_FOOTER || viewType == ITEM_VIEW_TYPE_IGNORE) { // for headers and weird ignored views super.onMeasureChild(child, layoutParams); } else { if (DBG) Log.d(TAG, "onMeasureChild BEFORE position:" + position + " h:" + getMeasuredHeight()); // measure it to the width of our column. int childWidthSpec = MeasureSpec.makeMeasureSpec(mColumnWidth, MeasureSpec.EXACTLY); int childHeightSpec; if (layoutParams.height > 0) { childHeightSpec = MeasureSpec.makeMeasureSpec(layoutParams.height, MeasureSpec.EXACTLY); } else { childHeightSpec = MeasureSpec.makeMeasureSpec(LayoutParams.WRAP_CONTENT, MeasureSpec.UNSPECIFIED); } child.measure(childWidthSpec, childHeightSpec); } final int childHeight = getChildHeight(child); setPositionHeightRatio(position, childHeight); if (DBG) Log.d(TAG, "onMeasureChild AFTER position:" + position + " h:" + childHeight); } public int getColumnWidth() { return mColumnWidth; } public void resetToTop() { if (mColumnCount > 0) { if (mColumnTops == null) { mColumnTops = new int[mColumnCount]; } if (mColumnBottoms == null) { mColumnBottoms = new int[mColumnCount]; } initColumnTopsAndBottoms(); mPositionData.clear(); mNeedSync = false; mDistanceToTop = 0; setSelection(0); } } // ////////////////////////////////////////////////////////////////////////////////////////// // POSITIONING // @Override protected void onChildCreated(final int position, final boolean flowDown) { super.onChildCreated(position, flowDown); if (!isHeaderOrFooter(position)) { // do we already have a column for this position? final int column = getChildColumn(position, flowDown); setPositionColumn(position, column); if (DBG) Log.d(TAG, "onChildCreated position:" + position + " is in column:" + column); } else { setPositionIsHeaderFooter(position); } } private void requestLayoutChildren() { final int count = getChildCount(); for (int i = 0; i < count; i++) { final View v = getChildAt(i); if (v != null) v.requestLayout(); } } @Override protected void layoutChildren() { preLayoutChildren(); super.layoutChildren(); } private void preLayoutChildren() { // on a major re-layout reset for our next layout pass if (!mNeedSync) { Arrays.fill(mColumnBottoms, 0); } else { mNeedSync = false; } // copy the tops into the bottom // since we're going to redo a layout pass that will draw down from // the top System.arraycopy(mColumnTops, 0, mColumnBottoms, 0, mColumnCount); } // NOTE : Views will either be layout out via onLayoutChild // OR // Views will be offset if they are active but offscreen so that we can recycle! // Both onLayoutChild() and onOffsetChild are called after we measure our view // see ExtensibleListView.setupChild(); @Override protected void onLayoutChild(final View child, final int position, final boolean flowDown, final int childrenLeft, final int childTop, final int childRight, final int childBottom) { if (isHeaderOrFooter(position)) { layoutGridHeaderFooter(child, position, flowDown, childrenLeft, childTop, childRight, childBottom); } else { layoutGridChild(child, position, flowDown, childrenLeft, childRight); } } private void layoutGridHeaderFooter(final View child, final int position, final boolean flowDown, final int childrenLeft, final int childTop, final int childRight, final int childBottom) { // offset the top and bottom of all our columns // if it's the footer we want it below the lowest child bottom int gridChildTop; int gridChildBottom; if (flowDown) { gridChildTop = getLowestPositionedBottom(); gridChildBottom = gridChildTop + getChildHeight(child); } else { gridChildBottom = getHighestPositionedTop(); gridChildTop = gridChildBottom - getChildHeight(child); } for (int i = 0; i < mColumnCount; i++) { updateColumnTopIfNeeded(i, gridChildTop); updateColumnBottomIfNeeded(i, gridChildBottom); } super.onLayoutChild(child, position, flowDown, childrenLeft, gridChildTop, childRight, gridChildBottom); } private void layoutGridChild(final View child, final int position, final boolean flowDown, final int childrenLeft, final int childRight) { // stash the bottom and the top if it's higher positioned int column = getPositionColumn(position); int gridChildTop; int gridChildBottom; int childTopMargin = getChildTopMargin(position); int childBottomMargin = getChildBottomMargin(); int verticalMargins = childTopMargin + childBottomMargin; if (flowDown) { gridChildTop = mColumnBottoms[column]; // the next items top is the last items bottom gridChildBottom = gridChildTop + (getChildHeight(child) + verticalMargins); } else { gridChildBottom = mColumnTops[column]; // the bottom of the next column up is our top gridChildTop = gridChildBottom - (getChildHeight(child) + verticalMargins); } if (DBG) Log.d(TAG, "onLayoutChild position:" + position + " column:" + column + " gridChildTop:" + gridChildTop + " gridChildBottom:" + gridChildBottom); // we also know the column of this view so let's stash it in the // view's layout params GridLayoutParams layoutParams = (GridLayoutParams) child.getLayoutParams(); layoutParams.column = column; updateColumnBottomIfNeeded(column, gridChildBottom); updateColumnTopIfNeeded(column, gridChildTop); // subtract the margins before layout gridChildTop += childTopMargin; gridChildBottom -= childBottomMargin; child.layout(childrenLeft, gridChildTop, childRight, gridChildBottom); } @Override protected void onOffsetChild(final View child, final int position, final boolean flowDown, final int childrenLeft, final int childTop) { // if the child is recycled and is just offset // we still want to add its deets into our store if (isHeaderOrFooter(position)) { offsetGridHeaderFooter(child, position, flowDown, childrenLeft, childTop); } else { offsetGridChild(child, position, flowDown, childrenLeft, childTop); } } private void offsetGridHeaderFooter(final View child, final int position, final boolean flowDown, final int childrenLeft, final int childTop) { // offset the top and bottom of all our columns // if it's the footer we want it below the lowest child bottom int gridChildTop; int gridChildBottom; if (flowDown) { gridChildTop = getLowestPositionedBottom(); gridChildBottom = gridChildTop + getChildHeight(child); } else { gridChildBottom = getHighestPositionedTop(); gridChildTop = gridChildBottom - getChildHeight(child); } for (int i = 0; i < mColumnCount; i++) { updateColumnTopIfNeeded(i, gridChildTop); updateColumnBottomIfNeeded(i, gridChildBottom); } super.onOffsetChild(child, position, flowDown, childrenLeft, gridChildTop); } private void offsetGridChild(final View child, final int position, final boolean flowDown, final int childrenLeft, final int childTop) { // stash the bottom and the top if it's higher positioned int column = getPositionColumn(position); int gridChildTop; int gridChildBottom; int childTopMargin = getChildTopMargin(position); int childBottomMargin = getChildBottomMargin(); int verticalMargins = childTopMargin + childBottomMargin; if (flowDown) { gridChildTop = mColumnBottoms[column]; // the next items top is the last items bottom gridChildBottom = gridChildTop + (getChildHeight(child) + verticalMargins); } else { gridChildBottom = mColumnTops[column]; // the bottom of the next column up is our top gridChildTop = gridChildBottom - (getChildHeight(child) + verticalMargins); } if (DBG) Log.d(TAG, "onOffsetChild position:" + position + " column:" + column + " childTop:" + childTop + " gridChildTop:" + gridChildTop + " gridChildBottom:" + gridChildBottom); // we also know the column of this view so let's stash it in the // view's layout params GridLayoutParams layoutParams = (GridLayoutParams) child.getLayoutParams(); layoutParams.column = column; updateColumnBottomIfNeeded(column, gridChildBottom); updateColumnTopIfNeeded(column, gridChildTop); super.onOffsetChild(child, position, flowDown, childrenLeft, gridChildTop + childTopMargin); } private int getChildHeight(final View child) { return child.getMeasuredHeight(); } private int getChildTopMargin(final int position) { boolean isFirstRow = position < (getHeaderViewsCount() + mColumnCount); return isFirstRow ? mItemMargin : 0; } private int getChildBottomMargin() { return mItemMargin; } @Override protected LayoutParams generateChildLayoutParams(final View child) { GridLayoutParams layoutParams = null; final ViewGroup.LayoutParams childParams = child.getLayoutParams(); if (childParams != null) { if (childParams instanceof GridLayoutParams) { layoutParams = (GridLayoutParams) childParams; } else { layoutParams = new GridLayoutParams(childParams); } } if (layoutParams == null) { layoutParams = new GridLayoutParams( mColumnWidth, ViewGroup.LayoutParams.WRAP_CONTENT); } return layoutParams; } private void updateColumnTopIfNeeded(int column, int childTop) { if (childTop < mColumnTops[column]) { mColumnTops[column] = childTop; } } private void updateColumnBottomIfNeeded(int column, int childBottom) { if (childBottom > mColumnBottoms[column]) { mColumnBottoms[column] = childBottom; } } @Override protected int getChildLeft(final int position) { if (isHeaderOrFooter(position)) { return super.getChildLeft(position); } else { final int column = getPositionColumn(position); return mColumnLefts[column]; } } @Override protected int getChildTop(final int position) { if (isHeaderOrFooter(position)) { return super.getChildTop(position); } else { final int column = getPositionColumn(position); if (column == -1) { return getHighestPositionedBottom(); } return mColumnBottoms[column]; } } /** * Get the top for the next child down in our view * (maybe a column across) so we can fill down. */ @Override protected int getNextChildDownsTop(final int position) { if (isHeaderOrFooter(position)) { return super.getNextChildDownsTop(position); } else { return getHighestPositionedBottom(); } } @Override protected int getChildBottom(final int position) { if (isHeaderOrFooter(position)) { return super.getChildBottom(position); } else { final int column = getPositionColumn(position); if (column == -1) { return getLowestPositionedTop(); } return mColumnTops[column]; } } /** * Get the bottom for the next child up in our view * (maybe a column across) so we can fill up. */ @Override protected int getNextChildUpsBottom(final int position) { if (isHeaderOrFooter(position)) { return super.getNextChildUpsBottom(position); } else { return getLowestPositionedTop(); } } @Override protected int getLastChildBottom() { final int lastPosition = mFirstPosition + (getChildCount() - 1); if (isHeaderOrFooter(lastPosition)) { return super.getLastChildBottom(); } return getHighestPositionedBottom(); } @Override protected int getFirstChildTop() { if (isHeaderOrFooter(mFirstPosition)) { return super.getFirstChildTop(); } return getLowestPositionedTop(); } @Override protected int getHighestChildTop() { if (isHeaderOrFooter(mFirstPosition)) { return super.getHighestChildTop(); } return getHighestPositionedTop(); } @Override protected int getLowestChildBottom() { final int lastPosition = mFirstPosition + (getChildCount() - 1); if (isHeaderOrFooter(lastPosition)) { return super.getLowestChildBottom(); } return getLowestPositionedBottom(); } @Override protected void offsetChildrenTopAndBottom(final int offset) { super.offsetChildrenTopAndBottom(offset); offsetAllColumnsTopAndBottom(offset); offsetDistanceToTop(offset); } protected void offsetChildrenTopAndBottom(final int offset, final int column) { if (DBG) Log.d(TAG, "offsetChildrenTopAndBottom: " + offset + " column:" + column); final int count = getChildCount(); for (int i = 0; i < count; i++) { final View v = getChildAt(i); if (v != null && v.getLayoutParams() != null && v.getLayoutParams() instanceof GridLayoutParams) { GridLayoutParams lp = (GridLayoutParams) v.getLayoutParams(); if (lp.column == column) { v.offsetTopAndBottom(offset); } } } offsetColumnTopAndBottom(offset, column); } private void offsetDistanceToTop(final int offset) { mDistanceToTop += offset; if (DBG) Log.d(TAG, "offset mDistanceToTop:" + mDistanceToTop); } public int getDistanceToTop() { return mDistanceToTop; } private void offsetAllColumnsTopAndBottom(final int offset) { if (offset != 0) { for (int i = 0; i < mColumnCount; i++) { offsetColumnTopAndBottom(offset, i); } } } private void offsetColumnTopAndBottom(final int offset, final int column) { if (offset != 0) { mColumnTops[column] += offset; mColumnBottoms[column] += offset; } } @Override protected void adjustViewsAfterFillGap(final boolean down) { super.adjustViewsAfterFillGap(down); // fix vertical gaps when hitting the top after a rotate // only when scrolling back up! if (!down) { alignTops(); } } private void alignTops() { if (mFirstPosition == getHeaderViewsCount()) { // we're showing all the views before the header views int[] nonHeaderTops = getHighestNonHeaderTops(); // we should now have our non header tops // align them boolean isAligned = true; int highestColumn = -1; int highestTop = Integer.MAX_VALUE; for (int i = 0; i < nonHeaderTops.length; i++) { // are they all aligned if (isAligned && i > 0 && nonHeaderTops[i] != highestTop) { isAligned = false; // not all the tops are aligned } // what's the highest if (nonHeaderTops[i] < highestTop) { highestTop = nonHeaderTops[i]; highestColumn = i; } } // skip the rest. if (isAligned) return; // we've got the highest column - lets align the others for (int i = 0; i < nonHeaderTops.length; i++) { if (i != highestColumn) { // there's a gap in this column int offset = highestTop - nonHeaderTops[i]; offsetChildrenTopAndBottom(offset, i); } } invalidate(); } } private int[] getHighestNonHeaderTops() { int[] nonHeaderTops = new int[mColumnCount]; int childCount = getChildCount(); if (childCount > 0) { for (int i = 0; i < childCount; i++) { View child = getChildAt(i); if (child != null && child.getLayoutParams() != null && child.getLayoutParams() instanceof GridLayoutParams) { // is this child's top the highest non GridLayoutParams lp = (GridLayoutParams) child.getLayoutParams(); // is it a child that isn't a header if (lp.viewType != ITEM_VIEW_TYPE_HEADER_OR_FOOTER && child.getTop() < nonHeaderTops[lp.column]) { nonHeaderTops[lp.column] = child.getTop(); } } } } return nonHeaderTops; } @Override protected void onChildrenDetached(final int start, final int count) { super.onChildrenDetached(start, count); // go through our remaining views and sync the top and bottom stash. // Repair the top and bottom column boundaries from the views we still have Arrays.fill(mColumnTops, Integer.MAX_VALUE); Arrays.fill(mColumnBottoms, 0); for (int i = 0; i < getChildCount(); i++) { final View child = getChildAt(i); if (child != null) { final LayoutParams childParams = (LayoutParams) child.getLayoutParams(); if (childParams.viewType != ITEM_VIEW_TYPE_HEADER_OR_FOOTER && childParams instanceof GridLayoutParams) { GridLayoutParams layoutParams = (GridLayoutParams) childParams; int column = layoutParams.column; int position = layoutParams.position; final int childTop = child.getTop(); if (childTop < mColumnTops[column]) { mColumnTops[column] = childTop - getChildTopMargin(position); } final int childBottom = child.getBottom(); if (childBottom > mColumnBottoms[column]) { mColumnBottoms[column] = childBottom + getChildBottomMargin(); } } else { // the header and footer here final int childTop = child.getTop(); final int childBottom = child.getBottom(); for (int col = 0; col < mColumnCount; col++) { if (childTop < mColumnTops[col]) { mColumnTops[col] = childTop; } if (childBottom > mColumnBottoms[col]) { mColumnBottoms[col] = childBottom; } } } } } } @Override protected boolean hasSpaceUp() { int end = mClipToPadding ? getRowPaddingTop() : 0; return getLowestPositionedTop() > end; } // ////////////////////////////////////////////////////////////////////////////////////////// // SYNCING ACROSS ROTATION // @Override protected void onSizeChanged(final int w, final int h, final int oldw, final int oldh) { super.onSizeChanged(w, h, oldw, oldh); onSizeChanged(w, h); } @Override protected void onSizeChanged(int w, int h) { super.onSizeChanged(w, h); boolean isLandscape = isLandscape(); int newColumnCount = isLandscape ? mColumnCountLandscape : mColumnCountPortrait; if (mColumnCount != newColumnCount) { mColumnCount = newColumnCount; mColumnWidth = calculateColumnWidth(w); mColumnTops = new int[mColumnCount]; mColumnBottoms = new int[mColumnCount]; mColumnLefts = new int[mColumnCount]; mDistanceToTop = 0; // rebuild the columns initColumnTopsAndBottoms(); initColumnLefts(); // if we have data if (getCount() > 0 && mPositionData.size() > 0) { onColumnSync(); } requestLayout(); } } private int calculateColumnWidth(final int gridWidth) { final int listPadding = getRowPaddingLeft() + getRowPaddingRight(); return (gridWidth - listPadding - mItemMargin * (mColumnCount + 1)) / mColumnCount; } private int calculateColumnLeft(final int colIndex) { return getRowPaddingLeft() + mItemMargin + ((mItemMargin + mColumnWidth) * colIndex); } /*** * Our mColumnTops and mColumnBottoms need to be re-built up to the * mSyncPosition - the following layout request will then * layout the that position and then fillUp and fillDown appropriately. */ private void onColumnSync() { // re-calc tops for new column count! int syncPosition = Math.min(mSyncPosition, getCount() - 1); SparseArray<Double> positionHeightRatios = new SparseArray<Double>(syncPosition); for (int pos = 0; pos < syncPosition; pos++) { // check for weirdness final GridItemRecord rec = mPositionData.get(pos); if (rec == null) break; Log.d(TAG, "onColumnSync:" + pos + " ratio:" + rec.heightRatio); positionHeightRatios.append(pos, rec.heightRatio); } mPositionData.clear(); // re-calc our relative position while at the same time // rebuilding our GridItemRecord collection if (DBG) Log.d(TAG, "onColumnSync column width:" + mColumnWidth); for (int pos = 0; pos < syncPosition; pos++) { //Check for weirdness again final Double heightRatio = positionHeightRatios.get(pos); if(heightRatio == null){ break; } final GridItemRecord rec = getOrCreateRecord(pos); final int height = (int) (mColumnWidth * heightRatio); rec.heightRatio = heightRatio; int top; int bottom; // check for headers if (isHeaderOrFooter(pos)) { // the next top is the bottom for that column top = getLowestPositionedBottom(); bottom = top + height; for (int i = 0; i < mColumnCount; i++) { mColumnTops[i] = top; mColumnBottoms[i] = bottom; } } else { // what's the next column down ? final int column = getHighestPositionedBottomColumn(); // the next top is the bottom for that column top = mColumnBottoms[column]; bottom = top + height + getChildTopMargin(pos) + getChildBottomMargin(); mColumnTops[column] = top; mColumnBottoms[column] = bottom; rec.column = column; } if (DBG) Log.d(TAG, "onColumnSync position:" + pos + " top:" + top + " bottom:" + bottom + " height:" + height + " heightRatio:" + heightRatio); } // our sync position will be displayed in this column final int syncColumn = getHighestPositionedBottomColumn(); setPositionColumn(syncPosition, syncColumn); // we want to offset from height of the sync position // minus the offset int syncToBottom = mColumnBottoms[syncColumn]; int offset = -syncToBottom + mSpecificTop; // offset all columns by offsetAllColumnsTopAndBottom(offset); // sync the distance to top mDistanceToTop = -syncToBottom; // stash our bottoms in our tops - though these will be copied back to the bottoms System.arraycopy(mColumnBottoms, 0, mColumnTops, 0, mColumnCount); } // ////////////////////////////////////////////////////////////////////////////////////////// // GridItemRecord UTILS // private void setPositionColumn(final int position, final int column) { GridItemRecord rec = getOrCreateRecord(position); rec.column = column; } private void setPositionHeightRatio(final int position, final int height) { GridItemRecord rec = getOrCreateRecord(position); rec.heightRatio = (double) height / (double) mColumnWidth; if (DBG) Log.d(TAG, "position:" + position + " width:" + mColumnWidth + " height:" + height + " heightRatio:" + rec.heightRatio); } private void setPositionIsHeaderFooter(final int position) { GridItemRecord rec = getOrCreateRecord(position); rec.isHeaderFooter = true; } private GridItemRecord getOrCreateRecord(final int position) { GridItemRecord rec = mPositionData.get(position, null); if (rec == null) { rec = new GridItemRecord(); mPositionData.append(position, rec); } return rec; } private int getPositionColumn(final int position) { GridItemRecord rec = mPositionData.get(position, null); return rec != null ? rec.column : -1; } // ////////////////////////////////////////////////////////////////////////////////////////// // HELPERS // private boolean isHeaderOrFooter(final int position) { final int viewType = mAdapter.getItemViewType(position); return viewType == ITEM_VIEW_TYPE_HEADER_OR_FOOTER; } private int getChildColumn(final int position, final boolean flowDown) { // do we already have a column for this child position? int column = getPositionColumn(position); // we don't have the column or it no longer fits in our grid final int columnCount = mColumnCount; if (column < 0 || column >= columnCount) { // if we're going down - // get the highest positioned (lowest value) // column bottom if (flowDown) { column = getHighestPositionedBottomColumn(); } else { column = getLowestPositionedTopColumn(); } } return column; } private void initColumnTopsAndBottoms() { initColumnTops(); initColumnBottoms(); } private void initColumnTops() { Arrays.fill(mColumnTops, getPaddingTop() + mGridPaddingTop); } private void initColumnBottoms() { Arrays.fill(mColumnBottoms, getPaddingTop() + mGridPaddingTop); } private void initColumnLefts() { for (int i = 0; i < mColumnCount; i++) { mColumnLefts[i] = calculateColumnLeft(i); } } // ////////////////////////////////////////////////////////////////////////////////////////// // BOTTOM // private int getHighestPositionedBottom() { final int column = getHighestPositionedBottomColumn(); return mColumnBottoms[column]; } private int getHighestPositionedBottomColumn() { int columnFound = 0; int highestPositionedBottom = Integer.MAX_VALUE; // the highest positioned bottom is the one with the lowest value :D for (int i = 0; i < mColumnCount; i++) { int bottom = mColumnBottoms[i]; if (bottom < highestPositionedBottom) { highestPositionedBottom = bottom; columnFound = i; } } return columnFound; } private int getLowestPositionedBottom() { final int column = getLowestPositionedBottomColumn(); return mColumnBottoms[column]; } private int getLowestPositionedBottomColumn() { int columnFound = 0; int lowestPositionedBottom = Integer.MIN_VALUE; // the lowest positioned bottom is the one with the highest value :D for (int i = 0; i < mColumnCount; i++) { int bottom = mColumnBottoms[i]; if (bottom > lowestPositionedBottom) { lowestPositionedBottom = bottom; columnFound = i; } } return columnFound; } // ////////////////////////////////////////////////////////////////////////////////////////// // TOP // private int getLowestPositionedTop() { final int column = getLowestPositionedTopColumn(); return mColumnTops[column]; } private int getLowestPositionedTopColumn() { int columnFound = 0; // we'll go backwards through since the right most // will likely be the lowest positioned Top int lowestPositionedTop = Integer.MIN_VALUE; // the lowest positioned top is the one with the highest value :D for (int i = 0; i < mColumnCount; i++) { int top = mColumnTops[i]; if (top > lowestPositionedTop) { lowestPositionedTop = top; columnFound = i; } } return columnFound; } private int getHighestPositionedTop() { final int column = getHighestPositionedTopColumn(); return mColumnTops[column]; } private int getHighestPositionedTopColumn() { int columnFound = 0; int highestPositionedTop = Integer.MAX_VALUE; // the highest positioned top is the one with the lowest value :D for (int i = 0; i < mColumnCount; i++) { int top = mColumnTops[i]; if (top < highestPositionedTop) { highestPositionedTop = top; columnFound = i; } } return columnFound; } // ////////////////////////////////////////////////////////////////////////////////////////// // LAYOUT PARAMS // /** * Extended LayoutParams to column position and anything else we may been for the grid */ public static class GridLayoutParams extends LayoutParams { // The column the view is displayed in int column; public GridLayoutParams(Context c, AttributeSet attrs) { super(c, attrs); enforceStaggeredLayout(); } public GridLayoutParams(int w, int h) { super(w, h); enforceStaggeredLayout(); } public GridLayoutParams(int w, int h, int viewType) { super(w, h); enforceStaggeredLayout(); } public GridLayoutParams(ViewGroup.LayoutParams source) { super(source); enforceStaggeredLayout(); } /** * Here we're making sure that all grid view items * are width MATCH_PARENT and height WRAP_CONTENT. * That's what this grid is designed for */ private void enforceStaggeredLayout() { if (width != MATCH_PARENT) { width = MATCH_PARENT; } if (height == MATCH_PARENT) { height = WRAP_CONTENT; } } } // ////////////////////////////////////////////////////////////////////////////////////////// // SAVED STATE public static class GridListSavedState extends ListSavedState { int columnCount; int[] columnTops; SparseArray positionData; public GridListSavedState(Parcelable superState) { super(superState); } /** * Constructor called from {@link #CREATOR} */ public GridListSavedState(Parcel in) { super(in); columnCount = in.readInt(); columnTops = new int[columnCount >= 0 ? columnCount : 0]; in.readIntArray(columnTops); positionData = in.readSparseArray(GridItemRecord.class.getClassLoader()); } @Override public void writeToParcel(Parcel out, int flags) { super.writeToParcel(out, flags); out.writeInt(columnCount); out.writeIntArray(columnTops); out.writeSparseArray(positionData); } @Override public String toString() { return "StaggeredGridView.GridListSavedState{" + Integer.toHexString(System.identityHashCode(this)) + "}"; } public static final Creator<GridListSavedState> CREATOR = new Creator<GridListSavedState>() { public GridListSavedState createFromParcel(Parcel in) { return new GridListSavedState(in); } public GridListSavedState[] newArray(int size) { return new GridListSavedState[size]; } }; } @Override public Parcelable onSaveInstanceState() { ListSavedState listState = (ListSavedState) super.onSaveInstanceState(); GridListSavedState ss = new GridListSavedState(listState.getSuperState()); // from the list state ss.selectedId = listState.selectedId; ss.firstId = listState.firstId; ss.viewTop = listState.viewTop; ss.position = listState.position; ss.height = listState.height; // our state boolean haveChildren = getChildCount() > 0 && getCount() > 0; if (haveChildren && mFirstPosition > 0) { ss.columnCount = mColumnCount; ss.columnTops = mColumnTops; ss.positionData = mPositionData; } else { ss.columnCount = mColumnCount >= 0 ? mColumnCount : 0; ss.columnTops = new int[ss.columnCount]; ss.positionData = new SparseArray<Object>(); } return ss; } @Override public void onRestoreInstanceState(Parcelable state) { GridListSavedState ss = (GridListSavedState) state; mColumnCount = ss.columnCount; mColumnTops = ss.columnTops; mColumnBottoms = new int[mColumnCount]; mPositionData = ss.positionData; mNeedSync = true; super.onRestoreInstanceState(ss); } }
/* * Copyright 2012-2017 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with * the License. A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR * CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions * and limitations under the License. */ package com.amazonaws.services.ec2.model.transform; import javax.annotation.Generated; import com.amazonaws.SdkClientException; import com.amazonaws.Request; import com.amazonaws.DefaultRequest; import com.amazonaws.http.HttpMethodName; import com.amazonaws.services.ec2.model.*; import com.amazonaws.transform.Marshaller; import com.amazonaws.util.StringUtils; /** * RequestSpotFleetRequest Marshaller */ @Generated("com.amazonaws:aws-java-sdk-code-generator") public class RequestSpotFleetRequestMarshaller implements Marshaller<Request<RequestSpotFleetRequest>, RequestSpotFleetRequest> { public Request<RequestSpotFleetRequest> marshall(RequestSpotFleetRequest requestSpotFleetRequest) { if (requestSpotFleetRequest == null) { throw new SdkClientException("Invalid argument passed to marshall(...)"); } Request<RequestSpotFleetRequest> request = new DefaultRequest<RequestSpotFleetRequest>(requestSpotFleetRequest, "AmazonEC2"); request.addParameter("Action", "RequestSpotFleet"); request.addParameter("Version", "2016-11-15"); request.setHttpMethod(HttpMethodName.POST); SpotFleetRequestConfigData spotFleetRequestConfig = requestSpotFleetRequest.getSpotFleetRequestConfig(); if (spotFleetRequestConfig != null) { if (spotFleetRequestConfig.getClientToken() != null) { request.addParameter("SpotFleetRequestConfig.ClientToken", StringUtils.fromString(spotFleetRequestConfig.getClientToken())); } if (spotFleetRequestConfig.getSpotPrice() != null) { request.addParameter("SpotFleetRequestConfig.SpotPrice", StringUtils.fromString(spotFleetRequestConfig.getSpotPrice())); } if (spotFleetRequestConfig.getTargetCapacity() != null) { request.addParameter("SpotFleetRequestConfig.TargetCapacity", StringUtils.fromInteger(spotFleetRequestConfig.getTargetCapacity())); } if (spotFleetRequestConfig.getValidFrom() != null) { request.addParameter("SpotFleetRequestConfig.ValidFrom", StringUtils.fromDate(spotFleetRequestConfig.getValidFrom())); } if (spotFleetRequestConfig.getValidUntil() != null) { request.addParameter("SpotFleetRequestConfig.ValidUntil", StringUtils.fromDate(spotFleetRequestConfig.getValidUntil())); } if (spotFleetRequestConfig.getTerminateInstancesWithExpiration() != null) { request.addParameter("SpotFleetRequestConfig.TerminateInstancesWithExpiration", StringUtils.fromBoolean(spotFleetRequestConfig.getTerminateInstancesWithExpiration())); } if (spotFleetRequestConfig.getIamFleetRole() != null) { request.addParameter("SpotFleetRequestConfig.IamFleetRole", StringUtils.fromString(spotFleetRequestConfig.getIamFleetRole())); } com.amazonaws.internal.SdkInternalList<SpotFleetLaunchSpecification> spotFleetRequestConfigDataLaunchSpecificationsList = (com.amazonaws.internal.SdkInternalList<SpotFleetLaunchSpecification>) spotFleetRequestConfig .getLaunchSpecifications(); if (!spotFleetRequestConfigDataLaunchSpecificationsList.isEmpty() || !spotFleetRequestConfigDataLaunchSpecificationsList.isAutoConstruct()) { int launchSpecificationsListIndex = 1; for (SpotFleetLaunchSpecification spotFleetRequestConfigDataLaunchSpecificationsListValue : spotFleetRequestConfigDataLaunchSpecificationsList) { if (spotFleetRequestConfigDataLaunchSpecificationsListValue.getImageId() != null) { request.addParameter("SpotFleetRequestConfig.LaunchSpecifications." + launchSpecificationsListIndex + ".ImageId", StringUtils.fromString(spotFleetRequestConfigDataLaunchSpecificationsListValue.getImageId())); } if (spotFleetRequestConfigDataLaunchSpecificationsListValue.getKeyName() != null) { request.addParameter("SpotFleetRequestConfig.LaunchSpecifications." + launchSpecificationsListIndex + ".KeyName", StringUtils.fromString(spotFleetRequestConfigDataLaunchSpecificationsListValue.getKeyName())); } com.amazonaws.internal.SdkInternalList<GroupIdentifier> spotFleetLaunchSpecificationSecurityGroupsList = (com.amazonaws.internal.SdkInternalList<GroupIdentifier>) spotFleetRequestConfigDataLaunchSpecificationsListValue .getSecurityGroups(); if (!spotFleetLaunchSpecificationSecurityGroupsList.isEmpty() || !spotFleetLaunchSpecificationSecurityGroupsList.isAutoConstruct()) { int securityGroupsListIndex = 1; for (GroupIdentifier spotFleetLaunchSpecificationSecurityGroupsListValue : spotFleetLaunchSpecificationSecurityGroupsList) { if (spotFleetLaunchSpecificationSecurityGroupsListValue.getGroupName() != null) { request.addParameter("SpotFleetRequestConfig.LaunchSpecifications." + launchSpecificationsListIndex + ".GroupSet." + securityGroupsListIndex + ".GroupName", StringUtils.fromString(spotFleetLaunchSpecificationSecurityGroupsListValue.getGroupName())); } if (spotFleetLaunchSpecificationSecurityGroupsListValue.getGroupId() != null) { request.addParameter("SpotFleetRequestConfig.LaunchSpecifications." + launchSpecificationsListIndex + ".GroupSet." + securityGroupsListIndex + ".GroupId", StringUtils.fromString(spotFleetLaunchSpecificationSecurityGroupsListValue.getGroupId())); } securityGroupsListIndex++; } } if (spotFleetRequestConfigDataLaunchSpecificationsListValue.getUserData() != null) { request.addParameter("SpotFleetRequestConfig.LaunchSpecifications." + launchSpecificationsListIndex + ".UserData", StringUtils.fromString(spotFleetRequestConfigDataLaunchSpecificationsListValue.getUserData())); } if (spotFleetRequestConfigDataLaunchSpecificationsListValue.getAddressingType() != null) { request.addParameter("SpotFleetRequestConfig.LaunchSpecifications." + launchSpecificationsListIndex + ".AddressingType", StringUtils.fromString(spotFleetRequestConfigDataLaunchSpecificationsListValue.getAddressingType())); } if (spotFleetRequestConfigDataLaunchSpecificationsListValue.getInstanceType() != null) { request.addParameter("SpotFleetRequestConfig.LaunchSpecifications." + launchSpecificationsListIndex + ".InstanceType", StringUtils.fromString(spotFleetRequestConfigDataLaunchSpecificationsListValue.getInstanceType())); } SpotPlacement placement = spotFleetRequestConfigDataLaunchSpecificationsListValue.getPlacement(); if (placement != null) { if (placement.getAvailabilityZone() != null) { request.addParameter( "SpotFleetRequestConfig.LaunchSpecifications." + launchSpecificationsListIndex + ".Placement.AvailabilityZone", StringUtils.fromString(placement.getAvailabilityZone())); } if (placement.getGroupName() != null) { request.addParameter("SpotFleetRequestConfig.LaunchSpecifications." + launchSpecificationsListIndex + ".Placement.GroupName", StringUtils.fromString(placement.getGroupName())); } if (placement.getTenancy() != null) { request.addParameter("SpotFleetRequestConfig.LaunchSpecifications." + launchSpecificationsListIndex + ".Placement.Tenancy", StringUtils.fromString(placement.getTenancy())); } } if (spotFleetRequestConfigDataLaunchSpecificationsListValue.getKernelId() != null) { request.addParameter("SpotFleetRequestConfig.LaunchSpecifications." + launchSpecificationsListIndex + ".KernelId", StringUtils.fromString(spotFleetRequestConfigDataLaunchSpecificationsListValue.getKernelId())); } if (spotFleetRequestConfigDataLaunchSpecificationsListValue.getRamdiskId() != null) { request.addParameter("SpotFleetRequestConfig.LaunchSpecifications." + launchSpecificationsListIndex + ".RamdiskId", StringUtils.fromString(spotFleetRequestConfigDataLaunchSpecificationsListValue.getRamdiskId())); } com.amazonaws.internal.SdkInternalList<BlockDeviceMapping> spotFleetLaunchSpecificationBlockDeviceMappingsList = (com.amazonaws.internal.SdkInternalList<BlockDeviceMapping>) spotFleetRequestConfigDataLaunchSpecificationsListValue .getBlockDeviceMappings(); if (!spotFleetLaunchSpecificationBlockDeviceMappingsList.isEmpty() || !spotFleetLaunchSpecificationBlockDeviceMappingsList.isAutoConstruct()) { int blockDeviceMappingsListIndex = 1; for (BlockDeviceMapping spotFleetLaunchSpecificationBlockDeviceMappingsListValue : spotFleetLaunchSpecificationBlockDeviceMappingsList) { if (spotFleetLaunchSpecificationBlockDeviceMappingsListValue.getVirtualName() != null) { request.addParameter("SpotFleetRequestConfig.LaunchSpecifications." + launchSpecificationsListIndex + ".BlockDeviceMapping." + blockDeviceMappingsListIndex + ".VirtualName", StringUtils.fromString(spotFleetLaunchSpecificationBlockDeviceMappingsListValue.getVirtualName())); } if (spotFleetLaunchSpecificationBlockDeviceMappingsListValue.getDeviceName() != null) { request.addParameter("SpotFleetRequestConfig.LaunchSpecifications." + launchSpecificationsListIndex + ".BlockDeviceMapping." + blockDeviceMappingsListIndex + ".DeviceName", StringUtils.fromString(spotFleetLaunchSpecificationBlockDeviceMappingsListValue.getDeviceName())); } EbsBlockDevice ebs = spotFleetLaunchSpecificationBlockDeviceMappingsListValue.getEbs(); if (ebs != null) { if (ebs.getSnapshotId() != null) { request.addParameter("SpotFleetRequestConfig.LaunchSpecifications." + launchSpecificationsListIndex + ".BlockDeviceMapping." + blockDeviceMappingsListIndex + ".Ebs.SnapshotId", StringUtils.fromString(ebs.getSnapshotId())); } if (ebs.getVolumeSize() != null) { request.addParameter("SpotFleetRequestConfig.LaunchSpecifications." + launchSpecificationsListIndex + ".BlockDeviceMapping." + blockDeviceMappingsListIndex + ".Ebs.VolumeSize", StringUtils.fromInteger(ebs.getVolumeSize())); } if (ebs.getDeleteOnTermination() != null) { request.addParameter("SpotFleetRequestConfig.LaunchSpecifications." + launchSpecificationsListIndex + ".BlockDeviceMapping." + blockDeviceMappingsListIndex + ".Ebs.DeleteOnTermination", StringUtils.fromBoolean(ebs.getDeleteOnTermination())); } if (ebs.getVolumeType() != null) { request.addParameter("SpotFleetRequestConfig.LaunchSpecifications." + launchSpecificationsListIndex + ".BlockDeviceMapping." + blockDeviceMappingsListIndex + ".Ebs.VolumeType", StringUtils.fromString(ebs.getVolumeType())); } if (ebs.getIops() != null) { request.addParameter("SpotFleetRequestConfig.LaunchSpecifications." + launchSpecificationsListIndex + ".BlockDeviceMapping." + blockDeviceMappingsListIndex + ".Ebs.Iops", StringUtils.fromInteger(ebs.getIops())); } if (ebs.getEncrypted() != null) { request.addParameter("SpotFleetRequestConfig.LaunchSpecifications." + launchSpecificationsListIndex + ".BlockDeviceMapping." + blockDeviceMappingsListIndex + ".Ebs.Encrypted", StringUtils.fromBoolean(ebs.getEncrypted())); } } if (spotFleetLaunchSpecificationBlockDeviceMappingsListValue.getNoDevice() != null) { request.addParameter("SpotFleetRequestConfig.LaunchSpecifications." + launchSpecificationsListIndex + ".BlockDeviceMapping." + blockDeviceMappingsListIndex + ".NoDevice", StringUtils.fromString(spotFleetLaunchSpecificationBlockDeviceMappingsListValue.getNoDevice())); } blockDeviceMappingsListIndex++; } } SpotFleetMonitoring monitoring = spotFleetRequestConfigDataLaunchSpecificationsListValue.getMonitoring(); if (monitoring != null) { if (monitoring.getEnabled() != null) { request.addParameter("SpotFleetRequestConfig.LaunchSpecifications." + launchSpecificationsListIndex + ".Monitoring.Enabled", StringUtils.fromBoolean(monitoring.getEnabled())); } } if (spotFleetRequestConfigDataLaunchSpecificationsListValue.getSubnetId() != null) { request.addParameter("SpotFleetRequestConfig.LaunchSpecifications." + launchSpecificationsListIndex + ".SubnetId", StringUtils.fromString(spotFleetRequestConfigDataLaunchSpecificationsListValue.getSubnetId())); } com.amazonaws.internal.SdkInternalList<InstanceNetworkInterfaceSpecification> spotFleetLaunchSpecificationNetworkInterfacesList = (com.amazonaws.internal.SdkInternalList<InstanceNetworkInterfaceSpecification>) spotFleetRequestConfigDataLaunchSpecificationsListValue .getNetworkInterfaces(); if (!spotFleetLaunchSpecificationNetworkInterfacesList.isEmpty() || !spotFleetLaunchSpecificationNetworkInterfacesList.isAutoConstruct()) { int networkInterfacesListIndex = 1; for (InstanceNetworkInterfaceSpecification spotFleetLaunchSpecificationNetworkInterfacesListValue : spotFleetLaunchSpecificationNetworkInterfacesList) { if (spotFleetLaunchSpecificationNetworkInterfacesListValue.getNetworkInterfaceId() != null) { request.addParameter("SpotFleetRequestConfig.LaunchSpecifications." + launchSpecificationsListIndex + ".NetworkInterfaceSet." + networkInterfacesListIndex + ".NetworkInterfaceId", StringUtils.fromString(spotFleetLaunchSpecificationNetworkInterfacesListValue.getNetworkInterfaceId())); } if (spotFleetLaunchSpecificationNetworkInterfacesListValue.getDeviceIndex() != null) { request.addParameter("SpotFleetRequestConfig.LaunchSpecifications." + launchSpecificationsListIndex + ".NetworkInterfaceSet." + networkInterfacesListIndex + ".DeviceIndex", StringUtils.fromInteger(spotFleetLaunchSpecificationNetworkInterfacesListValue.getDeviceIndex())); } if (spotFleetLaunchSpecificationNetworkInterfacesListValue.getSubnetId() != null) { request.addParameter("SpotFleetRequestConfig.LaunchSpecifications." + launchSpecificationsListIndex + ".NetworkInterfaceSet." + networkInterfacesListIndex + ".SubnetId", StringUtils.fromString(spotFleetLaunchSpecificationNetworkInterfacesListValue.getSubnetId())); } if (spotFleetLaunchSpecificationNetworkInterfacesListValue.getDescription() != null) { request.addParameter("SpotFleetRequestConfig.LaunchSpecifications." + launchSpecificationsListIndex + ".NetworkInterfaceSet." + networkInterfacesListIndex + ".Description", StringUtils.fromString(spotFleetLaunchSpecificationNetworkInterfacesListValue.getDescription())); } if (spotFleetLaunchSpecificationNetworkInterfacesListValue.getPrivateIpAddress() != null) { request.addParameter("SpotFleetRequestConfig.LaunchSpecifications." + launchSpecificationsListIndex + ".NetworkInterfaceSet." + networkInterfacesListIndex + ".PrivateIpAddress", StringUtils.fromString(spotFleetLaunchSpecificationNetworkInterfacesListValue.getPrivateIpAddress())); } com.amazonaws.internal.SdkInternalList<String> instanceNetworkInterfaceSpecificationGroupsList = (com.amazonaws.internal.SdkInternalList<String>) spotFleetLaunchSpecificationNetworkInterfacesListValue .getGroups(); if (!instanceNetworkInterfaceSpecificationGroupsList.isEmpty() || !instanceNetworkInterfaceSpecificationGroupsList.isAutoConstruct()) { int groupsListIndex = 1; for (String instanceNetworkInterfaceSpecificationGroupsListValue : instanceNetworkInterfaceSpecificationGroupsList) { if (instanceNetworkInterfaceSpecificationGroupsListValue != null) { request.addParameter("SpotFleetRequestConfig.LaunchSpecifications." + launchSpecificationsListIndex + ".NetworkInterfaceSet." + networkInterfacesListIndex + ".SecurityGroupId." + groupsListIndex, StringUtils.fromString(instanceNetworkInterfaceSpecificationGroupsListValue)); } groupsListIndex++; } } if (spotFleetLaunchSpecificationNetworkInterfacesListValue.getDeleteOnTermination() != null) { request.addParameter("SpotFleetRequestConfig.LaunchSpecifications." + launchSpecificationsListIndex + ".NetworkInterfaceSet." + networkInterfacesListIndex + ".DeleteOnTermination", StringUtils.fromBoolean(spotFleetLaunchSpecificationNetworkInterfacesListValue.getDeleteOnTermination())); } com.amazonaws.internal.SdkInternalList<PrivateIpAddressSpecification> instanceNetworkInterfaceSpecificationPrivateIpAddressesList = (com.amazonaws.internal.SdkInternalList<PrivateIpAddressSpecification>) spotFleetLaunchSpecificationNetworkInterfacesListValue .getPrivateIpAddresses(); if (!instanceNetworkInterfaceSpecificationPrivateIpAddressesList.isEmpty() || !instanceNetworkInterfaceSpecificationPrivateIpAddressesList.isAutoConstruct()) { int privateIpAddressesListIndex = 1; for (PrivateIpAddressSpecification instanceNetworkInterfaceSpecificationPrivateIpAddressesListValue : instanceNetworkInterfaceSpecificationPrivateIpAddressesList) { if (instanceNetworkInterfaceSpecificationPrivateIpAddressesListValue.getPrivateIpAddress() != null) { request.addParameter("SpotFleetRequestConfig.LaunchSpecifications." + launchSpecificationsListIndex + ".NetworkInterfaceSet." + networkInterfacesListIndex + ".PrivateIpAddresses." + privateIpAddressesListIndex + ".PrivateIpAddress", StringUtils.fromString(instanceNetworkInterfaceSpecificationPrivateIpAddressesListValue.getPrivateIpAddress())); } if (instanceNetworkInterfaceSpecificationPrivateIpAddressesListValue.getPrimary() != null) { request.addParameter("SpotFleetRequestConfig.LaunchSpecifications." + launchSpecificationsListIndex + ".NetworkInterfaceSet." + networkInterfacesListIndex + ".PrivateIpAddresses." + privateIpAddressesListIndex + ".Primary", StringUtils.fromBoolean(instanceNetworkInterfaceSpecificationPrivateIpAddressesListValue.getPrimary())); } privateIpAddressesListIndex++; } } if (spotFleetLaunchSpecificationNetworkInterfacesListValue.getSecondaryPrivateIpAddressCount() != null) { request.addParameter("SpotFleetRequestConfig.LaunchSpecifications." + launchSpecificationsListIndex + ".NetworkInterfaceSet." + networkInterfacesListIndex + ".SecondaryPrivateIpAddressCount", StringUtils.fromInteger(spotFleetLaunchSpecificationNetworkInterfacesListValue.getSecondaryPrivateIpAddressCount())); } if (spotFleetLaunchSpecificationNetworkInterfacesListValue.getAssociatePublicIpAddress() != null) { request.addParameter("SpotFleetRequestConfig.LaunchSpecifications." + launchSpecificationsListIndex + ".NetworkInterfaceSet." + networkInterfacesListIndex + ".AssociatePublicIpAddress", StringUtils.fromBoolean(spotFleetLaunchSpecificationNetworkInterfacesListValue.getAssociatePublicIpAddress())); } com.amazonaws.internal.SdkInternalList<InstanceIpv6Address> instanceNetworkInterfaceSpecificationIpv6AddressesList = (com.amazonaws.internal.SdkInternalList<InstanceIpv6Address>) spotFleetLaunchSpecificationNetworkInterfacesListValue .getIpv6Addresses(); if (!instanceNetworkInterfaceSpecificationIpv6AddressesList.isEmpty() || !instanceNetworkInterfaceSpecificationIpv6AddressesList.isAutoConstruct()) { int ipv6AddressesListIndex = 1; for (InstanceIpv6Address instanceNetworkInterfaceSpecificationIpv6AddressesListValue : instanceNetworkInterfaceSpecificationIpv6AddressesList) { if (instanceNetworkInterfaceSpecificationIpv6AddressesListValue.getIpv6Address() != null) { request.addParameter("SpotFleetRequestConfig.LaunchSpecifications." + launchSpecificationsListIndex + ".NetworkInterfaceSet." + networkInterfacesListIndex + ".Ipv6Addresses." + ipv6AddressesListIndex + ".Ipv6Address", StringUtils.fromString(instanceNetworkInterfaceSpecificationIpv6AddressesListValue.getIpv6Address())); } ipv6AddressesListIndex++; } } if (spotFleetLaunchSpecificationNetworkInterfacesListValue.getIpv6AddressCount() != null) { request.addParameter("SpotFleetRequestConfig.LaunchSpecifications." + launchSpecificationsListIndex + ".NetworkInterfaceSet." + networkInterfacesListIndex + ".Ipv6AddressCount", StringUtils.fromInteger(spotFleetLaunchSpecificationNetworkInterfacesListValue.getIpv6AddressCount())); } networkInterfacesListIndex++; } } IamInstanceProfileSpecification iamInstanceProfile = spotFleetRequestConfigDataLaunchSpecificationsListValue.getIamInstanceProfile(); if (iamInstanceProfile != null) { if (iamInstanceProfile.getArn() != null) { request.addParameter("SpotFleetRequestConfig.LaunchSpecifications." + launchSpecificationsListIndex + ".IamInstanceProfile.Arn", StringUtils.fromString(iamInstanceProfile.getArn())); } if (iamInstanceProfile.getName() != null) { request.addParameter("SpotFleetRequestConfig.LaunchSpecifications." + launchSpecificationsListIndex + ".IamInstanceProfile.Name", StringUtils.fromString(iamInstanceProfile.getName())); } } if (spotFleetRequestConfigDataLaunchSpecificationsListValue.getEbsOptimized() != null) { request.addParameter("SpotFleetRequestConfig.LaunchSpecifications." + launchSpecificationsListIndex + ".EbsOptimized", StringUtils.fromBoolean(spotFleetRequestConfigDataLaunchSpecificationsListValue.getEbsOptimized())); } if (spotFleetRequestConfigDataLaunchSpecificationsListValue.getWeightedCapacity() != null) { request.addParameter("SpotFleetRequestConfig.LaunchSpecifications." + launchSpecificationsListIndex + ".WeightedCapacity", StringUtils.fromDouble(spotFleetRequestConfigDataLaunchSpecificationsListValue.getWeightedCapacity())); } if (spotFleetRequestConfigDataLaunchSpecificationsListValue.getSpotPrice() != null) { request.addParameter("SpotFleetRequestConfig.LaunchSpecifications." + launchSpecificationsListIndex + ".SpotPrice", StringUtils.fromString(spotFleetRequestConfigDataLaunchSpecificationsListValue.getSpotPrice())); } launchSpecificationsListIndex++; } } if (spotFleetRequestConfig.getExcessCapacityTerminationPolicy() != null) { request.addParameter("SpotFleetRequestConfig.ExcessCapacityTerminationPolicy", StringUtils.fromString(spotFleetRequestConfig.getExcessCapacityTerminationPolicy())); } if (spotFleetRequestConfig.getAllocationStrategy() != null) { request.addParameter("SpotFleetRequestConfig.AllocationStrategy", StringUtils.fromString(spotFleetRequestConfig.getAllocationStrategy())); } if (spotFleetRequestConfig.getFulfilledCapacity() != null) { request.addParameter("SpotFleetRequestConfig.FulfilledCapacity", StringUtils.fromDouble(spotFleetRequestConfig.getFulfilledCapacity())); } if (spotFleetRequestConfig.getType() != null) { request.addParameter("SpotFleetRequestConfig.Type", StringUtils.fromString(spotFleetRequestConfig.getType())); } if (spotFleetRequestConfig.getReplaceUnhealthyInstances() != null) { request.addParameter("SpotFleetRequestConfig.ReplaceUnhealthyInstances", StringUtils.fromBoolean(spotFleetRequestConfig.getReplaceUnhealthyInstances())); } } return request; } }
/* * Copyright (C) 2010 The Android Open Source Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package libcore.javax.net.ssl; import java.util.Arrays; import java.util.Collections; import java.util.Enumeration; import java.util.Iterator; import java.util.LinkedList; import javax.net.ssl.SSLSessionContext; import junit.framework.TestCase; public class SSLSessionContextTest extends TestCase { public static final void assertSSLSessionContextSize(int expected, TestSSLContext c) { assertSSLSessionContextSize(expected, c.clientContext.getClientSessionContext(), c.serverContext.getServerSessionContext()); assertSSLSessionContextSize(0, c.serverContext.getClientSessionContext(), c.clientContext.getServerSessionContext()); } public static final void assertSSLSessionContextSize(int expected, SSLSessionContext client, SSLSessionContext server) { assertSSLSessionContextSize(expected, client, false); assertSSLSessionContextSize(expected, server, true); } public static final void assertSSLSessionContextSize(int expected, SSLSessionContext s, boolean server) { int size = Collections.list(s.getIds()).size(); if (server && TestSSLContext.sslServerSocketSupportsSessionTickets()) { assertEquals(0, size); } else { assertEquals(expected, size); } } public void test_SSLSessionContext_getIds() { TestSSLContext c = TestSSLContext.create(); assertSSLSessionContextSize(0, c); TestSSLSocketPair s = TestSSLSocketPair.create(); assertSSLSessionContextSize(1, s.c); Enumeration clientIds = s.c.clientContext.getClientSessionContext().getIds(); Enumeration serverIds = s.c.serverContext.getServerSessionContext().getIds(); byte[] clientId = (byte[]) clientIds.nextElement(); assertEquals(32, clientId.length); if (TestSSLContext.sslServerSocketSupportsSessionTickets()) { assertFalse(serverIds.hasMoreElements()); } else { byte[] serverId = (byte[]) serverIds.nextElement(); assertEquals(32, serverId.length); assertTrue(Arrays.equals(clientId, serverId)); } } public void test_SSLSessionContext_getSession() { TestSSLContext c = TestSSLContext.create(); try { c.clientContext.getClientSessionContext().getSession(null); fail(); } catch (NullPointerException expected) { } assertNull(c.clientContext.getClientSessionContext().getSession(new byte[0])); assertNull(c.clientContext.getClientSessionContext().getSession(new byte[1])); try { c.serverContext.getServerSessionContext().getSession(null); fail(); } catch (NullPointerException expected) { } assertNull(c.serverContext.getServerSessionContext().getSession(new byte[0])); assertNull(c.serverContext.getServerSessionContext().getSession(new byte[1])); TestSSLSocketPair s = TestSSLSocketPair.create(); SSLSessionContext client = s.c.clientContext.getClientSessionContext(); SSLSessionContext server = s.c.serverContext.getServerSessionContext(); byte[] clientId = (byte[]) client.getIds().nextElement(); assertNotNull(client.getSession(clientId)); assertTrue(Arrays.equals(clientId, client.getSession(clientId).getId())); if (TestSSLContext.sslServerSocketSupportsSessionTickets()) { assertFalse(server.getIds().hasMoreElements()); } else { byte[] serverId = (byte[]) server.getIds().nextElement(); assertNotNull(server.getSession(serverId)); assertTrue(Arrays.equals(serverId, server.getSession(serverId).getId())); } } public void test_SSLSessionContext_getSessionCacheSize() { TestSSLContext c = TestSSLContext.create(); assertEquals(TestSSLContext.EXPECTED_DEFAULT_CLIENT_SSL_SESSION_CACHE_SIZE, c.clientContext.getClientSessionContext().getSessionCacheSize()); assertEquals(TestSSLContext.EXPECTED_DEFAULT_SERVER_SSL_SESSION_CACHE_SIZE, c.serverContext.getServerSessionContext().getSessionCacheSize()); TestSSLSocketPair s = TestSSLSocketPair.create(); assertEquals(TestSSLContext.EXPECTED_DEFAULT_CLIENT_SSL_SESSION_CACHE_SIZE, s.c.clientContext.getClientSessionContext().getSessionCacheSize()); assertEquals(TestSSLContext.EXPECTED_DEFAULT_SERVER_SSL_SESSION_CACHE_SIZE, s.c.serverContext.getServerSessionContext().getSessionCacheSize()); } public void test_SSLSessionContext_setSessionCacheSize_noConnect() { TestSSLContext c = TestSSLContext.create(); assertNoConnectSetSessionCacheSizeBehavior( TestSSLContext.EXPECTED_DEFAULT_CLIENT_SSL_SESSION_CACHE_SIZE, c.clientContext.getClientSessionContext()); assertNoConnectSetSessionCacheSizeBehavior( TestSSLContext.EXPECTED_DEFAULT_SERVER_SSL_SESSION_CACHE_SIZE, c.serverContext.getServerSessionContext()); } private static void assertNoConnectSetSessionCacheSizeBehavior(int expectedDefault, SSLSessionContext s) { try { s.setSessionCacheSize(-1); fail(); } catch (IllegalArgumentException expected) { } assertEquals(expectedDefault, s.getSessionCacheSize()); s.setSessionCacheSize(1); assertEquals(1, s.getSessionCacheSize()); } public void test_SSLSessionContext_setSessionCacheSize_oneConnect() { TestSSLSocketPair s = TestSSLSocketPair.create(); SSLSessionContext client = s.c.clientContext.getClientSessionContext(); SSLSessionContext server = s.c.serverContext.getServerSessionContext(); assertEquals(TestSSLContext.EXPECTED_DEFAULT_CLIENT_SSL_SESSION_CACHE_SIZE, client.getSessionCacheSize()); assertEquals(TestSSLContext.EXPECTED_DEFAULT_SERVER_SSL_SESSION_CACHE_SIZE, server.getSessionCacheSize()); assertSSLSessionContextSize(1, s.c); } public void test_SSLSessionContext_setSessionCacheSize_dynamic() { TestSSLContext c = TestSSLContext.create(); SSLSessionContext client = c.clientContext.getClientSessionContext(); SSLSessionContext server = c.serverContext.getServerSessionContext(); String[] supportedCipherSuites = c.serverSocket.getSupportedCipherSuites(); c.serverSocket.setEnabledCipherSuites(supportedCipherSuites); LinkedList<String> uniqueCipherSuites = new LinkedList(Arrays.asList(supportedCipherSuites)); // only use RSA cipher suites which will work with our TrustProvider Iterator<String> i = uniqueCipherSuites.iterator(); while (i.hasNext()) { String cipherSuite = i.next(); // Certificate key length too long for export ciphers if (cipherSuite.startsWith("SSL_RSA_EXPORT_")) { i.remove(); continue; } if (cipherSuite.startsWith("SSL_RSA_")) { continue; } if (cipherSuite.startsWith("TLS_RSA_")) { continue; } if (cipherSuite.startsWith("TLS_DHE_RSA_")) { continue; } if (cipherSuite.startsWith("SSL_DHE_RSA_")) { continue; } i.remove(); } /* * having more than 3 uniqueCipherSuites is a test * requirement, not a requirement of the interface or * implementation. It simply allows us to make sure that we * will not get a cached session ID since we'll have to * renegotiate a new session due to the new cipher suite * requirement. even this test only really needs three if it * reused the unique cipher suites every time it resets the * session cache. */ assertTrue(uniqueCipherSuites.size() >= 3); String cipherSuite1 = uniqueCipherSuites.get(0); String cipherSuite2 = uniqueCipherSuites.get(1); String cipherSuite3 = uniqueCipherSuites.get(2); TestSSLSocketPair.connect(c, new String[] { cipherSuite1 }, null); assertSSLSessionContextSize(1, c); TestSSLSocketPair.connect(c, new String[] { cipherSuite2 }, null); assertSSLSessionContextSize(2, c); TestSSLSocketPair.connect(c, new String[] { cipherSuite3 }, null); assertSSLSessionContextSize(3, c); client.setSessionCacheSize(1); server.setSessionCacheSize(1); assertEquals(1, client.getSessionCacheSize()); assertEquals(1, server.getSessionCacheSize()); assertSSLSessionContextSize(1, c); TestSSLSocketPair.connect(c, new String[] { cipherSuite1 }, null); assertSSLSessionContextSize(1, c); client.setSessionCacheSize(2); server.setSessionCacheSize(2); TestSSLSocketPair.connect(c, new String[] { cipherSuite2 }, null); assertSSLSessionContextSize(2, c); TestSSLSocketPair.connect(c, new String[] { cipherSuite3 }, null); assertSSLSessionContextSize(2, c); } public void test_SSLSessionContext_getSessionTimeout() { TestSSLContext c = TestSSLContext.create(); assertEquals(TestSSLContext.EXPECTED_DEFAULT_SSL_SESSION_CACHE_TIMEOUT, c.clientContext.getClientSessionContext().getSessionTimeout()); assertEquals(TestSSLContext.EXPECTED_DEFAULT_SSL_SESSION_CACHE_TIMEOUT, c.serverContext.getServerSessionContext().getSessionTimeout()); TestSSLSocketPair s = TestSSLSocketPair.create(); assertEquals(TestSSLContext.EXPECTED_DEFAULT_SSL_SESSION_CACHE_TIMEOUT, s.c.clientContext.getClientSessionContext().getSessionTimeout()); assertEquals(TestSSLContext.EXPECTED_DEFAULT_SSL_SESSION_CACHE_TIMEOUT, s.c.serverContext.getServerSessionContext().getSessionTimeout()); } public void test_SSLSessionContext_setSessionTimeout() throws Exception { TestSSLContext c = TestSSLContext.create(); assertEquals(TestSSLContext.EXPECTED_DEFAULT_SSL_SESSION_CACHE_TIMEOUT, c.clientContext.getClientSessionContext().getSessionTimeout()); assertEquals(TestSSLContext.EXPECTED_DEFAULT_SSL_SESSION_CACHE_TIMEOUT, c.serverContext.getServerSessionContext().getSessionTimeout()); c.clientContext.getClientSessionContext().setSessionTimeout(0); c.serverContext.getServerSessionContext().setSessionTimeout(0); assertEquals(0, c.clientContext.getClientSessionContext().getSessionTimeout()); assertEquals(0, c.serverContext.getServerSessionContext().getSessionTimeout()); try { c.clientContext.getClientSessionContext().setSessionTimeout(-1); fail(); } catch (IllegalArgumentException expected) { } try { c.serverContext.getServerSessionContext().setSessionTimeout(-1); fail(); } catch (IllegalArgumentException expected) { } TestSSLSocketPair s = TestSSLSocketPair.create(); assertSSLSessionContextSize(1, s.c); Thread.sleep(1 * 1000); s.c.clientContext.getClientSessionContext().setSessionTimeout(1); s.c.serverContext.getServerSessionContext().setSessionTimeout(1); assertSSLSessionContextSize(0, s.c); } }
/* * Copyright 1999-2002 Carnegie Mellon University. * Portions Copyright 2002 Sun Microsystems, Inc. * Portions Copyright 2002 Mitsubishi Electric Research Laboratories. * All Rights Reserved. Use is subject to license terms. * * See the file "license.terms" for information on usage and * redistribution of this file, and for a DISCLAIMER OF ALL * WARRANTIES. * */ package edu.cmu.sphinx.linguist.dflat; import edu.cmu.sphinx.decoder.scorer.ScoreProvider; import edu.cmu.sphinx.frontend.Data; import edu.cmu.sphinx.linguist.*; import edu.cmu.sphinx.linguist.acoustic.*; import edu.cmu.sphinx.linguist.dictionary.Pronunciation; import edu.cmu.sphinx.linguist.dictionary.Word; import edu.cmu.sphinx.linguist.language.grammar.Grammar; import edu.cmu.sphinx.linguist.language.grammar.GrammarArc; import edu.cmu.sphinx.linguist.language.grammar.GrammarNode; import edu.cmu.sphinx.util.LogMath; import edu.cmu.sphinx.util.Timer; import edu.cmu.sphinx.util.TimerPool; import edu.cmu.sphinx.util.props.*; import java.io.IOException; import java.util.*; import java.util.logging.Logger; /** * A simple form of the linguist. It makes the following simplifying assumptions: 1) Zero or one word per grammar node * 2) No fan-in allowed ever 3) No composites (yet) 4) Only Unit, HMMState, and pronunciation states (and the * initial/final grammar state are in the graph (no word, alternative or grammar states attached). 5) Only valid * transitions (matching contexts) are allowed 6) No tree organization of units 7) Branching grammar states are * allowed * <p/> * This is a dynamic version of the flat linguist that is more efficient in terms of startup time and overall footprint * <p/> * Note that all probabilities are maintained in the log math domain */ public class DynamicFlatLinguist implements Linguist, Configurable { /** The property used to define the grammar to use when building the search graph */ @S4Component(type = Grammar.class) public final static String GRAMMAR = "grammar"; /** The property used to define the unit manager to use when building the search graph */ @S4Component(type = UnitManager.class) public final static String UNIT_MANAGER = "unitManager"; /** The property used to define the acoustic model to use when building the search graph */ @S4Component(type = AcousticModel.class) public final static String ACOUSTIC_MODEL = "acousticModel"; /** The property that specifies whether to add a branch for detecting out-of-grammar utterances. */ @S4Boolean(defaultValue = false) public final static String ADD_OUT_OF_GRAMMAR_BRANCH = "addOutOfGrammarBranch"; /** The property for the probability of entering the out-of-grammar branch. */ @S4Double(defaultValue = 1.0) public final static String OUT_OF_GRAMMAR_PROBABILITY = "outOfGrammarProbability"; /** The property for the probability of inserting a CI phone in the out-of-grammar ci phone loop */ @S4Double(defaultValue = 1.0) public static final String PHONE_INSERTION_PROBABILITY = "phoneInsertionProbability"; /** The property for the acoustic model to use to build the phone loop that detects out of grammar utterances. */ @S4Component(type = AcousticModel.class) public final static String PHONE_LOOP_ACOUSTIC_MODEL = "phoneLoopAcousticModel"; /** The property that defines the name of the logmath to be used by this search manager. */ @S4Component(type = LogMath.class) public final static String PROP_LOG_MATH = "logMath"; private final static float logOne = LogMath.getLogOne(); // ---------------------------------- // Subcomponents that are configured // by the property sheet // ----------------------------------- private Grammar grammar; private AcousticModel acousticModel; private AcousticModel phoneLoopAcousticModel; private LogMath logMath; private UnitManager unitManager; // ------------------------------------ // Data that is configured by the // property sheet // ------------------------------------ private float logWordInsertionProbability; private float logSilenceInsertionProbability; private float logUnitInsertionProbability; private float logFillerInsertionProbability; private float languageWeight; private float logOutOfGrammarBranchProbability; private float logPhoneInsertionProbability; private boolean addOutOfGrammarBranch; // ------------------------------------ // Data used for building and maintaining // the search graph // ------------------------------------- private SearchGraph searchGraph; private Logger logger; private HMMPool hmmPool; SearchStateArc outOfGrammarGraph; private GrammarNode initialGrammarState; // this map is used to manage the set of follow on units for a // particular grammar node. It is used to select the set of // possible right contexts as we leave a node private Map<GrammarNode, int[]> nodeToNextUnitArrayMap; // this map is used to manage the set of possible entry units for // a grammar node. It is used to filter paths so that we only // branch to grammar nodes that match the current right context. private Map<GrammarNode, Set<Unit>> nodeToUnitSetMap; // an empty arc (just waiting for Noah, I guess) private final SearchStateArc[] EMPTY_ARCS = new SearchStateArc[0]; public DynamicFlatLinguist(AcousticModel acousticModel, Grammar grammar, UnitManager unitManager, LogMath logMath, double wordInsertionProbability, double silenceInsertionProbability, double unitInsertionProbability, double fillerInsertionProbability, float languageWeight, boolean addOutOfGrammarBranch, double outOfGrammarBranchProbability, double phoneInsertionProbability, AcousticModel phoneLoopAcousticModel) { this.logger = Logger.getLogger(getClass().getName()); this.acousticModel = acousticModel; this.logMath = logMath; this.grammar = grammar; this.unitManager = unitManager; this.logWordInsertionProbability = logMath.linearToLog(wordInsertionProbability); this.logSilenceInsertionProbability = logMath.linearToLog(silenceInsertionProbability); this.logUnitInsertionProbability = logMath.linearToLog(unitInsertionProbability); this.logFillerInsertionProbability = logMath.linearToLog(fillerInsertionProbability); this.languageWeight = languageWeight; this.addOutOfGrammarBranch = addOutOfGrammarBranch; this.logOutOfGrammarBranchProbability = logMath.linearToLog(outOfGrammarBranchProbability); this.logPhoneInsertionProbability = logMath.linearToLog(logPhoneInsertionProbability); if (addOutOfGrammarBranch) { this.phoneLoopAcousticModel = phoneLoopAcousticModel; } } public DynamicFlatLinguist() { } /* * (non-Javadoc) * * @see edu.cmu.sphinx.util.props.Configurable#newProperties(edu.cmu.sphinx.util.props.PropertySheet) */ @Override public void newProperties(PropertySheet ps) throws PropertyException { // hookup to all of the components logger = ps.getLogger(); acousticModel = (AcousticModel) ps.getComponent(ACOUSTIC_MODEL); logMath = (LogMath) ps.getComponent(PROP_LOG_MATH); grammar = (Grammar) ps.getComponent(GRAMMAR); unitManager = (UnitManager) ps.getComponent(UNIT_MANAGER); // get the rest of the configuration data logWordInsertionProbability = logMath.linearToLog(ps.getDouble(PROP_WORD_INSERTION_PROBABILITY)); logSilenceInsertionProbability = logMath.linearToLog(ps.getDouble(PROP_SILENCE_INSERTION_PROBABILITY)); logUnitInsertionProbability = logMath.linearToLog(ps.getDouble(PROP_UNIT_INSERTION_PROBABILITY)); logFillerInsertionProbability = logMath.linearToLog(ps.getDouble(PROP_FILLER_INSERTION_PROBABILITY)); languageWeight = ps.getFloat(Linguist.PROP_LANGUAGE_WEIGHT); addOutOfGrammarBranch = ps.getBoolean(ADD_OUT_OF_GRAMMAR_BRANCH); logOutOfGrammarBranchProbability = logMath.linearToLog(ps.getDouble(OUT_OF_GRAMMAR_PROBABILITY)); logPhoneInsertionProbability = logMath.linearToLog(ps.getDouble(PHONE_INSERTION_PROBABILITY)); if (addOutOfGrammarBranch) { phoneLoopAcousticModel = (AcousticModel) ps.getComponent(PHONE_LOOP_ACOUSTIC_MODEL); } } /** * Returns the search graph * * @return the search graph */ @Override public SearchGraph getSearchGraph() { return searchGraph; } /** * Sets up the acoustic model. * * @param ps the PropertySheet from which to obtain the acoustic model * @throws edu.cmu.sphinx.util.props.PropertyException */ protected void setupAcousticModel(PropertySheet ps) throws PropertyException { acousticModel = (AcousticModel) ps.getComponent(ACOUSTIC_MODEL); } @Override public void allocate() throws IOException { logger.info("Allocating DFLAT"); allocateAcousticModel(); grammar.allocate(); hmmPool = new HMMPool(acousticModel, logger, unitManager); nodeToNextUnitArrayMap = new HashMap<GrammarNode, int[]>(); nodeToUnitSetMap = new HashMap<GrammarNode, Set<Unit>>(); Timer timer = TimerPool.getTimer(this, "compileGrammar"); timer.start(); compileGrammar(); timer.stop(); logger.info("Done allocating DFLAT"); } /** Allocates the acoustic model. * @throws java.io.IOException*/ protected void allocateAcousticModel() throws IOException { acousticModel.allocate(); if (addOutOfGrammarBranch) { phoneLoopAcousticModel.allocate(); } } /* * (non-Javadoc) * * @see edu.cmu.sphinx.linguist.Linguist#deallocate() */ @Override public void deallocate() { if (acousticModel != null) { acousticModel.deallocate(); } grammar.deallocate(); } /** Called before a recognition */ @Override public void startRecognition() { if (grammarHasChanged()) { compileGrammar(); } } /** Called after a recognition */ @Override public void stopRecognition() { } /** * Returns the LogMath used. * * @return the logMath used */ public LogMath getLogMath() { return logMath; } /** * Returns the log silence insertion probability. * * @return the log silence insertion probability. */ public float getLogSilenceInsertionProbability() { return logSilenceInsertionProbability; } /** * Determines if the underlying grammar has changed since we last compiled the search graph * * @return true if the grammar has changed */ private boolean grammarHasChanged() { return initialGrammarState == null || initialGrammarState != grammar.getInitialNode(); } private void compileGrammar() { initialGrammarState = grammar.getInitialNode(); for (GrammarNode node : grammar.getGrammarNodes()) { initUnitMaps(node); } searchGraph = new DynamicFlatSearchGraph(); } /** * Initializes the unit maps for this linguist. There are two unit maps: (a) nodeToNextUnitArrayMap contains an * array of unit ids for all possible units that immediately follow the given grammar node. This is used to * determine the set of exit contexts for words within a grammar node. (b) nodeToUnitSetMap contains the set of * possible entry units for a given grammar node. This is typically used to determine if a path with a given right * context should branch into a particular grammar node * * @param node the units maps will be created for this node. */ private void initUnitMaps(GrammarNode node) { // collect the set of next units for this node if (nodeToNextUnitArrayMap.get(node) == null) { Set<GrammarNode> vistedNodes = new HashSet<GrammarNode>(); Set<Unit> unitSet = new HashSet<Unit>(); GrammarArc[] arcs = node.getSuccessors(); for (GrammarArc arc : arcs) { GrammarNode nextNode = arc.getGrammarNode(); collectNextUnits(nextNode, vistedNodes, unitSet); } int[] nextUnits = new int[unitSet.size()]; int index = 0; for (Unit unit : unitSet) { nextUnits[index++] = unit.getBaseID(); } nodeToNextUnitArrayMap.put(node, nextUnits); } // collect the set of entry units for this node if (nodeToUnitSetMap.get(node) == null) { Set<GrammarNode> vistedNodes = new HashSet<GrammarNode>(); Set<Unit> unitSet = new HashSet<Unit>(); collectNextUnits(node, vistedNodes, unitSet); nodeToUnitSetMap.put(node, unitSet); } } /** * For the given grammar node, collect the set of possible next units. * * @param thisNode the grammar node * @param vistedNodes the set of visited grammar nodes, used to ensure that we don't attempt to expand a particular * grammar node more than once (which could lead to a death spiral) * @param unitSet the entry units are collected here. */ private void collectNextUnits(GrammarNode thisNode, Set<GrammarNode> vistedNodes, Set<Unit> unitSet) { if (vistedNodes.contains(thisNode)) { return; } vistedNodes.add(thisNode); if (thisNode.isFinalNode()) { unitSet.add(UnitManager.SILENCE); } else if (!thisNode.isEmpty()) { Word word = thisNode.getWord(); Pronunciation[] pronunciations = word.getPronunciations(); for (Pronunciation pronunciation : pronunciations) { unitSet.add(pronunciation.getUnits()[0]); } } else { GrammarArc[] arcs = thisNode.getSuccessors(); for (GrammarArc arc : arcs) { GrammarNode nextNode = arc.getGrammarNode(); collectNextUnits(nextNode, vistedNodes, unitSet); } } } final Map<SearchState, SearchStateArc[]> successorCache = new HashMap<SearchState, SearchStateArc[]>(); /** The base search state for this dynamic flat linguist. */ abstract class FlatSearchState implements SearchState, SearchStateArc { final static int ANY = 0; /** * Gets the set of successors for this state * * @return the set of successors */ @Override public abstract SearchStateArc[] getSuccessors(); /** * Returns a unique string representation of the state. This string is suitable (and typically used) for a label * for a GDL node * * @return the signature */ @Override public abstract String getSignature(); /** * Returns the order of this state type among all of the search states * * @return the order */ @Override public abstract int getOrder(); /** * Determines if this state is an emitting state * * @return true if this is an emitting state */ @Override public boolean isEmitting() { return false; } /** * Determines if this is a final state * * @return true if this is a final state */ @Override public boolean isFinal() { return false; } /** * Returns a lex state associated with the searc state (not applicable to this linguist) * * @return the lex state (null for this linguist) */ @Override public Object getLexState() { return null; } /** * Returns a well formatted string representation of this state * * @return the formatted string */ @Override public String toPrettyString() { return toString(); } /** * Returns a string representation of this object * * @return a string representation */ @Override public String toString() { return getSignature(); } /** * Returns the word history for this state (not applicable to this linguist) * * @return the word history (null for this linguist) */ @Override public WordSequence getWordHistory() { return null; } /** * Gets a successor to this search state * * @return the successor state */ @Override public SearchState getState() { return this; } /** * Gets the composite probability of entering this state * * @return the log probability */ @Override public float getProbability() { return getLanguageProbability() + getInsertionProbability(); } /** * Gets the language probability of entering this state * * @return the log probability */ @Override public float getLanguageProbability() { return logOne; } /** * Gets the insertion probability of entering this state * * @return the log probability */ @Override public float getInsertionProbability() { return logOne; } /** * Get the arcs from the cache if the exist * * @return the cached arcs or null */ SearchStateArc[] getCachedSuccessors() { return successorCache.get(this); } /** * Places the set of successor arcs in the cache * * @param successors the set of arcs to be cached for this state */ void cacheSuccessors(SearchStateArc[] successors) { successorCache.put(this, successors); } } /** * Represents a grammar node in the search graph. A grammar state needs to keep track of the associated grammar node * as well as the left context and next base unit. */ class GrammarState extends FlatSearchState { private final GrammarNode node; private final int lc; private final int nextBaseID; private final float languageProbability; /** * Creates a grammar state for the given node with a silence Lc * * @param node the grammar node */ GrammarState(GrammarNode node) { this(node, logOne, UnitManager.SILENCE.getBaseID()); } /** * Creates a grammar state for the given node and left context. The path will connect to any possible next base * * @param node the grammar node * @param languageProbability the probability of transistioning to this word * @param lc the left context for this path */ GrammarState(GrammarNode node, float languageProbability, int lc) { this(node, languageProbability, lc, ANY); } /** * Creates a grammar state for the given node and left context and next base ID. * * @param node the grammar node * @param languageProbability the probability of transistioning to this word * @param lc the left context for this path * @param nextBaseID the next base ID */ GrammarState(GrammarNode node, float languageProbability, int lc, int nextBaseID) { this.lc = lc; this.nextBaseID = nextBaseID; this.node = node; this.languageProbability = languageProbability; } /** * Gets the language probability of entering this state * * @return the log probability */ @Override public float getLanguageProbability() { return languageProbability * languageWeight; } /** * Generate a hashcode for an object. Equality for a grammar state includes the grammar node, the lc and the * next base ID * * @return the hashcode */ @Override public int hashCode() { return node.hashCode() * 17 + lc * 7 + nextBaseID; } /** * Determines if the given object is equal to this object * * @param o the object to test * @return <code>true</code> if the object is equal to this */ @Override public boolean equals(Object o) { if (o == this) { return true; } else if (o instanceof GrammarState) { GrammarState other = (GrammarState) o; return other.node == node && lc == other.lc && nextBaseID == other.nextBaseID; } else { return false; } } /** * Determines if this is a final state in the search graph * * @return true if this is a final state in the search graph */ @Override public boolean isFinal() { return node.isFinalNode(); } /** * Gets the set of successors for this state * * @return the set of successors */ @Override public SearchStateArc[] getSuccessors() { SearchStateArc[] arcs = getCachedSuccessors(); if (arcs != null) { return arcs; } if (isFinal()) { arcs = EMPTY_ARCS; } else if (node.isEmpty()) { arcs = getNextGrammarStates(lc, nextBaseID); } else { Word word = node.getWord(); Pronunciation[] pronunciations = word.getPronunciations(); // This can potentially speedup computation // pronunciations = filter(pronunciations, nextBaseID); SearchStateArc[] nextArcs = new SearchStateArc[pronunciations.length]; for (int i = 0; i < pronunciations.length; i++) { nextArcs[i] = new PronunciationState(this, pronunciations[i]); } arcs = nextArcs; } cacheSuccessors(arcs); return arcs; } /** * Gets the set of arcs to the next set of grammar states that match the given nextBaseID * * @param lc the current left context * @param nextBaseID the desired next base ID */ SearchStateArc[] getNextGrammarStates(int lc, int nextBaseID) { GrammarArc[] nextNodes = node.getSuccessors(); nextNodes = filter(nextNodes, nextBaseID); SearchStateArc[] nextArcs = new SearchStateArc[nextNodes.length]; for (int i = 0; i < nextNodes.length; i++) { GrammarArc arc = nextNodes[i]; nextArcs[i] = new GrammarState(arc.getGrammarNode(), arc.getProbability(), lc, nextBaseID); } return nextArcs; } /** * Returns a unique string representation of the state. This string is suitable (and typically used) for a label * for a GDL node * * @return the signature */ @Override public String getSignature() { return "GS " + node + "-lc-" + hmmPool.getUnit(lc) + "-rc-" + hmmPool.getUnit(nextBaseID); } /** * Returns the order of this state type among all of the search states * * @return the order */ @Override public int getOrder() { return 1; } /** * Given a set of arcs and the ID of the desired next unit, return the set of arcs containing only those that * transition to the next unit * * @param arcs the set of arcs to filter * @param nextBase the ID of the desired next unit */ GrammarArc[] filter(GrammarArc[] arcs, int nextBase) { if (nextBase != ANY) { List<GrammarArc> list = new ArrayList<GrammarArc>(); for (GrammarArc arc : arcs) { GrammarNode node = arc.getGrammarNode(); if (hasEntryContext(node, nextBase)) { list.add(arc); } } arcs = list.toArray(new GrammarArc[list.size()]); } return arcs; } /** * Determines if the given node starts with the specified unit * * @param node the grammar node * @param unitID the id of the unit */ private boolean hasEntryContext(GrammarNode node, int unitID) { Set<Unit> unitSet = nodeToUnitSetMap.get(node); return unitSet.contains(hmmPool.getUnit(unitID)); } /** * Retain only the pronunciations that start with the unit indicated by * nextBase. This method can be used instead of filter to reduce search * space. It's not used by default but could potentially lead to a * decoding speedup. * * @param p * the set of pronunciations to filter * @param nextBase * the ID of the desired initial unit */ Pronunciation[] filter(Pronunciation[] pronunciations, int nextBase) { if (nextBase == ANY) { return pronunciations; } ArrayList<Pronunciation> filteredPronunciation = new ArrayList<Pronunciation>( pronunciations.length); for (Pronunciation pronunciation : pronunciations) { if (pronunciation.getUnits()[0].getBaseID() == nextBase) { filteredPronunciation.add(pronunciation); } } return filteredPronunciation .toArray(new Pronunciation[filteredPronunciation.size()]); } /** * Gets the ID of the left context unit for this path * * @return the left context ID */ int getLC() { return lc; } /** * Gets the ID of the desired next unit * * @return the ID of the next unit */ int getNextBaseID() { return nextBaseID; } /** * Returns the set of IDs for all possible next units for this grammar node * * @return the set of IDs of all possible next units */ int[] getNextUnits() { return nodeToNextUnitArrayMap.get(node); } /** * Returns a string representation of this object * * @return a string representation */ @Override public String toString() { return node + "[" + hmmPool.getUnit(lc) + ',' + hmmPool.getUnit(nextBaseID) + ']'; } /** * Returns the grammar node associated with this grammar state * * @return the grammar node */ GrammarNode getGrammarNode() { return node; } } class InitialState extends FlatSearchState { private final List<SearchStateArc> nextArcs = new ArrayList<SearchStateArc>(); /** * Gets the set of successors for this state * * @return the set of successors */ @Override public SearchStateArc[] getSuccessors() { return nextArcs.toArray(new SearchStateArc[nextArcs.size()]); } public void addArc(SearchStateArc arc) { nextArcs.add(arc); } /** * Returns a unique string representation of the state. This string is suitable (and typically used) for a label * for a GDL node * * @return the signature */ @Override public String getSignature() { return "initialState"; } /** * Returns the order of this state type among all of the search states * * @return the order */ @Override public int getOrder() { return 1; } /** * Returns a string representation of this object * * @return a string representation */ @Override public String toString() { return getSignature(); } } /** This class representations a word punctuation in the search graph */ class PronunciationState extends FlatSearchState implements WordSearchState { private final GrammarState gs; private final Pronunciation pronunciation; /** * Creates a PronunciationState * * @param gs the associated grammar state * @param p the pronunciation */ PronunciationState(GrammarState gs, Pronunciation p) { this.gs = gs; this.pronunciation = p; } /** * Gets the insertion probability of entering this state * * @return the log probability */ @Override public float getInsertionProbability() { if (pronunciation.getWord().isFiller()) { return logOne; } else { return logWordInsertionProbability; } } /** * Generate a hashcode for an object * * @return the hashcode */ @Override public int hashCode() { return 13 * gs.hashCode() + pronunciation.hashCode(); } /** * Determines if the given object is equal to this object * * @param o the object to test * @return <code>true</code> if the object is equal to this */ @Override public boolean equals(Object o) { if (o == this) { return true; } else if (o instanceof PronunciationState) { PronunciationState other = (PronunciationState) o; return other.gs.equals(gs) && other.pronunciation.equals(pronunciation); } else { return false; } } /** * Gets the successor states for this search graph * * @return the successor states */ @Override public SearchStateArc[] getSuccessors() { SearchStateArc[] arcs = getCachedSuccessors(); if (arcs == null) { arcs = getSuccessors(gs.getLC(), 0); cacheSuccessors(arcs); } return arcs; } /** * Gets the successor states for the unit and the given position and left context * * @param lc the ID of the left context * @param index the position of the unit within the pronunciation * @return the set of sucessor arcs */ SearchStateArc[] getSuccessors(int lc, int index) { SearchStateArc[] arcs; if (index == pronunciation.getUnits().length - 1) { if (isContextIndependentUnit( pronunciation.getUnits()[index])) { arcs = new SearchStateArc[1]; arcs[0] = new FullHMMSearchState(this, index, lc, ANY); } else { int[] nextUnits = gs.getNextUnits(); arcs = new SearchStateArc[nextUnits.length]; for (int i = 0; i < arcs.length; i++) { arcs[i] = new FullHMMSearchState(this, index, lc, nextUnits[i]); } } } else { arcs = new SearchStateArc[1]; arcs[0] = new FullHMMSearchState(this, index, lc); } return arcs; } /** * Gets the pronunciation assocated with this state * * @return the pronunciation */ @Override public Pronunciation getPronunciation() { return pronunciation; } /** * Determines if the given unit is a CI unit * * @param unit the unit to test * @return true if the unit is a context independent unit */ private boolean isContextIndependentUnit(Unit unit) { return unit.isFiller(); } /** * Returns a unique string representation of the state. This string is suitable (and typically used) for a label * for a GDL node * * @return the signature */ @Override public String getSignature() { return "PS " + gs.getSignature() + '-' + pronunciation; } /** * Returns a string representation of this object * * @return a string representation */ @Override public String toString() { return pronunciation.getWord().getSpelling(); } /** * Returns the order of this state type among all of the search states * * @return the order */ @Override public int getOrder() { return 2; } /** * Returns the grammar state associated with this state * * @return the grammar state */ GrammarState getGrammarState() { return gs; } /** * Returns true if this WordSearchState indicates the start of a word. Returns false if this WordSearchState * indicates the end of a word. * * @return true if this WordSearchState indicates the start of a word, false if this WordSearchState indicates * the end of a word */ @Override public boolean isWordStart() { return true; } } /** Represents a unit (as an HMM) in the search graph */ class FullHMMSearchState extends FlatSearchState implements UnitSearchState { private final PronunciationState pState; private final int index; private final int lc; private final int rc; private final HMM hmm; private final boolean isLastUnitOfWord; /** * Creates a FullHMMSearchState * * @param p the parent PronunciationState * @param which the index of the unit within the pronunciation * @param lc the ID of the left context */ FullHMMSearchState(PronunciationState p, int which, int lc) { this(p, which, lc, p.getPronunciation().getUnits()[which + 1].getBaseID()); } /** * Creates a FullHMMSearchState * * @param p the parent PronunciationState * @param which the index of the unit within the pronunciation * @param lc the ID of the left context * @param rc the ID of the right context */ FullHMMSearchState(PronunciationState p, int which, int lc, int rc) { this.pState = p; this.index = which; this.lc = lc; this.rc = rc; int base = p.getPronunciation().getUnits()[which].getBaseID(); int id = hmmPool.buildID(base, lc, rc); hmm = hmmPool.getHMM(id, getPosition()); isLastUnitOfWord = which == p.getPronunciation().getUnits().length - 1; } /** * Determines the insertion probability based upon the type of unit * * @return the insertion probability */ @Override public float getInsertionProbability() { Unit unit = hmm.getBaseUnit(); if (unit.isSilence()) { return logSilenceInsertionProbability; } else if (unit.isFiller()) { return logFillerInsertionProbability; } else { return logUnitInsertionProbability; } } /** * Returns a string representation of this object * * @return a string representation */ @Override public String toString() { return hmm.getUnit().toString(); } /** * Generate a hashcode for an object * * @return the hashcode */ @Override public int hashCode() { return pState.getGrammarState().getGrammarNode().hashCode() * 29 + pState.getPronunciation().hashCode() * 19 + index * 7 + 43 * lc + rc; } /** * Determines if the given object is equal to this object * * @param o the object to test * @return <code>true</code> if the object is equal to this */ @Override public boolean equals(Object o) { if (o == this) { return true; } else if (o instanceof FullHMMSearchState) { FullHMMSearchState other = (FullHMMSearchState) o; // the definition for equal for a FullHMMState: // Grammar Node equal // Pronunciation equal // index equal // rc equal return pState.getGrammarState().getGrammarNode() == other.pState.getGrammarState().getGrammarNode() && pState.getPronunciation() == other.pState.getPronunciation() && index == other.index && lc == other.lc && rc == other.rc; } else { return false; } } /** * Returns the unit associated with this state * * @return the unit */ @Override public Unit getUnit() { return hmm.getBaseUnit(); } /** * Gets the set of successors for this state * * @return the set of successors */ @Override public SearchStateArc[] getSuccessors() { SearchStateArc[] arcs = getCachedSuccessors(); if (arcs == null) { arcs = new SearchStateArc[1]; arcs[0] = new HMMStateSearchState(this, hmm.getInitialState()); cacheSuccessors(arcs); } return arcs; } /** * Determines if this unit is the last unit of a word * * @return true if this unit is the last unit of a word */ boolean isLastUnitOfWord() { return isLastUnitOfWord; } /** * Determines the position of the unit within the word * * @return the position of the unit within the word */ HMMPosition getPosition() { int len = pState.getPronunciation().getUnits().length; if (len == 1) { return HMMPosition.SINGLE; } else if (index == 0) { return HMMPosition.BEGIN; } else if (index == len - 1) { return HMMPosition.END; } else { return HMMPosition.INTERNAL; } } /** * Returns the HMM for this state * * @return the HMM */ HMM getHMM() { return hmm; } /** * Returns the order of this state type among all of the search states * * @return the order */ @Override public int getOrder() { return 3; } /** * Returns a unique string representation of the state. This string is suitable (and typically used) for a label * for a GDL node * * @return the signature */ @Override public String getSignature() { return "HSS " + pState.getGrammarState().getGrammarNode() + pState.getPronunciation() + index + '-' + rc + '-' + lc; } /** * Returns the ID of the right context for this state * * @return the right context unit ID */ int getRC() { return rc; } /** * Returns the next set of arcs after this state and all substates have been processed * * @return the next set of arcs */ SearchStateArc[] getNextArcs() { SearchStateArc[] arcs; // this is the last state of the hmm // so check to see if we are at the end // of a word, if not get the next full hmm in the word // otherwise generate arcs to the next set of words // Pronunciation pronunciation = pState.getPronunciation(); int nextLC = getHMM().getBaseUnit().getBaseID(); if (!isLastUnitOfWord()) { arcs = pState.getSuccessors(nextLC, index + 1); } else { // we are at the end of the word, so we transit to the // next grammar nodes GrammarState gs = pState.getGrammarState(); arcs = gs.getNextGrammarStates(nextLC, getRC()); } return arcs; } } /** Represents a single hmm state in the search graph */ class HMMStateSearchState extends FlatSearchState implements HMMSearchState, ScoreProvider { private final FullHMMSearchState fullHMMSearchState; private final HMMState hmmState; private final float probability; /** * Creates an HMMStateSearchState * * @param hss the parent hmm state * @param hmmState which hmm state */ HMMStateSearchState(FullHMMSearchState hss, HMMState hmmState) { this(hss, hmmState, logOne); } /** * Creates an HMMStateSearchState * * @param hss the parent hmm state * @param hmmState which hmm state * @param prob the transition probability */ HMMStateSearchState(FullHMMSearchState hss, HMMState hmmState, float prob) { this.probability = prob; fullHMMSearchState = hss; this.hmmState = hmmState; } /** * Returns the acoustic probability for this state * * @return the probability */ @Override public float getInsertionProbability() { return probability; } /** * Generate a hashcode for an object * * @return the hashcode */ @Override public int hashCode() { return 7 * fullHMMSearchState.hashCode() + hmmState.hashCode(); } /** * Determines if the given object is equal to this object * * @param o the object to test * @return <code>true</code> if the object is equal to this */ @Override public boolean equals(Object o) { if (o == this) { return true; } else if (o instanceof HMMStateSearchState) { HMMStateSearchState other = (HMMStateSearchState) o; return other.fullHMMSearchState.equals(fullHMMSearchState) && other.hmmState.equals(hmmState); } else { return false; } } /** * Determines if this state is an emitting state * * @return true if this is an emitting state */ @Override public boolean isEmitting() { return hmmState.isEmitting(); } /** * Gets the set of successors for this state * * @return the set of successors */ @Override public SearchStateArc[] getSuccessors() { SearchStateArc[] arcs = getCachedSuccessors(); if (arcs == null) { if (hmmState.isExitState()) { arcs = fullHMMSearchState.getNextArcs(); } else { HMMStateArc[] next = hmmState.getSuccessors(); arcs = new SearchStateArc[next.length]; for (int i = 0; i < arcs.length; i++) { arcs[i] = new HMMStateSearchState(fullHMMSearchState, next[i].getHMMState(), next[i].getLogProbability()); } } cacheSuccessors(arcs); } return arcs; } /** * Returns the order of this state type among all of the search states * * @return the order */ @Override public int getOrder() { return isEmitting() ? 4 : 0; } /** * Returns a unique string representation of the state. This string is suitable (and typically used) for a label * for a GDL node * * @return the signature */ @Override public String getSignature() { return "HSSS " + fullHMMSearchState.getSignature() + '-' + hmmState; } /** * Returns the hmm state for this search state * * @return the hmm state */ @Override public HMMState getHMMState() { return hmmState; } @Override public float getScore(Data data) { return hmmState.getScore(data); } } /** The search graph that is produced by the flat linguist. */ class DynamicFlatSearchGraph implements SearchGraph { /* * (non-Javadoc) * * @see edu.cmu.sphinx.linguist.SearchGraph#getInitialState() */ @Override public SearchState getInitialState() { InitialState initialState = new InitialState(); initialState.addArc(new GrammarState(grammar.getInitialNode())); // add an out-of-grammar branch if configured to do so if (addOutOfGrammarBranch) { OutOfGrammarGraph oogg = new OutOfGrammarGraph (phoneLoopAcousticModel, logOutOfGrammarBranchProbability, logPhoneInsertionProbability); initialState.addArc(oogg.getOutOfGrammarGraph()); } return initialState; } /* * (non-Javadoc) * * @see edu.cmu.sphinx.linguist.SearchGraph#getNumStateOrder() */ @Override public int getNumStateOrder() { return 5; } } }
/* * Copyright 2000-2016 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intellij.concurrency; import com.intellij.openapi.application.ApplicationManager; import com.intellij.openapi.application.ex.ApplicationManagerEx; import com.intellij.openapi.application.impl.ApplicationImpl; import com.intellij.openapi.progress.ProcessCanceledException; import com.intellij.openapi.progress.ProgressIndicator; import com.intellij.openapi.progress.ProgressManager; import com.intellij.util.Processor; import com.intellij.util.concurrency.AtomicFieldUpdater; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import java.util.Collection; import java.util.List; import java.util.concurrent.CountedCompleter; /** * Executes processor on array elements in range from lo (inclusive) to hi (exclusive). * To do this it starts executing processor on first array items and, if it takes too much time, splits the work and forks the right half. * The series of splits lead to linked list of forked sub tasks, each of which is a CountedCompleter of its own, * having this task as its parent. * After the first pass on the array, this task attempts to steal work from the recently forked off sub tasks, * by traversing the linked subtasks list, unforking each subtask and calling execAndForkSubTasks() on each recursively. * After that, the task completes itself. * The process of completing traverses task parent hierarchy, decrementing each pending count until it either * decrements not-zero pending count and stops or * reaches the top, in which case it invokes {@link java.util.concurrent.ForkJoinTask#quietlyComplete()} which causes the top level task to wake up and join successfully. * The exceptions from the sub tasks bubble up to the top and saved in {@link #throwable}. */ class ApplierCompleter<T> extends CountedCompleter<Void> { private final boolean runInReadAction; private final boolean failFastOnAcquireReadAction; private final ProgressIndicator progressIndicator; @NotNull private final List<? extends T> array; @NotNull private final Processor<? super T> processor; private final int lo; private final int hi; private final ApplierCompleter<T> next; // keeps track of right-hand-side tasks volatile Throwable throwable; private static final AtomicFieldUpdater<ApplierCompleter, Throwable> throwableUpdater = AtomicFieldUpdater.forFieldOfType(ApplierCompleter.class, Throwable.class); // if not null, the read action has failed and this list contains unfinished subtasks private final Collection<ApplierCompleter<T>> failedSubTasks; //private final List<ApplierCompleter> children = new ArrayList<ApplierCompleter>(); @Override public boolean cancel(boolean mayInterruptIfRunning) { progressIndicator.cancel(); return super.cancel(mayInterruptIfRunning); } ApplierCompleter(ApplierCompleter<T> parent, boolean runInReadAction, boolean failFastOnAcquireReadAction, @NotNull ProgressIndicator progressIndicator, @NotNull List<? extends T> array, @NotNull Processor<? super T> processor, int lo, int hi, @NotNull Collection<ApplierCompleter<T>> failedSubTasks, ApplierCompleter<T> next) { super(parent); this.runInReadAction = runInReadAction; this.failFastOnAcquireReadAction = failFastOnAcquireReadAction; this.progressIndicator = progressIndicator; this.array = array; this.processor = processor; this.lo = lo; this.hi = hi; this.failedSubTasks = failedSubTasks; this.next = next; } @Override public void compute() { if (failFastOnAcquireReadAction) { ((ApplicationImpl)ApplicationManager.getApplication()).executeByImpatientReader(()-> wrapInReadActionAndIndicator(this::execAndForkSubTasks)); } else { wrapInReadActionAndIndicator(this::execAndForkSubTasks); } } private void wrapInReadActionAndIndicator(@NotNull final Runnable process) { Runnable toRun = runInReadAction ? () -> { if (!ApplicationManagerEx.getApplicationEx().tryRunReadAction(process)) { failedSubTasks.add(this); doComplete(throwable); } } : process; ProgressManager progressManager = ProgressManager.getInstance(); ProgressIndicator existing = progressManager.getProgressIndicator(); if (existing == progressIndicator) { // we are already wrapped in an indicator - most probably because we came here from helper which steals children tasks toRun.run(); } else { progressManager.executeProcessUnderProgress(toRun, progressIndicator); } } static class ComputationAbortedException extends RuntimeException {} // executes tasks one by one and forks right halves if it takes too much time // returns the linked list of forked halves - they all need to be joined; null means all tasks have been executed, nothing was forked @Nullable private ApplierCompleter<T> execAndForkSubTasks() { int hi = this.hi; ApplierCompleter<T> right = null; Throwable throwable = null; try { for (int i = lo; i < hi; ++i) { ProgressManager.checkCanceled(); if (hi - i >= 2) { int availableParallelism = JobSchedulerImpl.getJobPoolParallelism() - Math.max(0,getSurplusQueuedTaskCount()); if (availableParallelism > 1) { // fork off several sub-tasks at once to reduce rampup for (int n=0; n<availableParallelism; n++) { int mid = i + hi >>> 1; if (mid == i || mid == hi) break; right = new ApplierCompleter<>(this, runInReadAction, failFastOnAcquireReadAction, progressIndicator, array, processor, mid, hi, failedSubTasks, right); //children.add(right); addToPendingCount(1); right.fork(); hi = mid; } } } if (!processor.process(array.get(i))) { throw new ComputationAbortedException(); } } // traverse the list looking for a task available for stealing if (right != null) { // tries to unfork, execute and re-link subtasks ApplierCompleter<T> cur = right; Throwable result = right.throwable; while (cur != null) { ProgressManager.checkCanceled(); if (cur.tryUnfork()) { cur.execAndForkSubTasks(); result = moreImportant(result, cur.throwable); } cur = cur.next; } throwable = result; } } catch (Throwable e) { cancelProgress(); throwable = e; } finally { doComplete(moreImportant(throwable, this.throwable)); } return right; } private static Throwable moreImportant(Throwable throwable1, Throwable throwable2) { Throwable result; if (throwable1 == null) { result = throwable2; } else if (throwable2 == null) { result = throwable1; } else { // any exception wins over PCE because the latter can be induced by canceled indicator because of the former result = throwable1 instanceof ProcessCanceledException ? throwable2 : throwable1; } return result; } private void doComplete(Throwable throwable) { ApplierCompleter<T> a = this; ApplierCompleter<T> child = a; while (true) { // update parent.throwable in a thread safe way Throwable oldThrowable; Throwable newThrowable; do { oldThrowable = a.throwable; newThrowable = moreImportant(oldThrowable, throwable); } while (oldThrowable != newThrowable && !throwableUpdater.compareAndSet(a, oldThrowable, newThrowable)); throwable = newThrowable; if (a.getPendingCount() == 0) { // currently avoid using onExceptionalCompletion since it leaks exceptions via ForkJoinTask.exceptionTable a.onCompletion(child); //a.onExceptionalCompletion(throwable, child); child = a; //noinspection unchecked a = (ApplierCompleter<T>)a.getCompleter(); if (a == null) { // currently avoid using completeExceptionally since it leaks exceptions via ForkJoinTask.exceptionTable child.quietlyComplete(); break; } } else if (a.decrementPendingCountUnlessZero() != 0) { break; } } } private void cancelProgress() { if (!progressIndicator.isCanceled()) { progressIndicator.cancel(); } } boolean completeTaskWhichFailToAcquireReadAction() { final boolean[] result = {true}; // these tasks could not be executed in the other thread; do them here for (final ApplierCompleter<T> task : failedSubTasks) { ProgressManager.checkCanceled(); ApplicationManager.getApplication().runReadAction(() -> task.wrapInReadActionAndIndicator(() -> { for (int i = task.lo; i < task.hi; ++i) { ProgressManager.checkCanceled(); if (!task.processor.process(task.array.get(i))) { result[0] = false; break; } } })); } return result[0]; } @Override public String toString() { return "("+lo+"-"+hi+")"+(getCompleter() == null ? "" : " parent: "+getCompleter()); } }
/* ### * IP: GHIDRA * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Script to fix up Windows RTTI vtables and structures //@category C++ import java.util.*; import ghidra.app.script.GhidraScript; import ghidra.program.model.address.*; import ghidra.program.model.data.*; import ghidra.program.model.listing.CircularDependencyException; import ghidra.program.model.listing.Data; import ghidra.program.model.mem.MemoryAccessException; import ghidra.program.model.mem.MemoryBlock; import ghidra.program.model.symbol.*; import ghidra.util.exception.*; public class FixUpRttiAnalysisScript extends GhidraScript { private static final String RTTI_BASE_CLASS_ARRAY_LABEL = "RTTI_Base_Class_Array"; private static final String RTTI_CLASS_HIERARCHY_DESCRIPTOR_LABEL = "RTTI_Class_Hierarchy_Descriptor"; private static final String RTTI_BASE_CLASS_DESCRIPTOR_LABEL = "RTTI_Base_Class_Descriptor"; private static final String RTTI_BASE_COMPLETE_OBJECT_LOADER_LABEL = "RTTI_Complete_Object_Locator"; private static final String RTTI_BASE_CLASS_DESCRIPTOR_DATA_NAME = "RTTIBaseClassDescriptor"; private static final String RTTI_BASE_COMPLETE_OBJECT_LOADER_DATA_NAME = "RTTICompleteObjectLocator"; private static final String RTTI_CLASS_HIERARCHY_DESCRIPTOR_DATA_NAME = "RTTIClassHierarchyDescriptor"; private static final String VFTABLE_META_PTR_LABEL = "vftable_meta_ptr"; private static final String VFTABLE_LABEL = "vftable"; SymbolTable symbolTable = null; DataTypeManager dataTypeManager = null; GlobalNamespace globalNamespace = null; int defaultPointerSize = 0; boolean isWindows = false; @Override public void run() throws Exception { if (currentProgram == null) { println("There is no open program"); return; } setIsWindows(); if (!isWindows) { println("This script only handles Windows programs"); return; } // TODO: check version and only run if before 9.3? symbolTable = currentProgram.getSymbolTable(); dataTypeManager = currentProgram.getDataTypeManager(); globalNamespace = (GlobalNamespace) currentProgram.getGlobalNamespace(); defaultPointerSize = currentProgram.getDefaultPointerSize(); if (defaultPointerSize != 4 && defaultPointerSize != 8) { println("This script only works on 32 or 64 bit programs"); return; } applyMissingRTTIStructures(); } /** * Method to find and apply missing RTTI structures * @throws CancelledException if cancelled * @throws Exception if error applying label or data */ private List<Symbol> applyMissingRTTIStructures() throws CancelledException, Exception { List<Symbol> completeObjectLocatorSymbols = createMissingRTTICompleteObjectLocator(); List<Symbol> baseClassDescriptorSymbols = createMissingBaseClassDescriptors(); List<Address> classHierarchyDescriptors = createMissingClassHierarchyDescriptors( baseClassDescriptorSymbols, completeObjectLocatorSymbols); createMissingBaseClassArrays(classHierarchyDescriptors); List<Symbol> vftableSymbols = createMissingVftableSymbols(completeObjectLocatorSymbols); return vftableSymbols; } /** * Method to set the global variable isWindows */ private void setIsWindows() { String compilerID = currentProgram.getCompilerSpec().getCompilerSpecID().getIdAsString().toLowerCase(); isWindows = compilerID.contains("windows"); } /** * Method to iterate over all symbols with Base Class Descriptor symbol and if * the correct data type has not already been created, do so. * @return List of all symbols with valid (even previously) BaseClassDescriptor structure applied * @throws CancelledException when cancelled * @throws Exception when data cannot be created */ private List<Symbol> createMissingRTTICompleteObjectLocator() throws CancelledException, Exception { List<Symbol> completeObjectLocatorSymbols = new ArrayList<Symbol>(); SymbolIterator dataSymbols = symbolTable.getSymbols(getInitializedMemory(), SymbolType.LABEL, true); while (dataSymbols.hasNext()) { monitor.checkCanceled(); Symbol symbol = dataSymbols.next(); if (!symbol.getName().contains(RTTI_BASE_COMPLETE_OBJECT_LOADER_LABEL)) { continue; } Data data = getDataAt(symbol.getAddress()); if (data != null && data.getDataType().getName().contains(RTTI_BASE_COMPLETE_OBJECT_LOADER_DATA_NAME)) { completeObjectLocatorSymbols.add(symbol); continue; } // for some reason it was named but not created so create it data = createCompleteObjectLocator(symbol.getAddress()); if (data != null && data.getDataType().getName().contains(RTTI_BASE_COMPLETE_OBJECT_LOADER_DATA_NAME)) { completeObjectLocatorSymbols.add(symbol); continue; } println("Cannot create RTTI_CompleteObjectLocator at " + symbol.getAddress()); } return completeObjectLocatorSymbols; } /** * Method to create a CompleteObjectLocator structure at the given address * @param address the address where the structure will be created * @return the created CompleteObjectLocator data or null if it couldn't be created * @throws CancelledException if cancelled * @throws Exception if error creating data */ private Data createCompleteObjectLocator(Address address) throws CancelledException, Exception { DataType completeObjLocatorDataType = dataTypeManager.getDataType(CategoryPath.ROOT, RTTI_BASE_COMPLETE_OBJECT_LOADER_DATA_NAME); if (completeObjLocatorDataType == null) { return null; } int sizeOfDt = completeObjLocatorDataType.getLength(); clearListing(address, address.add(sizeOfDt)); Data completeObjectLocator = createData(address, completeObjLocatorDataType); if (completeObjectLocator == null) { return null; } return completeObjectLocator; } /** * Method to iterate over all symbols with Base Class Descriptor symbol and if * the correct data type has not already been created, do so. * @return List of all symbols with valid (even previously) BaseClassDescriptor structure applied * @throws Exception when cancelled */ private List<Symbol> createMissingBaseClassDescriptors() throws Exception { List<Symbol> baseClassDescriptorSymbols = new ArrayList<Symbol>(); SymbolIterator dataSymbols = symbolTable.getSymbols(getInitializedMemory(), SymbolType.LABEL, true); while (dataSymbols.hasNext()) { monitor.checkCanceled(); Symbol symbol = dataSymbols.next(); if (!symbol.getName().contains(RTTI_BASE_CLASS_DESCRIPTOR_LABEL)) { continue; } Data data = getDataAt(symbol.getAddress()); if (data != null && data.getDataType().getName().contains(RTTI_BASE_CLASS_DESCRIPTOR_DATA_NAME)) { baseClassDescriptorSymbols.add(symbol); continue; } // for some reason it was named but not created so create it data = createBaseClassDescriptor(symbol.getAddress()); if (data != null && data.getDataType().getName().contains(RTTI_BASE_CLASS_DESCRIPTOR_DATA_NAME)) { baseClassDescriptorSymbols.add(symbol); continue; } println("Cannot create RTTI_Base_Class_Descriptor at " + symbol.getAddress()); } return baseClassDescriptorSymbols; } /** * Method to create a BaseClassDescriptor structure at the given address * @param baseClassDescriptorAddress the address where the structure will be created * @return the created BaseClassDescriptor data or null if it couldn't be created * @throws CancelledException if cancelled * @throws Exception if error creating data */ private Data createBaseClassDescriptor(Address baseClassDescriptorAddress) throws CancelledException, Exception { DataType baseClassDescriptor = dataTypeManager.getDataType(CategoryPath.ROOT, RTTI_BASE_CLASS_DESCRIPTOR_DATA_NAME); int sizeOfDt = baseClassDescriptor.getLength(); clearListing(baseClassDescriptorAddress, baseClassDescriptorAddress.add(sizeOfDt)); Data baseClassDescArray = createData(baseClassDescriptorAddress, baseClassDescriptor); if (baseClassDescArray == null) { return null; } return baseClassDescArray; } /** * Method to apply missing RTTI Base Class Descriptor structures and symbols * @param address address to apply the missing structure and symbol * @param numBaseClasses number of base classes in the array pointing to BaseClassDescriptors * @param classNamespace name of the class * @throws AddressOutOfBoundsException if try clear listing at address out of bounds * @throws MemoryAccessException if cannot access memory * @throws CancelledException if cancelled * @throws Exception if issue making data */ private void createBaseClassDescriptors(Address address, int numBaseClasses, Namespace classNamespace) throws CancelledException, MemoryAccessException, AddressOutOfBoundsException, Exception { for (int i = 0; i < numBaseClasses; i++) { monitor.checkCanceled(); Address baseClassDescriptorAddress = getReferencedAddress(address.add(i * 4)); Data baseClassDescriptor = getDataAt(baseClassDescriptorAddress); if (baseClassDescriptor == null || !baseClassDescriptor.getDataType() .getName() .equals( RTTI_BASE_CLASS_DESCRIPTOR_DATA_NAME)) { int num1 = getInt(baseClassDescriptorAddress.add(8)); int num2 = getInt(baseClassDescriptorAddress.add(12)); int num3 = getInt(baseClassDescriptorAddress.add(16)); int num4 = getInt(baseClassDescriptorAddress.add(20)); baseClassDescriptor = createBaseClassDescriptor(baseClassDescriptorAddress); if (baseClassDescriptor != null) { symbolTable.createLabel( baseClassDescriptorAddress, RTTI_BASE_CLASS_DESCRIPTOR_LABEL + "_at_(" + num1 + "," + num2 + "," + num3 + "," + num4 + ")", classNamespace, SourceType.ANALYSIS); } else { println( "Failed to create a baseClassDescArray structure at " + address.toString()); } } } } /** * * @param baseClassDescriptors the given list of BaseClassDescriptor symbols * @param completeObjectLocators the given list of CompleteObjectLocator symbols * @return list of ClassHierarchyDescriptor addresses * @throws CancelledException if cancelled * @throws MemoryAccessException if memory cannot be read * @throws InvalidInputException if issue setting return type * @throws AddressOutOfBoundsException if try clear listing at address out of bounds * @throws Exception if there is an issue creating a label */ private List<Address> createMissingClassHierarchyDescriptors(List<Symbol> baseClassDescriptors, List<Symbol> completeObjectLocators) throws CancelledException, MemoryAccessException, InvalidInputException, AddressOutOfBoundsException, Exception { List<Address> classHierarchyDescriptorAddresses = new ArrayList<Address>(); Iterator<Symbol> baseClassDescriptorIterator = baseClassDescriptors.iterator(); while (baseClassDescriptorIterator.hasNext()) { monitor.checkCanceled(); Symbol symbol = baseClassDescriptorIterator.next(); Address classHierarchyDescriptorAddress = createClassHierarchyDescriptor( symbol.getAddress().add(24), symbol.getParentNamespace()); if (classHierarchyDescriptorAddress != null && !classHierarchyDescriptorAddresses.contains(classHierarchyDescriptorAddress)) { classHierarchyDescriptorAddresses.add(classHierarchyDescriptorAddress); } } Iterator<Symbol> completeObjectLocatorIterator = completeObjectLocators.iterator(); while (completeObjectLocatorIterator.hasNext()) { monitor.checkCanceled(); Symbol symbol = completeObjectLocatorIterator.next(); Address classHierarchyDescriptorAddress = createClassHierarchyDescriptor( symbol.getAddress().add(16), symbol.getParentNamespace()); if (classHierarchyDescriptorAddress != null && !classHierarchyDescriptorAddresses.contains(classHierarchyDescriptorAddress)) { classHierarchyDescriptorAddresses.add(classHierarchyDescriptorAddress); } } return classHierarchyDescriptorAddresses; } /** * * @param address the address where the ClassHierarchyDescriptor is to be created * @param classNamespace the namespace of the class * @return the given class's ClassHierarchyDescriptor address * @throws CancelledException if cancelled * @throws MemoryAccessException if memory cannot be read * @throws InvalidInputException if issue setting return type * @throws Exception if issue creating label */ private Address createClassHierarchyDescriptor(Address address, Namespace classNamespace) throws CancelledException, MemoryAccessException, InvalidInputException, Exception { Address classHierarchyDescriptorAddress = getReferencedAddress(address); Data classHierarchyStructure = getDataAt(classHierarchyDescriptorAddress); if (classHierarchyStructure != null && classHierarchyStructure.getDataType() .getName() .equals( RTTI_CLASS_HIERARCHY_DESCRIPTOR_DATA_NAME)) { return classHierarchyDescriptorAddress; } Symbol classHierarchySymbol; classHierarchySymbol = symbolTable.createLabel(classHierarchyDescriptorAddress, RTTI_CLASS_HIERARCHY_DESCRIPTOR_LABEL, classNamespace, SourceType.ANALYSIS); classHierarchyStructure = createClassHierarchyStructure(classHierarchyDescriptorAddress); if (classHierarchyStructure == null) { println("Failed to create a classHierarchyDescriptor structure at " + classHierarchyDescriptorAddress.toString()); symbolTable.removeSymbolSpecial(classHierarchySymbol); return null; } return classHierarchyDescriptorAddress; } /** * Method to create a ClassHierarchyDescriptor structure at the given address * @param classHierarchyDescriptorAddress the address where the structure will be created * @return the created ClassHierarchyDescriptor data or null if it couldn't be created * @throws CancelledException if cancelled * @throws AddressOutOfBoundsException if try clear listing at address out of bounds * @throws Exception if issue creating data */ private Data createClassHierarchyStructure(Address classHierarchyDescriptorAddress) throws CancelledException, AddressOutOfBoundsException, Exception { DataType classHDatatype = dataTypeManager.getDataType(CategoryPath.ROOT, RTTI_CLASS_HIERARCHY_DESCRIPTOR_DATA_NAME); int sizeOfDt = classHDatatype.getLength(); clearListing(classHierarchyDescriptorAddress, classHierarchyDescriptorAddress.add(sizeOfDt)); Data classHierarchyStructure = createData(classHierarchyDescriptorAddress, classHDatatype); if (classHierarchyStructure == null) { return null; } return classHierarchyStructure; } /** * * @param classHierarchyDescriptors the given list of applied ClassHierarchyDescriptor structures * @return a list of base class array addresses * @throws CancelledException if cancelled * @throws MemoryAccessException if memory cannot be read * @throws AddressOutOfBoundsException if try clear listing at address out of bounds * @throws Exception if there is an issue creating a label */ private List<Address> createMissingBaseClassArrays(List<Address> classHierarchyDescriptors) throws CancelledException, MemoryAccessException, AddressOutOfBoundsException, Exception { List<Address> baseClassArrayAddresses = new ArrayList<Address>(); Iterator<Address> classHierarchyDescriptorIterator = classHierarchyDescriptors.iterator(); while (classHierarchyDescriptorIterator.hasNext()) { monitor.checkCanceled(); Address classHierarchyDescriptorAddress = classHierarchyDescriptorIterator.next(); Symbol classHierarchyDescriptorSymbol = symbolTable.getPrimarySymbol(classHierarchyDescriptorAddress); Namespace classNamespace = classHierarchyDescriptorSymbol.getParentNamespace(); int numBaseClasses = getInt(classHierarchyDescriptorAddress.add(8)); Address baseClassArrayAddress = getReferencedAddress(classHierarchyDescriptorAddress.add(12)); Data baseClassDescArray = getDataAt(baseClassArrayAddress); if (baseClassDescArray != null && baseClassDescArray.isArray()) { baseClassArrayAddresses.add(baseClassArrayAddress); continue; } baseClassDescArray = createBaseClassArray(baseClassArrayAddress, numBaseClasses); if (baseClassDescArray != null && baseClassDescArray.isArray()) { Symbol primarySymbol = symbolTable.getPrimarySymbol(baseClassArrayAddress); if (primarySymbol == null || !primarySymbol.getName().contains(RTTI_BASE_CLASS_ARRAY_LABEL)) { symbolTable.createLabel(baseClassArrayAddress, RTTI_BASE_CLASS_ARRAY_LABEL, classNamespace, SourceType.ANALYSIS); } baseClassArrayAddresses.add(baseClassArrayAddress); createBaseClassDescriptors(baseClassArrayAddress, numBaseClasses, classNamespace); continue; } println("Failed to create a baseClassDescArray structure at " + baseClassArrayAddress.toString()); } return baseClassArrayAddresses; } /** * Method to create a base class array at the given address with the given number of base class's in the array * @param baseClassArrayAddress the address where the array will be created * @param numBaseClasses the number of BaseClass's in the array * @return the created BaseClassArray data or null if cannot retrieve it * @throws CancelledException if cancelled * @throws Exception if error creating data */ private Data createBaseClassArray(Address baseClassArrayAddress, int numBaseClasses) throws CancelledException, Exception { int sizeOfDt; ArrayDataType baseClassDescArrayDT; int addressSize = baseClassArrayAddress.getSize(); if (addressSize == 32) { DataType baseClassDescriptor = dataTypeManager.getDataType(CategoryPath.ROOT, RTTI_BASE_CLASS_DESCRIPTOR_DATA_NAME); PointerDataType baseClassDescriptorPtr = new PointerDataType(baseClassDescriptor); sizeOfDt = baseClassDescriptorPtr.getLength(); baseClassDescArrayDT = new ArrayDataType(baseClassDescriptorPtr, numBaseClasses, sizeOfDt); } else if (addressSize == 64) { DataType imageBaseOffset = dataTypeManager.getDataType(CategoryPath.ROOT, "ImageBaseOffset32"); sizeOfDt = imageBaseOffset.getLength(); baseClassDescArrayDT = new ArrayDataType(imageBaseOffset, numBaseClasses, sizeOfDt); } else { return null; } clearListing(baseClassArrayAddress, baseClassArrayAddress.add(numBaseClasses * sizeOfDt)); Data baseClassDescArray = createData(baseClassArrayAddress, baseClassDescArrayDT); if (baseClassDescArray == null) { return null; } return baseClassDescArray; } /** * Method to create missing vftables and return a list of them * @param completeObjectLocatorSymbols the list of completeObjectLocatorSymbols * @return list of vftable symbols * @throws CancelledException if cancelled * @throws InvalidInputException if invalid input * @throws CircularDependencyException if namespace has circular dependency * @throws DuplicateNameException if try to create label with duplicate name in namespace */ private List<Symbol> createMissingVftableSymbols(List<Symbol> completeObjectLocatorSymbols) throws CancelledException, InvalidInputException, DuplicateNameException, CircularDependencyException { List<Symbol> vftables = new ArrayList<Symbol>(); Iterator<Symbol> iterator = completeObjectLocatorSymbols.iterator(); while (iterator.hasNext()) { monitor.checkCanceled(); Symbol completeObjectLocatorSymbol = iterator.next(); Address completeObjectLocatorAddress = completeObjectLocatorSymbol.getAddress(); Namespace classNamespace = completeObjectLocatorSymbol.getParentNamespace(); if (classNamespace.equals(globalNamespace)) { println("no class namespace for " + completeObjectLocatorAddress.toString()); continue; } Reference[] referencesTo = getReferencesTo(completeObjectLocatorAddress); if (referencesTo.length == 0) { println("no refs to " + completeObjectLocatorAddress.toString()); continue; } for (Reference refTo : referencesTo) { Address vftableMetaPointer = refTo.getFromAddress(); if (vftableMetaPointer == null) { println("can't retrieve meta address"); continue; } Address vftableAddress = vftableMetaPointer.add(defaultPointerSize); if (vftableAddress == null) { println("can't retrieve vftable address"); continue; } // if not created, create vftable meta pointer label if (getGivenSymbol(vftableAddress, VFTABLE_META_PTR_LABEL, classNamespace) == null) { symbolTable.createLabel(vftableMetaPointer, VFTABLE_META_PTR_LABEL, classNamespace, SourceType.ANALYSIS); } // if not created, create vftable label Symbol vftableSymbol = getGivenSymbol(vftableAddress, VFTABLE_LABEL, classNamespace); if (vftableSymbol == null) { vftableSymbol = symbolTable.createLabel(vftableAddress, VFTABLE_LABEL, classNamespace, SourceType.ANALYSIS); if (vftableSymbol == null) { continue; } } if (!vftables.contains(vftableSymbol)) { vftables.add(vftableSymbol); } } } return vftables; } /** * Method to retrieve the symbol with the given address, containing name (containing to account * for pdb case where sometimes has extra chars) and namespace * @param address the given address * @param name the given name * @param namespace the given namespace * @return the symbol with the given address, containing name, with given namespace * @throws CancelledException if cancelled */ private Symbol getGivenSymbol(Address address, String name, Namespace namespace) throws CancelledException { SymbolIterator symbols = symbolTable.getSymbolsAsIterator(address); for (Symbol sym : symbols) { monitor.checkCanceled(); if (sym.getName().contains(name) && sym.getParentNamespace().equals(namespace)) { return sym; } } return null; } /** * Method to return referenced address at the given address * @param address the address to look for a referenced address at * @return the first referenced address from the given address * @throws MemoryAccessException if memory cannot be read */ private Address getReferencedAddress(Address address) throws MemoryAccessException { //TODO: switch to this then test then just rewrite the call and get rid of this method // MSDataTypeUtils.getReferencedAddress(currentProgram, address); // this will work whether there is a created reference or not int addressSize = address.getSize(); if (addressSize == 32) { long offset = getInt(address); return address.getNewAddress(offset); } // this currently will workn only if there is a created reference // TODO: get ibo bytes and figure out what the ibo ref address would be if (addressSize == 64) { Reference refs[] = getReferencesFrom(address); if (refs.length == 0) { return null; } return refs[0].getToAddress(); } return null; } /** * Method to retrieve the AddressSet of the current program's initialized memory * @return the AddressSet of the current program's initialized memory * @throws CancelledException if cancelled */ private AddressSet getInitializedMemory() throws CancelledException { AddressSet dataAddresses = new AddressSet(); MemoryBlock[] blocks = currentProgram.getMemory().getBlocks(); for (MemoryBlock block : blocks) { monitor.checkCanceled(); if (block.isInitialized()) { dataAddresses.add(block.getStart(), block.getEnd()); } } return dataAddresses; } }
package org.twak.tweed.gen; import java.io.File; import java.io.IOException; import java.nio.file.Files; import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Set; import java.util.concurrent.atomic.AtomicInteger; import java.util.stream.Collectors; import javax.swing.JButton; import javax.swing.JComponent; import javax.swing.JOptionPane; import javax.swing.JPanel; import javax.vecmath.Point2d; import javax.vecmath.Point3d; import org.twak.siteplan.jme.Jme3z; import org.twak.tweed.GenHandlesSelect; import org.twak.tweed.IDumpObjs; import org.twak.tweed.Tweed; import org.twak.tweed.gen.skel.SkelGen; import org.twak.utils.collections.LoopL; import org.twak.utils.collections.Loopz; import org.twak.utils.geom.HalfMesh2; import org.twak.utils.geom.ObjDump; import org.twak.utils.geom.HalfMesh2.HalfEdge; import org.twak.utils.geom.HalfMesh2.HalfFace; import org.twak.utils.ui.ListDownLayout; import org.twak.utils.Parallel; import org.twak.viewTrace.facades.Regularizer; import com.jme3.math.Vector2f; import com.jme3.math.Vector3f; import com.jme3.scene.Node; import com.jme3.scene.Spatial; import com.thoughtworks.xstream.XStream; public class ResultsGen extends Gen implements IDumpObjs, GenHandlesSelect { public static final String SOLUTIONS = "solutions", SOLVER_FILE = "solver_state.xml"; File root; List<MeshFile> toAdd = Collections.synchronizedList( new ArrayList<>() ); Map<File, Node> rendered = new HashMap<>(); int footprints = 0; public ResultsGen( File file, Tweed tweed ) { super ("results", tweed); this.root = file; } private void load( File f, boolean plot ) { plansIn.set( 0 ); plansOut.set( 0 ); Regularizer.seenImages.clear(); try { List<File> files = Files.walk(f.toPath()) .filter(Files::isRegularFile) .map (p -> p.toFile()) .filter (s -> s.getName().equals("done.xml" ) ) .collect( Collectors.toList() ); new Parallel<File, MeshFile>( files, x -> readMesh(x, plot), meshes -> loaded(meshes), false ); } catch ( IOException e ) { e.printStackTrace(); } calculateOnJmeThread(); } private void loaded( Set<MeshFile> meshes ) { toAdd.addAll( meshes ); calculateOnJmeThread(); } static class MeshFile { HalfMesh2 mesh; File file; public MeshFile (HalfMesh2 mesh, File file) { this.mesh = mesh; this.file = file; } } AtomicInteger plansOut = new AtomicInteger(), plansIn = new AtomicInteger(); private MeshFile readMesh(File f, boolean plot) { System.out.println("reading solution " + f.getParentFile().getName()); try { SolverState SS = (SolverState) new XStream().fromXML( f ); for (HalfFace hf : SS.mesh) plansIn.incrementAndGet(); SkelFootprint.postProcesss( SS ); for (HalfFace hf : SS.mesh) { plansOut.incrementAndGet(); for (HalfEdge e : hf) { SuperEdge se = (SuperEdge) e; } } if (plot) SS.debugSolverResult(); return new MeshFile ( SS.mesh, f ); } catch ( Throwable t ) { t.printStackTrace(); return null; } } Map<HalfMesh2, Node> nodes = new HashMap<>(); @Override public void calculate() { synchronized ( toAdd ) { for ( MeshFile block : toAdd ) { if ( block != null ) { Node n = rendered.get(block.file); if (n != null) n.removeFromParent(); rendered.remove(block.file); SkelGen sg = new SkelGen( tweed ); sg.block = block.mesh; sg.parentNode = gNode; sg.calculate(); ResultsGen.this.gNode.attachChild( sg.gNode ); rendered.put( block.file, sg.gNode ); } } toAdd.clear(); } System.out.println(" before: " + plansIn.get() +" after " + plansOut.get() ); gNode.updateModelBound(); gNode.updateGeometricState(); super.calculate(); } @Override public JComponent getUI() { JPanel out = new JPanel(); out.setLayout( new ListDownLayout() ); JButton all = new JButton ("load all"); all.addActionListener( e -> new Thread ( () -> load(root, false) ).start() ); out.add(all); JButton clear = new JButton ("clear"); clear.addActionListener( e -> clear() ); out.add(clear); return out; } private void clear() { toAdd.clear(); rendered.clear(); for (Spatial s : gNode.getChildren()) s.removeFromParent(); calculateOnJmeThread(); tweed.enqueue( new Runnable() { @Override public void run() { Jme3z.removeAllChildren( gNode ); } } ); } @Override public void dumpObj( ObjDump dump ) { Jme3z.dump( dump, gNode, 1 ); } @Override public void select( Spatial target, Vector3f contactPoint, Vector2f cursorPosition ) { // tweed. } @Override public void blockSelected( LoopL<Point3d> polies, BlockGen blockGen ) { Point2d cen = Loopz.average( Loopz.to2dLoop( polies, 1, null ) ); for (File f : new File (tweed.DATA+File.separator +"solutions").listFiles()) { try { Point2d fp = FeatureCache.fileToCenter( f.getName() ); if (fp.distanceSquared( cen ) < 1) { new Thread( () -> load( f, true ) ).start(); return; } } catch (Throwable th) { System.out.println( "unable to read solution "+f.getName() ); } } JOptionPane.showMessageDialog( tweed.frame(), "Can't find solution for center " + cen ); } }
/* * Copyright (C) 1999-2008 Jive Software. All rights reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.jivesoftware.xmpp.workgroup.utils; import java.beans.BeanDescriptor; import java.beans.BeanInfo; import java.beans.EventSetDescriptor; import java.beans.IntrospectionException; import java.beans.MethodDescriptor; import java.beans.PropertyDescriptor; import java.util.ArrayList; import java.util.Enumeration; import java.util.List; import java.util.Locale; import java.util.MissingResourceException; import java.util.ResourceBundle; import org.jivesoftware.util.JiveGlobals; import org.slf4j.Logger; import org.slf4j.LoggerFactory; public abstract class WorkgroupBeanInfo implements BeanInfo { private static final Logger Log = LoggerFactory.getLogger(WorkgroupBeanInfo.class); private ResourceBundle bundle; public WorkgroupBeanInfo() { List<String> bundleNames = new ArrayList<String>(); String prefix = "bean_"; // fully qualified class name: bean_com.foo.MyClass.properties bundleNames.add(prefix + getClass().toString()); // just class name: bean_MyClass.properties bundleNames.add(prefix + getName().toString()); //Get the locale that should be used, then load the resource bundle. Locale currentLocale = JiveGlobals.getLocale(); for (int i = 0, n = bundleNames.size(); i < n; i++) { String name = bundleNames.get(i); try { // TODO - possibly use other class loaders? bundle = ResourceBundle.getBundle(name, currentLocale); break; } catch (Exception e) { // Ignore any exception when trying to load bundle. } } } /** * Returns the names of the properties of the bean that should be exposed. * * @return the names of the properties that should be exposed. */ public abstract String[] getPropertyNames(); /** * Returns the bean Class. * * @return the Class of the JavaBean that the BeanInfo is for. */ public abstract Class getBeanClass(); /** * Returns the name of the class that the bean info applies to (which * corresponds to the resource bundle that will be loaded). For example, * for the class <tt>com.foo.ExampleClass</tt>, the name would be * <tt>ExampleClass</tt>. * * @return the name of the JavaBean that the BeanInfo is for. */ public abstract String getName(); // BeanInfo Interface public BeanDescriptor getBeanDescriptor() { BeanDescriptor descriptor = new BeanDescriptor(getBeanClass()); try { // Attempt to load the name, displayName and shortDescription explicitly. try { String name = bundle.getString("name"); if (name != null) { descriptor.setName(name); } } catch (MissingResourceException ignored) { } // Get the name try { String displayName = bundle.getString("displayName"); if (displayName != null) { descriptor.setDisplayName(displayName); } } catch (MissingResourceException ignored) { } // Get the short description try { String shortDescription = bundle.getString("shortDescription"); if (shortDescription != null) { descriptor.setShortDescription(shortDescription); } } catch (MissingResourceException ignored) { } // Get a large description field try { String description = bundle.getString("description"); if (description != null) { descriptor.setValue("description", description); } } catch (MissingResourceException ignored) { } // Add any other properties that are specified. Enumeration<String> e = bundle.getKeys(); while (e.hasMoreElements()) { String key = e.nextElement(); try { String value = bundle.getString(key); if (value != null) { descriptor.setValue(key, value); } } catch (MissingResourceException ignored) { } } } catch (Exception e) { // Ignore any exceptions. We may get some if we try to load a // a property that doesn't appear in the resource bundle. } return descriptor; } public PropertyDescriptor[] getPropertyDescriptors() { Class beanClass = getBeanClass(); String[] properties = getPropertyNames(); PropertyDescriptor[] descriptors = new PropertyDescriptor[properties.length]; try { // For each property, create a property descriptor and set the // name and description using the localized data. for (int i = 0; i < descriptors.length; i++) { PropertyDescriptor newDescriptor = new PropertyDescriptor(properties[i], beanClass); if (bundle != null) { try { newDescriptor.setDisplayName(bundle.getString(properties[i] + ".displayName")); } catch (MissingResourceException ignored) { } try { newDescriptor.setShortDescription(bundle.getString(properties[i] + ".shortDescription")); } catch (MissingResourceException ignored) { } // Check to see if the property should be a large text field. This // is a hint to the GUI saying that a large text field should be // used to set this value. try { String largeText = bundle.getString(properties[i] + ".useLargeTextField"); if ("true".equals(largeText)) { newDescriptor.setValue("useLargeTextField", "true"); } } catch (MissingResourceException ignored) { } } descriptors[i] = newDescriptor; } return descriptors; } catch (IntrospectionException ie) { Log.error(ie.getMessage(), ie); throw new Error(ie.toString()); } } public int getDefaultPropertyIndex() { return -1; } public EventSetDescriptor[] getEventSetDescriptors() { return null; } public int getDefaultEventIndex() { return -1; } public MethodDescriptor[] getMethodDescriptors() { return null; } public BeanInfo[] getAdditionalBeanInfo() { return null; } public java.awt.Image getIcon(int iconKind) { return null; } }
/******************************************************************************* * Copyright (c) 2016 AT&T Intellectual Property. All rights reserved. *******************************************************************************/ package com.att.cadi.aaf.cass; import java.util.ArrayList; import java.util.HashSet; import java.util.Set; import org.apache.cassandra.auth.AuthenticatedUser; import org.apache.cassandra.auth.IAuthorizer; import org.apache.cassandra.auth.IResource; import org.apache.cassandra.auth.Permission; import org.apache.cassandra.auth.PermissionDetails; import org.apache.cassandra.exceptions.RequestExecutionException; import org.apache.cassandra.exceptions.RequestValidationException; import com.att.cadi.Access.Level; import com.att.cadi.aaf.v2_0.AbsAAFLur; import com.att.cadi.lur.LocalPermission; public class AAFAuthorizer extends AAFBase implements IAuthorizer { // Returns every permission on the resource granted to the user. public Set<Permission> authorize(AuthenticatedUser user, IResource resource) { String uname, rname; access.log(Level.DEBUG,"Authorizing",uname=user.getName(),"for",rname=resource.getName()); Set<Permission> permissions; if(user instanceof AAFAuthenticatedUser) { AAFAuthenticatedUser aafUser = (AAFAuthenticatedUser) user; aafUser.setAnonymous(false); if(aafUser.isLocal()) { permissions = checkPermissions(aafUser, new LocalPermission( rname.replaceFirst("data", cluster_name) )); } else { permissions = checkPermissions( aafUser, perm_type, ':'+rname.replaceFirst("data", cluster_name).replace('/', ':')); } } else { permissions = Permission.NONE; } access.log(Level.INFO,"Permissions on",rname,"for",uname,':', permissions); return permissions; } /** * Check only for Localized IDs (see cadi.properties) * @param aau * @param perm * @return */ private Set<Permission> checkPermissions(AAFAuthenticatedUser aau, LocalPermission perm) { if(localLur.fish(aau.getFullName(), perm)) { // aau.setSuper(true); return Permission.ALL; } else { return Permission.NONE; } } /** * Check remoted AAF Permissions * @param aau * @param type * @param instance * @return */ private Set<Permission> checkPermissions(AAFAuthenticatedUser aau, String type, String instance) { // Can perform ALL actions String fullName = aau.getFullName(); PermHolder ph = new PermHolder(aau); aafLur.fishOneOf(fullName, ph,type,instance,actions); return ph.permissions; } private class PermHolder { private AAFAuthenticatedUser aau; public PermHolder(AAFAuthenticatedUser aau) { this.aau = aau; } public Set<Permission> permissions = Permission.NONE; public void mutable() { if(permissions==Permission.NONE) { permissions = new HashSet<Permission>(); } } }; /** * This specialty List avoid extra Object Creation, and allows the Lur to do a Vistor on all appropriate Perms */ private static final ArrayList<AbsAAFLur.Action<PermHolder>> actions = new ArrayList<AbsAAFLur.Action<PermHolder>>(); static { actions.add(new AbsAAFLur.Action<PermHolder>() { public String getName() { return "*"; } public boolean exec(PermHolder a) { a.aau.setSuper(true); a.permissions = Permission.ALL; return true; } }); actions.add(new AbsAAFLur.Action<PermHolder>() { public String getName() { return "SELECT"; } public boolean exec(PermHolder ph) { ph.mutable(); ph.permissions.add(Permission.SELECT); return false; } }); actions.add(new AbsAAFLur.Action<PermHolder>() { public String getName() { return "MODIFY"; } public boolean exec(PermHolder ph) { ph.mutable(); ph.permissions.add(Permission.MODIFY); return false; } }); actions.add(new AbsAAFLur.Action<PermHolder>() { public String getName() { return "CREATE"; } public boolean exec(PermHolder ph) { ph.mutable(); ph.permissions.add(Permission.CREATE); return false; } }); actions.add(new AbsAAFLur.Action<PermHolder>() { public String getName() { return "ALTER"; } public boolean exec(PermHolder ph) { ph.mutable(); ph.permissions.add(Permission.ALTER); return false; } }); actions.add(new AbsAAFLur.Action<PermHolder>() { public String getName() { return "DROP"; } public boolean exec(PermHolder ph) { ph.mutable(); ph.permissions.add(Permission.DROP); return false; } }); actions.add(new AbsAAFLur.Action<PermHolder>() { public String getName() { return "AUTHORIZE"; } public boolean exec(PermHolder ph) { ph.mutable(); ph.permissions.add(Permission.AUTHORIZE); return false; } }); }; public void grant(AuthenticatedUser performer, Set<Permission> permissions, IResource resource, String to) throws RequestExecutionException { access.log(Level.INFO, "Use AAF CLI to grant permission(s) to user/role"); } public void revoke(AuthenticatedUser performer, Set<Permission> permissions, IResource resource, String from) throws RequestExecutionException { access.log(Level.INFO,"Use AAF CLI to revoke permission(s) for user/role"); } public Set<PermissionDetails> list(AuthenticatedUser performer, Set<Permission> permissions, IResource resource, String of) throws RequestValidationException, RequestExecutionException { access.log(Level.INFO,"Use AAF CLI to find the list of permissions"); return null; } // Called prior to deleting the user with DROP USER query. Internal hook, so no permission checks are needed here. public void revokeAll(String droppedUser) { access.log(Level.INFO,"Use AAF CLI to revoke permission(s) for user/role"); } // Called after a resource is removed (DROP KEYSPACE, DROP TABLE, etc.). public void revokeAll(IResource droppedResource) { access.log(Level.INFO,"Use AAF CLI to delete the unused permission", droppedResource.getName()); } }
/* * Copyright 2017 Red Hat, Inc. and/or its affiliates. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.jbpm.kie.services.impl.admin; import static org.jbpm.services.api.query.QueryResultMapper.*; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertTrue; import static org.kie.scanner.KieMavenRepository.getKieMavenRepository; import java.io.File; import java.io.FileOutputStream; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; import org.assertj.core.api.Assertions; import org.drools.compiler.kie.builder.impl.InternalKieModule; import org.jbpm.kie.services.impl.KModuleDeploymentUnit; import org.jbpm.kie.services.impl.query.SqlQueryDefinition; import org.jbpm.kie.services.impl.query.mapper.RawListQueryMapper; import org.jbpm.kie.services.impl.query.mapper.TaskSummaryQueryMapper; import org.jbpm.kie.services.impl.query.mapper.UserTaskInstanceQueryMapper; import org.jbpm.kie.services.test.KModuleDeploymentServiceTest; import org.jbpm.kie.test.util.AbstractKieServicesBaseTest; import org.jbpm.kie.test.util.CountDownListenerFactory; import org.jbpm.services.api.ProcessInstanceNotFoundException; import org.jbpm.services.api.TaskNotFoundException; import org.jbpm.services.api.admin.TaskNotification; import org.jbpm.services.api.admin.TaskReassignment; import org.jbpm.services.api.admin.UserTaskAdminService; import org.jbpm.services.api.model.DeploymentUnit; import org.jbpm.services.api.model.UserTaskInstanceDesc; import org.jbpm.services.api.query.model.QueryDefinition; import org.jbpm.services.api.query.model.QueryParam; import org.jbpm.services.api.query.model.QueryDefinition.Target; import org.jbpm.shared.services.impl.TransactionalCommandService; import org.junit.After; import org.junit.Before; import org.junit.Test; import org.kie.api.KieServices; import org.kie.api.builder.ReleaseId; import org.kie.api.runtime.process.ProcessInstance; import org.kie.api.runtime.query.QueryContext; import org.kie.api.task.model.OrganizationalEntity; import org.kie.api.task.model.Status; import org.kie.api.task.model.TaskSummary; import org.kie.internal.query.QueryFilter; import org.kie.internal.runtime.conf.ObjectModel; import org.kie.internal.task.api.TaskModelFactory; import org.kie.internal.task.api.TaskModelProvider; import org.kie.internal.task.api.model.EmailNotification; import org.kie.internal.task.api.model.TaskEvent; import org.kie.scanner.KieMavenRepository; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import static org.assertj.core.api.Assertions.assertThatExceptionOfType; public class UserTaskAdminServiceImplTest extends AbstractKieServicesBaseTest { private static final Logger logger = LoggerFactory.getLogger(KModuleDeploymentServiceTest.class); protected static final String ADMIN_ARTIFACT_ID = "test-admin"; protected static final String ADMIN_GROUP_ID = "org.jbpm.test"; protected static final String ADMIN_VERSION_V1 = "1.0.0"; private List<DeploymentUnit> units = new ArrayList<DeploymentUnit>(); private KModuleDeploymentUnit deploymentUnit; private Long processInstanceId = null; protected UserTaskAdminService userTaskAdminService; private TaskModelFactory factory = TaskModelProvider.getFactory(); @Before public void prepare() { configureServices(); logger.debug("Preparing kjar"); KieServices ks = KieServices.Factory.get(); // version 1 of kjar ReleaseId releaseId = ks.newReleaseId(ADMIN_GROUP_ID, ADMIN_ARTIFACT_ID, ADMIN_VERSION_V1); List<String> processes = new ArrayList<String>(); processes.add("repo/processes/general/humanTask.bpmn"); InternalKieModule kJar1 = createKieJar(ks, releaseId, processes); File pom = new File("target/admin", "pom.xml"); pom.getParentFile().mkdir(); try { FileOutputStream fs = new FileOutputStream(pom); fs.write(getPom(releaseId).getBytes()); fs.close(); } catch (Exception e) { } KieMavenRepository repository = getKieMavenRepository(); repository.installArtifact(releaseId, kJar1, pom); userTaskAdminService = new UserTaskAdminServiceImpl(); ((UserTaskAdminServiceImpl) userTaskAdminService).setUserTaskService(userTaskService); ((UserTaskAdminServiceImpl) userTaskAdminService).setRuntimeDataService(runtimeDataService); ((UserTaskAdminServiceImpl) userTaskAdminService).setIdentityProvider(identityProvider); ((UserTaskAdminServiceImpl) userTaskAdminService).setCommandService(new TransactionalCommandService(emf)); // now let's deploy to runtime both kjars deploymentUnit = new KModuleDeploymentUnit(ADMIN_GROUP_ID, ADMIN_ARTIFACT_ID, ADMIN_VERSION_V1); deploymentService.deploy(deploymentUnit); units.add(deploymentUnit); // set user to administrator so it will be allowed to do operations identityProvider.setName("Administrator"); identityProvider.setRoles(Collections.singletonList("")); } @After public void cleanup() { cleanupSingletonSessionId(); if (processInstanceId != null) { try { // let's abort process instance to leave the system in clear state processService.abortProcessInstance(processInstanceId); ProcessInstance pi = processService.getProcessInstance(processInstanceId); Assertions.assertThat(pi).isNull(); } catch (ProcessInstanceNotFoundException e) { // ignore it as it might already be completed/aborted } } if (units != null && !units.isEmpty()) { for (DeploymentUnit unit : units) { try { deploymentService.undeploy(unit); } catch (Exception e) { // do nothing in case of some failed tests to avoid next test to fail as well } } units.clear(); } close(); CountDownListenerFactory.clear(); } public void setUserTaskAdminService(UserTaskAdminService userTaskAdminService) { this.userTaskAdminService = userTaskAdminService; } @Test public void testAddPotentialOwnersNotBusinessAdmin() { identityProvider.setName("notAdmin"); processInstanceId = processService.startProcess(deploymentUnit.getIdentifier(), "org.jbpm.writedocument"); Assertions.assertThat(processInstanceId).isNotNull(); List<TaskSummary> tasks = runtimeDataService.getTasksAssignedAsPotentialOwner("salaboy", new QueryFilter()); Assertions.assertThat(tasks).hasSize(1); TaskSummary task = tasks.get(0); userTaskService.release(task.getId(), "salaboy"); Assertions.assertThatThrownBy( () -> userTaskAdminService.addPotentialOwners(task.getId(), false, factory.newUser("john"))) .hasMessageContaining("User notAdmin is not business admin of task 1"); } @Test public void testAddPotentialOwnersToNonExistentTask() { processInstanceId = processService.startProcess(deploymentUnit.getIdentifier(), "org.jbpm.writedocument"); Assertions.assertThat(processInstanceId).isNotNull(); List<TaskSummary> tasks = runtimeDataService.getTasksAssignedAsPotentialOwner("salaboy", new QueryFilter()); Assertions.assertThat(tasks).hasSize(1); TaskSummary task = tasks.get(0); userTaskService.release(task.getId(), "salaboy"); Assertions.assertThatThrownBy( () -> userTaskAdminService.addPotentialOwners(15456, false, factory.newUser("john"))) .hasMessageContaining("Task with id 15456 was not found"); } @Test public void testAddRemovePotentialOwnersAsGroup() { processInstanceId = processService.startProcess(deploymentUnit.getIdentifier(), "org.jbpm.writedocument"); Assertions.assertThat(processInstanceId).isNotNull(); List<TaskSummary> tasks = runtimeDataService.getTasksAssignedAsPotentialOwner("salaboy", new QueryFilter()); Assertions.assertThat(tasks).hasSize(1); TaskSummary task = tasks.get(0); userTaskService.release(task.getId(), "salaboy"); // Forward the task to HR group (Add HR as potential owners) identityProvider.setRoles(Collections.singletonList("HR")); userTaskAdminService.addPotentialOwners(task.getId(), true, factory.newGroup("HR")); tasks = runtimeDataService.getTasksAssignedAsPotentialOwner("katy", new QueryFilter()); Assertions.assertThat(tasks).hasSize(1); // HR has no resources to handle so lets forward it to accounting userTaskAdminService.removePotentialOwners(task.getId(), factory.newGroup("HR")); tasks = runtimeDataService.getTasksAssignedAsPotentialOwner("katy", new QueryFilter()); Assertions.assertThat(tasks).hasSize(0); identityProvider.setRoles(Collections.singletonList("Accounting")); userTaskAdminService.addPotentialOwners(task.getId(), false, factory.newGroup("Accounting")); tasks = runtimeDataService.getTasksAssignedAsPotentialOwner("mary", new QueryFilter()); Assertions.assertThat(tasks).hasSize(1); } @Test public void testAddPotentialOwners() { processInstanceId = processService.startProcess(deploymentUnit.getIdentifier(), "org.jbpm.writedocument"); Assertions.assertThat(processInstanceId).isNotNull(); List<TaskSummary> tasks = runtimeDataService.getTasksAssignedAsPotentialOwner("salaboy", new QueryFilter()); Assertions.assertThat(tasks).hasSize(1); TaskSummary task = tasks.get(0); userTaskService.release(task.getId(), "salaboy"); userTaskAdminService.addPotentialOwners(task.getId(), false, factory.newUser("john")); List<TaskEvent> events = runtimeDataService.getTaskEvents(task.getId(), new QueryFilter()); Assertions.assertThat(events).hasSize(3); TaskEvent updatedEvent = events.get(2); Assertions.assertThat(updatedEvent.getMessage()).isEqualTo("Potential owners [john] have been added"); tasks = runtimeDataService.getTasksAssignedAsPotentialOwner("salaboy", new QueryFilter()); Assertions.assertThat(tasks).hasSize(1); tasks = runtimeDataService.getTasksAssignedAsPotentialOwner("john", new QueryFilter()); Assertions.assertThat(tasks).hasSize(1); userTaskAdminService.addPotentialOwners(task.getId(), true, factory.newUser("john")); tasks = runtimeDataService.getTasksAssignedAsPotentialOwner("salaboy", new QueryFilter()); Assertions.assertThat(tasks).hasSize(0); tasks = runtimeDataService.getTasksAssignedAsPotentialOwner("john", new QueryFilter()); Assertions.assertThat(tasks).hasSize(1); } @Test public void testAddPotentialOwnersWrongDeploymentId() { processInstanceId = processService.startProcess(deploymentUnit.getIdentifier(), "org.jbpm.writedocument"); Assertions.assertThat(processInstanceId).isNotNull(); List<TaskSummary> tasks = runtimeDataService.getTasksAssignedAsPotentialOwner("salaboy", new QueryFilter()); Assertions.assertThat(tasks).hasSize(1); TaskSummary task = tasks.get(0); userTaskService.release(task.getId(), "salaboy"); assertThatExceptionOfType(TaskNotFoundException.class).isThrownBy(() -> { userTaskAdminService.addPotentialOwners("wrong-one", task.getId(), false, factory.newUser("john")); }) .withMessageContaining("Task with id " + task.getId() + " is not associated with wrong-one"); tasks = runtimeDataService.getTasksAssignedAsPotentialOwner("john", new QueryFilter()); Assertions.assertThat(tasks).hasSize(0); } @Test public void testAddExcludedOwners() { processInstanceId = processService.startProcess(deploymentUnit.getIdentifier(), "org.jbpm.writedocument"); Assertions.assertThat(processInstanceId).isNotNull(); List<TaskSummary> tasks = runtimeDataService.getTasksAssignedAsPotentialOwner("salaboy", new QueryFilter()); Assertions.assertThat(tasks).hasSize(1); TaskSummary task = tasks.get(0); userTaskService.release(task.getId(), "salaboy"); userTaskAdminService.addExcludedOwners(task.getId(), false, factory.newUser("salaboy")); List<TaskEvent> events = runtimeDataService.getTaskEvents(task.getId(), new QueryFilter()); Assertions.assertThat(events).hasSize(3); TaskEvent updatedEvent = events.get(2); Assertions.assertThat(updatedEvent.getMessage()).isEqualTo("Excluded owners [salaboy] have been added"); tasks = runtimeDataService.getTasksAssignedAsPotentialOwner("salaboy", new QueryFilter()); Assertions.assertThat(tasks).hasSize(0); tasks = runtimeDataService.getTasksAssignedAsPotentialOwner("john", new QueryFilter()); Assertions.assertThat(tasks).hasSize(0); userTaskAdminService.addExcludedOwners(task.getId(), true, factory.newUser("john")); tasks = runtimeDataService.getTasksAssignedAsPotentialOwner("salaboy", new QueryFilter()); Assertions.assertThat(tasks).hasSize(1); tasks = runtimeDataService.getTasksAssignedAsPotentialOwner("john", new QueryFilter()); Assertions.assertThat(tasks).hasSize(0); } @Test public void testAddBusinessAdmins() { processInstanceId = processService.startProcess(deploymentUnit.getIdentifier(), "org.jbpm.writedocument"); Assertions.assertThat(processInstanceId).isNotNull(); List<TaskSummary> tasks = runtimeDataService.getTasksAssignedAsPotentialOwner("salaboy", new QueryFilter()); Assertions.assertThat(tasks).hasSize(1); TaskSummary task = tasks.get(0); userTaskService.release(task.getId(), "salaboy"); tasks = runtimeDataService.getTasksAssignedAsBusinessAdministrator("salaboy", new QueryFilter()); Assertions.assertThat(tasks).hasSize(0); userTaskAdminService.addBusinessAdmins(task.getId(), false, factory.newUser("salaboy")); List<TaskEvent> events = runtimeDataService.getTaskEvents(task.getId(), new QueryFilter()); Assertions.assertThat(events).hasSize(3); TaskEvent updatedEvent = events.get(2); Assertions.assertThat(updatedEvent.getMessage()).isEqualTo("Business administrators [salaboy] have been added"); tasks = runtimeDataService.getTasksAssignedAsBusinessAdministrator("salaboy", new QueryFilter()); Assertions.assertThat(tasks).hasSize(1); tasks = runtimeDataService.getTasksAssignedAsBusinessAdministrator("Administrator", new QueryFilter()); Assertions.assertThat(tasks).hasSize(1); userTaskAdminService.addBusinessAdmins(task.getId(), true, factory.newUser("salaboy")); tasks = runtimeDataService.getTasksAssignedAsBusinessAdministrator("salaboy", new QueryFilter()); Assertions.assertThat(tasks).hasSize(1); tasks = runtimeDataService.getTasksAssignedAsBusinessAdministrator("Administrator", new QueryFilter()); Assertions.assertThat(tasks).hasSize(0); } @Test public void testRemovePotentialOwners() { processInstanceId = processService.startProcess(deploymentUnit.getIdentifier(), "org.jbpm.writedocument"); Assertions.assertThat(processInstanceId).isNotNull(); List<TaskSummary> tasks = runtimeDataService.getTasksAssignedAsPotentialOwner("salaboy", new QueryFilter()); Assertions.assertThat(tasks).hasSize(1); TaskSummary task = tasks.get(0); userTaskService.release(task.getId(), "salaboy"); userTaskAdminService.removePotentialOwners(task.getId(), factory.newUser("salaboy")); List<TaskEvent> events = runtimeDataService.getTaskEvents(task.getId(), new QueryFilter()); Assertions.assertThat(events).hasSize(3); TaskEvent updatedEvent = events.get(2); Assertions.assertThat(updatedEvent.getMessage()).isEqualTo("Potential owners [salaboy] have been removed"); tasks = runtimeDataService.getTasksAssignedAsPotentialOwner("salaboy", new QueryFilter()); Assertions.assertThat(tasks).hasSize(0); } @Test public void testRemoveExcludedOwners() { processInstanceId = processService.startProcess(deploymentUnit.getIdentifier(), "org.jbpm.writedocument"); Assertions.assertThat(processInstanceId).isNotNull(); List<TaskSummary> tasks = runtimeDataService.getTasksAssignedAsPotentialOwner("salaboy", new QueryFilter()); Assertions.assertThat(tasks).hasSize(1); TaskSummary task = tasks.get(0); userTaskService.release(task.getId(), "salaboy"); userTaskAdminService.addExcludedOwners(task.getId(), false, factory.newUser("salaboy")); List<TaskEvent> events = runtimeDataService.getTaskEvents(task.getId(), new QueryFilter()); Assertions.assertThat(events).hasSize(3); TaskEvent updatedEvent = events.get(2); Assertions.assertThat(updatedEvent.getMessage()).isEqualTo("Excluded owners [salaboy] have been added"); tasks = runtimeDataService.getTasksAssignedAsPotentialOwner("salaboy", new QueryFilter()); Assertions.assertThat(tasks).hasSize(0); userTaskAdminService.removeExcludedOwners(task.getId(), factory.newUser("salaboy")); events = runtimeDataService.getTaskEvents(task.getId(), new QueryFilter()); Assertions.assertThat(events).hasSize(4); updatedEvent = events.get(3); Assertions.assertThat(updatedEvent.getMessage()).isEqualTo("Excluded owners [salaboy] have been removed"); tasks = runtimeDataService.getTasksAssignedAsPotentialOwner("salaboy", new QueryFilter()); Assertions.assertThat(tasks).hasSize(1); } @Test public void testRemoveBusinessAdmin() { processInstanceId = processService.startProcess(deploymentUnit.getIdentifier(), "org.jbpm.writedocument"); Assertions.assertThat(processInstanceId).isNotNull(); List<TaskSummary> tasks = runtimeDataService.getTasksAssignedAsBusinessAdministrator("Administrator", new QueryFilter()); Assertions.assertThat(tasks).hasSize(1); TaskSummary task = tasks.get(0); userTaskAdminService.removeBusinessAdmins(task.getId(), factory.newUser("Administrator")); List<TaskEvent> events = runtimeDataService.getTaskEvents(task.getId(), new QueryFilter()); Assertions.assertThat(events).hasSize(2); TaskEvent updatedEvent = events.get(1); Assertions.assertThat(updatedEvent.getMessage()).isEqualTo("Business administrators [Administrator] have been removed"); List<Status> readyStatuses = Arrays.asList(new Status[]{ org.kie.api.task.model.Status.Ready }); tasks = runtimeDataService.getTasksAssignedAsBusinessAdministratorByStatus("Administrator", readyStatuses, new QueryFilter()); Assertions.assertThat(tasks).hasSize(0); } @Test public void testAddRemoveInputData() { processInstanceId = processService.startProcess(deploymentUnit.getIdentifier(), "org.jbpm.writedocument"); Assertions.assertThat(processInstanceId).isNotNull(); List<TaskSummary> tasks = runtimeDataService.getTasksAssignedAsPotentialOwner("salaboy", new QueryFilter()); Assertions.assertThat(tasks).hasSize(1); TaskSummary task = tasks.get(0); Map<String, Object> inputData = userTaskService.getTaskInputContentByTaskId(task.getId()); Assertions.assertThat(inputData).doesNotContainKey("added-input"); userTaskAdminService.addTaskInput(task.getId(), "added-input", "just a test"); inputData = userTaskService.getTaskInputContentByTaskId(task.getId()); Assertions.assertThat(inputData).containsKey("added-input"); Assertions.assertThat(inputData.get("added-input")).isEqualTo("just a test"); Assertions.assertThat(inputData).doesNotContainKey("added-input2"); Assertions.assertThat(inputData).doesNotContainKey("added-input3"); Map<String, Object> extra = new HashMap<>(); extra.put("added-input2", "1"); extra.put("added-input3", "2"); userTaskAdminService.addTaskInputs(task.getId(), extra); inputData = userTaskService.getTaskInputContentByTaskId(task.getId()); Assertions.assertThat(inputData).containsKey("added-input2"); Assertions.assertThat(inputData.get("added-input2")).isEqualTo("1"); Assertions.assertThat(inputData).containsKey("added-input3"); Assertions.assertThat(inputData.get("added-input3")).isEqualTo("2"); userTaskAdminService.removeTaskInputs(task.getId(), "added-input2", "added-input3"); inputData = userTaskService.getTaskInputContentByTaskId(task.getId()); Assertions.assertThat(inputData).doesNotContainKey("added-input2"); Assertions.assertThat(inputData).doesNotContainKey("added-input3"); } @Test public void testRemoveOutputData() { processInstanceId = processService.startProcess(deploymentUnit.getIdentifier(), "org.jbpm.writedocument"); Assertions.assertThat(processInstanceId).isNotNull(); List<TaskSummary> tasks = runtimeDataService.getTasksAssignedAsPotentialOwner("salaboy", new QueryFilter()); Assertions.assertThat(tasks).hasSize(1); TaskSummary task = tasks.get(0); Map<String, Object> output = new HashMap<>(); output.put("added-output", "draft"); userTaskService.saveContent(task.getId(), output); Map<String, Object> outputData = userTaskService.getTaskOutputContentByTaskId(task.getId()); Assertions.assertThat(outputData).containsKey("added-output"); Assertions.assertThat(outputData.get("added-output")).isEqualTo("draft"); userTaskAdminService.removeTaskOutputs(task.getId(), "added-output"); outputData = userTaskService.getTaskOutputContentByTaskId(task.getId()); Assertions.assertThat(outputData).doesNotContainKey("added-output"); } @Test(timeout=10000) public void testReassignNotStarted() throws Exception { processInstanceId = processService.startProcess(deploymentUnit.getIdentifier(), "org.jbpm.writedocument"); Assertions.assertThat(processInstanceId).isNotNull(); List<TaskSummary> tasks = runtimeDataService.getTasksAssignedAsPotentialOwner("salaboy", new QueryFilter()); Assertions.assertThat(tasks).hasSize(1); TaskSummary task = tasks.get(0); userTaskAdminService.reassignWhenNotStarted(task.getId(), "2s", factory.newUser("john")); CountDownListenerFactory.getExistingTask("userTaskAdminService").waitTillCompleted(); tasks = runtimeDataService.getTasksAssignedAsPotentialOwner("salaboy", new QueryFilter()); Assertions.assertThat(tasks).hasSize(0); tasks = runtimeDataService.getTasksAssignedAsPotentialOwner("john", new QueryFilter()); Assertions.assertThat(tasks).hasSize(1); } @Test(timeout=10000) public void testReassignNotStartedInvalidTimeExpression() throws Exception { processInstanceId = processService.startProcess(deploymentUnit.getIdentifier(), "org.jbpm.writedocument"); Assertions.assertThat(processInstanceId).isNotNull(); List<TaskSummary> tasks = runtimeDataService.getTasksAssignedAsPotentialOwner("salaboy", new QueryFilter()); Assertions.assertThat(tasks).hasSize(1); TaskSummary task = tasks.get(0); Assertions.assertThatThrownBy(() -> { userTaskAdminService.reassignWhenNotStarted(task.getId(), "2ssssssss", factory.newUser("john")); }) .hasMessage("Error parsing time string: [ 2ssssssss ]"); Assertions.assertThatThrownBy(() -> { userTaskAdminService.reassignWhenNotStarted(task.getId(), null, factory.newUser("john")); }) .hasMessage("Invalid time expression"); Assertions.assertThatThrownBy(() -> { userTaskAdminService.reassignWhenNotStarted(task.getId(), "", factory.newUser("john")); }) .hasMessage("Invalid time expression"); } @Test(timeout=10000) public void testReassignNotStartedInvalidOrgEntities() throws Exception { processInstanceId = processService.startProcess(deploymentUnit.getIdentifier(), "org.jbpm.writedocument"); Assertions.assertThat(processInstanceId).isNotNull(); List<TaskSummary> tasks = runtimeDataService.getTasksAssignedAsPotentialOwner("salaboy", new QueryFilter()); Assertions.assertThat(tasks).hasSize(1); TaskSummary task = tasks.get(0); Assertions.assertThatThrownBy(() -> { userTaskAdminService.reassignWhenNotStarted(task.getId(), "2s", null); }) .hasMessage("Invalid org entity"); } @Test(timeout=10000) public void testReassignNotCompleted() throws Exception { processInstanceId = processService.startProcess(deploymentUnit.getIdentifier(), "org.jbpm.writedocument"); Assertions.assertThat(processInstanceId).isNotNull(); List<TaskSummary> tasks = runtimeDataService.getTasksAssignedAsPotentialOwner("salaboy", new QueryFilter()); Assertions.assertThat(tasks).hasSize(1); TaskSummary task = tasks.get(0); userTaskService.start(task.getId(), "salaboy"); Collection<TaskReassignment> reassignments = userTaskAdminService.getTaskReassignments(task.getId(), false); Assertions.assertThat(reassignments).isNotNull(); Assertions.assertThat(reassignments).hasSize(0); userTaskAdminService.reassignWhenNotCompleted(task.getId(), "2s", factory.newUser("john")); reassignments = userTaskAdminService.getTaskReassignments(task.getId(), true); Assertions.assertThat(reassignments).isNotNull(); Assertions.assertThat(reassignments).hasSize(1); CountDownListenerFactory.getExistingTask("userTaskAdminService").waitTillCompleted(); tasks = runtimeDataService.getTasksAssignedAsPotentialOwner("salaboy", new QueryFilter()); Assertions.assertThat(tasks).hasSize(0); tasks = runtimeDataService.getTasksAssignedAsPotentialOwner("john", new QueryFilter()); Assertions.assertThat(tasks).hasSize(1); reassignments = userTaskAdminService.getTaskReassignments(task.getId(), true); Assertions.assertThat(reassignments).isNotNull(); Assertions.assertThat(reassignments).hasSize(0); reassignments = userTaskAdminService.getTaskReassignments(task.getId(), false); Assertions.assertThat(reassignments).isNotNull(); Assertions.assertThat(reassignments).hasSize(1); } @Test(timeout=10000) public void testReassignNotCompletedInvalidTimeExpression() throws Exception { processInstanceId = processService.startProcess(deploymentUnit.getIdentifier(), "org.jbpm.writedocument"); Assertions.assertThat(processInstanceId).isNotNull(); List<TaskSummary> tasks = runtimeDataService.getTasksAssignedAsPotentialOwner("salaboy", new QueryFilter()); Assertions.assertThat(tasks).hasSize(1); TaskSummary task = tasks.get(0); userTaskService.start(task.getId(), "salaboy"); Collection<TaskReassignment> reassignments = userTaskAdminService.getTaskReassignments(task.getId(), false); Assertions.assertThat(reassignments).isNotNull(); Assertions.assertThat(reassignments).hasSize(0); Assertions.assertThatThrownBy(() -> { userTaskAdminService.reassignWhenNotCompleted(task.getId(), "2ssssssss", factory.newUser("john")); }) .hasMessage("Error parsing time string: [ 2ssssssss ]"); Assertions.assertThatThrownBy(() -> { userTaskAdminService.reassignWhenNotCompleted(task.getId(), null, factory.newUser("john")); }) .hasMessage("Invalid time expression"); Assertions.assertThatThrownBy(() -> { userTaskAdminService.reassignWhenNotCompleted(task.getId(), "", factory.newUser("john")); }) .hasMessage("Invalid time expression"); } @Test(timeout=10000) public void testReassignNotCompletedInvalidOrgEntities() throws Exception { processInstanceId = processService.startProcess(deploymentUnit.getIdentifier(), "org.jbpm.writedocument"); Assertions.assertThat(processInstanceId).isNotNull(); List<TaskSummary> tasks = runtimeDataService.getTasksAssignedAsPotentialOwner("salaboy", new QueryFilter()); Assertions.assertThat(tasks).hasSize(1); TaskSummary task = tasks.get(0); userTaskService.start(task.getId(), "salaboy"); Collection<TaskReassignment> reassignments = userTaskAdminService.getTaskReassignments(task.getId(), false); Assertions.assertThat(reassignments).isNotNull(); Assertions.assertThat(reassignments).hasSize(0); Assertions.assertThatThrownBy(() -> { userTaskAdminService.reassignWhenNotCompleted(task.getId(), "2s", null); }) .hasMessage("Invalid org entity"); } @Test(timeout=10000) public void testNotifyNotStarted() throws Exception { processInstanceId = processService.startProcess(deploymentUnit.getIdentifier(), "org.jbpm.writedocument"); Assertions.assertThat(processInstanceId).isNotNull(); List<TaskSummary> tasks = runtimeDataService.getTasksAssignedAsPotentialOwner("salaboy", new QueryFilter()); Assertions.assertThat(tasks).hasSize(1); TaskSummary task = tasks.get(0); List<OrganizationalEntity> recipients = new ArrayList<>(); recipients.add(factory.newUser("john")); EmailNotification emailNotification = userTaskAdminService.buildEmailNotification("test", recipients, "Simple body", "Administrator", ""); userTaskAdminService.notifyWhenNotStarted(task.getId(), "2s", emailNotification); CountDownListenerFactory.getExistingTask("userTaskAdminService").waitTillCompleted(); tasks = runtimeDataService.getTasksAssignedAsPotentialOwner("salaboy", new QueryFilter()); Assertions.assertThat(tasks).hasSize(1); } @Test(timeout=10000) public void testNotifyNotCompleted() throws Exception { processInstanceId = processService.startProcess(deploymentUnit.getIdentifier(), "org.jbpm.writedocument"); Assertions.assertThat(processInstanceId).isNotNull(); List<TaskSummary> tasks = runtimeDataService.getTasksAssignedAsPotentialOwner("salaboy", new QueryFilter()); Assertions.assertThat(tasks).hasSize(1); TaskSummary task = tasks.get(0); Collection<TaskNotification> notifications = userTaskAdminService.getTaskNotifications(task.getId(), false); Assertions.assertThat(notifications).isNotNull(); Assertions.assertThat(notifications).hasSize(0); userTaskService.start(task.getId(), "salaboy"); List<OrganizationalEntity> recipients = new ArrayList<>(); recipients.add(factory.newUser("john")); EmailNotification emailNotification = userTaskAdminService.buildEmailNotification("test", recipients, "Simple body", "Administrator", ""); userTaskAdminService.notifyWhenNotCompleted(task.getId(), "2s", emailNotification); notifications = userTaskAdminService.getTaskNotifications(task.getId(), false); Assertions.assertThat(notifications).isNotNull(); Assertions.assertThat(notifications).hasSize(1); CountDownListenerFactory.getExistingTask("userTaskAdminService").waitTillCompleted(); tasks = runtimeDataService.getTasksAssignedAsPotentialOwner("salaboy", new QueryFilter()); Assertions.assertThat(tasks).hasSize(1); notifications = userTaskAdminService.getTaskNotifications(task.getId(), true); Assertions.assertThat(notifications).isNotNull(); Assertions.assertThat(notifications).hasSize(0); notifications = userTaskAdminService.getTaskNotifications(task.getId(), false); Assertions.assertThat(notifications).isNotNull(); Assertions.assertThat(notifications).hasSize(1); } @Test(timeout=10000) public void testNotifyNotStartedAndCancel() throws Exception { processInstanceId = processService.startProcess(deploymentUnit.getIdentifier(), "org.jbpm.writedocument"); Assertions.assertThat(processInstanceId).isNotNull(); List<TaskSummary> tasks = runtimeDataService.getTasksAssignedAsPotentialOwner("salaboy", new QueryFilter()); Assertions.assertThat(tasks).hasSize(1); TaskSummary task = tasks.get(0); Collection<TaskNotification> notifications = userTaskAdminService.getTaskNotifications(task.getId(), false); Assertions.assertThat(notifications).isNotNull(); Assertions.assertThat(notifications).hasSize(0); List<OrganizationalEntity> recipients = new ArrayList<>(); recipients.add(factory.newUser("john")); EmailNotification emailNotification = userTaskAdminService.buildEmailNotification("test", recipients, "Simple body", "Administrator", ""); long notificationId = userTaskAdminService.notifyWhenNotStarted(task.getId(), "2s", emailNotification); notifications = userTaskAdminService.getTaskNotifications(task.getId(), true); Assertions.assertThat(notifications).isNotNull(); Assertions.assertThat(notifications).hasSize(1); userTaskAdminService.cancelNotification(task.getId(), notificationId); notifications = userTaskAdminService.getTaskNotifications(task.getId(), true); Assertions.assertThat(notifications).isNotNull(); Assertions.assertThat(notifications).hasSize(0); } @Test(timeout=10000) public void testReassignNotStartedAndCancel() throws Exception { processInstanceId = processService.startProcess(deploymentUnit.getIdentifier(), "org.jbpm.writedocument"); Assertions.assertThat(processInstanceId).isNotNull(); List<TaskSummary> tasks = runtimeDataService.getTasksAssignedAsPotentialOwner("salaboy", new QueryFilter()); Assertions.assertThat(tasks).hasSize(1); TaskSummary task = tasks.get(0); Collection<TaskReassignment> reassignments = userTaskAdminService.getTaskReassignments(task.getId(), false); Assertions.assertThat(reassignments).isNotNull(); Assertions.assertThat(reassignments).hasSize(0); Long reassignmentId = userTaskAdminService.reassignWhenNotStarted(task.getId(), "2s", factory.newUser("john")); reassignments = userTaskAdminService.getTaskReassignments(task.getId(), true); Assertions.assertThat(reassignments).isNotNull(); Assertions.assertThat(reassignments).hasSize(1); userTaskAdminService.cancelReassignment(task.getId(), reassignmentId); reassignments = userTaskAdminService.getTaskReassignments(task.getId(), true); Assertions.assertThat(reassignments).isNotNull(); Assertions.assertThat(reassignments).hasSize(0); } @Test public void testGetTaskInstancesAsPotOwners() { String PO_TASK_QUERY = "select ti.taskId, ti.activationTime, ti.actualOwner, ti.createdBy, ti.createdOn, ti.deploymentId, " + "ti.description, ti.dueDate, ti.name, ti.parentId, ti.priority, ti.processId, ti.processInstanceId, " + "ti.processSessionId, ti.status, ti.workItemId, oe.id, eo.entity_id " + "from AuditTaskImpl ti " + "left join PeopleAssignments_PotOwners po on ti.taskId = po.task_id " + "left join OrganizationalEntity oe on po.entity_id = oe.id " + " left join PeopleAssignments_ExclOwners eo on ti.taskId = eo.task_id "; SqlQueryDefinition query = new SqlQueryDefinition("getMyTaskInstances", "jdbc/testDS1", Target.PO_TASK); query.setExpression(PO_TASK_QUERY); queryService.registerQuery(query); List<QueryDefinition> queries = queryService.getQueries(new QueryContext()); assertNotNull(queries); assertEquals(1, queries.size()); QueryDefinition registeredQuery = queries.get(0); assertNotNull(registeredQuery); assertEquals(query.getName(), registeredQuery.getName()); assertEquals(query.getSource(), registeredQuery.getSource()); assertEquals(query.getExpression(), registeredQuery.getExpression()); assertEquals(query.getTarget(), registeredQuery.getTarget()); registeredQuery = queryService.getQuery(query.getName()); assertNotNull(registeredQuery); assertEquals(query.getName(), registeredQuery.getName()); assertEquals(query.getSource(), registeredQuery.getSource()); assertEquals(query.getExpression(), registeredQuery.getExpression()); assertEquals(query.getTarget(), registeredQuery.getTarget()); Map<String, Object> params = new HashMap<String, Object>(); params.put("approval_document", "initial content"); processInstanceId = processService.startProcess(deploymentUnit.getIdentifier(), "org.jbpm.writedocument", params); assertNotNull(processInstanceId); identityProvider.setName("notvalid"); List<UserTaskInstanceDesc> taskInstanceLogs = queryService.query(query.getName(), UserTaskInstanceQueryMapper.get(), new QueryContext()); assertNotNull(taskInstanceLogs); assertEquals(0, taskInstanceLogs.size()); identityProvider.setName("salaboy"); taskInstanceLogs = queryService.query(query.getName(), UserTaskInstanceQueryMapper.get(), new QueryContext()); assertNotNull(taskInstanceLogs); assertEquals(1, taskInstanceLogs.size()); List<TaskSummary> taskSummaries = queryService.query(query.getName(), TaskSummaryQueryMapper.get(), new QueryContext()); assertNotNull(taskSummaries); assertEquals(1, taskSummaries.size()); identityProvider.setName("Administrator"); userTaskAdminService.addPotentialOwners(taskSummaries.get(0).getId(), false, factory.newUser("john")); identityProvider.setName("salaboy"); taskInstanceLogs = queryService.query(query.getName(), UserTaskInstanceQueryMapper.get(), new QueryContext()); assertNotNull(taskInstanceLogs); assertEquals(1, taskInstanceLogs.size()); taskSummaries = queryService.query(query.getName(), TaskSummaryQueryMapper.get(), new QueryContext()); assertNotNull(taskSummaries); assertEquals(1, taskSummaries.size()); QueryParam[] parameters = QueryParam.getBuilder().append(QueryParam.groupBy(COLUMN_NAME)).append(QueryParam.count(COLUMN_TASKID)).get(); Collection<List<Object>> instances = queryService.query(query.getName(), RawListQueryMapper.get(), new QueryContext(), parameters); assertNotNull(instances); assertEquals(1, instances.size()); List<Object> result = instances.iterator().next(); assertNotNull(result); assertEquals(2, result.size()); // here we have count set to 2 because group by is on name and thus it returns duplicates assertTrue(result.get(1) instanceof Number); assertEquals(2, ((Number) result.get(1)).intValue()); processService.abortProcessInstance(processInstanceId); processInstanceId = null; } @Test public void testGetTaskInstancesAsPotOwnersMultipleInstances() { String PO_TASK_QUERY = "select ti.taskId, ti.activationTime, ti.actualOwner, ti.createdBy, ti.createdOn, ti.deploymentId, " + "ti.description, ti.dueDate, ti.name, ti.parentId, ti.priority, ti.processId, ti.processInstanceId, " + "ti.processSessionId, ti.status, ti.workItemId, oe.id, eo.entity_id " + "from AuditTaskImpl ti " + "left join PeopleAssignments_PotOwners po on ti.taskId = po.task_id " + "left join OrganizationalEntity oe on po.entity_id = oe.id " + " left join PeopleAssignments_ExclOwners eo on ti.taskId = eo.task_id "; SqlQueryDefinition query = new SqlQueryDefinition("getMyTaskInstances", "jdbc/testDS1", Target.PO_TASK); query.setExpression(PO_TASK_QUERY); queryService.registerQuery(query); List<QueryDefinition> queries = queryService.getQueries(new QueryContext()); assertNotNull(queries); assertEquals(1, queries.size()); QueryDefinition registeredQuery = queries.get(0); assertNotNull(registeredQuery); assertEquals(query.getName(), registeredQuery.getName()); assertEquals(query.getSource(), registeredQuery.getSource()); assertEquals(query.getExpression(), registeredQuery.getExpression()); assertEquals(query.getTarget(), registeredQuery.getTarget()); registeredQuery = queryService.getQuery(query.getName()); assertNotNull(registeredQuery); assertEquals(query.getName(), registeredQuery.getName()); assertEquals(query.getSource(), registeredQuery.getSource()); assertEquals(query.getExpression(), registeredQuery.getExpression()); assertEquals(query.getTarget(), registeredQuery.getTarget()); Map<String, Object> params = new HashMap<String, Object>(); params.put("approval_document", "initial content"); processInstanceId = processService.startProcess(deploymentUnit.getIdentifier(), "org.jbpm.writedocument", params); assertNotNull(processInstanceId); Long processInstanceId2 = processService.startProcess(deploymentUnit.getIdentifier(), "org.jbpm.writedocument", params); assertNotNull(processInstanceId); assertNotNull(processInstanceId2); identityProvider.setName("notvalid"); List<UserTaskInstanceDesc> taskInstanceLogs = queryService.query(query.getName(), UserTaskInstanceQueryMapper.get(), new QueryContext()); assertNotNull(taskInstanceLogs); assertEquals(0, taskInstanceLogs.size()); identityProvider.setName("salaboy"); taskInstanceLogs = queryService.query(query.getName(), UserTaskInstanceQueryMapper.get(), new QueryContext()); assertNotNull(taskInstanceLogs); assertEquals(2, taskInstanceLogs.size()); identityProvider.setName("Administrator"); userTaskAdminService.addPotentialOwners(taskInstanceLogs.get(0).getTaskId(), false, factory.newUser("john")); identityProvider.setName("salaboy"); taskInstanceLogs = queryService.query(query.getName(), UserTaskInstanceQueryMapper.get(), new QueryContext()); assertNotNull(taskInstanceLogs); assertEquals(2, taskInstanceLogs.size()); processService.abortProcessInstance(processInstanceId); processInstanceId = null; processService.abortProcessInstance(processInstanceId2); processInstanceId2 = null; } /* * Helper methods */ @Override protected List<ObjectModel> getTaskListeners() { List<ObjectModel> listeners = super.getTaskListeners(); listeners.add(new ObjectModel("mvel", "org.jbpm.kie.test.util.CountDownListenerFactory.getTask(\"userTaskAdminService\", 1)")); return listeners; } protected boolean createDescriptor() { return true; } }
/** * Copyright 2010 Red Hat, Inc. and/or its affiliates. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.jbpm.process.audit; import java.util.List; import javax.naming.InitialContext; import javax.naming.NamingException; import javax.persistence.EntityManager; import javax.persistence.EntityManagerFactory; import javax.persistence.EntityTransaction; import javax.persistence.TransactionRequiredException; import javax.transaction.NotSupportedException; import javax.transaction.Status; import javax.transaction.SystemException; import javax.transaction.UserTransaction; import org.drools.core.WorkingMemory; import org.drools.core.common.InternalWorkingMemory; import org.drools.core.runtime.process.InternalProcessRuntime; import org.jbpm.process.audit.variable.ProcessIndexerManager; import org.jbpm.process.instance.impl.ProcessInstanceImpl; import org.jbpm.workflow.instance.impl.NodeInstanceImpl; import org.kie.api.event.KieRuntimeEvent; import org.kie.api.event.process.ProcessCompletedEvent; import org.kie.api.event.process.ProcessEventListener; import org.kie.api.event.process.ProcessNodeLeftEvent; import org.kie.api.event.process.ProcessNodeTriggeredEvent; import org.kie.api.event.process.ProcessStartedEvent; import org.kie.api.event.process.ProcessVariableChangedEvent; import org.kie.api.runtime.Environment; import org.kie.api.runtime.EnvironmentName; import org.kie.api.runtime.KieSession; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * Enables history log via JPA. * */ public class JPAWorkingMemoryDbLogger extends AbstractAuditLogger { private static final Logger logger = LoggerFactory.getLogger(JPAWorkingMemoryDbLogger.class); private static final String[] KNOWN_UT_JNDI_KEYS = new String[] {"UserTransaction", "java:jboss/UserTransaction", System.getProperty("jbpm.ut.jndi.lookup")}; private boolean isJTA = true; private boolean sharedEM = false; private EntityManagerFactory emf; private ProcessIndexerManager indexManager = ProcessIndexerManager.get(); /* * for backward compatibility */ public JPAWorkingMemoryDbLogger(WorkingMemory workingMemory) { super(workingMemory); InternalProcessRuntime processRuntime = ((InternalWorkingMemory) workingMemory).getProcessRuntime(); if (processRuntime != null) { processRuntime.addEventListener( (ProcessEventListener) this ); } } public JPAWorkingMemoryDbLogger(KieSession session) { Environment env = session.getEnvironment(); internalSetIsJTA(env); session.addEventListener(this); } /* * end of backward compatibility */ public JPAWorkingMemoryDbLogger(EntityManagerFactory emf) { this.emf = emf; } public JPAWorkingMemoryDbLogger() { // default constructor when this is used with a persistent KieSession } public JPAWorkingMemoryDbLogger(EntityManagerFactory emf, Environment env) { this.emf = emf; internalSetIsJTA(env); } public JPAWorkingMemoryDbLogger(Environment env) { internalSetIsJTA(env); } private void internalSetIsJTA(Environment env) { Boolean bool = (Boolean) env.get("IS_JTA_TRANSACTION"); if (bool != null) { isJTA = bool.booleanValue(); } } @Override public void beforeNodeTriggered(ProcessNodeTriggeredEvent event) { NodeInstanceLog log = (NodeInstanceLog) builder.buildEvent(event); persist(log, event); ((NodeInstanceImpl) event.getNodeInstance()).getMetaData().put("NodeInstanceLog", log); } @Override public void afterNodeLeft(ProcessNodeLeftEvent event) { NodeInstanceLog log = (NodeInstanceLog) builder.buildEvent(event, null); persist(log, event); } @Override public void afterVariableChanged(ProcessVariableChangedEvent event) { List<org.kie.api.runtime.manager.audit.VariableInstanceLog> variables = indexManager.index(getBuilder(), event); for (org.kie.api.runtime.manager.audit.VariableInstanceLog log : variables) { persist(log, event); } } @Override public void beforeProcessStarted(ProcessStartedEvent event) { ProcessInstanceLog log = (ProcessInstanceLog) builder.buildEvent(event); persist(log, event); ((ProcessInstanceImpl) event.getProcessInstance()).getMetaData().put("ProcessInstanceLog", log); } @Override public void afterProcessCompleted(ProcessCompletedEvent event) { long processInstanceId = event.getProcessInstance().getId(); EntityManager em = getEntityManager(event); Object tx = joinTransaction(em); ProcessInstanceLog log = (ProcessInstanceLog) ((ProcessInstanceImpl) event.getProcessInstance()).getMetaData().get("ProcessInstanceLog"); if (log == null) { List<ProcessInstanceLog> result = em.createQuery( "from ProcessInstanceLog as log where log.processInstanceId = :piId and log.end is null") .setParameter("piId", processInstanceId).getResultList(); if (result != null && result.size() != 0) { log = result.get(result.size() - 1); } } if (log != null) { log = (ProcessInstanceLog) builder.buildEvent(event, log); em.merge(log); } leaveTransaction(em, tx); } @Override public void afterNodeTriggered(ProcessNodeTriggeredEvent event) { // trigger this to record some of the data (like work item id) after activity was triggered NodeInstanceLog log = (NodeInstanceLog) ((NodeInstanceImpl) event.getNodeInstance()).getMetaData().get("NodeInstanceLog"); builder.buildEvent(event, log); } @Override public void beforeNodeLeft(ProcessNodeLeftEvent event) { } @Override public void beforeVariableChanged(ProcessVariableChangedEvent event) { } @Override public void afterProcessStarted(ProcessStartedEvent event) { } @Override public void beforeProcessCompleted(ProcessCompletedEvent event) { } public void dispose() { } /** * This method persists the entity given to it. * </p> * This method also makes sure that the entity manager used for persisting the entity, joins the existing JTA transaction. * @param entity An entity to be persisted. */ private void persist(Object entity, KieRuntimeEvent event) { EntityManager em = getEntityManager(event); Object tx = joinTransaction(em); em.persist(entity); leaveTransaction(em, tx); } /** * This method creates a entity manager. */ private EntityManager getEntityManager(KieRuntimeEvent event) { Environment env = event.getKieRuntime().getEnvironment(); /** * It's important to set the sharedEM flag with _every_ operation * otherwise, there are situations where: * 1. it can be set to "true" * 2. something can happen * 3. the "true" value can no longer apply * (I've seen this in debugging logs.. ) */ sharedEM = false; if( emf != null ) { return emf.createEntityManager(); } else if (env != null) { EntityManager em = (EntityManager) env.get(EnvironmentName.CMD_SCOPED_ENTITY_MANAGER); if (em != null) { sharedEM = true; return em; } EntityManagerFactory emf = (EntityManagerFactory) env.get(EnvironmentName.ENTITY_MANAGER_FACTORY); if (emf != null) { return emf.createEntityManager(); } } throw new RuntimeException("Could not find or create a new EntityManager!"); } /** * This method opens a new transaction, if none is currently running, and joins the entity manager/persistence context * to that transaction. * @param em The entity manager we're using. * @return {@link UserTransaction} If we've started a new transaction, then we return it so that it can be closed. * @throws NotSupportedException * @throws SystemException * @throws Exception if something goes wrong. */ private Object joinTransaction(EntityManager em) { boolean newTx = false; UserTransaction ut = null; if (isJTA) { try { em.joinTransaction(); } catch (TransactionRequiredException e) { ut = findUserTransaction(); try { if( ut != null && ut.getStatus() == Status.STATUS_NO_TRANSACTION ) { ut.begin(); newTx = true; // since new transaction was started em must join it em.joinTransaction(); } } catch(Exception ex) { throw new IllegalStateException("Unable to find or open a transaction: " + ex.getMessage(), ex); } if (!newTx) { // rethrow TransactionRequiredException if UserTransaction was not found or started throw e; } } if( newTx ) { return ut; } } // else { // EntityTransaction tx = em.getTransaction(); // if( ! tx.isActive() ) { // tx.begin(); // return tx; // } // } return null; } /** * This method closes the entity manager and transaction. It also makes sure that any objects associated * with the entity manager/persistence context are detached. * </p> * Obviously, if the transaction returned by the {@link #joinTransaction(EntityManager)} method is null, * nothing is done with the transaction parameter. * @param em The entity manager. * @param ut The (user) transaction. */ private void leaveTransaction(EntityManager em, Object transaction) { if( isJTA ) { try { if( transaction != null ) { // There's a tx running, close it. ((UserTransaction) transaction).commit(); } } catch(Exception e) { logger.error("Unable to commit transaction: ", e); } } else { if( transaction != null ) { ((EntityTransaction) transaction).commit(); } } if (!sharedEM) { try { em.close(); } catch( Exception e ) { logger.error("Unable to close created EntityManager: {}", e.getMessage(), e); } } } protected static UserTransaction findUserTransaction() { InitialContext context = null; try { context = new InitialContext(); return (UserTransaction) context.lookup( "java:comp/UserTransaction" ); } catch ( NamingException ex ) { for (String utLookup : KNOWN_UT_JNDI_KEYS) { if (utLookup != null) { try { UserTransaction ut = (UserTransaction) context.lookup(utLookup); return ut; } catch (NamingException e) { logger.debug("User Transaction not found in JNDI under {}", utLookup); } } } logger.warn("No user transaction found under known names"); return null; } } }
package eu.spitfire.ssp.backend.vs.webservices; import com.google.common.util.concurrent.FutureCallback; import com.google.common.util.concurrent.Futures; import com.google.common.util.concurrent.ListenableFuture; import com.google.common.util.concurrent.SettableFuture; import eu.spitfire.ssp.backend.vs.VirtualSensor; import eu.spitfire.ssp.backend.vs.VirtualSensorsComponentFactory; import eu.spitfire.ssp.backend.vs.VirtualSensorsRegistry; import eu.spitfire.ssp.server.internal.wrapper.ExpiringNamedGraph; import eu.spitfire.ssp.server.webservices.HttpWebservice; import com.hp.hpl.jena.query.Query; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.net.URI; import java.net.URISyntaxException; import java.util.Locale; /** * Created by olli on 30.06.14. */ public abstract class AbstractVirtualSensorCreator extends HttpWebservice{ private static Logger LOG = LoggerFactory.getLogger(AbstractVirtualSensorCreator.class.getName()); private VirtualSensorsComponentFactory componentFactory; protected URI addPrefix(String sensorName) throws URISyntaxException { return new URI(String.format(Locale.ENGLISH, VS_GRAPHNAME_TEMPLATE, sensorName)); } protected final String VS_PREFIX; protected final String VS_GRAPHNAME_TEMPLATE; protected AbstractVirtualSensorCreator(VirtualSensorsComponentFactory componentFactory, String htmlResourcePath){ super(componentFactory.getIoExecutor(), componentFactory.getInternalTasksExecutor(), htmlResourcePath); this.componentFactory = componentFactory; String port = componentFactory.getPort() == 80 ? "" : ":" + componentFactory.getPort(); VS_PREFIX = "http://" + componentFactory.getHostName() + port + "/vs#"; VS_GRAPHNAME_TEMPLATE = VS_PREFIX + "%s"; } protected VirtualSensor createVirtualSensor(URI sensorName, URI sensorType, URI foi, URI property, Query query) throws Exception { return new VirtualSensor(sensorName, sensorType, foi, property, query, componentFactory.getLocalChannel(), componentFactory.getInternalTasksExecutor()); } protected ListenableFuture<Void> registerVirtualSensor(VirtualSensor virtualSensor){ SettableFuture<Void> registrationFuture = SettableFuture.create(); // await the result of the first observation Futures.addCallback(virtualSensor.makeSingleObservation(), new FutureCallback<Long>() { @Override public void onSuccess(Long aLong) { VirtualSensorsRegistry registry = componentFactory.getRegistry(); ExpiringNamedGraph graph = new ExpiringNamedGraph( virtualSensor.getGraphName(), virtualSensor.createGraphAsModel() ); //await the registration Futures.addCallback(registry.registerDataOrigin(virtualSensor, graph), new FutureCallback<Void>() { @Override public void onSuccess(Void aVoid) { registrationFuture.set(null); } @Override public void onFailure(Throwable throwable) { registrationFuture.setException(throwable); } }); } @Override public void onFailure(Throwable throwable) { registrationFuture.setException(throwable); } }); return registrationFuture; } // // creates a TTL-representation of the virtual sensors graph // private String createVirtualSensorGraph(String sensorName, URI sensorType, URI foi, URI property){ // return String.format(Locale.ENGLISH, VS_INSTANCE_TEMPLATE, VS_PREFIX, sensorName, sensorType.toString(), // property.toString(), foi.toString(), property.toString(), sensorName, foi.toString(), property.toString(), // property.toString(), property.toString()); // } // // // protected Model createModel(String sensorName, URI sensorType, URI foi, URI property){ // Model result = ModelFactory.createDefaultModel(); // String ttl = createVirtualSensorGraph(sensorName, sensorType, foi, property); // InputStream stream = new ByteArrayInputStream(ttl.getBytes(Charset.forName("UTF8"))); // // result.read(stream, null, Language.RDF_TURTLE.lang); // return result; // } // virtualSensor.createGraphAsModel(); // Model model = createModel(sensorName, sensorType, foi, property); // URI graphName = createGraphName(sensorName); // ListenableFuture<Model> initialStatusFuture = addSensorValueToModel(graphName, model, query); // SettableFuture<Void> sensorCreationFuture = SettableFuture.create(); // Futures.addCallback(initialStatusFuture, new FutureCallback<Model>() { // @Override // public void onSuccess(Model model) { // // //Register virtual sensor // final VirtualSensor virtualSensor = new VirtualSensor( // sensorName, sensorType, foi, property, query, "?val", localChannel, getInternalExecutor() // ); // final ExpiringNamedGraph initialStatus = new ExpiringNamedGraph(graphName, model); // // VirtualSensorRegistry registry = getVirtualSensorRegistry(); // ListenableFuture<Void> registrationFuture = registry.registerDataOrigin(virtualSensor, initialStatus); // // Futures.addCallback(registrationFuture, new FutureCallback<Void>() { // @Override // public void onSuccess(Void aVoid) { // sensorCreationFuture.set(null); // } // // @Override // public void onFailure(Throwable throwable) { // sensorCreationFuture.setException(throwable); // } // }); // } // // @Override // public void onFailure(Throwable throwable) { // sensorCreationFuture.setException(throwable); // } // }); // return sensorCreationFuture; // } // protected ListenableFuture<Model> addSensorValueToModel(final URI sensorName, final Model model, final Query query){ // final SettableFuture<Model> modelFuture = SettableFuture.create(); // // Futures.addCallback(executeSparqlQuery(query), new FutureCallback<ResultSet>() { // @Override // public void onSuccess(ResultSet resultSet) { // try { // // RDFNode sensorValue; // // if (resultSet.hasNext()) { // QuerySolution querySolution = resultSet.nextSolution(); // if(querySolution.contains("?aggVal")){ // sensorValue = ResourceFactory.createPlainLiteral(querySolution.get("?aggVal").toString()); // } // else{ // sensorValue = model.createTypedLiteral(0); // } // } // else{ // sensorValue = ResourceFactory.createTypedLiteral(0); // } // // Resource subject = model.getResource(sensorName + "-Result"); // // Statement statement = subject.getProperty( // model.getProperty("http://purl.oclc.org/NET/ssnx/ssn#", "hasValue") // ); // // statement.changeObject(sensorValue); // // modelFuture.set(model); // // } catch (Exception ex) { // modelFuture.setException(ex); // } // } // // @Override // public void onFailure(Throwable t) { // LOG.warn("Exception while executing SPARQL query: {}", query, t); // modelFuture.setException(t); // } // }); // // return modelFuture; // } // private SettableFuture<ResultSet> executeSparqlQuery(Query sparqlQuery){ // // SettableFuture<ResultSet> sparqlResultFuture = SettableFuture.create(); // InternalQueryExecutionRequest internalQueryExecutionRequest = new InternalQueryExecutionRequest(sparqlQuery, sparqlResultFuture); // Channels.write(this.localChannel, internalQueryExecutionRequest); // // return sparqlResultFuture; // } // protected VirtualSensorsRegistry getVirtualSensorsRegistry(){ // return this.virtualSensorsRegistry; // } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.flink.runtime.resourcemanager; import org.apache.flink.api.common.time.Time; import org.apache.flink.configuration.AkkaOptions; import org.apache.flink.configuration.Configuration; import org.apache.flink.runtime.clusterframework.ApplicationStatus; import org.apache.flink.runtime.clusterframework.types.ResourceID; import org.apache.flink.runtime.entrypoint.ClusterInformation; import org.apache.flink.runtime.heartbeat.HeartbeatServices; import org.apache.flink.runtime.io.network.partition.ResourceManagerPartitionTrackerFactory; import org.apache.flink.runtime.io.network.partition.ResourceManagerPartitionTrackerImpl; import org.apache.flink.runtime.metrics.groups.ResourceManagerMetricGroup; import org.apache.flink.runtime.resourcemanager.slotmanager.SlotManager; import org.apache.flink.runtime.rpc.FatalErrorHandler; import org.apache.flink.runtime.rpc.RpcService; import org.apache.flink.util.ConfigurationException; import org.apache.flink.util.function.TriConsumer; import javax.annotation.Nullable; import java.util.UUID; import java.util.concurrent.CompletableFuture; import java.util.concurrent.Executor; import java.util.function.BiFunction; import java.util.function.Consumer; /** Implementation of {@link ResourceManagerFactory} for testing purpose. */ public class TestingResourceManagerFactory extends ResourceManagerFactory<ResourceID> { private final Consumer<UUID> initializeConsumer; private final Consumer<UUID> terminateConsumer; private final TriConsumer<UUID, ApplicationStatus, String> internalDeregisterApplicationConsumer; private final BiFunction<ResourceManager<?>, CompletableFuture<Void>, CompletableFuture<Void>> getTerminationFutureFunction; public TestingResourceManagerFactory( Consumer<UUID> initializeConsumer, Consumer<UUID> terminateConsumer, TriConsumer<UUID, ApplicationStatus, String> internalDeregisterApplicationConsumer, BiFunction<ResourceManager<?>, CompletableFuture<Void>, CompletableFuture<Void>> getTerminationFutureFunction) { this.initializeConsumer = initializeConsumer; this.terminateConsumer = terminateConsumer; this.internalDeregisterApplicationConsumer = internalDeregisterApplicationConsumer; this.getTerminationFutureFunction = getTerminationFutureFunction; } @Override protected ResourceManager<ResourceID> createResourceManager( Configuration configuration, ResourceID resourceId, RpcService rpcService, UUID leaderSessionId, HeartbeatServices heartbeatServices, FatalErrorHandler fatalErrorHandler, ClusterInformation clusterInformation, @Nullable String webInterfaceUrl, ResourceManagerMetricGroup resourceManagerMetricGroup, ResourceManagerRuntimeServices resourceManagerRuntimeServices, Executor ioExecutor) { return new MockResourceManager( rpcService, leaderSessionId, resourceId, heartbeatServices, resourceManagerRuntimeServices.getSlotManager(), ResourceManagerPartitionTrackerImpl::new, resourceManagerRuntimeServices.getJobLeaderIdService(), clusterInformation, fatalErrorHandler, resourceManagerMetricGroup, Time.fromDuration(configuration.get(AkkaOptions.ASK_TIMEOUT_DURATION)), ioExecutor); } @Override protected ResourceManagerRuntimeServicesConfiguration createResourceManagerRuntimeServicesConfiguration(Configuration configuration) throws ConfigurationException { return StandaloneResourceManagerFactory.getInstance() .createResourceManagerRuntimeServicesConfiguration(configuration); } public static class Builder { private Consumer<UUID> initializeConsumer = (ignore) -> {}; private Consumer<UUID> terminateConsumer = (ignore) -> {}; private TriConsumer<UUID, ApplicationStatus, String> internalDeregisterApplicationConsumer = (ignore1, ignore2, ignore3) -> {}; private BiFunction<ResourceManager<?>, CompletableFuture<Void>, CompletableFuture<Void>> getTerminationFutureFunction = (rm, superTerminationFuture) -> superTerminationFuture; public Builder setInitializeConsumer(Consumer<UUID> initializeConsumer) { this.initializeConsumer = initializeConsumer; return this; } public Builder setTerminateConsumer(Consumer<UUID> terminateConsumer) { this.terminateConsumer = terminateConsumer; return this; } public Builder setInternalDeregisterApplicationConsumer( TriConsumer<UUID, ApplicationStatus, String> internalDeregisterApplicationConsumer) { this.internalDeregisterApplicationConsumer = internalDeregisterApplicationConsumer; return this; } public Builder setGetTerminationFutureFunction( BiFunction<ResourceManager<?>, CompletableFuture<Void>, CompletableFuture<Void>> getTerminationFutureFunction) { this.getTerminationFutureFunction = getTerminationFutureFunction; return this; } public TestingResourceManagerFactory build() { return new TestingResourceManagerFactory( initializeConsumer, terminateConsumer, internalDeregisterApplicationConsumer, getTerminationFutureFunction); } } private class MockResourceManager extends ResourceManager<ResourceID> { private final UUID leaderSessionId; public MockResourceManager( RpcService rpcService, UUID leaderSessionId, ResourceID resourceId, HeartbeatServices heartbeatServices, SlotManager slotManager, ResourceManagerPartitionTrackerFactory clusterPartitionTrackerFactory, JobLeaderIdService jobLeaderIdService, ClusterInformation clusterInformation, FatalErrorHandler fatalErrorHandler, ResourceManagerMetricGroup resourceManagerMetricGroup, Time rpcTimeout, Executor ioExecutor) { super( rpcService, leaderSessionId, resourceId, heartbeatServices, slotManager, clusterPartitionTrackerFactory, jobLeaderIdService, clusterInformation, fatalErrorHandler, resourceManagerMetricGroup, rpcTimeout, ioExecutor); this.leaderSessionId = leaderSessionId; } @Override protected void initialize() { initializeConsumer.accept(leaderSessionId); } @Override protected void terminate() { terminateConsumer.accept(leaderSessionId); } @Override protected void internalDeregisterApplication( ApplicationStatus finalStatus, @Nullable String optionalDiagnostics) { internalDeregisterApplicationConsumer.accept( leaderSessionId, finalStatus, optionalDiagnostics); } @Override public boolean startNewWorker(WorkerResourceSpec workerResourceSpec) { throw new UnsupportedOperationException(); } @Override protected ResourceID workerStarted(ResourceID resourceID) { throw new UnsupportedOperationException(); } @Override public boolean stopWorker(ResourceID worker) { throw new UnsupportedOperationException(); } @Override public CompletableFuture<Void> getTerminationFuture() { return getTerminationFutureFunction.apply( MockResourceManager.this, super.getTerminationFuture()); } } }
package aQute.lib.converter; import static org.assertj.core.api.Assertions.assertThat; import java.io.ByteArrayOutputStream; import java.io.File; import java.lang.reflect.Array; import java.lang.reflect.Field; import java.lang.reflect.ParameterizedType; import java.lang.reflect.Type; import java.math.BigDecimal; import java.math.BigInteger; import java.net.URI; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Optional; import java.util.Queue; import java.util.Set; import java.util.SortedMap; import java.util.SortedSet; import java.util.Stack; import java.util.TreeMap; import java.util.TreeSet; import java.util.Vector; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentLinkedQueue; import java.util.concurrent.ConcurrentMap; import java.util.concurrent.CopyOnWriteArrayList; import java.util.concurrent.CopyOnWriteArraySet; import aQute.lib.io.IO; import aQute.libg.cryptography.Digester; import aQute.libg.cryptography.SHA1; import aQute.libg.map.MAP; import junit.framework.TestCase; @SuppressWarnings({ "unchecked", "rawtypes" }) public class ConverterTest extends TestCase { Converter converter = new Converter(); public void testOptional() throws Exception { Optional<String> opt = converter.convert(new TypeReference<Optional<String>>() {}, 1); assertTrue(opt.isPresent()); assertEquals("1", opt.get()); opt = converter.convert(new TypeReference<Optional<String>>() {}, null); assertFalse(opt.isPresent()); } public void testMangling() { assertMangle("", ""); assertMangle("a", "a"); assertMangle("ab", "ab"); assertMangle("abc", "abc"); assertMangle("a\u0008bc", "a\bbc"); assertMangle("$_$", "-"); assertMangle("$_", "."); assertMangle("_$", "."); assertMangle("x$_$", "x-"); assertMangle("$_$x", "-x"); assertMangle("abc$_$abc", "abc-abc"); assertMangle("$$_$x", "$.x"); assertMangle("$_$$", "-"); assertMangle("$_$$$", "-$"); assertMangle("$", ""); assertMangle("$$", "$"); assertMangle("_", "."); assertMangle("$_", "."); assertMangle("myProperty143", "myProperty143"); assertMangle("$new", "new"); assertMangle("n$ew", "new"); assertMangle("new$", "new"); assertMangle("my$$prop", "my$prop"); assertMangle("dot_prop", "dot.prop"); assertMangle("_secret", ".secret"); assertMangle("another__prop", "another_prop"); assertMangle("three___prop", "three_.prop"); assertMangle("four_$__prop", "four._prop"); assertMangle("five_$_prop", "five..prop"); } private void assertMangle(String methodName, String key) { assertEquals(Converter.mangleMethodName(methodName), key); } interface M { String a(); int b(); int c(); double d(); } public void testMap() throws Exception { Map<String, String> map = MAP.$("a", "A") .$("b", "2"); M m = converter.convert(M.class, map); assertEquals("A", m.a()); assertEquals(2, m.b()); assertEquals(0, m.c()); assertEquals(0d, m.d()); assertThat(m.toString()).endsWith("'"); } public void testTypeRef() throws Exception { Map<String, Integer> f; Type type = (new TypeReference<Map<String, Integer>>() {}).getType(); assertTrue(type instanceof ParameterizedType); ParameterizedType ptype = (ParameterizedType) type; assertEquals(Map.class, ptype.getRawType()); assertEquals(String.class, ptype.getActualTypeArguments()[0]); assertEquals(Integer.class, ptype.getActualTypeArguments()[1]); Map<Integer, String> m = MAP.$(1, "1") .$(2, "2"); f = converter.convert(new TypeReference<Map<String, Integer>>() {}, m); assertEquals(f.get("1"), (Integer) 1); assertEquals(f.get("2"), (Integer) 2); } public static void hookTest() throws Exception { Converter converter = new Converter().hook(File.class, (dest, o) -> { if (o instanceof String) { return IO.getFile(new File(""), o.toString()); } return null; }); assertEquals(Integer.valueOf(6), converter.convert(Integer.class, "6")); assertEquals(new File("src").getAbsoluteFile(), converter.convert(File.class, "src")); converter.hook(null, (dest, o) -> { if (dest instanceof Class) { if (Number.class.isAssignableFrom((Class<?>) dest)) return 1; } return null; }); assertEquals(Integer.valueOf(1), converter.convert(Integer.class, "6")); assertEquals(Integer.valueOf(1), converter.convert(Long.class, "6")); assertEquals("6", converter.convert(String.class, "6")); } /** * Test map to object */ public static class D { public int n; public String s; public Map<String, Object> __extra; } public void testMap2Object() throws Exception { Map<String, Object> map = new HashMap<>(); map.put("n", 42); map.put("s", "string"); map.put("e", Boolean.TRUE); D d = converter.convert(D.class, map); assertThat(d.n).isEqualTo(42); assertThat(d.s).isEqualTo("string"); assertThat(d.__extra).containsEntry("e", Boolean.TRUE); } /** * Digests as byte[] * * @throws Exception */ public void testDigest() throws Exception { Digester<SHA1> digester = SHA1.getDigester(); try { IO.copy("ABC".getBytes(), digester); SHA1 digest = digester.digest(); byte[] out = converter.convert(byte[].class, digest); assertTrue(Arrays.equals(digest.digest(), out)); ByteArrayOutputStream bout = new ByteArrayOutputStream(); bout.write("Hello World".getBytes()); assertTrue(Arrays.equals("Hello World".getBytes(), converter.convert(byte[].class, bout))); } finally { digester.close(); } } /** * Map a string to a char[], Character[], or Collection<Character> */ public void testCharacters() throws Exception { assertTrue(Arrays.equals(new char[] { 'A', 'B', 'C' }, converter.convert(char[].class, "ABC"))); assertEquals("ABC", converter.convert(String.class, new char[] { 'A', 'B', 'C' })); } /** * Test string to primitives * * @throws Exception */ public void testStringtoPrimitives() throws Exception { assertEquals((Integer) (int) 'A', converter.convert(int.class, 'A')); assertEquals((Integer) (int) 'A', converter.convert(Integer.class, 'A')); assertEquals((Boolean) true, converter.convert(boolean.class, "1")); assertEquals((Boolean) true, converter.convert(Boolean.class, "1")); assertEquals((Boolean) false, converter.convert(boolean.class, "0")); assertEquals((Boolean) false, converter.convert(Boolean.class, "0")); assertEquals((Byte) (byte) 1, converter.convert(byte.class, "1")); assertEquals((Byte) (byte) 1, converter.convert(Byte.class, "1")); assertEquals((Short) (short) 1, converter.convert(short.class, "1")); assertEquals((Short) (short) 1, converter.convert(Short.class, "1")); assertEquals((Integer) 1, converter.convert(int.class, "1")); assertEquals((Integer) 1, converter.convert(Integer.class, "1")); assertEquals((Long) 1L, converter.convert(long.class, "1")); assertEquals((Long) 1L, converter.convert(Long.class, "1")); assertEquals(1f, converter.convert(float.class, "1")); assertEquals(1f, converter.convert(Float.class, "1")); assertEquals(1d, converter.convert(double.class, "1")); assertEquals(1d, converter.convert(double.class, "1")); assertEquals((Character) 'A', converter.convert(char.class, "A")); assertEquals((Character) 'A', converter.convert(Character.class, "A")); } /** * Test the wrappers * * @throws Exception */ public void testWrappers() throws Exception { Object[] types = { Boolean.FALSE, (byte) 0, '\u0000', (short) 0, 0, 0L, 0f, 0d }; for (int i = 0; i < types.length; i++) { for (int j = 0; j < types.length; j++) { assertEquals("" + i + " " + j, types[i], converter.convert(types[i].getClass(), types[j])); } } } /** * Create an array and see if we can convert a single number * * @throws Exception */ public void testPrimitives() throws Exception { assertPrimitives1(1); assertPrimitives(0); assertPrimitives(new Object[] { 0, 1, 2 }); assertPrimitives1(new Object[] { 1, 2 }); assertPrimitives(false); assertPrimitives1(true); assertPrimitives('\u0000'); assertPrimitives1('\u0001'); } /** * Test enums */ public enum X { A, B, C; } public void testEnums() throws Exception { assertEquals(X.A, converter.convert(X.class, "A")); assertEquals(X.B, converter.convert(X.class, 1)); } /** * Test collections */ public static class XX { public ArrayList<String> al; public Collection<String> col; public Queue<String> queue; public Stack<String> stack; public Vector<String> vector; public Set<String> set; public TreeSet<String> treeset; public SortedSet<String> sorted; public ConcurrentLinkedQueue<String> concurrent; public CopyOnWriteArrayList<String> concurrentList; public CopyOnWriteArraySet<String> concurrentSet; } public void testCollections() throws Exception { Class<XX> xx = XX.class; Object xxx = xx.getConstructor() .newInstance(); int count = 11; for (Field field : xx.getFields()) { Object o = converter.convert(field.getGenericType(), 1); assertTrue(o instanceof Collection); Collection c = (Collection) o; assertEquals("1", c.iterator() .next()); field.set(xxx, o); count--; } assertEquals(0, count); } /** * Test generic collections */ public static class GC { public Collection<String> strings; public Collection<Collection<String>> stringss; public Collection<String>[] stringsarray; public List<X> enums; public X[] enuma; public List list; } public void testGenericCollections() throws Exception { Class<GC> xx = GC.class; GC g = xx.getConstructor() .newInstance(); for (Field field : xx.getFields()) { Object o = converter.convert(field.getGenericType(), 1); field.set(g, o); } assertEquals("[1]", g.strings.toString()); assertEquals(String.class, g.strings.iterator() .next() .getClass()); assertEquals("[[1]]", g.stringss.toString()); assertEquals("[1]", g.stringsarray[0].toString()); assertEquals("[1]", g.list.toString()); assertTrue(g.list.get(0) instanceof Integer); assertEquals(X.B, g.enuma[0]); assertEquals(X.B, g.enums.get(0)); } /** * Test generic maps */ public static class GM { public Map<String, Integer> strings; public SortedMap<String, Integer> sorted; public TreeMap<String, Integer> tree; public ConcurrentHashMap<String, Integer> concurrenthash; public ConcurrentMap<String, Integer> concurrent; public Map map; } public static class GT { public int a = 1; public double b = 2; } public void testGenericMaps() throws Exception { Class<GM> xx = GM.class; GM gMap = xx.getConstructor() .newInstance(); GM gSemiMap = xx.getConstructor() .newInstance(); GT semiMap = new GT(); Map map = new HashMap<String, Integer>(); map.put("a", 1); map.put("b", 2); for (Field field : xx.getFields()) { Object o = converter.convert(field.getGenericType(), map); field.set(gMap, o); Object o2 = converter.convert(field.getGenericType(), semiMap); field.set(gSemiMap, o2); } assertEquals("{a=1, b=2}", new TreeMap(gMap.strings).toString()); assertEquals("{a=1, b=2}", new TreeMap(gSemiMap.strings).toString()); } void assertPrimitives(@SuppressWarnings("unused") Object source) throws Exception { Class[] types = { byte.class, boolean.class, char.class, short.class, int.class, long.class, float.class, double.class }; for (Class c : types) { Class at = Array.newInstance(c, 1) .getClass(); Object parray = converter.convert(at, 0); Object o = Array.get(parray, 0); if (o instanceof Number) assertEquals(0, ((Number) o).intValue()); else if (o instanceof Character) assertEquals(0, ((Character) o).charValue()); else if (o instanceof Boolean) assertEquals(false, ((Boolean) o).booleanValue()); else fail(o.getClass() + " unexpected "); assertEquals(at, parray.getClass()); assertEquals(c, parray.getClass() .getComponentType()); } } /** * Test constructor * * @throws Exception */ public void testConstructor() throws Exception { String home = System.getProperty("user.home"); assertEquals(new File(home), converter.convert(File.class, home)); // assertEquals(new Version(1, 0, 0), converter.convert(Version.class, // "1.0.0")); } /** * Test valueOf * * @throws Exception */ public void testValueOf() throws Exception { assertEquals((Byte) (byte) 12, converter.convert(Byte.class, "12")); assertEquals((Boolean) true, converter.convert(Boolean.class, "TRUE")); assertEquals((Character) '1', converter.convert(char.class, "49")); assertEquals((Boolean) true, converter.convert(Boolean.class, "TRUE")); assertEquals((Boolean) true, converter.convert(Boolean.class, "TRUE")); assertEquals((Boolean) true, converter.convert(Boolean.class, "TRUE")); } void assertPrimitives1(Object source) throws Exception { Class[] types = { byte.class, boolean.class, char.class, short.class, int.class, long.class, float.class, double.class }; for (Class c : types) { Class at = Array.newInstance(c, 1) .getClass(); Object parray = converter.convert(at, source); Object o = Array.get(parray, 0); if (o instanceof Number) assertEquals(1, ((Number) o).intValue()); else if (o instanceof Character) assertEquals(1, ((Character) o).charValue()); else if (o instanceof Boolean) assertEquals(true, ((Boolean) o).booleanValue()); else fail(o.getClass() + " unexpected "); assertEquals(at, parray.getClass()); assertEquals(c, parray.getClass() .getComponentType()); } } public void testProperties() throws Exception { assertEquals(Arrays.asList(1L), Converter.cnv(new TypeReference<List<Long>>() {}, "1")); assertEquals(Arrays.asList(1L), Converter.cnv(new TypeReference<List<Long>>() {}, 1)); assertEquals(Arrays.asList(1L), Converter.cnv(new TypeReference<List<Long>>() {}, 1L)); assertEquals(Arrays.asList(1L), Converter.cnv(new TypeReference<List<Long>>() {}, 1D)); assertEquals(Arrays.asList(1L), Converter.cnv(new TypeReference<List<Long>>() {}, 1f)); assertEquals(Arrays.asList(1L), Converter.cnv(new TypeReference<List<Long>>() {}, new String[] { "1" })); assertEquals(Arrays.asList(1L), Converter.cnv(new TypeReference<List<Long>>() {}, Arrays.asList((byte) 1))); assertEquals(Arrays.asList(1L), Converter.cnv(new TypeReference<List<Long>>() {}, Arrays.asList((short) 1))); assertEquals(Arrays.asList(1L), Converter.cnv(new TypeReference<List<Long>>() {}, Arrays.asList(1))); assertEquals(Arrays.asList(1L), Converter.cnv(new TypeReference<List<Long>>() {}, Arrays.asList(1L))); assertEquals(Arrays.asList(1L), Converter.cnv(new TypeReference<List<Long>>() {}, Arrays.asList(1D))); assertEquals(Arrays.asList(1L), Converter.cnv(new TypeReference<List<Long>>() {}, Arrays.asList(1L))); assertEquals(Arrays.asList(1L), Converter.cnv(new TypeReference<List<Long>>() {}, new BigDecimal(1))); assertEquals(Arrays.asList(1L), Converter.cnv(new TypeReference<List<Long>>() {}, new BigInteger(new byte[] { 1 }))); assertEquals(Arrays.asList(1L), Converter.cnv(new TypeReference<List<Long>>() {}, new byte[] { 1 })); assertEquals(Arrays.asList(1L), Converter.cnv(new TypeReference<List<Long>>() {}, new short[] { 1 })); assertEquals(Arrays.asList(1L), Converter.cnv(new TypeReference<List<Long>>() {}, new char[] { 1 })); assertEquals(Arrays.asList(1L), Converter.cnv(new TypeReference<List<Long>>() {}, new long[] { 1 })); assertEquals(Arrays.asList(1L), Converter.cnv(new TypeReference<List<Long>>() {}, new float[] { 1 })); assertEquals(Arrays.asList(1L), Converter.cnv(new TypeReference<List<Long>>() {}, new double[] { 1 })); assertEquals(Arrays.asList(1L), Converter.cnv(new TypeReference<List<Long>>() {}, new BigInteger[] { new BigInteger(new byte[] { 1 }) })); assertEquals(Arrays.asList(1L), Converter.cnv(new TypeReference<List<Long>>() {}, new BigDecimal[] { new BigDecimal(1) })); } public void testURIs() throws Exception { URI expected = new URI("https://www.jpm4j.org/#!/p/sha/C621B54583719AC0310404463D6D99DB27E1052C//0.0.0"); assertEquals(expected, Converter.cnv(URI.class, "https://www.jpm4j.org/#!/p/sha/C621B54583719AC0310404463D6D99DB27E1052C//0.0.0")); assertEquals(expected, Converter.cnv(URI.class, "https://www.jpm4j.org/#!/p/sha/C621B54583719AC0310404463D6D99DB27E1052C//0.0.0\n")); assertEquals(expected, Converter.cnv(URI.class, "https://www.jpm4j.org/#!/p/sha/C621B54583719AC0310404463D6D99DB27E1052C//0.0.0\n1.3.1")); assertEquals(expected, Converter.cnv(URI.class, "https://www.jpm4j.org/#!/p/sha/C621B54583719AC0310404463D6D99DB27E1052C//0.0.0\r\n1.3.1")); } }
/** * Copyright (c) Microsoft Corporation. All rights reserved. * Licensed under the MIT License. See License.txt in the project root for * license information. * * Code generated by Microsoft (R) AutoRest Code Generator. */ package com.microsoft.azure.management.devtestlabs.v2018_09_15.implementation; import retrofit2.Retrofit; import com.google.common.reflect.TypeToken; import com.microsoft.azure.AzureServiceFuture; import com.microsoft.azure.CloudException; import com.microsoft.azure.ListOperationCallback; import com.microsoft.azure.Page; import com.microsoft.azure.PagedList; import com.microsoft.rest.ServiceCallback; import com.microsoft.rest.ServiceFuture; import com.microsoft.rest.ServiceResponse; import java.io.IOException; import java.util.List; import okhttp3.ResponseBody; import retrofit2.http.GET; import retrofit2.http.Header; import retrofit2.http.Headers; import retrofit2.http.Path; import retrofit2.http.Query; import retrofit2.http.Url; import retrofit2.Response; import rx.functions.Func1; import rx.Observable; /** * An instance of this class provides access to all the operations defined * in ArmTemplates. */ public class ArmTemplatesInner { /** The Retrofit service to perform REST calls. */ private ArmTemplatesService service; /** The service client containing this operation class. */ private DevTestLabsClientImpl client; /** * Initializes an instance of ArmTemplatesInner. * * @param retrofit the Retrofit instance built from a Retrofit Builder. * @param client the instance of the service client containing this operation class. */ public ArmTemplatesInner(Retrofit retrofit, DevTestLabsClientImpl client) { this.service = retrofit.create(ArmTemplatesService.class); this.client = client; } /** * The interface defining all the services for ArmTemplates to be * used by Retrofit to perform actually REST calls. */ interface ArmTemplatesService { @Headers({ "Content-Type: application/json; charset=utf-8", "x-ms-logging-context: com.microsoft.azure.management.devtestlabs.v2018_09_15.ArmTemplates list" }) @GET("subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DevTestLab/labs/{labName}/artifactsources/{artifactSourceName}/armtemplates") Observable<Response<ResponseBody>> list(@Path("subscriptionId") String subscriptionId, @Path("resourceGroupName") String resourceGroupName, @Path("labName") String labName, @Path("artifactSourceName") String artifactSourceName, @Query("$expand") String expand, @Query("$filter") String filter, @Query("$top") Integer top, @Query("$orderby") String orderby, @Query("api-version") String apiVersion, @Header("accept-language") String acceptLanguage, @Header("User-Agent") String userAgent); @Headers({ "Content-Type: application/json; charset=utf-8", "x-ms-logging-context: com.microsoft.azure.management.devtestlabs.v2018_09_15.ArmTemplates get" }) @GET("subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DevTestLab/labs/{labName}/artifactsources/{artifactSourceName}/armtemplates/{name}") Observable<Response<ResponseBody>> get(@Path("subscriptionId") String subscriptionId, @Path("resourceGroupName") String resourceGroupName, @Path("labName") String labName, @Path("artifactSourceName") String artifactSourceName, @Path("name") String name, @Query("$expand") String expand, @Query("api-version") String apiVersion, @Header("accept-language") String acceptLanguage, @Header("User-Agent") String userAgent); @Headers({ "Content-Type: application/json; charset=utf-8", "x-ms-logging-context: com.microsoft.azure.management.devtestlabs.v2018_09_15.ArmTemplates listNext" }) @GET Observable<Response<ResponseBody>> listNext(@Url String nextUrl, @Header("accept-language") String acceptLanguage, @Header("User-Agent") String userAgent); } /** * List azure resource manager templates in a given artifact source. * * @param resourceGroupName The name of the resource group. * @param labName The name of the lab. * @param artifactSourceName The name of the artifact source. * @throws IllegalArgumentException thrown if parameters fail the validation * @throws CloudException thrown if the request is rejected by server * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent * @return the PagedList&lt;ArmTemplateInner&gt; object if successful. */ public PagedList<ArmTemplateInner> list(final String resourceGroupName, final String labName, final String artifactSourceName) { ServiceResponse<Page<ArmTemplateInner>> response = listSinglePageAsync(resourceGroupName, labName, artifactSourceName).toBlocking().single(); return new PagedList<ArmTemplateInner>(response.body()) { @Override public Page<ArmTemplateInner> nextPage(String nextPageLink) { return listNextSinglePageAsync(nextPageLink).toBlocking().single().body(); } }; } /** * List azure resource manager templates in a given artifact source. * * @param resourceGroupName The name of the resource group. * @param labName The name of the lab. * @param artifactSourceName The name of the artifact source. * @param serviceCallback the async ServiceCallback to handle successful and failed responses. * @throws IllegalArgumentException thrown if parameters fail the validation * @return the {@link ServiceFuture} object */ public ServiceFuture<List<ArmTemplateInner>> listAsync(final String resourceGroupName, final String labName, final String artifactSourceName, final ListOperationCallback<ArmTemplateInner> serviceCallback) { return AzureServiceFuture.fromPageResponse( listSinglePageAsync(resourceGroupName, labName, artifactSourceName), new Func1<String, Observable<ServiceResponse<Page<ArmTemplateInner>>>>() { @Override public Observable<ServiceResponse<Page<ArmTemplateInner>>> call(String nextPageLink) { return listNextSinglePageAsync(nextPageLink); } }, serviceCallback); } /** * List azure resource manager templates in a given artifact source. * * @param resourceGroupName The name of the resource group. * @param labName The name of the lab. * @param artifactSourceName The name of the artifact source. * @throws IllegalArgumentException thrown if parameters fail the validation * @return the observable to the PagedList&lt;ArmTemplateInner&gt; object */ public Observable<Page<ArmTemplateInner>> listAsync(final String resourceGroupName, final String labName, final String artifactSourceName) { return listWithServiceResponseAsync(resourceGroupName, labName, artifactSourceName) .map(new Func1<ServiceResponse<Page<ArmTemplateInner>>, Page<ArmTemplateInner>>() { @Override public Page<ArmTemplateInner> call(ServiceResponse<Page<ArmTemplateInner>> response) { return response.body(); } }); } /** * List azure resource manager templates in a given artifact source. * * @param resourceGroupName The name of the resource group. * @param labName The name of the lab. * @param artifactSourceName The name of the artifact source. * @throws IllegalArgumentException thrown if parameters fail the validation * @return the observable to the PagedList&lt;ArmTemplateInner&gt; object */ public Observable<ServiceResponse<Page<ArmTemplateInner>>> listWithServiceResponseAsync(final String resourceGroupName, final String labName, final String artifactSourceName) { return listSinglePageAsync(resourceGroupName, labName, artifactSourceName) .concatMap(new Func1<ServiceResponse<Page<ArmTemplateInner>>, Observable<ServiceResponse<Page<ArmTemplateInner>>>>() { @Override public Observable<ServiceResponse<Page<ArmTemplateInner>>> call(ServiceResponse<Page<ArmTemplateInner>> page) { String nextPageLink = page.body().nextPageLink(); if (nextPageLink == null) { return Observable.just(page); } return Observable.just(page).concatWith(listNextWithServiceResponseAsync(nextPageLink)); } }); } /** * List azure resource manager templates in a given artifact source. * * @param resourceGroupName The name of the resource group. * @param labName The name of the lab. * @param artifactSourceName The name of the artifact source. * @throws IllegalArgumentException thrown if parameters fail the validation * @return the PagedList&lt;ArmTemplateInner&gt; object wrapped in {@link ServiceResponse} if successful. */ public Observable<ServiceResponse<Page<ArmTemplateInner>>> listSinglePageAsync(final String resourceGroupName, final String labName, final String artifactSourceName) { if (this.client.subscriptionId() == null) { throw new IllegalArgumentException("Parameter this.client.subscriptionId() is required and cannot be null."); } if (resourceGroupName == null) { throw new IllegalArgumentException("Parameter resourceGroupName is required and cannot be null."); } if (labName == null) { throw new IllegalArgumentException("Parameter labName is required and cannot be null."); } if (artifactSourceName == null) { throw new IllegalArgumentException("Parameter artifactSourceName is required and cannot be null."); } if (this.client.apiVersion() == null) { throw new IllegalArgumentException("Parameter this.client.apiVersion() is required and cannot be null."); } final String expand = null; final String filter = null; final Integer top = null; final String orderby = null; return service.list(this.client.subscriptionId(), resourceGroupName, labName, artifactSourceName, expand, filter, top, orderby, this.client.apiVersion(), this.client.acceptLanguage(), this.client.userAgent()) .flatMap(new Func1<Response<ResponseBody>, Observable<ServiceResponse<Page<ArmTemplateInner>>>>() { @Override public Observable<ServiceResponse<Page<ArmTemplateInner>>> call(Response<ResponseBody> response) { try { ServiceResponse<PageImpl<ArmTemplateInner>> result = listDelegate(response); return Observable.just(new ServiceResponse<Page<ArmTemplateInner>>(result.body(), result.response())); } catch (Throwable t) { return Observable.error(t); } } }); } /** * List azure resource manager templates in a given artifact source. * * @param resourceGroupName The name of the resource group. * @param labName The name of the lab. * @param artifactSourceName The name of the artifact source. * @param expand Specify the $expand query. Example: 'properties($select=displayName)' * @param filter The filter to apply to the operation. Example: '$filter=contains(name,'myName') * @param top The maximum number of resources to return from the operation. Example: '$top=10' * @param orderby The ordering expression for the results, using OData notation. Example: '$orderby=name desc' * @throws IllegalArgumentException thrown if parameters fail the validation * @throws CloudException thrown if the request is rejected by server * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent * @return the PagedList&lt;ArmTemplateInner&gt; object if successful. */ public PagedList<ArmTemplateInner> list(final String resourceGroupName, final String labName, final String artifactSourceName, final String expand, final String filter, final Integer top, final String orderby) { ServiceResponse<Page<ArmTemplateInner>> response = listSinglePageAsync(resourceGroupName, labName, artifactSourceName, expand, filter, top, orderby).toBlocking().single(); return new PagedList<ArmTemplateInner>(response.body()) { @Override public Page<ArmTemplateInner> nextPage(String nextPageLink) { return listNextSinglePageAsync(nextPageLink).toBlocking().single().body(); } }; } /** * List azure resource manager templates in a given artifact source. * * @param resourceGroupName The name of the resource group. * @param labName The name of the lab. * @param artifactSourceName The name of the artifact source. * @param expand Specify the $expand query. Example: 'properties($select=displayName)' * @param filter The filter to apply to the operation. Example: '$filter=contains(name,'myName') * @param top The maximum number of resources to return from the operation. Example: '$top=10' * @param orderby The ordering expression for the results, using OData notation. Example: '$orderby=name desc' * @param serviceCallback the async ServiceCallback to handle successful and failed responses. * @throws IllegalArgumentException thrown if parameters fail the validation * @return the {@link ServiceFuture} object */ public ServiceFuture<List<ArmTemplateInner>> listAsync(final String resourceGroupName, final String labName, final String artifactSourceName, final String expand, final String filter, final Integer top, final String orderby, final ListOperationCallback<ArmTemplateInner> serviceCallback) { return AzureServiceFuture.fromPageResponse( listSinglePageAsync(resourceGroupName, labName, artifactSourceName, expand, filter, top, orderby), new Func1<String, Observable<ServiceResponse<Page<ArmTemplateInner>>>>() { @Override public Observable<ServiceResponse<Page<ArmTemplateInner>>> call(String nextPageLink) { return listNextSinglePageAsync(nextPageLink); } }, serviceCallback); } /** * List azure resource manager templates in a given artifact source. * * @param resourceGroupName The name of the resource group. * @param labName The name of the lab. * @param artifactSourceName The name of the artifact source. * @param expand Specify the $expand query. Example: 'properties($select=displayName)' * @param filter The filter to apply to the operation. Example: '$filter=contains(name,'myName') * @param top The maximum number of resources to return from the operation. Example: '$top=10' * @param orderby The ordering expression for the results, using OData notation. Example: '$orderby=name desc' * @throws IllegalArgumentException thrown if parameters fail the validation * @return the observable to the PagedList&lt;ArmTemplateInner&gt; object */ public Observable<Page<ArmTemplateInner>> listAsync(final String resourceGroupName, final String labName, final String artifactSourceName, final String expand, final String filter, final Integer top, final String orderby) { return listWithServiceResponseAsync(resourceGroupName, labName, artifactSourceName, expand, filter, top, orderby) .map(new Func1<ServiceResponse<Page<ArmTemplateInner>>, Page<ArmTemplateInner>>() { @Override public Page<ArmTemplateInner> call(ServiceResponse<Page<ArmTemplateInner>> response) { return response.body(); } }); } /** * List azure resource manager templates in a given artifact source. * * @param resourceGroupName The name of the resource group. * @param labName The name of the lab. * @param artifactSourceName The name of the artifact source. * @param expand Specify the $expand query. Example: 'properties($select=displayName)' * @param filter The filter to apply to the operation. Example: '$filter=contains(name,'myName') * @param top The maximum number of resources to return from the operation. Example: '$top=10' * @param orderby The ordering expression for the results, using OData notation. Example: '$orderby=name desc' * @throws IllegalArgumentException thrown if parameters fail the validation * @return the observable to the PagedList&lt;ArmTemplateInner&gt; object */ public Observable<ServiceResponse<Page<ArmTemplateInner>>> listWithServiceResponseAsync(final String resourceGroupName, final String labName, final String artifactSourceName, final String expand, final String filter, final Integer top, final String orderby) { return listSinglePageAsync(resourceGroupName, labName, artifactSourceName, expand, filter, top, orderby) .concatMap(new Func1<ServiceResponse<Page<ArmTemplateInner>>, Observable<ServiceResponse<Page<ArmTemplateInner>>>>() { @Override public Observable<ServiceResponse<Page<ArmTemplateInner>>> call(ServiceResponse<Page<ArmTemplateInner>> page) { String nextPageLink = page.body().nextPageLink(); if (nextPageLink == null) { return Observable.just(page); } return Observable.just(page).concatWith(listNextWithServiceResponseAsync(nextPageLink)); } }); } /** * List azure resource manager templates in a given artifact source. * ServiceResponse<PageImpl<ArmTemplateInner>> * @param resourceGroupName The name of the resource group. ServiceResponse<PageImpl<ArmTemplateInner>> * @param labName The name of the lab. ServiceResponse<PageImpl<ArmTemplateInner>> * @param artifactSourceName The name of the artifact source. ServiceResponse<PageImpl<ArmTemplateInner>> * @param expand Specify the $expand query. Example: 'properties($select=displayName)' ServiceResponse<PageImpl<ArmTemplateInner>> * @param filter The filter to apply to the operation. Example: '$filter=contains(name,'myName') ServiceResponse<PageImpl<ArmTemplateInner>> * @param top The maximum number of resources to return from the operation. Example: '$top=10' ServiceResponse<PageImpl<ArmTemplateInner>> * @param orderby The ordering expression for the results, using OData notation. Example: '$orderby=name desc' * @throws IllegalArgumentException thrown if parameters fail the validation * @return the PagedList&lt;ArmTemplateInner&gt; object wrapped in {@link ServiceResponse} if successful. */ public Observable<ServiceResponse<Page<ArmTemplateInner>>> listSinglePageAsync(final String resourceGroupName, final String labName, final String artifactSourceName, final String expand, final String filter, final Integer top, final String orderby) { if (this.client.subscriptionId() == null) { throw new IllegalArgumentException("Parameter this.client.subscriptionId() is required and cannot be null."); } if (resourceGroupName == null) { throw new IllegalArgumentException("Parameter resourceGroupName is required and cannot be null."); } if (labName == null) { throw new IllegalArgumentException("Parameter labName is required and cannot be null."); } if (artifactSourceName == null) { throw new IllegalArgumentException("Parameter artifactSourceName is required and cannot be null."); } if (this.client.apiVersion() == null) { throw new IllegalArgumentException("Parameter this.client.apiVersion() is required and cannot be null."); } return service.list(this.client.subscriptionId(), resourceGroupName, labName, artifactSourceName, expand, filter, top, orderby, this.client.apiVersion(), this.client.acceptLanguage(), this.client.userAgent()) .flatMap(new Func1<Response<ResponseBody>, Observable<ServiceResponse<Page<ArmTemplateInner>>>>() { @Override public Observable<ServiceResponse<Page<ArmTemplateInner>>> call(Response<ResponseBody> response) { try { ServiceResponse<PageImpl<ArmTemplateInner>> result = listDelegate(response); return Observable.just(new ServiceResponse<Page<ArmTemplateInner>>(result.body(), result.response())); } catch (Throwable t) { return Observable.error(t); } } }); } private ServiceResponse<PageImpl<ArmTemplateInner>> listDelegate(Response<ResponseBody> response) throws CloudException, IOException, IllegalArgumentException { return this.client.restClient().responseBuilderFactory().<PageImpl<ArmTemplateInner>, CloudException>newInstance(this.client.serializerAdapter()) .register(200, new TypeToken<PageImpl<ArmTemplateInner>>() { }.getType()) .registerError(CloudException.class) .build(response); } /** * Get azure resource manager template. * * @param resourceGroupName The name of the resource group. * @param labName The name of the lab. * @param artifactSourceName The name of the artifact source. * @param name The name of the azure resource manager template. * @throws IllegalArgumentException thrown if parameters fail the validation * @throws CloudException thrown if the request is rejected by server * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent * @return the ArmTemplateInner object if successful. */ public ArmTemplateInner get(String resourceGroupName, String labName, String artifactSourceName, String name) { return getWithServiceResponseAsync(resourceGroupName, labName, artifactSourceName, name).toBlocking().single().body(); } /** * Get azure resource manager template. * * @param resourceGroupName The name of the resource group. * @param labName The name of the lab. * @param artifactSourceName The name of the artifact source. * @param name The name of the azure resource manager template. * @param serviceCallback the async ServiceCallback to handle successful and failed responses. * @throws IllegalArgumentException thrown if parameters fail the validation * @return the {@link ServiceFuture} object */ public ServiceFuture<ArmTemplateInner> getAsync(String resourceGroupName, String labName, String artifactSourceName, String name, final ServiceCallback<ArmTemplateInner> serviceCallback) { return ServiceFuture.fromResponse(getWithServiceResponseAsync(resourceGroupName, labName, artifactSourceName, name), serviceCallback); } /** * Get azure resource manager template. * * @param resourceGroupName The name of the resource group. * @param labName The name of the lab. * @param artifactSourceName The name of the artifact source. * @param name The name of the azure resource manager template. * @throws IllegalArgumentException thrown if parameters fail the validation * @return the observable to the ArmTemplateInner object */ public Observable<ArmTemplateInner> getAsync(String resourceGroupName, String labName, String artifactSourceName, String name) { return getWithServiceResponseAsync(resourceGroupName, labName, artifactSourceName, name).map(new Func1<ServiceResponse<ArmTemplateInner>, ArmTemplateInner>() { @Override public ArmTemplateInner call(ServiceResponse<ArmTemplateInner> response) { return response.body(); } }); } /** * Get azure resource manager template. * * @param resourceGroupName The name of the resource group. * @param labName The name of the lab. * @param artifactSourceName The name of the artifact source. * @param name The name of the azure resource manager template. * @throws IllegalArgumentException thrown if parameters fail the validation * @return the observable to the ArmTemplateInner object */ public Observable<ServiceResponse<ArmTemplateInner>> getWithServiceResponseAsync(String resourceGroupName, String labName, String artifactSourceName, String name) { if (this.client.subscriptionId() == null) { throw new IllegalArgumentException("Parameter this.client.subscriptionId() is required and cannot be null."); } if (resourceGroupName == null) { throw new IllegalArgumentException("Parameter resourceGroupName is required and cannot be null."); } if (labName == null) { throw new IllegalArgumentException("Parameter labName is required and cannot be null."); } if (artifactSourceName == null) { throw new IllegalArgumentException("Parameter artifactSourceName is required and cannot be null."); } if (name == null) { throw new IllegalArgumentException("Parameter name is required and cannot be null."); } if (this.client.apiVersion() == null) { throw new IllegalArgumentException("Parameter this.client.apiVersion() is required and cannot be null."); } final String expand = null; return service.get(this.client.subscriptionId(), resourceGroupName, labName, artifactSourceName, name, expand, this.client.apiVersion(), this.client.acceptLanguage(), this.client.userAgent()) .flatMap(new Func1<Response<ResponseBody>, Observable<ServiceResponse<ArmTemplateInner>>>() { @Override public Observable<ServiceResponse<ArmTemplateInner>> call(Response<ResponseBody> response) { try { ServiceResponse<ArmTemplateInner> clientResponse = getDelegate(response); return Observable.just(clientResponse); } catch (Throwable t) { return Observable.error(t); } } }); } /** * Get azure resource manager template. * * @param resourceGroupName The name of the resource group. * @param labName The name of the lab. * @param artifactSourceName The name of the artifact source. * @param name The name of the azure resource manager template. * @param expand Specify the $expand query. Example: 'properties($select=displayName)' * @throws IllegalArgumentException thrown if parameters fail the validation * @throws CloudException thrown if the request is rejected by server * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent * @return the ArmTemplateInner object if successful. */ public ArmTemplateInner get(String resourceGroupName, String labName, String artifactSourceName, String name, String expand) { return getWithServiceResponseAsync(resourceGroupName, labName, artifactSourceName, name, expand).toBlocking().single().body(); } /** * Get azure resource manager template. * * @param resourceGroupName The name of the resource group. * @param labName The name of the lab. * @param artifactSourceName The name of the artifact source. * @param name The name of the azure resource manager template. * @param expand Specify the $expand query. Example: 'properties($select=displayName)' * @param serviceCallback the async ServiceCallback to handle successful and failed responses. * @throws IllegalArgumentException thrown if parameters fail the validation * @return the {@link ServiceFuture} object */ public ServiceFuture<ArmTemplateInner> getAsync(String resourceGroupName, String labName, String artifactSourceName, String name, String expand, final ServiceCallback<ArmTemplateInner> serviceCallback) { return ServiceFuture.fromResponse(getWithServiceResponseAsync(resourceGroupName, labName, artifactSourceName, name, expand), serviceCallback); } /** * Get azure resource manager template. * * @param resourceGroupName The name of the resource group. * @param labName The name of the lab. * @param artifactSourceName The name of the artifact source. * @param name The name of the azure resource manager template. * @param expand Specify the $expand query. Example: 'properties($select=displayName)' * @throws IllegalArgumentException thrown if parameters fail the validation * @return the observable to the ArmTemplateInner object */ public Observable<ArmTemplateInner> getAsync(String resourceGroupName, String labName, String artifactSourceName, String name, String expand) { return getWithServiceResponseAsync(resourceGroupName, labName, artifactSourceName, name, expand).map(new Func1<ServiceResponse<ArmTemplateInner>, ArmTemplateInner>() { @Override public ArmTemplateInner call(ServiceResponse<ArmTemplateInner> response) { return response.body(); } }); } /** * Get azure resource manager template. * * @param resourceGroupName The name of the resource group. * @param labName The name of the lab. * @param artifactSourceName The name of the artifact source. * @param name The name of the azure resource manager template. * @param expand Specify the $expand query. Example: 'properties($select=displayName)' * @throws IllegalArgumentException thrown if parameters fail the validation * @return the observable to the ArmTemplateInner object */ public Observable<ServiceResponse<ArmTemplateInner>> getWithServiceResponseAsync(String resourceGroupName, String labName, String artifactSourceName, String name, String expand) { if (this.client.subscriptionId() == null) { throw new IllegalArgumentException("Parameter this.client.subscriptionId() is required and cannot be null."); } if (resourceGroupName == null) { throw new IllegalArgumentException("Parameter resourceGroupName is required and cannot be null."); } if (labName == null) { throw new IllegalArgumentException("Parameter labName is required and cannot be null."); } if (artifactSourceName == null) { throw new IllegalArgumentException("Parameter artifactSourceName is required and cannot be null."); } if (name == null) { throw new IllegalArgumentException("Parameter name is required and cannot be null."); } if (this.client.apiVersion() == null) { throw new IllegalArgumentException("Parameter this.client.apiVersion() is required and cannot be null."); } return service.get(this.client.subscriptionId(), resourceGroupName, labName, artifactSourceName, name, expand, this.client.apiVersion(), this.client.acceptLanguage(), this.client.userAgent()) .flatMap(new Func1<Response<ResponseBody>, Observable<ServiceResponse<ArmTemplateInner>>>() { @Override public Observable<ServiceResponse<ArmTemplateInner>> call(Response<ResponseBody> response) { try { ServiceResponse<ArmTemplateInner> clientResponse = getDelegate(response); return Observable.just(clientResponse); } catch (Throwable t) { return Observable.error(t); } } }); } private ServiceResponse<ArmTemplateInner> getDelegate(Response<ResponseBody> response) throws CloudException, IOException, IllegalArgumentException { return this.client.restClient().responseBuilderFactory().<ArmTemplateInner, CloudException>newInstance(this.client.serializerAdapter()) .register(200, new TypeToken<ArmTemplateInner>() { }.getType()) .registerError(CloudException.class) .build(response); } /** * List azure resource manager templates in a given artifact source. * * @param nextPageLink The NextLink from the previous successful call to List operation. * @throws IllegalArgumentException thrown if parameters fail the validation * @throws CloudException thrown if the request is rejected by server * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent * @return the PagedList&lt;ArmTemplateInner&gt; object if successful. */ public PagedList<ArmTemplateInner> listNext(final String nextPageLink) { ServiceResponse<Page<ArmTemplateInner>> response = listNextSinglePageAsync(nextPageLink).toBlocking().single(); return new PagedList<ArmTemplateInner>(response.body()) { @Override public Page<ArmTemplateInner> nextPage(String nextPageLink) { return listNextSinglePageAsync(nextPageLink).toBlocking().single().body(); } }; } /** * List azure resource manager templates in a given artifact source. * * @param nextPageLink The NextLink from the previous successful call to List operation. * @param serviceFuture the ServiceFuture object tracking the Retrofit calls * @param serviceCallback the async ServiceCallback to handle successful and failed responses. * @throws IllegalArgumentException thrown if parameters fail the validation * @return the {@link ServiceFuture} object */ public ServiceFuture<List<ArmTemplateInner>> listNextAsync(final String nextPageLink, final ServiceFuture<List<ArmTemplateInner>> serviceFuture, final ListOperationCallback<ArmTemplateInner> serviceCallback) { return AzureServiceFuture.fromPageResponse( listNextSinglePageAsync(nextPageLink), new Func1<String, Observable<ServiceResponse<Page<ArmTemplateInner>>>>() { @Override public Observable<ServiceResponse<Page<ArmTemplateInner>>> call(String nextPageLink) { return listNextSinglePageAsync(nextPageLink); } }, serviceCallback); } /** * List azure resource manager templates in a given artifact source. * * @param nextPageLink The NextLink from the previous successful call to List operation. * @throws IllegalArgumentException thrown if parameters fail the validation * @return the observable to the PagedList&lt;ArmTemplateInner&gt; object */ public Observable<Page<ArmTemplateInner>> listNextAsync(final String nextPageLink) { return listNextWithServiceResponseAsync(nextPageLink) .map(new Func1<ServiceResponse<Page<ArmTemplateInner>>, Page<ArmTemplateInner>>() { @Override public Page<ArmTemplateInner> call(ServiceResponse<Page<ArmTemplateInner>> response) { return response.body(); } }); } /** * List azure resource manager templates in a given artifact source. * * @param nextPageLink The NextLink from the previous successful call to List operation. * @throws IllegalArgumentException thrown if parameters fail the validation * @return the observable to the PagedList&lt;ArmTemplateInner&gt; object */ public Observable<ServiceResponse<Page<ArmTemplateInner>>> listNextWithServiceResponseAsync(final String nextPageLink) { return listNextSinglePageAsync(nextPageLink) .concatMap(new Func1<ServiceResponse<Page<ArmTemplateInner>>, Observable<ServiceResponse<Page<ArmTemplateInner>>>>() { @Override public Observable<ServiceResponse<Page<ArmTemplateInner>>> call(ServiceResponse<Page<ArmTemplateInner>> page) { String nextPageLink = page.body().nextPageLink(); if (nextPageLink == null) { return Observable.just(page); } return Observable.just(page).concatWith(listNextWithServiceResponseAsync(nextPageLink)); } }); } /** * List azure resource manager templates in a given artifact source. * ServiceResponse<PageImpl<ArmTemplateInner>> * @param nextPageLink The NextLink from the previous successful call to List operation. * @throws IllegalArgumentException thrown if parameters fail the validation * @return the PagedList&lt;ArmTemplateInner&gt; object wrapped in {@link ServiceResponse} if successful. */ public Observable<ServiceResponse<Page<ArmTemplateInner>>> listNextSinglePageAsync(final String nextPageLink) { if (nextPageLink == null) { throw new IllegalArgumentException("Parameter nextPageLink is required and cannot be null."); } String nextUrl = String.format("%s", nextPageLink); return service.listNext(nextUrl, this.client.acceptLanguage(), this.client.userAgent()) .flatMap(new Func1<Response<ResponseBody>, Observable<ServiceResponse<Page<ArmTemplateInner>>>>() { @Override public Observable<ServiceResponse<Page<ArmTemplateInner>>> call(Response<ResponseBody> response) { try { ServiceResponse<PageImpl<ArmTemplateInner>> result = listNextDelegate(response); return Observable.just(new ServiceResponse<Page<ArmTemplateInner>>(result.body(), result.response())); } catch (Throwable t) { return Observable.error(t); } } }); } private ServiceResponse<PageImpl<ArmTemplateInner>> listNextDelegate(Response<ResponseBody> response) throws CloudException, IOException, IllegalArgumentException { return this.client.restClient().responseBuilderFactory().<PageImpl<ArmTemplateInner>, CloudException>newInstance(this.client.serializerAdapter()) .register(200, new TypeToken<PageImpl<ArmTemplateInner>>() { }.getType()) .registerError(CloudException.class) .build(response); } }
/* * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS HEADER. * * Copyright (c) 1997-2010 Oracle and/or its affiliates. All rights reserved. * * The contents of this file are subject to the terms of either the GNU * General Public License Version 2 only ("GPL") or the Common Development * and Distribution License("CDDL") (collectively, the "License"). You * may not use this file except in compliance with the License. You can * obtain a copy of the License at * https://glassfish.dev.java.net/public/CDDL+GPL_1_1.html * or packager/legal/LICENSE.txt. See the License for the specific * language governing permissions and limitations under the License. * * When distributing the software, include this License Header Notice in each * file and include the License file at packager/legal/LICENSE.txt. * * GPL Classpath Exception: * Oracle designates this particular file as subject to the "Classpath" * exception as provided by Oracle in the GPL Version 2 section of the License * file that accompanied this code. * * Modifications: * If applicable, add the following below the License Header, with the fields * enclosed by brackets [] replaced by your own identifying information: * "Portions Copyright [year] [name of copyright owner]" * * Contributor(s): * If you wish your version of this file to be governed by only the CDDL or * only the GPL Version 2, indicate your decision by adding "[Contributor] * elects to include this software in this distribution under the [CDDL or GPL * Version 2] license." If you don't indicate a single choice of license, a * recipient has the option to distribute your version of this file under * either the CDDL, the GPL Version 2 or to extend the choice of license to * its licensees as provided above. However, if you add GPL Version 2 code * and therefore, elected the GPL Version 2 license, then the option applies * only if the new code is made subject to such option by the copyright * holder. */ /* * $Id: Duration.java,v 1.10 2010-11-01 04:36:08 joehw Exp $ * %W% %E% */ package mf.javax.xml.datatype; import java.math.BigDecimal; import java.math.BigInteger; import java.util.Calendar; import java.util.Date; import java.util.GregorianCalendar; import mf.javax.xml.namespace.QName; /** * <p>Immutable representation of a time span as defined in * the W3C XML Schema 1.0 specification.</p> * * <p>A Duration object represents a period of Gregorian time, * which consists of six fields (years, months, days, hours, * minutes, and seconds) plus a sign (+/-) field.</p> * * <p>The first five fields have non-negative (>=0) integers or null * (which represents that the field is not set), * and the seconds field has a non-negative decimal or null. * A negative sign indicates a negative duration.</p> * * <p>This class provides a number of methods that make it easy * to use for the duration datatype of XML Schema 1.0 with * the errata.</p> * * <h2>Order relationship</h2> * <p>Duration objects only have partial order, where two values A and B * maybe either:</p> * <ol> * <li>A&lt;B (A is shorter than B) * <li>A&gt;B (A is longer than B) * <li>A==B (A and B are of the same duration) * <li>A&lt;>B (Comparison between A and B is indeterminate) * </ol> * * <p>For example, 30 days cannot be meaningfully compared to one month. * The {@link #compare(Duration duration)} method implements this * relationship.</p> * * <p>See the {@link #isLongerThan(Duration)} method for details about * the order relationship among <code>Duration</code> objects.</p> * * <h2>Operations over Duration</h2> * <p>This class provides a set of basic arithmetic operations, such * as addition, subtraction and multiplication. * Because durations don't have total order, an operation could * fail for some combinations of operations. For example, you cannot * subtract 15 days from 1 month. See the javadoc of those methods * for detailed conditions where this could happen.</p> * * <p>Also, division of a duration by a number is not provided because * the <code>Duration</code> class can only deal with finite precision * decimal numbers. For example, one cannot represent 1 sec divided by 3.</p> * * <p>However, you could substitute a division by 3 with multiplying * by numbers such as 0.3 or 0.333.</p> * * <h2>Range of allowed values</h2> * <p> * Because some operations of <code>Duration</code> rely on {@link Calendar} * even though {@link Duration} can hold very large or very small values, * some of the methods may not work correctly on such <code>Duration</code>s. * The impacted methods document their dependency on {@link Calendar}. * * @author <a href="mailto:Joseph.Fialli@Sun.COM">Joseph Fialli</a> * @author <a href="mailto:Kohsuke.Kawaguchi@Sun.com">Kohsuke Kawaguchi</a> * @author <a href="mailto:Jeff.Suttor@Sun.com">Jeff Suttor</a> * @author <a href="mailto:Sunitha.Reddy@Sun.com">Sunitha Reddy</a> * @version $Revision: 1.10 $, $Date: 2010-11-01 04:36:08 $ * @see XMLGregorianCalendar#add(Duration) * @since 1.5 */ public abstract class Duration { /** * <p>Debugging <code>true</code> or <code>false</code>.</p> */ private static final boolean DEBUG = true; /** * Default no-arg constructor. * * <p>Note: Always use the {@link DatatypeFactory} to * construct an instance of <code>Duration</code>. * The constructor on this class cannot be guaranteed to * produce an object with a consistent state and may be * removed in the future.</p> */ public Duration() { } /** * <p>Return the name of the XML Schema date/time type that this instance * maps to. Type is computed based on fields that are set, * i.e. {@link #isSet(DatatypeConstants.Field field)} == <code>true</code>.</p> * * <table border="2" rules="all" cellpadding="2"> * <thead> * <tr> * <th align="center" colspan="7"> * Required fields for XML Schema 1.0 Date/Time Datatypes.<br/> * <i>(timezone is optional for all date/time datatypes)</i> * </th> * </tr> * </thead> * <tbody> * <tr> * <td>Datatype</td> * <td>year</td> * <td>month</td> * <td>day</td> * <td>hour</td> * <td>minute</td> * <td>second</td> * </tr> * <tr> * <td>{@link DatatypeConstants#DURATION}</td> * <td>X</td> * <td>X</td> * <td>X</td> * <td>X</td> * <td>X</td> * <td>X</td> * </tr> * <tr> * <td>{@link DatatypeConstants#DURATION_DAYTIME}</td> * <td></td> * <td></td> * <td>X</td> * <td>X</td> * <td>X</td> * <td>X</td> * </tr> * <tr> * <td>{@link DatatypeConstants#DURATION_YEARMONTH}</td> * <td>X</td> * <td>X</td> * <td></td> * <td></td> * <td></td> * <td></td> * </tr> * </tbody> * </table> * * @return one of the following constants: * {@link DatatypeConstants#DURATION}, * {@link DatatypeConstants#DURATION_DAYTIME} or * {@link DatatypeConstants#DURATION_YEARMONTH}. * * @throws IllegalStateException If the combination of set fields does not match one of the XML Schema date/time datatypes. */ public QName getXMLSchemaType() { boolean yearSet = isSet(DatatypeConstants.YEARS); boolean monthSet = isSet(DatatypeConstants.MONTHS); boolean daySet = isSet(DatatypeConstants.DAYS); boolean hourSet = isSet(DatatypeConstants.HOURS); boolean minuteSet = isSet(DatatypeConstants.MINUTES); boolean secondSet = isSet(DatatypeConstants.SECONDS); // DURATION if (yearSet && monthSet && daySet && hourSet && minuteSet && secondSet) { return DatatypeConstants.DURATION; } // DURATION_DAYTIME if (!yearSet && !monthSet && daySet && hourSet && minuteSet && secondSet) { return DatatypeConstants.DURATION_DAYTIME; } // DURATION_YEARMONTH if (yearSet && monthSet && !daySet && !hourSet && !minuteSet && !secondSet) { return DatatypeConstants.DURATION_YEARMONTH; } // nothing matches throw new IllegalStateException( "javax.xml.datatype.Duration#getXMLSchemaType():" + " this Duration does not match one of the XML Schema date/time datatypes:" + " year set = " + yearSet + " month set = " + monthSet + " day set = " + daySet + " hour set = " + hourSet + " minute set = " + minuteSet + " second set = " + secondSet ); } /** * Returns the sign of this duration in -1,0, or 1. * * @return * -1 if this duration is negative, 0 if the duration is zero, * and 1 if the duration is positive. */ public abstract int getSign(); /** * <p>Get the years value of this <code>Duration</code> as an <code>int</code> or <code>0</code> if not present.</p> * * <p><code>getYears()</code> is a convenience method for * {@link #getField(DatatypeConstants.Field field) getField(DatatypeConstants.YEARS)}.</p> * * <p>As the return value is an <code>int</code>, an incorrect value will be returned for <code>Duration</code>s * with years that go beyond the range of an <code>int</code>. * Use {@link #getField(DatatypeConstants.Field field) getField(DatatypeConstants.YEARS)} to avoid possible loss of precision.</p> * * @return If the years field is present, return its value as an <code>int</code>, else return <code>0</code>. */ public int getYears() { return getField(DatatypeConstants.YEARS).intValue(); } /** * Obtains the value of the MONTHS field as an integer value, * or 0 if not present. * * This method works just like {@link #getYears()} except * that this method works on the MONTHS field. * * @return Months of this <code>Duration</code>. */ public int getMonths() { return getField(DatatypeConstants.MONTHS).intValue(); } /** * Obtains the value of the DAYS field as an integer value, * or 0 if not present. * * This method works just like {@link #getYears()} except * that this method works on the DAYS field. * * @return Days of this <code>Duration</code>. */ public int getDays() { return getField(DatatypeConstants.DAYS).intValue(); } /** * Obtains the value of the HOURS field as an integer value, * or 0 if not present. * * This method works just like {@link #getYears()} except * that this method works on the HOURS field. * * @return Hours of this <code>Duration</code>. * */ public int getHours() { return getField(DatatypeConstants.HOURS).intValue(); } /** * Obtains the value of the MINUTES field as an integer value, * or 0 if not present. * * This method works just like {@link #getYears()} except * that this method works on the MINUTES field. * * @return Minutes of this <code>Duration</code>. * */ public int getMinutes() { return getField(DatatypeConstants.MINUTES).intValue(); } /** * Obtains the value of the SECONDS field as an integer value, * or 0 if not present. * * This method works just like {@link #getYears()} except * that this method works on the SECONDS field. * * @return seconds in the integer value. The fraction of seconds * will be discarded (for example, if the actual value is 2.5, * this method returns 2) */ public int getSeconds() { return getField(DatatypeConstants.SECONDS).intValue(); } /** * <p>Returns the length of the duration in milli-seconds.</p> * * <p>If the seconds field carries more digits than milli-second order, * those will be simply discarded (or in other words, rounded to zero.) * For example, for any Calendar value <code>x</code>,</p> * <pre> * <code>new Duration("PT10.00099S").getTimeInMills(x) == 10000</code>. * <code>new Duration("-PT10.00099S").getTimeInMills(x) == -10000</code>. * </pre> * * <p> * Note that this method uses the {@link #addTo(Calendar)} method, * which may work incorrectly with <code>Duration</code> objects with * very large values in its fields. See the {@link #addTo(Calendar)} * method for details. * * @param startInstant * The length of a month/year varies. The <code>startInstant</code> is * used to disambiguate this variance. Specifically, this method * returns the difference between <code>startInstant</code> and * <code>startInstant+duration</code> * * @return milliseconds between <code>startInstant</code> and * <code>startInstant</code> plus this <code>Duration</code> * * @throws NullPointerException if <code>startInstant</code> parameter * is null. * */ public long getTimeInMillis(final Calendar startInstant) { Calendar cal = (Calendar) startInstant.clone(); addTo(cal); return getCalendarTimeInMillis(cal) - getCalendarTimeInMillis(startInstant); } /** * <p>Returns the length of the duration in milli-seconds.</p> * * <p>If the seconds field carries more digits than milli-second order, * those will be simply discarded (or in other words, rounded to zero.) * For example, for any <code>Date</code> value <code>x</code>,</p> * <pre> * <code>new Duration("PT10.00099S").getTimeInMills(x) == 10000</code>. * <code>new Duration("-PT10.00099S").getTimeInMills(x) == -10000</code>. * </pre> * * <p> * Note that this method uses the {@link #addTo(Date)} method, * which may work incorrectly with <code>Duration</code> objects with * very large values in its fields. See the {@link #addTo(Date)} * method for details. * * @param startInstant * The length of a month/year varies. The <code>startInstant</code> is * used to disambiguate this variance. Specifically, this method * returns the difference between <code>startInstant</code> and * <code>startInstant+duration</code>. * * @throws NullPointerException * If the startInstant parameter is null. * * @return milliseconds between <code>startInstant</code> and * <code>startInstant</code> plus this <code>Duration</code> * * @see #getTimeInMillis(Calendar) */ public long getTimeInMillis(final Date startInstant) { Calendar cal = new GregorianCalendar(); cal.setTime(startInstant); this.addTo(cal); return getCalendarTimeInMillis(cal) - startInstant.getTime(); } /** * Gets the value of a field. * * Fields of a duration object may contain arbitrary large value. * Therefore this method is designed to return a {@link Number} object. * * In case of YEARS, MONTHS, DAYS, HOURS, and MINUTES, the returned * number will be a non-negative integer. In case of seconds, * the returned number may be a non-negative decimal value. * * @param field * one of the six Field constants (YEARS,MONTHS,DAYS,HOURS, * MINUTES, or SECONDS.) * @return * If the specified field is present, this method returns * a non-null non-negative {@link Number} object that * represents its value. If it is not present, return null. * For YEARS, MONTHS, DAYS, HOURS, and MINUTES, this method * returns a {@link java.math.BigInteger} object. For SECONDS, this * method returns a {@link java.math.BigDecimal}. * * @throws NullPointerException If the <code>field</code> is <code>null</code>. */ public abstract Number getField(final DatatypeConstants.Field field); /** * Checks if a field is set. * * A field of a duration object may or may not be present. * This method can be used to test if a field is present. * * @param field * one of the six Field constants (YEARS,MONTHS,DAYS,HOURS, * MINUTES, or SECONDS.) * @return * true if the field is present. false if not. * * @throws NullPointerException * If the field parameter is null. */ public abstract boolean isSet(final DatatypeConstants.Field field); /** * <p>Computes a new duration whose value is <code>this+rhs</code>.</p> * * <p>For example,</p> * <pre> * "1 day" + "-3 days" = "-2 days" * "1 year" + "1 day" = "1 year and 1 day" * "-(1 hour,50 minutes)" + "-20 minutes" = "-(1 hours,70 minutes)" * "15 hours" + "-3 days" = "-(2 days,9 hours)" * "1 year" + "-1 day" = IllegalStateException * </pre> * * <p>Since there's no way to meaningfully subtract 1 day from 1 month, * there are cases where the operation fails in * {@link IllegalStateException}.</p> * * <p> * Formally, the computation is defined as follows.</p> * <p> * Firstly, we can assume that two <code>Duration</code>s to be added * are both positive without losing generality (i.e., * <code>(-X)+Y=Y-X</code>, <code>X+(-Y)=X-Y</code>, * <code>(-X)+(-Y)=-(X+Y)</code>) * * <p> * Addition of two positive <code>Duration</code>s are simply defined as * field by field addition where missing fields are treated as 0. * <p> * A field of the resulting <code>Duration</code> will be unset if and * only if respective fields of two input <code>Duration</code>s are unset. * <p> * Note that <code>lhs.add(rhs)</code> will be always successful if * <code>lhs.signum()*rhs.signum()!=-1</code> or both of them are * normalized.</p> * * @param rhs <code>Duration</code> to add to this <code>Duration</code> * * @return * non-null valid Duration object. * * @throws NullPointerException * If the rhs parameter is null. * @throws IllegalStateException * If two durations cannot be meaningfully added. For * example, adding negative one day to one month causes * this exception. * * * @see #subtract(Duration) */ public abstract Duration add(final Duration rhs); /** * Adds this duration to a {@link Calendar} object. * * <p> * Calls {@link java.util.Calendar#add(int,int)} in the * order of YEARS, MONTHS, DAYS, HOURS, MINUTES, SECONDS, and MILLISECONDS * if those fields are present. Because the {@link Calendar} class * uses int to hold values, there are cases where this method * won't work correctly (for example if values of fields * exceed the range of int.) * </p> * * <p> * Also, since this duration class is a Gregorian duration, this * method will not work correctly if the given {@link Calendar} * object is based on some other calendar systems. * </p> * * <p> * Any fractional parts of this <code>Duration</code> object * beyond milliseconds will be simply ignored. For example, if * this duration is "P1.23456S", then 1 is added to SECONDS, * 234 is added to MILLISECONDS, and the rest will be unused. * </p> * * <p> * Note that because {@link Calendar#add(int, int)} is using * <code>int</code>, <code>Duration</code> with values beyond the * range of <code>int</code> in its fields * will cause overflow/underflow to the given {@link Calendar}. * {@link XMLGregorianCalendar#add(Duration)} provides the same * basic operation as this method while avoiding * the overflow/underflow issues. * * @param calendar * A calendar object whose value will be modified. * @throws NullPointerException * if the calendar parameter is null. */ public abstract void addTo(Calendar calendar); /** * Adds this duration to a {@link Date} object. * * <p> * The given date is first converted into * a {@link java.util.GregorianCalendar}, then the duration * is added exactly like the {@link #addTo(Calendar)} method. * * <p> * The updated time instant is then converted back into a * {@link Date} object and used to update the given {@link Date} object. * * <p> * This somewhat redundant computation is necessary to unambiguously * determine the duration of months and years. * * @param date * A date object whose value will be modified. * @throws NullPointerException * if the date parameter is null. */ public void addTo(Date date) { // check data parameter if (date == null) { throw new NullPointerException( "Cannot call " + this.getClass().getName() + "#addTo(Date date) with date == null." ); } Calendar cal = new GregorianCalendar(); cal.setTime(date); this.addTo(cal); date.setTime(getCalendarTimeInMillis(cal)); } /** * <p>Computes a new duration whose value is <code>this-rhs</code>.</p> * * <p>For example:</p> * <pre> * "1 day" - "-3 days" = "4 days" * "1 year" - "1 day" = IllegalStateException * "-(1 hour,50 minutes)" - "-20 minutes" = "-(1hours,30 minutes)" * "15 hours" - "-3 days" = "3 days and 15 hours" * "1 year" - "-1 day" = "1 year and 1 day" * </pre> * * <p>Since there's no way to meaningfully subtract 1 day from 1 month, * there are cases where the operation fails in {@link IllegalStateException}.</p> * * <p>Formally the computation is defined as follows. * First, we can assume that two <code>Duration</code>s are both positive * without losing generality. (i.e., * <code>(-X)-Y=-(X+Y)</code>, <code>X-(-Y)=X+Y</code>, * <code>(-X)-(-Y)=-(X-Y)</code>)</p> * * <p>Then two durations are subtracted field by field. * If the sign of any non-zero field <code>F</code> is different from * the sign of the most significant field, * 1 (if <code>F</code> is negative) or -1 (otherwise) * will be borrowed from the next bigger unit of <code>F</code>.</p> * * <p>This process is repeated until all the non-zero fields have * the same sign.</p> * * <p>If a borrow occurs in the days field (in other words, if * the computation needs to borrow 1 or -1 month to compensate * days), then the computation fails by throwing an * {@link IllegalStateException}.</p> * * @param rhs <code>Duration</code> to subtract from this <code>Duration</code>. * * @return New <code>Duration</code> created from subtracting <code>rhs</code> from this <code>Duration</code>. * * @throws IllegalStateException * If two durations cannot be meaningfully subtracted. For * example, subtracting one day from one month causes * this exception. * * @throws NullPointerException * If the rhs parameter is null. * * @see #add(Duration) */ public Duration subtract(final Duration rhs) { return add(rhs.negate()); } /** * <p>Computes a new duration whose value is <code>factor</code> times * longer than the value of this duration.</p> * * <p>This method is provided for the convenience. * It is functionally equivalent to the following code:</p> * <pre> * multiply(new BigDecimal(String.valueOf(factor))) * </pre> * * @param factor Factor times longer of new <code>Duration</code> to create. * * @return New <code>Duration</code> that is <code>factor</code>times longer than this <code>Duration</code>. * * @see #multiply(BigDecimal) */ public Duration multiply(int factor) { return multiply(new BigDecimal(String.valueOf(factor))); } /** * Computes a new duration whose value is <code>factor</code> times * longer than the value of this duration. * * <p> * For example, * <pre> * "P1M" (1 month) * "12" = "P12M" (12 months) * "PT1M" (1 min) * "0.3" = "PT18S" (18 seconds) * "P1M" (1 month) * "1.5" = IllegalStateException * </pre> * * <p> * Since the <code>Duration</code> class is immutable, this method * doesn't change the value of this object. It simply computes * a new Duration object and returns it. * * <p> * The operation will be performed field by field with the precision * of {@link BigDecimal}. Since all the fields except seconds are * restricted to hold integers, * any fraction produced by the computation will be * carried down toward the next lower unit. For example, * if you multiply "P1D" (1 day) with "0.5", then it will be 0.5 day, * which will be carried down to "PT12H" (12 hours). * When fractions of month cannot be meaningfully carried down * to days, or year to months, this will cause an * {@link IllegalStateException} to be thrown. * For example if you multiple one month by 0.5.</p> * * <p> * To avoid {@link IllegalStateException}, use * the {@link #normalizeWith(Calendar)} method to remove the years * and months fields. * * @param factor to multiply by * * @return * returns a non-null valid <code>Duration</code> object * * @throws IllegalStateException if operation produces fraction in * the months field. * * @throws NullPointerException if the <code>factor</code> parameter is * <code>null</code>. * */ public abstract Duration multiply(final BigDecimal factor); /** * Returns a new <code>Duration</code> object whose * value is <code>-this</code>. * * <p> * Since the <code>Duration</code> class is immutable, this method * doesn't change the value of this object. It simply computes * a new Duration object and returns it. * * @return * always return a non-null valid <code>Duration</code> object. */ public abstract Duration negate(); /** * <p>Converts the years and months fields into the days field * by using a specific time instant as the reference point.</p> * * <p>For example, duration of one month normalizes to 31 days * given the start time instance "July 8th 2003, 17:40:32".</p> * * <p>Formally, the computation is done as follows:</p> * <ol> * <li>the given Calendar object is cloned</li> * <li>the years, months and days fields will be added to the {@link Calendar} object * by using the {@link Calendar#add(int,int)} method</li> * <li>the difference between the two Calendars in computed in milliseconds and converted to days, * if a remainder occurs due to Daylight Savings Time, it is discarded</li> * <li>the computed days, along with the hours, minutes and seconds * fields of this duration object is used to construct a new * Duration object.</li> * </ol> * * <p>Note that since the Calendar class uses <code>int</code> to * hold the value of year and month, this method may produce * an unexpected result if this duration object holds * a very large value in the years or months fields.</p> * * @param startTimeInstant <code>Calendar</code> reference point. * * @return <code>Duration</code> of years and months of this <code>Duration</code> as days. * * @throws NullPointerException If the startTimeInstant parameter is null. */ public abstract Duration normalizeWith(final Calendar startTimeInstant); /** * <p>Partial order relation comparison with this <code>Duration</code> instance.</p> * * <p>Comparison result must be in accordance with * <a href="http://www.w3.org/TR/xmlschema-2/#duration-order">W3C XML Schema 1.0 Part 2, Section 3.2.7.6.2, * <i>Order relation on duration</i></a>.</p> * * <p>Return:</p> * <ul> * <li>{@link DatatypeConstants#LESSER} if this <code>Duration</code> is shorter than <code>duration</code> parameter</li> * <li>{@link DatatypeConstants#EQUAL} if this <code>Duration</code> is equal to <code>duration</code> parameter</li> * <li>{@link DatatypeConstants#GREATER} if this <code>Duration</code> is longer than <code>duration</code> parameter</li> * <li>{@link DatatypeConstants#INDETERMINATE} if a conclusive partial order relation cannot be determined</li> * </ul> * * @param duration to compare * * @return the relationship between <code>this</code> <code>Duration</code>and <code>duration</code> parameter as * {@link DatatypeConstants#LESSER}, {@link DatatypeConstants#EQUAL}, {@link DatatypeConstants#GREATER} * or {@link DatatypeConstants#INDETERMINATE}. * * @throws UnsupportedOperationException If the underlying implementation * cannot reasonably process the request, e.g. W3C XML Schema allows for * arbitrarily large/small/precise values, the request may be beyond the * implementations capability. * @throws NullPointerException if <code>duration</code> is <code>null</code>. * * @see #isShorterThan(Duration) * @see #isLongerThan(Duration) */ public abstract int compare(final Duration duration); /** * <p>Checks if this duration object is strictly longer than * another <code>Duration</code> object.</p> * * <p>Duration X is "longer" than Y if and only if X>Y * as defined in the section 3.2.6.2 of the XML Schema 1.0 * specification.</p> * * <p>For example, "P1D" (one day) > "PT12H" (12 hours) and * "P2Y" (two years) > "P23M" (23 months).</p> * * @param duration <code>Duration</code> to test this <code>Duration</code> against. * * @throws UnsupportedOperationException If the underlying implementation * cannot reasonably process the request, e.g. W3C XML Schema allows for * arbitrarily large/small/precise values, the request may be beyond the * implementations capability. * @throws NullPointerException If <code>duration</code> is null. * * @return * true if the duration represented by this object * is longer than the given duration. false otherwise. * * @see #isShorterThan(Duration) * @see #compare(Duration duration) */ public boolean isLongerThan(final Duration duration) { return compare(duration) == DatatypeConstants.GREATER; } /** * <p>Checks if this duration object is strictly shorter than * another <code>Duration</code> object.</p> * * @param duration <code>Duration</code> to test this <code>Duration</code> against. * * @return <code>true</code> if <code>duration</code> parameter is shorter than this <code>Duration</code>, * else <code>false</code>. * * @throws UnsupportedOperationException If the underlying implementation * cannot reasonably process the request, e.g. W3C XML Schema allows for * arbitrarily large/small/precise values, the request may be beyond the * implementations capability. * @throws NullPointerException if <code>duration</code> is null. * * @see #isLongerThan(Duration duration) * @see #compare(Duration duration) */ public boolean isShorterThan(final Duration duration) { return compare(duration) == DatatypeConstants.LESSER; } /** * <p>Checks if this duration object has the same duration * as another <code>Duration</code> object.</p> * * <p>For example, "P1D" (1 day) is equal to "PT24H" (24 hours).</p> * * <p>Duration X is equal to Y if and only if time instant * t+X and t+Y are the same for all the test time instants * specified in the section 3.2.6.2 of the XML Schema 1.0 * specification.</p> * * <p>Note that there are cases where two <code>Duration</code>s are * "incomparable" to each other, like one month and 30 days. * For example,</p> * <pre> * !new Duration("P1M").isShorterThan(new Duration("P30D")) * !new Duration("P1M").isLongerThan(new Duration("P30D")) * !new Duration("P1M").equals(new Duration("P30D")) * </pre> * * @param duration * The object to compare this <code>Duration</code> against. * * @return * <code>true</code> if this duration is the same length as * <code>duration</code>. * <code>false</code> if <code>duration</code> is <code>null</code>, * is not a * <code>Duration</code> object, * or its length is different from this duration. * * @throws UnsupportedOperationException If the underlying implementation * cannot reasonably process the request, e.g. W3C XML Schema allows for * arbitrarily large/small/precise values, the request may be beyond the * implementations capability. * * @see #compare(Duration duration) */ public boolean equals(final Object duration) { if (duration == null || !(duration instanceof Duration)) { return false; } return compare((Duration) duration) == DatatypeConstants.EQUAL; } /** * Returns a hash code consistent with the definition of the equals method. * * @see Object#hashCode() */ public abstract int hashCode(); /** * <p>Returns a <code>String</code> representation of this <code>Duration</code> <code>Object</code>.</p> * * <p>The result is formatted according to the XML Schema 1.0 spec and can be always parsed back later into the * equivalent <code>Duration</code> <code>Object</code> by {@link DatatypeFactory#newDuration(String lexicalRepresentation)}.</p> * * <p>Formally, the following holds for any <code>Duration</code> * <code>Object</code> x:</p> * <pre> * new Duration(x.toString()).equals(x) * </pre> * * @return A non-<code>null</code> valid <code>String</code> representation of this <code>Duration</code>. */ public String toString() { StringBuffer buf = new StringBuffer(); if (getSign() < 0) { buf.append('-'); } buf.append('P'); BigInteger years = (BigInteger) getField(DatatypeConstants.YEARS); if (years != null) { buf.append(years + "Y"); } BigInteger months = (BigInteger) getField(DatatypeConstants.MONTHS); if (months != null) { buf.append(months + "M"); } BigInteger days = (BigInteger) getField(DatatypeConstants.DAYS); if (days != null) { buf.append(days + "D"); } BigInteger hours = (BigInteger) getField(DatatypeConstants.HOURS); BigInteger minutes = (BigInteger) getField(DatatypeConstants.MINUTES); BigDecimal seconds = (BigDecimal) getField(DatatypeConstants.SECONDS); if (hours != null || minutes != null || seconds != null) { buf.append('T'); if (hours != null) { buf.append(hours + "H"); } if (minutes != null) { buf.append(minutes + "M"); } if (seconds != null) { buf.append(toString(seconds) + "S"); } } return buf.toString(); } /** * <p>Turns {@link BigDecimal} to a string representation.</p> * * <p>Due to a behavior change in the {@link BigDecimal#toString()} * method in JDK1.5, this had to be implemented here.</p> * * @param bd <code>BigDecimal</code> to format as a <code>String</code> * * @return <code>String</code> representation of <code>BigDecimal</code> */ private String toString(BigDecimal bd) { String intString = bd.unscaledValue().toString(); int scale = bd.scale(); if (scale == 0) { return intString; } /* Insert decimal point */ StringBuffer buf; int insertionPoint = intString.length() - scale; if (insertionPoint == 0) { /* Point goes right before intVal */ return "0." + intString; } else if (insertionPoint > 0) { /* Point goes inside intVal */ buf = new StringBuffer(intString); buf.insert(insertionPoint, '.'); } else { /* We must insert zeros between point and intVal */ buf = new StringBuffer(3 - insertionPoint + intString.length()); buf.append("0."); for (int i = 0; i < -insertionPoint; i++) { buf.append('0'); } buf.append(intString); } return buf.toString(); } /** * <p>Calls the {@link Calendar#getTimeInMillis} method. * Prior to JDK1.4, this method was protected and therefore * cannot be invoked directly.</p> * * <p>TODO: In future, this should be replaced by <code>cal.getTimeInMillis()</code>.</p> * * @param cal <code>Calendar</code> to get time in milliseconds. * * @return Milliseconds of <code>cal</code>. */ private static long getCalendarTimeInMillis(final Calendar cal) { return cal.getTime().getTime(); } }
/* * Copyright (c) 2004-2022, University of Oslo * All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: * Redistributions of source code must retain the above copyright notice, this * list of conditions and the following disclaimer. * * Redistributions in binary form must reproduce the above copyright notice, * this list of conditions and the following disclaimer in the documentation * and/or other materials provided with the distribution. * Neither the name of the HISP project nor the names of its contributors may * be used to endorse or promote products derived from this software without * specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE * DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR * ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON * ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ package org.hisp.dhis.webapi.controller; import static java.util.Collections.emptyList; import static java.util.Collections.emptyMap; import static org.hisp.dhis.webapi.utils.ContextUtils.setNoStore; import static org.springframework.http.CacheControl.noStore; import static org.springframework.http.MediaType.APPLICATION_JSON_VALUE; import java.io.IOException; import java.util.Collection; import java.util.Deque; import java.util.List; import java.util.Map; import java.util.UUID; import java.util.function.Supplier; import java.util.stream.Collectors; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import org.hisp.dhis.common.CodeGenerator; import org.hisp.dhis.common.DhisApiVersion; import org.hisp.dhis.fieldfiltering.FieldFilterParams; import org.hisp.dhis.fieldfiltering.FieldFilterService; import org.hisp.dhis.i18n.I18n; import org.hisp.dhis.i18n.I18nManager; import org.hisp.dhis.render.RenderService; import org.hisp.dhis.scheduling.JobType; import org.hisp.dhis.setting.StyleManager; import org.hisp.dhis.setting.StyleObject; import org.hisp.dhis.setting.SystemSettingManager; import org.hisp.dhis.statistics.StatisticsProvider; import org.hisp.dhis.system.SystemInfo; import org.hisp.dhis.system.SystemService; import org.hisp.dhis.system.notification.Notification; import org.hisp.dhis.system.notification.Notifier; import org.hisp.dhis.user.CurrentUserService; import org.hisp.dhis.webapi.mvc.annotation.ApiVersion; import org.hisp.dhis.webapi.utils.ContextUtils; import org.hisp.dhis.webapi.webdomain.CodeList; import org.hisp.dhis.webapi.webdomain.ObjectCount; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.http.HttpStatus; import org.springframework.http.MediaType; import org.springframework.http.ResponseEntity; import org.springframework.stereotype.Controller; import org.springframework.web.bind.annotation.GetMapping; import org.springframework.web.bind.annotation.PathVariable; import org.springframework.web.bind.annotation.RequestMapping; import org.springframework.web.bind.annotation.RequestParam; import org.springframework.web.bind.annotation.ResponseBody; import org.springframework.web.bind.annotation.ResponseStatus; import com.fasterxml.jackson.databind.node.ObjectNode; import com.fasterxml.jackson.dataformat.csv.CsvFactory; import com.fasterxml.jackson.dataformat.csv.CsvGenerator; import com.fasterxml.jackson.dataformat.csv.CsvMapper; import com.fasterxml.jackson.dataformat.csv.CsvSchema; /** * @author Morten Olav Hansen <mortenoh@gmail.com> */ @Controller @RequestMapping( value = SystemController.RESOURCE_PATH ) @ApiVersion( { DhisApiVersion.DEFAULT, DhisApiVersion.ALL } ) public class SystemController { public static final String RESOURCE_PATH = "/system"; @Autowired private CurrentUserService currentUserService; @Autowired private SystemService systemService; @Autowired private StyleManager styleManager; @Autowired private SystemSettingManager systemSettingManager; @Autowired private Notifier notifier; @Autowired private RenderService renderService; @Autowired private I18nManager i18nManager; @Autowired private StatisticsProvider statisticsProvider; @Autowired private FieldFilterService fieldFilterService; private static final CsvFactory CSV_FACTORY = new CsvMapper().getFactory(); // ------------------------------------------------------------------------- // UID Generator // ------------------------------------------------------------------------- @GetMapping( value = { "/uid", "/id" } ) public @ResponseBody CodeList getUid( @RequestParam( required = false, defaultValue = "1" ) Integer limit, HttpServletResponse response ) { setNoStore( response ); return generateCodeList( Math.min( limit, 10000 ), CodeGenerator::generateUid ); } @GetMapping( value = { "/uid", "/id" }, produces = "application/csv" ) public void getUidCsv( @RequestParam( required = false, defaultValue = "1" ) Integer limit, HttpServletResponse response ) throws IOException { CodeList codeList = generateCodeList( Math.min( limit, 10000 ), CodeGenerator::generateUid ); CsvSchema schema = CsvSchema.builder() .addColumn( "uid" ) .setUseHeader( true ) .build(); CsvGenerator csvGenerator = CSV_FACTORY.createGenerator( response.getOutputStream() ); csvGenerator.setSchema( schema ); for ( String code : codeList.getCodes() ) { csvGenerator.writeStartObject(); csvGenerator.writeStringField( "uid", code ); csvGenerator.writeEndObject(); } csvGenerator.flush(); } @GetMapping( value = "/uuid", produces = { APPLICATION_JSON_VALUE, MediaType.APPLICATION_XML_VALUE } ) public @ResponseBody CodeList getUuid( @RequestParam( required = false, defaultValue = "1" ) Integer limit, HttpServletResponse response ) { CodeList codeList = generateCodeList( Math.min( limit, 10000 ), () -> UUID.randomUUID().toString() ); setNoStore( response ); return codeList; } // ------------------------------------------------------------------------- // Tasks // ------------------------------------------------------------------------- @GetMapping( value = "/tasks", produces = { "*/*", APPLICATION_JSON_VALUE } ) public ResponseEntity<Map<JobType, Map<String, Deque<Notification>>>> getTasksJson() { return ResponseEntity.ok().cacheControl( noStore() ).body( notifier.getNotifications() ); } @GetMapping( value = "/tasks/{jobType}", produces = { "*/*", APPLICATION_JSON_VALUE } ) public ResponseEntity<Map<String, Deque<Notification>>> getTasksExtendedJson( @PathVariable( "jobType" ) String jobType ) { Map<String, Deque<Notification>> notifications = jobType == null ? emptyMap() : notifier.getNotificationsByJobType( JobType.valueOf( jobType.toUpperCase() ) ); return ResponseEntity.ok().cacheControl( noStore() ).body( notifications ); } @GetMapping( value = "/tasks/{jobType}/{jobId}", produces = { "*/*", APPLICATION_JSON_VALUE } ) public ResponseEntity<Collection<Notification>> getTaskJsonByUid( @PathVariable( "jobType" ) String jobType, @PathVariable( "jobId" ) String jobId ) { Collection<Notification> notifications = jobType == null ? emptyList() : notifier.getNotificationsByJobId( JobType.valueOf( jobType.toUpperCase() ), jobId ); return ResponseEntity.ok().cacheControl( noStore() ).body( notifications ); } // ------------------------------------------------------------------------- // Tasks summary // ------------------------------------------------------------------------- @GetMapping( value = "/taskSummaries/{jobType}", produces = { "*/*", APPLICATION_JSON_VALUE } ) public ResponseEntity<Map<String, Object>> getTaskSummaryExtendedJson( @PathVariable( "jobType" ) String jobType ) { if ( jobType != null ) { Map<String, Object> summary = notifier .getJobSummariesForJobType( JobType.valueOf( jobType.toUpperCase() ) ); if ( summary != null ) { return ResponseEntity.ok().cacheControl( noStore() ).body( summary ); } } return ResponseEntity.ok().cacheControl( noStore() ).build(); } @GetMapping( value = "/taskSummaries/{jobType}/{jobId}", produces = { "*/*", APPLICATION_JSON_VALUE } ) public ResponseEntity<Object> getTaskSummaryJson( @PathVariable( "jobType" ) String jobType, @PathVariable( "jobId" ) String jobId ) { if ( jobType != null ) { Object summary = notifier.getJobSummaryByJobId( JobType.valueOf( jobType.toUpperCase() ), jobId ); if ( summary != null ) { return ResponseEntity.ok().cacheControl( noStore() ).body( summary ); } } return ResponseEntity.ok().cacheControl( noStore() ).build(); } // ------------------------------------------------------------------------- // Various // ------------------------------------------------------------------------- @GetMapping( value = "/info", produces = { APPLICATION_JSON_VALUE, "application/javascript" } ) public @ResponseBody ResponseEntity<ObjectNode> getSystemInfo( @RequestParam( defaultValue = "*" ) List<String> fields, HttpServletRequest request, HttpServletResponse response ) { SystemInfo info = systemService.getSystemInfo(); info.setContextPath( ContextUtils.getContextPath( request ) ); info.setUserAgent( request.getHeader( ContextUtils.HEADER_USER_AGENT ) ); if ( !currentUserService.currentUserIsSuper() ) { info.clearSensitiveInfo(); } setNoStore( response ); FieldFilterParams<SystemInfo> params = FieldFilterParams.of( info, fields ); List<ObjectNode> objectNodes = fieldFilterService.toObjectNodes( params ); return ResponseEntity.ok( objectNodes.get( 0 ) ); } @GetMapping( value = "/objectCounts" ) public @ResponseBody ObjectCount getObjectCounts() { return new ObjectCount( statisticsProvider.getObjectCounts() ); } @GetMapping( "/ping" ) @ResponseStatus( HttpStatus.OK ) public @ResponseBody String ping( HttpServletResponse response ) { setNoStore( response ); return "pong"; } @GetMapping( value = "/flags", produces = APPLICATION_JSON_VALUE ) public @ResponseBody List<StyleObject> getFlags() { return getFlagObjects(); } @GetMapping( value = "/styles", produces = APPLICATION_JSON_VALUE ) public @ResponseBody List<StyleObject> getStyles() { return styleManager.getStyles(); } // ------------------------------------------------------------------------- // Supportive methods // ------------------------------------------------------------------------- private List<StyleObject> getFlagObjects() { I18n i18n = i18nManager.getI18n(); return systemSettingManager.getFlags().stream() .map( flag -> new StyleObject( i18n.getString( flag ), flag, (flag + ".png") ) ) .collect( Collectors.toList() ); } private CodeList generateCodeList( Integer limit, Supplier<String> codeSupplier ) { CodeList codeList = new CodeList(); for ( int i = 0; i < limit; i++ ) { codeList.getCodes().add( codeSupplier.get() ); } return codeList; } }
package visnode.commons.swing; import java.io.File; import java.util.function.Consumer; import javax.swing.JFileChooser; import javax.swing.JOptionPane; import javax.swing.filechooser.FileFilter; import javax.swing.filechooser.FileNameExtensionFilter; import visnode.application.ExceptionHandler; import visnode.application.InvalidOpenFileException; import visnode.gui.FileFilterFactory; /** * File chooser factory */ public class FileChooserFactory { /** * Creates a dialog for saving projects * * @return SingleFileChooserBuilder */ public static SingleFileChooserBuilder saveProject() { return new SingleFileChooserBuilder( new FileChooserBuilder() .method(Method.SAVE) .files() .filter(FileFilterFactory.projectFileFilter()) .title("Save") ); } /** * Creates a dialog for opening projects * * @return SingleFileChooserBuilder */ public static SingleFileChooserBuilder openProject() { return new SingleFileChooserBuilder( new FileChooserBuilder() .method(Method.OPEN) .files() .filter(FileFilterFactory.projectFileFilter()) .title("Open") ); } /** * Creates a dialog for opening images * * @return SingleFileChooserBuilder */ public static SingleFileChooserBuilder openImage() { return new SingleFileChooserBuilder( new FileChooserBuilder() .method(Method.OPEN) .files() .filter(FileFilterFactory.inputFileFilter()) .title("Open") ); } /** * Creates a dialog for opening images with multiple file selections * * @return MultiFileChooserBuilder */ public static MultiFileChooserBuilder openImages() { return new MultiFileChooserBuilder( new FileChooserBuilder() .method(Method.OPEN) .files() .filter(FileFilterFactory.inputFileFilter()) .title("Open") ); } /** * Creates a dialog for exporting images * * @return FileChooserBuilder */ public static SingleFileChooserBuilder exportImage() { return new SingleFileChooserBuilder( new FileChooserBuilder() .method(Method.SAVE) .files() .filter(FileFilterFactory.exportFileFilter()) .title("Export") ); } /** * Builder for file choosers with single files */ public static class SingleFileChooserBuilder { /** File chooser */ private final FileChooserBuilder builder; public SingleFileChooserBuilder(FileChooserBuilder builder) { this.builder = builder; } /** * Accepts the file chooser and calls the consumer when ready * * @param consumer */ public void accept(Consumer<File> consumer) { try { if (builder.method == Method.OPEN) { if (builder.chooser.showOpenDialog(null) == JFileChooser.APPROVE_OPTION) { consumer.accept(builder.chooser.getSelectedFile()); } } else { if (builder.chooser.showSaveDialog(null) == JFileChooser.APPROVE_OPTION) { File file = builder.addExtensionIfRequired(builder.chooser.getSelectedFile(), builder.chooser.getFileFilter()); if (!builder.chooser.getFileFilter().accept(file)) { throw new InvalidOpenFileException(); } if (!builder.checkOverride(file)) { return; } consumer.accept(file); } } } catch (Exception ex) { ExceptionHandler.get().handle(ex); } } } /** * Builder for file choosers with multi files */ public static class MultiFileChooserBuilder { /** File chooser */ private final FileChooserBuilder builder; public MultiFileChooserBuilder(FileChooserBuilder builder) { this.builder = builder; this.builder.chooser.setMultiSelectionEnabled(true); } /** * Accepts the file chooser and calls the consumer when ready * * @param consumer */ public void accept(Consumer<File[]> consumer) { try { if (builder.method == Method.OPEN) { if (builder.chooser.showOpenDialog(null) == JFileChooser.APPROVE_OPTION) { consumer.accept(builder.chooser.getSelectedFiles()); } } } catch (Exception ex) { ExceptionHandler.get().handle(ex); } } } /** * Builder for file choosers */ public static class FileChooserBuilder { /** Chooser */ private final JFileChooser chooser; /** Method for the chooser */ private Method method; /** * Creates a new Chooser Builder */ public FileChooserBuilder() { chooser = new JFileChooser(); } /** * Selects only files * * @return FileChooserBuilder */ private FileChooserBuilder files() { chooser.setFileSelectionMode(JFileChooser.FILES_ONLY); return this; } /** * Sets the method for the dialog * * @param method * @return FileChooserBuilder */ private FileChooserBuilder method(Method method) { this.method = method; return this; } /** * Sets the filter * * @param filter * @return FileChooserBuilder */ private FileChooserBuilder filter(FileFilterFactory.FileFilterList filter) { filter.apply(chooser); return this; } /** * Sets the dialog title * * @param title * @return FileChooserBuilder */ private FileChooserBuilder title(String title) { chooser.setDialogTitle(title); return this; } /** * Checks file override * * @param file * @return boolean */ private boolean checkOverride(File file) { if (!file.exists()) { return true; } int r = JOptionPane.showConfirmDialog(null, "The file " + file + " already exists. Do you wish to override it?"); return r == JOptionPane.OK_OPTION; } /** * Adds the extension to the file if required * * @param file * @param fileFilter * @return File */ private File addExtensionIfRequired(File file, FileFilter fileFilter) { String name = file.getName(); // If there is an extension if (name.indexOf('.', name.length() - 4) >= 0) { return file; } if (fileFilter instanceof FileNameExtensionFilter) { FileNameExtensionFilter extensions = (FileNameExtensionFilter) fileFilter; return new File(file.getAbsolutePath() + '.' + extensions.getExtensions()[0]); } return file; } } /** * Method for the dialog */ private static enum Method { OPEN, SAVE } }
/* * Copyright (c) 2010, Regents of the University of California * All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: * * * Redistributions of source code must retain the above copyright notice, * this list of conditions and the following disclaimer. * * Redistributions in binary form must reproduce the above copyright * notice, this list of conditions and the following disclaimer in the * documentation and/or other materials provided with the distribution. * * Neither the name of the University of California, Berkeley * nor the names of its contributors may be used to endorse or promote * products derived from this software without specific prior written * permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS * FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE * COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, * BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER * CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, * STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED * OF THE POSSIBILITY OF SUCH DAMAGE. * * Author Original authors * Author: Marco Guazzone (marco.guazzone@gmail.com), 2013 */ package radlab.rain.workload.olio; import java.io.IOException; import java.io.UnsupportedEncodingException; import java.util.Calendar; import org.apache.http.client.methods.HttpPost; import org.apache.http.entity.mime.MultipartEntity; import org.apache.http.entity.mime.content.FileBody; import org.apache.http.entity.mime.content.StringBody; import org.apache.http.HttpStatus; import radlab.rain.IScoreboard; import radlab.rain.workload.olio.model.OlioPerson; import radlab.rain.workload.olio.model.OlioSocialEvent; import radlab.rain.workload.olio.model.OlioTag; /** * The AddEventOperation is an operation that creates a new event. The user * must be logged on. The creation of the POST involves populating the request * with event details, an image, a document, and address data.<br /> * <br /> * The requests made include loading the event form, loading the static URLs * (CSS/JS), and sending the POST data to the application. * <br/> * NOTE: Code based on {@code org.apache.olio.workload.driver.UIDriver} class * and adapted for RAIN. * * @author Original authors * @author <a href="mailto:marco.guazzone@gmail.com">Marco Guazzone</a> */ public class AddEventOperation extends OlioOperation { public AddEventOperation(boolean interactive, IScoreboard scoreboard) { super(interactive, scoreboard); this._operationName = OlioGenerator.ADD_EVENT_OP_NAME; this._operationIndex = OlioGenerator.ADD_EVENT_OP; } @Override public void execute() throws Throwable { // Need a logged person OlioPerson loggedPerson = this.getUtility().getPerson(this.getSessionState().getLoggedPersonId()); if (!this.getUtility().isRegisteredPerson(loggedPerson)) { this.getLogger().severe("Login required for adding an event"); //throw new Exception("Login required for adding an event"); this.setFailed(true); return; } StringBuilder response = null; // Fetch the add event form. response = this.getHttpTransport().fetchUrl(this.getGenerator().getAddEventURL()); this.trace(this.getGenerator().getAddEventURL()); // Verify that the request succeeded. if (!this.getGenerator().checkHttpResponse(response.toString())) { this.getLogger().severe("Problems in performing request to URL: " + this.getGenerator().getAddEventURL() + " (HTTP status code: " + this.getHttpTransport().getStatusCode() + "). Server response: " + response); throw new IOException("Problems in performing request to URL: " + this.getGenerator().getAddEventURL() + " (HTTP status code: " + this.getHttpTransport().getStatusCode() + ")"); } // Load the static files associated with the add event form. this.loadStatics(this.getGenerator().getAddEventStatics()); this.trace(this.getGenerator().getAddEventStatics()); // Get the authentication token needed to create the POST request. String token = null; switch (this.getConfiguration().getIncarnation()) { case OlioConfiguration.JAVA_INCARNATION: // No token to parse break; case OlioConfiguration.PHP_INCARNATION: // No token to parse break; case OlioConfiguration.RAILS_INCARNATION: token = this.parseAuthToken(response.toString()); if ( token == null ) { throw new Exception( "Authentication token could not be parsed" ); } break; } // Generate a new Olio social event OlioSocialEvent event = this.getUtility().newSocialEvent(); event.submitterUserName = loggedPerson.userName; // Submit the add event form to create the event. HttpPost reqPost = new HttpPost(this.getGenerator().getAddEventResultURL()); MultipartEntity entity = new MultipartEntity(); this.populateEntity(entity, event); switch (this.getConfiguration().getIncarnation()) { case OlioConfiguration.JAVA_INCARNATION: // No token to set break; case OlioConfiguration.PHP_INCARNATION: // No token to set break; case OlioConfiguration.RAILS_INCARNATION: entity.addPart("authenticity_token", new StringBody(token)); break; } reqPost.setEntity(entity); response = this.getHttpTransport().fetch(reqPost); this.trace(this.getGenerator().getAddEventResultURL()); //FIXME: In Apache Olio there is also a check for redirection. Do we need it? // Probably no, since HttpTransport#fecth already take care of it //String[] locationHeader = this.getHttpTransport().getHeadersMap().get("location"); //if (redirectionLocation != null) //{ // String redirectUrl = null; // switch (this.getConfiguration().getIncarnation()) // { // case OlioConfiguration.JAVA_INCARNATION: // redirectUrl = this.getGenerator().getBaseURL() + '/' + locationHeader[0]; // break; // case OlioConfiguration.PHP_INCARNATION: // redirectUrl = this.getGenerator().getBaseURL() + '/' + locationHeader[0]; // break; // case OlioConfiguration.RAILS_INCARNATION: // redirectUrl = locationHeader[0]; // break; // } // response = this.getHttpTransport().fetchURL(redirectUrl); //} // Verify that the request succeeded. if (!this.getGenerator().checkHttpResponse(response.toString())) { this.getLogger().severe("Problems in performing request to URL: " + reqPost.getURI() + " (HTTP status code: " + this.getHttpTransport().getStatusCode() + "). Server response: " + response); throw new IOException("Problems in performing request to URL: " + reqPost.getURI() + " (HTTP status code: " + this.getHttpTransport().getStatusCode() + ")"); } // Verify that the operation succeeded. switch (this.getConfiguration().getIncarnation()) { case OlioConfiguration.JAVA_INCARNATION: // No check to do break; case OlioConfiguration.PHP_INCARNATION: // No check to do break; case OlioConfiguration.RAILS_INCARNATION: int index = response.toString().toLowerCase().indexOf("event was successfully created."); if (index == -1) { this.getLogger().severe("Problems in performing request to URL: " + reqPost.getURI() + " (HTTP status code: " + this.getHttpTransport().getStatusCode() + "): Could not find success message in result body. Server response: " + response); throw new IOException("Problems in performing request to URL: " + reqPost.getURI() + " (HTTP status code: " + this.getHttpTransport().getStatusCode() + "): Could not find success message in result body"); } break; } // Save session data this.getSessionState().setLastResponse(response.toString()); this.setFailed(false); } /** * Adds the details and files needed to create a new event in Olio. * * @param entity The request entity in which to add event details. */ protected void populateEntity(MultipartEntity entity, OlioSocialEvent evt) throws Throwable { Calendar cal = Calendar.getInstance(); cal.setTime(evt.eventTimestamp); StringBuilder tags = new StringBuilder(); for (OlioTag tag : evt.tags) { tags.append(tag.name).append(' '); } tags.setLength(tags.length()-1); // Trim trailing space switch (this.getConfiguration().getIncarnation()) { case OlioConfiguration.JAVA_INCARNATION: entity.addPart("title", new StringBody(evt.title)); entity.addPart("summary", new StringBody(evt.summary)); entity.addPart("description", new StringBody(evt.description)); entity.addPart("submitter_user_name", new StringBody(evt.submitterUserName)); entity.addPart("telephone", new StringBody(evt.telephone)); entity.addPart("timezone", new StringBody(evt.timezone)); entity.addPart("year", new StringBody(Integer.toString(cal.get(Calendar.YEAR)))); entity.addPart("month", new StringBody(Integer.toString(cal.get(Calendar.MONTH)))); entity.addPart("day", new StringBody(Integer.toString(cal.get(Calendar.DAY_OF_MONTH)))); entity.addPart("hour", new StringBody(Integer.toString(cal.get(Calendar.HOUR_OF_DAY)))); entity.addPart("minute", new StringBody(Integer.toString(cal.get(Calendar.MINUTE)))); entity.addPart("tags", new StringBody(tags.toString())); entity.addPart("street1", new StringBody(evt.address[0])); entity.addPart("street2", new StringBody(evt.address[1])); entity.addPart("city", new StringBody(evt.address[2])); entity.addPart("state", new StringBody(evt.address[3])); entity.addPart("zip", new StringBody(evt.address[4])); entity.addPart("country", new StringBody(evt.address[5])); entity.addPart("upload_event_image", new FileBody(this.getGenerator().getEventImgFile())); entity.addPart("upload_event_literature", new FileBody(this.getGenerator().getEventPdfFile())); entity.addPart("submit", new StringBody("Create")); break; case OlioConfiguration.PHP_INCARNATION: entity.addPart("title", new StringBody(evt.title)); //entity.addPart("summary", new StringBody(evt.summary)); entity.addPart("description", new StringBody(evt.description)); entity.addPart("submitter_user_name", new StringBody(evt.submitterUserName)); entity.addPart("telephone", new StringBody(evt.telephone)); entity.addPart("timezone", new StringBody(evt.timezone)); entity.addPart("year", new StringBody(Integer.toString(cal.get(Calendar.YEAR)))); entity.addPart("month", new StringBody(Integer.toString(cal.get(Calendar.MONTH)))); entity.addPart("day", new StringBody(Integer.toString(cal.get(Calendar.DAY_OF_MONTH)))); entity.addPart("hour", new StringBody(Integer.toString(cal.get(Calendar.HOUR_OF_DAY)))); entity.addPart("minute", new StringBody(Integer.toString(cal.get(Calendar.MINUTE)))); entity.addPart("tags", new StringBody(tags.toString())); entity.addPart("street1", new StringBody(evt.address[0])); entity.addPart("street2", new StringBody(evt.address[1])); entity.addPart("city", new StringBody(evt.address[2])); entity.addPart("state", new StringBody(evt.address[3])); entity.addPart("zip", new StringBody(evt.address[4])); entity.addPart("country", new StringBody(evt.address[5])); entity.addPart("upload_event_image", new FileBody(this.getGenerator().getEventImgFile())); entity.addPart("upload_event_literature", new FileBody(this.getGenerator().getEventPdfFile())); entity.addPart("addeventsubmit", new StringBody("Create")); break; case OlioConfiguration.RAILS_INCARNATION: entity.addPart("event[title]", new StringBody(evt.title)); entity.addPart("event[summary]", new StringBody(evt.summary)); entity.addPart("event[description]", new StringBody(evt.description)); //FIXME: Submitter user name? entity.addPart("event[telephone]", new StringBody(evt.telephone)); //FIXME: Timezone? entity.addPart("event[event_timestamp(1i)]", new StringBody(Integer.toString(cal.get(Calendar.YEAR)))); entity.addPart("event[event_timestamp(2i)]", new StringBody(Integer.toString(cal.get(Calendar.MONTH)))); entity.addPart("event[event_timestamp(3i)]", new StringBody(Integer.toString(cal.get(Calendar.DAY_OF_MONTH)))); entity.addPart("event[event_timestamp(4i)]", new StringBody(Integer.toString(cal.get(Calendar.HOUR_OF_DAY)))); entity.addPart("event[event_timestamp(5i)]", new StringBody(Integer.toString(cal.get(Calendar.MINUTE)))); entity.addPart("tag_list", new StringBody(tags.toString())); entity.addPart("event_image", new FileBody(this.getGenerator().getEventImgFile())); entity.addPart("event_document", new FileBody(this.getGenerator().getEventPdfFile())); entity.addPart("address[street1]", new StringBody(evt.address[0])); entity.addPart("address[street2]", new StringBody(evt.address[1])); entity.addPart("address[city]", new StringBody(evt.address[2])); entity.addPart("address[state]", new StringBody(evt.address[3])); entity.addPart("address[zip]", new StringBody(evt.address[4])); entity.addPart("address[country]", new StringBody(evt.address[5])); entity.addPart("commit", new StringBody("Create")); break; } } }
/* * Copyright 2000-2016 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intellij.refactoring.convertToInstanceMethod; import com.intellij.codeInsight.ChangeContextUtil; import com.intellij.history.LocalHistory; import com.intellij.history.LocalHistoryAction; import com.intellij.ide.util.EditorHelper; import com.intellij.openapi.diagnostic.Logger; import com.intellij.openapi.project.Project; import com.intellij.openapi.util.Key; import com.intellij.openapi.util.Ref; import com.intellij.psi.*; import com.intellij.psi.impl.source.javadoc.PsiDocParamRef; import com.intellij.psi.javadoc.PsiDocTagValue; import com.intellij.psi.search.GlobalSearchScope; import com.intellij.psi.search.LocalSearchScope; import com.intellij.psi.search.searches.ReferencesSearch; import com.intellij.psi.util.PsiTreeUtil; import com.intellij.psi.util.PsiUtil; import com.intellij.psi.util.TypeConversionUtil; import com.intellij.refactoring.BaseRefactoringProcessor; import com.intellij.refactoring.RefactoringBundle; import com.intellij.refactoring.listeners.RefactoringEventData; import com.intellij.refactoring.move.moveInstanceMethod.MoveInstanceMethodViewDescriptor; import com.intellij.refactoring.util.*; import com.intellij.usageView.UsageInfo; import com.intellij.usageView.UsageViewDescriptor; import com.intellij.util.IncorrectOperationException; import com.intellij.util.VisibilityUtil; import java.util.HashMap; import com.intellij.util.containers.MultiMap; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import java.util.*; /** * @author dsl */ public class ConvertToInstanceMethodProcessor extends BaseRefactoringProcessor { private static final Logger LOG = Logger.getInstance("#com.intellij.refactoring.convertToInstanceMethod.ConvertToInstanceMethodProcessor"); private PsiMethod myMethod; private @Nullable PsiParameter myTargetParameter; private PsiClass myTargetClass; private Map<PsiTypeParameter, PsiTypeParameter> myTypeParameterReplacements; private static final Key<PsiTypeParameter> BIND_TO_TYPE_PARAMETER = Key.create("REPLACEMENT"); private final String myOldVisibility; private final String myNewVisibility; public ConvertToInstanceMethodProcessor(final Project project, final PsiMethod method, @Nullable final PsiParameter targetParameter, final String newVisibility) { super(project); myMethod = method; myTargetParameter = targetParameter; LOG.assertTrue(method.hasModifierProperty(PsiModifier.STATIC)); if (myTargetParameter != null) { LOG.assertTrue(myTargetParameter.getDeclarationScope() == myMethod); LOG.assertTrue(myTargetParameter.getType() instanceof PsiClassType); final PsiType type = myTargetParameter.getType(); LOG.assertTrue(type instanceof PsiClassType); myTargetClass = ((PsiClassType)type).resolve(); } else { myTargetClass = method.getContainingClass(); } myOldVisibility = VisibilityUtil.getVisibilityModifier(method.getModifierList()); myNewVisibility = newVisibility; } public PsiClass getTargetClass() { return myTargetClass; } @NotNull protected UsageViewDescriptor createUsageViewDescriptor(@NotNull UsageInfo[] usages) { return new MoveInstanceMethodViewDescriptor(myMethod, myTargetParameter, myTargetClass); } @Override protected void refreshElements(@NotNull PsiElement[] elements) { LOG.assertTrue(elements.length > 1); myMethod = (PsiMethod)elements[0]; myTargetParameter = elements.length == 3 ? (PsiParameter)elements[1] : null; myTargetClass = (PsiClass)elements[elements.length - 1]; } @NotNull protected UsageInfo[] findUsages() { LOG.assertTrue(myTargetParameter == null || myTargetParameter.getDeclarationScope() == myMethod); final Project project = myMethod.getProject(); final PsiReference[] methodReferences = ReferencesSearch.search(myMethod, GlobalSearchScope.projectScope(project), false).toArray(PsiReference.EMPTY_ARRAY); List<UsageInfo> result = new ArrayList<>(); for (final PsiReference ref : methodReferences) { final PsiElement element = ref.getElement(); if (element instanceof PsiReferenceExpression) { PsiElement parent = element.getParent(); if (parent instanceof PsiMethodCallExpression) { result.add(new MethodCallUsageInfo((PsiMethodCallExpression)parent)); } else if (element instanceof PsiMethodReferenceExpression) { result.add(new MethodReferenceUsageInfo((PsiMethodReferenceExpression)element, myTargetParameter == null || myMethod.getParameterList().getParameterIndex(myTargetParameter) == 0)); } } else if (element instanceof PsiDocTagValue) { result.add(new JavaDocUsageInfo(ref)); //TODO:!!! } } if (myTargetParameter != null) { for (final PsiReference ref : ReferencesSearch.search(myTargetParameter, new LocalSearchScope(myMethod), false)) { final PsiElement element = ref.getElement(); if (element instanceof PsiReferenceExpression || element instanceof PsiDocParamRef) { result.add(new ParameterUsageInfo(ref)); } } } if (myTargetClass.isInterface()) { PsiClass[] implementingClasses = RefactoringHierarchyUtil.findImplementingClasses(myTargetClass); for (final PsiClass implementingClass : implementingClasses) { result.add(new ImplementingClassUsageInfo(implementingClass)); } } return result.toArray(UsageInfo.EMPTY_ARRAY); } @Nullable @Override protected String getRefactoringId() { return "refactoring.makeInstance"; } @Nullable @Override protected RefactoringEventData getBeforeData() { RefactoringEventData data = new RefactoringEventData(); data.addElements(new PsiElement[]{myMethod, myTargetClass}); return data; } @Nullable @Override protected RefactoringEventData getAfterData(@NotNull UsageInfo[] usages) { RefactoringEventData data = new RefactoringEventData(); data.addElement(myTargetClass); return data; } protected boolean preprocessUsages(@NotNull Ref<UsageInfo[]> refUsages) { UsageInfo[] usagesIn = refUsages.get(); MultiMap<PsiElement, String> conflicts = new MultiMap<>(); final Set<PsiMember> methods = Collections.singleton((PsiMember)myMethod); if (!myTargetClass.isInterface()) { RefactoringConflictsUtil.analyzeAccessibilityConflicts(methods, myTargetClass, conflicts, myNewVisibility); } else { for (final UsageInfo usage : usagesIn) { if (usage instanceof ImplementingClassUsageInfo) { RefactoringConflictsUtil .analyzeAccessibilityConflicts(methods, ((ImplementingClassUsageInfo)usage).getPsiClass(), conflicts, PsiModifier.PUBLIC); } } } for (final UsageInfo usageInfo : usagesIn) { PsiElement place = null; if (usageInfo instanceof MethodCallUsageInfo) { place = ((MethodCallUsageInfo)usageInfo).getMethodCall(); if (myTargetParameter != null) { final PsiExpression[] expressions = ((PsiMethodCallExpression)place).getArgumentList().getExpressions(); final int index = myMethod.getParameterList().getParameterIndex(myTargetParameter); if (index < expressions.length) { PsiExpression instanceValue = expressions[index]; instanceValue = RefactoringUtil.unparenthesizeExpression(instanceValue); if (instanceValue instanceof PsiLiteralExpression && ((PsiLiteralExpression)instanceValue).getValue() == null) { String message = RefactoringBundle.message("0.contains.call.with.null.argument.for.parameter.1", RefactoringUIUtil.getDescription(ConflictsUtil.getContainer(place), true), CommonRefactoringUtil.htmlEmphasize(myTargetParameter.getName())); conflicts.putValue(place, message); } } } } else if (usageInfo instanceof MethodReferenceUsageInfo) { place = ((MethodReferenceUsageInfo)usageInfo).getExpression(); if (!((MethodReferenceUsageInfo)usageInfo).isApplicableBySecondSearch()) { conflicts.putValue(place, RefactoringBundle.message("expand.method.reference.warning")); } } if (myTargetParameter == null && place != null && myTargetClass.hasTypeParameters() && !thisAccessExpressionApplicable(place)) { conflicts.putValue(place, "Impossible to infer class type arguments. When proceed, raw " + myTargetClass.getName() + " would be created"); } } return showConflicts(conflicts, usagesIn); } protected void performRefactoring(@NotNull UsageInfo[] usages) { LocalHistoryAction a = LocalHistory.getInstance().startAction(getCommandName()); try { doRefactoring(usages); } catch (IncorrectOperationException e) { LOG.error(e); } finally { a.finish(); } } private void doRefactoring(UsageInfo[] usages) throws IncorrectOperationException { myTypeParameterReplacements = buildTypeParameterReplacements(); List<PsiClass> inheritors = new ArrayList<>(); CommonRefactoringUtil.sortDepthFirstRightLeftOrder(usages); // Process usages for (final UsageInfo usage : usages) { if (usage instanceof MethodCallUsageInfo) { processMethodCall(((MethodCallUsageInfo)usage).getMethodCall()); } else if (usage instanceof ParameterUsageInfo) { processParameterUsage((ParameterUsageInfo)usage); } else if (usage instanceof ImplementingClassUsageInfo) { inheritors.add(((ImplementingClassUsageInfo)usage).getPsiClass()); } else if (usage instanceof MethodReferenceUsageInfo) { processMethodReference((MethodReferenceUsageInfo)usage); } } prepareTypeParameterReplacement(); if (myTargetParameter != null) myTargetParameter.delete(); ChangeContextUtil.encodeContextInfo(myMethod, true); if (!myTargetClass.isInterface()) { PsiMethod method = addMethodToClass(myTargetClass); fixVisibility(method, usages); EditorHelper.openInEditor(method); } else { final PsiMethod interfaceMethod = addMethodToClass(myTargetClass); final PsiModifierList modifierList = interfaceMethod.getModifierList(); final boolean markAsDefault = PsiUtil.isLanguageLevel8OrHigher(myTargetClass); if (markAsDefault) { modifierList.setModifierProperty(PsiModifier.DEFAULT, true); } RefactoringUtil.makeMethodAbstract(myTargetClass, interfaceMethod); EditorHelper.openInEditor(interfaceMethod); if (!markAsDefault) { for (final PsiClass psiClass : inheritors) { final PsiMethod newMethod = addMethodToClass(psiClass); PsiUtil.setModifierProperty(newMethod, myNewVisibility != null && !myNewVisibility.equals(VisibilityUtil.ESCALATE_VISIBILITY) ? myNewVisibility : PsiModifier.PUBLIC, true); } } } myMethod.delete(); } private void processMethodReference(MethodReferenceUsageInfo usage) { PsiMethodReferenceExpression expression = usage.getExpression(); if (usage.isApplicableBySecondSearch()) { PsiExpression qualifierExpression = expression.getQualifierExpression(); LOG.assertTrue(qualifierExpression != null); PsiElementFactory factory = JavaPsiFacade.getElementFactory(myProject); PsiElement qualifier; if (myTargetParameter != null) { qualifier = factory.createReferenceExpression(myTargetClass); } else { boolean thisAccess = thisAccessExpressionApplicable(expression); qualifier = thisAccess ? factory.createExpressionFromText("this", qualifierExpression) : createSyntheticAccessExpression(factory, expression); } qualifierExpression.replace(qualifier); } else { PsiLambdaExpression lambdaExpression = LambdaRefactoringUtil.convertMethodReferenceToLambda(expression, false, true); List<PsiExpression> returnExpressions = LambdaUtil.getReturnExpressions(lambdaExpression); if (!returnExpressions.isEmpty()) { PsiMethodCallExpression methodCall = (PsiMethodCallExpression)returnExpressions.get(0); processMethodCall(methodCall); usage.setReplacement(methodCall); } } } private void fixVisibility(final PsiMethod method, final UsageInfo[] usages) throws IncorrectOperationException { final PsiModifierList modifierList = method.getModifierList(); if (VisibilityUtil.ESCALATE_VISIBILITY.equals(myNewVisibility)) { for (UsageInfo usage : usages) { PsiElement place = null; if (usage instanceof MethodCallUsageInfo) { place = usage.getElement(); } else if (usage instanceof MethodReferenceUsageInfo) { PsiMethodReferenceExpression expression = ((MethodReferenceUsageInfo)usage).getExpression(); if (expression != null && expression.isValid()) { place = expression; } else { place = ((MethodReferenceUsageInfo)usage).getReplacement(); } } if (place != null) { VisibilityUtil.escalateVisibility(method, place); } } } else if (myNewVisibility != null && !myNewVisibility.equals(myOldVisibility)) { modifierList.setModifierProperty(myNewVisibility, true); } } private void prepareTypeParameterReplacement() throws IncorrectOperationException { if (myTypeParameterReplacements == null) return; final Collection<PsiTypeParameter> typeParameters = myTypeParameterReplacements.keySet(); for (final PsiTypeParameter parameter : typeParameters) { for (final PsiReference reference : ReferencesSearch.search(parameter, new LocalSearchScope(myMethod), false)) { if (reference.getElement() instanceof PsiJavaCodeReferenceElement) { reference.getElement().putCopyableUserData(BIND_TO_TYPE_PARAMETER, myTypeParameterReplacements.get(parameter)); } } } final Set<PsiTypeParameter> methodTypeParameters = myTypeParameterReplacements.keySet(); for (final PsiTypeParameter methodTypeParameter : methodTypeParameters) { methodTypeParameter.delete(); } } private PsiMethod addMethodToClass(final PsiClass targetClass) throws IncorrectOperationException { final PsiMethod newMethod = (PsiMethod)targetClass.add(myMethod); final PsiModifierList modifierList = newMethod.getModifierList(); modifierList.setModifierProperty(PsiModifier.STATIC, false); ChangeContextUtil.decodeContextInfo(newMethod, null, null); if (myTypeParameterReplacements == null) return newMethod; final Map<PsiTypeParameter, PsiTypeParameter> additionalReplacements; if (targetClass != myTargetClass) { final PsiSubstitutor superClassSubstitutor = TypeConversionUtil.getSuperClassSubstitutor(myTargetClass, targetClass, PsiSubstitutor.EMPTY); final Map<PsiTypeParameter, PsiTypeParameter> map = calculateReplacementMap(superClassSubstitutor, myTargetClass, targetClass); if (map == null) return newMethod; additionalReplacements = new HashMap<>(); for (final Map.Entry<PsiTypeParameter, PsiTypeParameter> entry : map.entrySet()) { additionalReplacements.put(entry.getValue(), entry.getKey()); } } else { additionalReplacements = null; } newMethod.accept(new JavaRecursiveElementVisitor() { @Override public void visitReferenceElement(PsiJavaCodeReferenceElement reference) { PsiTypeParameter typeParameterToBind = reference.getCopyableUserData(BIND_TO_TYPE_PARAMETER); if (typeParameterToBind != null) { reference.putCopyableUserData(BIND_TO_TYPE_PARAMETER, null); try { if (additionalReplacements != null) { typeParameterToBind = additionalReplacements.get(typeParameterToBind); } reference.bindToElement(typeParameterToBind); } catch (IncorrectOperationException e) { LOG.error(e); } } else { visitElement(reference); } } }); return newMethod; } private void processParameterUsage(ParameterUsageInfo usage) throws IncorrectOperationException { final PsiReference reference = usage.getReferenceExpression(); if (reference instanceof PsiReferenceExpression) { final PsiReferenceExpression referenceExpression = (PsiReferenceExpression)reference; PsiElement parent = referenceExpression.getParent(); if (parent instanceof PsiReferenceExpression && sameUnqualified(parent)) { referenceExpression.delete(); } else { final PsiExpression expression = JavaPsiFacade.getInstance(myMethod.getProject()).getElementFactory().createExpressionFromText("this", null); referenceExpression.replace(expression); } } else { final PsiElement element = reference.getElement(); if (element instanceof PsiDocParamRef) { element.getParent().delete(); } } } private static boolean sameUnqualified(PsiElement parent) { if (parent instanceof PsiMethodReferenceExpression) return false; PsiElement resolve = ((PsiReferenceExpression)parent).resolve(); if (resolve instanceof PsiField) { final PsiElementFactory elementFactory = JavaPsiFacade.getElementFactory(resolve.getProject()); final PsiExpression unqualifiedFieldReference = elementFactory.createExpressionFromText(((PsiField)resolve).getName(), parent); return resolve == ((PsiReferenceExpression)unqualifiedFieldReference).resolve(); } return true; } private void processMethodCall(final PsiMethodCallExpression methodCall) throws IncorrectOperationException { PsiElementFactory factory = JavaPsiFacade.getInstance(myMethod.getProject()).getElementFactory(); final PsiReferenceExpression methodExpression = methodCall.getMethodExpression(); PsiExpression argument; if (myTargetParameter != null) { PsiParameterList parameterList = myMethod.getParameterList(); int parameterIndex = parameterList.getParameterIndex(myTargetParameter); PsiExpression[] arguments = methodCall.getArgumentList().getExpressions(); if (arguments.length <= parameterIndex) return; argument = arguments[parameterIndex]; } else { if (thisAccessExpressionApplicable(methodCall)) { PsiExpression qualifierExpression = methodExpression.getQualifierExpression(); if (qualifierExpression != null) { qualifierExpression.delete(); } return; } argument = createSyntheticAccessExpression(factory, methodCall); } final PsiExpression qualifier; if (methodExpression.getQualifierExpression() != null) { qualifier = methodExpression.getQualifierExpression(); } else { final PsiReferenceExpression newRefExpr = (PsiReferenceExpression)factory.createExpressionFromText("x." + myMethod.getName(), null); qualifier = ((PsiReferenceExpression)methodExpression.replace(newRefExpr)).getQualifierExpression(); } qualifier.replace(argument); argument.delete(); } private PsiExpression createSyntheticAccessExpression(PsiElementFactory factory, PsiElement context) { return factory.createExpressionFromText("new " + myTargetClass.getName() + "()", context); } private boolean thisAccessExpressionApplicable(PsiElement expression) { return PsiTreeUtil.isAncestor(myTargetClass, expression, false) && PsiUtil.getEnclosingStaticElement(expression, myTargetClass) == null; } @NotNull protected String getCommandName() { return ConvertToInstanceMethodHandler.REFACTORING_NAME; } @Nullable public Map<PsiTypeParameter, PsiTypeParameter> buildTypeParameterReplacements() { if (myTargetParameter == null) { return Collections.emptyMap(); } final PsiClassType type = (PsiClassType)myTargetParameter.getType(); final PsiSubstitutor substitutor = type.resolveGenerics().getSubstitutor(); return calculateReplacementMap(substitutor, myTargetClass, myMethod); } @Nullable private static Map<PsiTypeParameter, PsiTypeParameter> calculateReplacementMap(final PsiSubstitutor substitutor, final PsiClass targetClass, final PsiElement containingElement) { final HashMap<PsiTypeParameter, PsiTypeParameter> result = new HashMap<>(); for (PsiTypeParameter classTypeParameter : PsiUtil.typeParametersIterable(targetClass)) { final PsiType substitution = substitutor.substitute(classTypeParameter); if (!(substitution instanceof PsiClassType)) return null; final PsiClass aClass = ((PsiClassType)substitution).resolve(); if (!(aClass instanceof PsiTypeParameter)) return null; final PsiTypeParameter methodTypeParameter = (PsiTypeParameter)aClass; if (methodTypeParameter.getOwner() != containingElement) return null; if (result.keySet().contains(methodTypeParameter)) return null; result.put(methodTypeParameter, classTypeParameter); } return result; } public PsiMethod getMethod() { return myMethod; } @Nullable public PsiParameter getTargetParameter() { return myTargetParameter; } }
/*------------------------------------------------------------------------------ Copyright (c) CovertJaguar, 2011-2017 This work (the API) is licensed under the "MIT" License, see LICENSE.md for details. -----------------------------------------------------------------------------*/ package mods.railcraft.api.signals; import mods.railcraft.api.carts.CartToolsAPI; import mods.railcraft.api.tracks.TrackScanner; import net.minecraft.block.Block; import net.minecraft.entity.item.EntityMinecart; import net.minecraft.nbt.NBTTagCompound; import net.minecraft.nbt.NBTTagList; import net.minecraft.tileentity.TileEntity; import net.minecraft.util.math.BlockPos; import net.minecraft.util.math.MathHelper; import org.apache.logging.log4j.Level; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.message.MessageFormatMessage; import javax.annotation.Nullable; import java.util.*; import static mods.railcraft.api.signals.TrackLocator.Status; /** * @author CovertJaguar <http://www.railcraft.info> */ public abstract class SignalBlock extends AbstractPair { public static final int VALIDATION_CHECK_INTERVAL = 16384; private static final Level DEBUG_LEVEL = Level.INFO; // private static final Map<UUID, Deque<WorldCoordinate>> savedData = new HashMap<UUID, Deque<WorldCoordinate>>(); private final Map<BlockPos, BlockPos> trackCache = new HashMap<>(); private final Map<BlockPos, TrackScanner.ScanResult> trackScans = new HashMap<>(); private final Set<BlockPos> waitingForRetest = new HashSet<>(); private final TrackLocator trackLocator; private int update = rand.nextInt(); // private UUID uuid = UUID.randomUUID(); private boolean changedAspect; protected SignalBlock(String locTag, TileEntity tile, int numPairs) { super(locTag, tile, numPairs); this.trackLocator = new TrackLocator(tile); } @Nullable private SignalBlock getSignalAt(BlockPos coord) { TileEntity recv = getPairAt(coord); if (recv != null) return ((ISignalTileBlock) recv).getSignalBlock(); return null; } public abstract SignalAspect getSignalAspect(); public TrackLocator getTrackLocator() { return trackLocator; } public void log(Level level, String msg, Object... args) { LogManager.getLogger("railcraft").log(level, new MessageFormatMessage(msg, args)); } private void printDebug(String msg, Object... args) { if (SignalTools.printSignalDebug) log(DEBUG_LEVEL, msg, args); } private void printDebugPair(String msg, @Nullable TileEntity ot) { if (SignalTools.printSignalDebug) if (ot == null) log(DEBUG_LEVEL, msg + " source:[{0}] target:[null]", tile.getPos()); else log(DEBUG_LEVEL, msg + " source:[{0}] target:[{1}] target class:{2}", tile.getPos(), ot.getPos(), ot.getClass()); } private void printDebugPair(String msg, @Nullable BlockPos coord) { if (SignalTools.printSignalDebug) if (coord == null) log(DEBUG_LEVEL, msg + " source:[{0}] target:[null]", tile.getPos()); else log(DEBUG_LEVEL, msg + " source:[{0}] target:[{1}]", tile.getPos(), coord); } @Override protected void saveNBT(NBTTagCompound data) { super.saveNBT(data); // MiscTools.writeUUID(data, "uuid", uuid); NBTTagList tagList = new NBTTagList(); for (Map.Entry<BlockPos, BlockPos> cache : trackCache.entrySet()) { NBTTagCompound entry = new NBTTagCompound(); if (cache.getKey() != null && cache.getValue() != null) { SignalTools.writeToNBT(entry, "key", cache.getKey()); SignalTools.writeToNBT(entry, "value", cache.getValue()); tagList.appendTag(entry); } } data.setTag("trackCache", tagList); // if (RailcraftConfig.printSignalDebug()) { // Deque<WorldCoordinate> test = new LinkedList<WorldCoordinate>(); // NBTTagList list = data.getTagList("pairings", 10); // for (byte entry = 0; entry < list.tagCount(); entry++) { // NBTTagCompound tag = list.getCompoundTagAt(entry); // int[] c = tag.getIntArray("coords"); // test.add(new WorldCoordinate(c[0], c[1], c[2], c[3])); // } // boolean isConsistent = test.containsAll(getPairs()); // printDebug("Signal Block saved NBT. [{0}, {1}, {2}] [verified: {3}] [changedAspect: {4}] [data: {5}]", tile.xCoord, tile.yCoord, tile.zCoord, isConsistent, changedAspect, test); printDebug("Signal Block saved NBT. [{0}] [changedAspect: {1}] [data: {1}]", tile.getPos(), changedAspect, pairings); // savedData.put(uuid, new LinkedList<WorldCoordinate>(pairings)); // } } @Override protected void loadNBT(NBTTagCompound data) { super.loadNBT(data); // uuid = MiscTools.readUUID(data, "uuid"); if (data.hasKey("trackCache")) { NBTTagList tagList = data.getTagList("trackCache", 10); for (int i = 0; i < tagList.tagCount(); i++) { NBTTagCompound nbt = tagList.getCompoundTagAt(i); BlockPos key = SignalTools.readFromNBT(nbt, "key"); BlockPos value = SignalTools.readFromNBT(nbt, "value"); trackCache.put(key, value); } } // if (RailcraftConfig.printSignalDebug()) { // String isConsistent = "unknown"; // Deque<WorldCoordinate> lastSave = savedData.get(uuid); // if (lastSave != null) { // if (pairings.containsAll(lastSave)) // isConsistent = "true"; // else // isConsistent = "false"; // } printDebug("Signal Block loaded NBT. [{0}] [data: {1}]", tile.getPos(), pairings); // } } @Override public void clearPairing(BlockPos other) { printDebugPair("Signal Block pair cleared. ", other); if (SignalTools.printSignalDebug) { // logTrace(DEBUG_LEVEL, 10, "Signal Block code Path"); Block block = tile.getWorld().getBlockState(other).getBlock(); log(DEBUG_LEVEL, "Signal Block target block [{0}, {1}, {2}] = {3}, {4}", other, block.getClass(), block.getUnlocalizedName()); TileEntity t = tile.getWorld().getTileEntity(other); if (t != null) log(DEBUG_LEVEL, "Signal Block target tile [{0}] = {1}", t.getPos(), t.getClass()); else log(DEBUG_LEVEL, "Signal Block target tile [{0}] = null", other); } super.clearPairing(other); } private void clearSignalBlockPairing(@Nullable BlockPos other, String reason, Object... args) { printDebug(reason, args); if (other == null) clearPairings(); else clearPairing(other); } @Override protected void addPairing(BlockPos other) { pairings.remove(other); pairings.add(other); while (pairings.size() > getMaxPairings()) { BlockPos pair = pairings.remove(); printDebugPair("Signal Block dropped because too many pairs.", pair); } SignalTools.packetBuilder.sendPairPacketUpdate(this); } @Override public boolean isValidPair(BlockPos otherCoord, TileEntity otherTile) { if (otherTile instanceof ISignalTileBlock) { SignalBlock signalBlock = ((ISignalTileBlock) otherTile).getSignalBlock(); return signalBlock.isPairedWith(getCoords()); } return false; } @Override public void cleanPairings() { if (!invalidPairings.isEmpty()) printDebug("Signal Block pairs cleaned: source:[{0}] targets: {1}", tile.getPos(), invalidPairings); super.cleanPairings(); } // @Override // public void startPairing() { // clearSignalBlockPairing("Signal Block pairing cleared in preparation to start a new pairing. [{0}, {1}, {2}]", tile.xCoord, tile.yCoord, tile.zCoord); // super.startPairing(); // } @Override public boolean createPair(TileEntity other) { if (!(other instanceof ISignalTileBlock)) { return false; } ISignalTileBlock otherTile = (ISignalTileBlock) other; SignalBlock otherSignal = otherTile.getSignalBlock(); if (otherSignal == this) { printDebugPair("Signal Block creation was aborted, cannot pair with self.", otherSignal.getTile()); return false; } printDebugPair("Signal Block creation being attempted.", otherSignal.getTile()); Status myTrackStatus = trackLocator.getTrackStatus(); Status otherTrackStatus = otherSignal.getTrackLocator().getTrackStatus(); if (myTrackStatus == Status.INVALID || otherTrackStatus == Status.INVALID) { printDebugPair("Signal Block creation failed, could not find Track.", otherSignal.getTile()); return false; } BlockPos myTrack = trackLocator.getTrackLocation(); BlockPos otherTrack = otherSignal.getTrackLocator().getTrackLocation(); assert myTrack != null; assert otherTrack != null; TrackScanner.ScanResult scan = TrackScanner.scanStraightTrackSection(tile.getWorld(), myTrack, otherTrack); if (!scan.areConnected) { printDebugPair("Signal Block creation failed, could not find Path.", otherSignal.getTile()); return false; } addPairing(otherSignal.getCoords()); otherSignal.addPairing(getCoords()); endPairing(); otherSignal.endPairing(); trackScans.put(otherTrack, scan); printDebugPair("Signal Block created successfully.", otherSignal.getTile()); return true; } protected abstract void updateSignalAspect(); protected abstract SignalAspect getSignalAspectForPair(BlockPos otherCoord); public SignalAspect determineAspect(BlockPos otherCoord) { if (isWaitingForRetest() || isBeingPaired()) return SignalAspect.BLINK_YELLOW; if (!isPaired()) return SignalAspect.BLINK_RED; SignalAspect otherAspect = SignalAspect.GREEN; SignalBlock other = getSignalAt(otherCoord); if (other != null) otherAspect = other.getSignalAspectForPair(getCoords()); SignalAspect myAspect = determineMyAspect(otherCoord); return SignalAspect.mostRestrictive(myAspect, otherAspect); } private SignalAspect determineMyAspect(BlockPos otherCoord) { BlockPos myTrack = trackLocator.getTrackLocation(); if (myTrack == null) return SignalAspect.RED; BlockPos otherTrack = getOtherTrackLocation(otherCoord); if (otherTrack == null) return SignalAspect.YELLOW; TrackScanner.ScanResult scan = getOrCreateTrackScan(otherTrack); if (scan == null) return SignalAspect.RED; int y1 = scan.minY; int y2 = scan.maxY + 1; int x1 = Math.min(myTrack.getX(), otherTrack.getX()); int z1 = Math.min(myTrack.getZ(), otherTrack.getZ()); int x2 = Math.max(myTrack.getX(), otherTrack.getX()) + 1; int z2 = Math.max(myTrack.getZ(), otherTrack.getZ()) + 1; boolean zAxis = Math.abs(myTrack.getX() - otherTrack.getX()) < Math.abs(myTrack.getZ() - otherTrack.getZ()); int xOffset = otherTrack.getX() > myTrack.getX() ? -3 : 3; int zOffset = otherTrack.getZ() > myTrack.getZ() ? -3 : 3; List<EntityMinecart> carts = CartToolsAPI.getMinecartsIn(tile.getWorld(), new BlockPos(x1, y1, z1), new BlockPos(x2, y2, z2)); // System.out.printf("%d, %d, %d, %d, %d, %d\n", i1, j1, k1, i2, j2, k2); // System.out.println("carts = " + carts.size()); SignalAspect newAspect = SignalAspect.GREEN; for (EntityMinecart cart : carts) { int cartX = MathHelper.floor(cart.posX); int cartZ = MathHelper.floor(cart.posZ); if (Math.abs(cart.motionX) < 0.08 && Math.abs(cart.motionZ) < 0.08) return SignalAspect.RED; else if (zAxis) if (cartZ > myTrack.getZ() + zOffset && cart.motionZ < 0) return SignalAspect.RED; else if (cartZ < myTrack.getZ() + zOffset && cart.motionZ > 0) return SignalAspect.RED; else newAspect = SignalAspect.YELLOW; else if (cartX > myTrack.getX() + xOffset && cart.motionX < 0) return SignalAspect.RED; else if (cartX < myTrack.getX() + xOffset && cart.motionX > 0) return SignalAspect.RED; else newAspect = SignalAspect.YELLOW; } return newAspect; } @Nullable private TrackScanner.ScanResult getOrCreateTrackScan(BlockPos otherTrack) { TrackScanner.ScanResult scan = trackScans.get(otherTrack); if (scan == null) { BlockPos myTrack = trackLocator.getTrackLocation(); if (myTrack != null) { scan = TrackScanner.scanStraightTrackSection(tile.getWorld(), myTrack, otherTrack); trackScans.put(otherTrack, scan); } } return scan; } @Nullable private BlockPos getOtherTrackLocation(BlockPos otherCoord) { SignalBlock other = getSignalAt(otherCoord); if (other != null) { BlockPos track = other.trackLocator.getTrackLocation(); if (track != null) trackCache.put(otherCoord, track); return track; } return trackCache.get(otherCoord); } private TrackValidationStatus isSignalBlockValid(BlockPos other) { // if (other == null) // return new TrackValidationStatus(true, "UNVERIFIABLE_COORD_NULL"); SignalBlock otherSignalBlock = getSignalAt(other); if (otherSignalBlock == null) return new TrackValidationStatus(true, "UNVERIFIABLE_OTHER_SIGNAL_NULL"); Status trackStatus = trackLocator.getTrackStatus(); if (trackStatus == Status.INVALID) return new TrackValidationStatus(false, "INVALID_MY_TRACK_NULL"); Status otherTrackStatus = otherSignalBlock.trackLocator.getTrackStatus(); if (otherTrackStatus == Status.INVALID) return new TrackValidationStatus(false, "INVALID_OTHER_TRACK_INVALID"); BlockPos otherTrack = trackCache.get(other); if (otherTrackStatus == Status.UNKNOWN) { if (otherTrack == null) return new TrackValidationStatus(true, "UNVERIFIABLE_OTHER_TRACK_UNKNOWN"); } else { otherTrack = otherSignalBlock.trackLocator.getTrackLocation(); if (otherTrack != null) trackCache.put(other, otherTrack); } if (otherTrack == null) return new TrackValidationStatus(true, "UNVERIFIABLE_OTHER_TRACK_NULL"); BlockPos myTrack = trackLocator.getTrackLocation(); if (myTrack == null) return new TrackValidationStatus(true, "INVALID_MY_TRACK_NULL"); TrackScanner.ScanResult scan = TrackScanner.scanStraightTrackSection(tile.getWorld(), myTrack, otherTrack); trackScans.put(otherTrack, scan); if (scan.verdict == TrackScanner.ScanResult.Verdict.VALID) return new TrackValidationStatus(true, "VALID"); if (scan.verdict == TrackScanner.ScanResult.Verdict.UNKNOWN) return new TrackValidationStatus(true, "UNVERIFIABLE_UNLOADED_CHUNK"); return new TrackValidationStatus(false, "INVALID_SCAN_FAIL: " + scan.verdict.name()); } @Override public void tickServer() { super.tickServer(); update++; try { if (!isLoaded()) return; } catch (Throwable ex) { // Game.logErrorAPI("Railcraft", ex, AbstractPair.class); } if (update % SignalTools.signalUpdateInterval == 0) { SignalAspect prev = getSignalAspect(); if (prev != SignalAspect.BLINK_RED) changedAspect = true; updateSignalAspect(); if (getSignalAspect() == SignalAspect.BLINK_RED && prev != SignalAspect.BLINK_RED) printDebug("Signal Block changed aspect to BLINK_RED: source:[{0}] pairs: {1}", tile.getPos(), pairings); } if (update % VALIDATION_CHECK_INTERVAL == 0) { Status trackStatus = trackLocator.getTrackStatus(); switch (trackStatus) { case INVALID: clearSignalBlockPairing(null, "Signal Block dropped because no track was found near Signal. [{0}]", tile.getPos()); break; case VALID: for (BlockPos otherCoord : waitingForRetest) { TrackValidationStatus status = isSignalBlockValid(otherCoord); if (!status.isValid) clearSignalBlockPairing(otherCoord, "Signal Block dropped because track between Signals was invalid. source:[{0}] target:[{1}, {2}, {3}] reason:{4}", tile.getPos(), otherCoord, status.message); } waitingForRetest.clear(); for (BlockPos otherCoord : getPairs()) { if (!isSignalBlockValid(otherCoord).isValid) waitingForRetest.add(otherCoord); } break; } } } public boolean isWaitingForRetest() { return !waitingForRetest.isEmpty(); } @Override protected String getTagName() { return "SignalBlock"; } private static class TrackValidationStatus { public final boolean isValid; public final String message; public TrackValidationStatus(boolean isValid, String message) { this.isValid = isValid; this.message = message; } } }
/* * Copyright (c) 2021, Peter Abeles. All Rights Reserved. * * This file is part of BoofCV (http://boofcv.org). * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package boofcv.alg.fiducial.qrcode; import boofcv.alg.distort.LensDistortionNarrowFOV; import boofcv.struct.image.ImageGray; import org.ddogleg.struct.FastArray; import org.jetbrains.annotations.Nullable; /** * Searches the image for alignment patterns. First it computes a transform that removes perspective distortion * using previously detected position patterns. Then it searches inside the image for the position patterns. If * available, adjacent position patterns are used to adjust the search so that locations distant from position * patterns can be compensated for. * * NOTE: The current algorithm is a bit brittle and is a good target for further improvement. It has issues converging * when its initial guess is off. * * @author Peter Abeles */ @SuppressWarnings({"NullAway.Init"}) public class QrCodeAlignmentPatternLocator<T extends ImageGray<T>> { // grid for quick look up of alignment patterns to adjust search private final FastArray<QrCode.Alignment> lookup = new FastArray<>(QrCode.Alignment.class); QrCodeBinaryGridReader<T> reader; // pixel value storage used when localizing float[] arrayX = new float[12]; float[] arrayY = new float[12]; QrCode qr; float threshold; public QrCodeAlignmentPatternLocator( Class<T> imageType ) { reader = new QrCodeBinaryGridReader<>(imageType); } /** * Uses the previously detected position patterns to seed the search for the alignment patterns */ public boolean process( T image, QrCode qr ) { this.qr = qr; // this must be cleared before calling setMarker or else the distortion will be messed up qr.alignment.reset(); reader.setImage(image); reader.setMarker(qr); threshold = (float)qr.threshCorner; initializePatterns(qr); // version 1 has no alignment patterns if (qr.version <= 1) return true; return localizePositionPatterns(QrCode.VERSION_INFO[qr.version].alignment); } public void setLensDistortion( int width, int height, @Nullable LensDistortionNarrowFOV model ) { reader.setLensDistortion(width, height, model); } /** * Creates a list of alignment patterns to look for and their grid coordinates */ void initializePatterns( QrCode qr ) { int[] where = QrCode.VERSION_INFO[qr.version].alignment; qr.alignment.reset(); lookup.reset(); for (int row = 0; row < where.length; row++) { for (int col = 0; col < where.length; col++) { boolean skip = false; if (row == 0 && col == 0) skip = true; else if (row == 0 && col == where.length - 1) skip = true; else if (row == where.length - 1 && col == 0) skip = true; if (skip) { lookup.add(null); } else { QrCode.Alignment a = qr.alignment.grow(); a.moduleX = where[col]; a.moduleY = where[row]; lookup.add(a); } } } } boolean localizePositionPatterns( int[] alignmentLocations ) { int size = alignmentLocations.length; for (int row = 0; row < size; row++) { for (int col = 0; col < size; col++) { QrCode.Alignment a = lookup.get(row*size + col); if (a == null) continue; // adjustment from previously found alignment patterns double adjY = 0, adjX = 0; if (row > 0) { QrCode.Alignment p = lookup.get((row - 1)*size + col); if (p != null) adjY = p.moduleY + 0.5 - p.moduleFound.y; } if (col > 0) { QrCode.Alignment p = lookup.get(row*size + col - 1); if (p != null) adjX = p.moduleX + 0.5 - p.moduleFound.x; } if (!centerOnSquare(a, (float)(a.moduleY + 0.5 + adjY), (float)(a.moduleX + 0.5 + adjX))) { return false; } // if( !localize(a, (float)a.moduleFound.y, (float)a.moduleFound.x) ) { // return false; // } if (!meanshift(a, (float)a.moduleFound.y, (float)a.moduleFound.x)) { return false; } } } return true; } float[] samples = new float[9]; /** * If the initial guess is within the inner white circle or black dot this will ensure that it is centered * on the black dot */ boolean centerOnSquare( QrCode.Alignment pattern, float guessY, float guessX ) { float step = 1; float bestMag = Float.MAX_VALUE; float bestX = guessX; float bestY = guessY; for (int i = 0; i < 10; i++) { for (int row = 0; row < 3; row++) { float gridy = guessY - 1f + row; for (int col = 0; col < 3; col++) { float gridx = guessX - 1f + col; samples[row*3 + col] = reader.read(gridy, gridx); } } float dx = (samples[2] + samples[5] + samples[8]) - (samples[0] + samples[3] + samples[6]); float dy = (samples[6] + samples[7] + samples[8]) - (samples[0] + samples[1] + samples[2]); float r = (float)Math.sqrt(dx*dx + dy*dy); if (bestMag > r) { // System.out.println("good step at "+i); bestMag = r; bestX = guessX; bestY = guessY; } else { // System.out.println("bad step at "+i); step *= 0.75f; } if (r > 0) { guessX = bestX + step*dx/r; guessY = bestY + step*dy/r; } else { break; } } pattern.moduleFound.x = bestX; pattern.moduleFound.y = bestY; reader.gridToImage((float)pattern.moduleFound.y, (float)pattern.moduleFound.x, pattern.pixel); return true; } /** * Localizizes the alignment pattern crudely by searching for the black box in the center by looking * for its edges in the gray scale image * * @return true if success or false if it doesn't resemble an alignment pattern */ boolean localize( QrCode.Alignment pattern, float guessY, float guessX ) { // sample along the middle. Try to not sample the outside edges which could confuse it for (int i = 0; i < arrayY.length; i++) { float x = guessX - 1.5f + i*3f/12.0f; float y = guessY - 1.5f + i*3f/12.0f; arrayX[i] = reader.read(guessY, x); arrayY[i] = reader.read(y, guessX); } // TODO turn this into an exhaustive search of the array for best up and down point? int downX = greatestDown(arrayX); if (downX == -1) return false; int upX = greatestUp(arrayX, downX); if (upX == -1) return false; int downY = greatestDown(arrayY); if (downY == -1) return false; int upY = greatestUp(arrayY, downY); if (upY == -1) return false; pattern.moduleFound.x = guessX - 1.5f + (downX + upX)*3f/24.0f; pattern.moduleFound.y = guessY - 1.5f + (downY + upY)*3f/24.0f; reader.gridToImage((float)pattern.moduleFound.y, (float)pattern.moduleFound.x, pattern.pixel); return true; } boolean meanshift( QrCode.Alignment pattern, float guessY, float guessX ) { // System.out.println("before "+guessX+" "+guessY); float step = 1; float decay = 0.7f; for (int i = 0; i < 10; i++) { float sumX = 0; float sumY = 0; float total = 0; for (int y = 0; y < 8; y++) { float dy = -1.5f + 3f*y/7f; float gridY = guessY + dy; for (int x = 0; x < 8; x++) { float dx = -1.5f + 3f*x/7f; float gridX = guessX + dx; float v = reader.read(gridY, gridX); float r = (float)Math.sqrt(dx*dx + dy*dy); float w = Math.max(-10, (r > 0.5 ? v - threshold : threshold - v)); total += Math.abs(w); sumX += w*dx; sumY += w*dy; } } guessX += step*sumX/total; guessY += step*sumY/total; step *= decay; } // System.out.println("after "+guessX+" "+guessY+"\n"); pattern.moduleFound.x = guessX; pattern.moduleFound.y = guessY; reader.gridToImage((float)pattern.moduleFound.y, (float)pattern.moduleFound.x, pattern.pixel); return true; } /** * Searches for the greatest down slope in the list */ static int greatestDown( float[] array ) { int best = -1; float bestScore = 0; for (int i = 5; i < array.length; i++) { float diff = (4.0f/2.0f)*(array[i - 5] + array[i]); diff -= array[i - 4] + array[i - 3] + array[i - 2] + array[i - 1]; if (diff > bestScore) { bestScore = diff; best = i - 4; } } return best; } static int greatestUp( float[] array, int start ) { int best = -1; float bestScore = 0; for (int i = start; i < array.length; i++) { float diff = array[i] - array[i - 1]; if (diff > bestScore) { bestScore = diff; best = i - 1; } } return best; } }
/* * Copyright (c) 2005-2010 Grameen Foundation USA * All rights reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or * implied. See the License for the specific language governing * permissions and limitations under the License. * * See also http://www.apache.org/licenses/LICENSE-2.0.html for an * explanation of the license and how it is applied. */ package org.mifos.accounts.struts.actionforms; import java.sql.Date; import java.util.Calendar; import java.util.GregorianCalendar; import java.util.Locale; import java.util.ResourceBundle; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpSession; import org.apache.commons.lang.StringUtils; import org.apache.struts.Globals; import org.apache.struts.action.ActionErrors; import org.apache.struts.action.ActionMapping; import org.apache.struts.action.ActionMessage; import org.joda.time.LocalDate; import org.mifos.accounts.servicefacade.AccountTypeDto; import org.mifos.accounts.util.helpers.AccountConstants; import org.mifos.application.admin.servicefacade.InvalidDateException; import org.mifos.application.master.business.MifosCurrency; import org.mifos.config.AccountingRules; import org.mifos.framework.business.util.helpers.MethodNameConstants; import org.mifos.framework.struts.actionforms.BaseActionForm; import org.mifos.framework.util.helpers.Constants; import org.mifos.framework.util.helpers.DateUtils; import org.mifos.framework.util.helpers.DoubleConversionResult; import org.mifos.framework.util.helpers.FilePaths; import org.mifos.security.login.util.helpers.LoginConstants; import org.mifos.security.util.UserContext; public class AccountApplyPaymentActionForm extends BaseActionForm { private String input; private String transactionDateDD; private String transactionDateMM; private String transactionDateYY; private String amount; private Short currencyId; private String receiptId; private String receiptDateDD; private String receiptDateMM; private String receiptDateYY; /* * Among other things, this field holds the PaymentTypes value for disbursements. */ private String paymentTypeId; private String waiverInterest; private String globalAccountNum; private String accountId; private String prdOfferingName; private boolean amountCannotBeZero = true; public boolean amountCannotBeZero() { return this.amountCannotBeZero; } public void setAmountCannotBeZero(boolean amountCannotBeZero) { this.amountCannotBeZero = amountCannotBeZero; } public String getPrdOfferingName() { return prdOfferingName; } public void setPrdOfferingName(String prdOfferingName) { this.prdOfferingName = prdOfferingName; } public String getAmount() { return amount; } public void setAmount(String amount) { this.amount = amount; } public String getInput() { return input; } @Override public ActionErrors validate(ActionMapping mapping, HttpServletRequest request) { String methodCalled = request.getParameter(MethodNameConstants.METHOD); ActionErrors errors = new ActionErrors(); ResourceBundle resources = ResourceBundle.getBundle(FilePaths.ACCOUNTS_UI_RESOURCE_PROPERTYFILE, getUserLocale(request)); if (methodCalled != null && methodCalled.equals("preview")) { ActionErrors errors2 = validateDate(getTransactionDate(), resources.getString("accounts.date_of_trxn"), request); if (null != errors2 && !errors2.isEmpty()) { errors.add(errors2); } if (StringUtils.isEmpty(getPaymentTypeId())) { errors.add(AccountConstants.ERROR_MANDATORY, new ActionMessage(AccountConstants.ERROR_MANDATORY, resources.getString("accounts.mode_of_payment"))); } if (getReceiptDate() != null && !getReceiptDate().equals("")) { errors2 = validateDate(getReceiptDate(), resources.getString("accounts.receiptdate"), request); if (null != errors2 && !errors2.isEmpty()) { errors.add(errors2); } } String accountType = (String) request.getSession().getAttribute(Constants.ACCOUNT_TYPE); if (accountType != null && accountType.equals(AccountTypeDto.LOAN_ACCOUNT.name())) { if (getAmount() == null || getAmount().equals("")) { errors.add(AccountConstants.ERROR_MANDATORY, new ActionMessage(AccountConstants.ERROR_MANDATORY, resources.getString("accounts.amt"))); } } validateAmount(errors,getUserLocale(request)); } if (!errors.isEmpty()) { request.setAttribute(Globals.ERROR_KEY, errors); request.setAttribute("methodCalled", methodCalled); } return errors; } protected ActionErrors validateDate(String date, String fieldName, HttpServletRequest request) { ActionErrors errors = null; java.sql.Date sqlDate = null; if (date != null && !date.equals("")) { try { sqlDate = DateUtils.getDateAsSentFromBrowser(date); if (DateUtils.whichDirection(sqlDate) > 0) { errors = new ActionErrors(); errors.add(AccountConstants.ERROR_FUTUREDATE, new ActionMessage(AccountConstants.ERROR_FUTUREDATE, fieldName)); } } catch (InvalidDateException ide) { errors = new ActionErrors(); errors.add(AccountConstants.ERROR_INVALIDDATE, new ActionMessage(AccountConstants.ERROR_INVALIDDATE, fieldName)); } } else { errors = new ActionErrors(); errors.add(AccountConstants.ERROR_MANDATORY, new ActionMessage(AccountConstants.ERROR_MANDATORY, fieldName)); } return errors; } protected Locale getUserLocale(HttpServletRequest request) { Locale locale = null; HttpSession session = request.getSession(); if (session != null) { UserContext userContext = (UserContext) session.getAttribute(LoginConstants.USERCONTEXT); if (null != userContext) { locale = userContext.getCurrentLocale(); } } return locale; } protected void validateAmount(ActionErrors errors, Locale locale) { MifosCurrency currency = null; if (getCurrencyId() != null && AccountingRules.isMultiCurrencyEnabled()) { currency = AccountingRules.getCurrencyByCurrencyId(getCurrencyId()); } DoubleConversionResult conversionResult = validateAmount(getAmount(), currency , AccountConstants.ACCOUNT_AMOUNT, errors, locale, FilePaths.ACCOUNTS_UI_RESOURCE_PROPERTYFILE, ""); if (amountCannotBeZero() && conversionResult.getErrors().size() == 0 && !(conversionResult.getDoubleValue() > 0.0)) { addError(errors, AccountConstants.ACCOUNT_AMOUNT, AccountConstants.ERRORS_MUST_BE_GREATER_THAN_ZERO, lookupLocalizedPropertyValue(AccountConstants.ACCOUNT_AMOUNT, locale, FilePaths.ACCOUNTS_UI_RESOURCE_PROPERTYFILE)); } } public void setInput(String input) { this.input = input; } public String getPaymentTypeId() { return paymentTypeId; } public void setPaymentTypeId(String paymentTypeId) { this.paymentTypeId = paymentTypeId; } public String getReceiptDate() { if (StringUtils.isNotBlank(receiptDateDD) && StringUtils.isNotBlank(receiptDateMM) && StringUtils.isNotBlank(receiptDateYY)) { return receiptDateDD + "/" + receiptDateMM + "/" + receiptDateYY; } return null; } public void setReceiptDate(String receiptDate) throws InvalidDateException { if (StringUtils.isBlank(receiptDate)) { receiptDateDD = null; receiptDateMM = null; receiptDateYY = null; } else { Calendar cal = new GregorianCalendar(); java.sql.Date date = DateUtils.getDateAsSentFromBrowser(receiptDate); cal.setTime(date); receiptDateDD = Integer.toString(cal.get(Calendar.DAY_OF_MONTH)); receiptDateMM = Integer.toString(cal.get(Calendar.MONTH) + 1); receiptDateYY = Integer.toString(cal.get(Calendar.YEAR)); } } public String getReceiptId() { return receiptId; } public void setReceiptId(String receiptId) { this.receiptId = receiptId; } public String getTransactionDate() { if (StringUtils.isNotBlank(transactionDateDD) && StringUtils.isNotBlank(transactionDateMM) && StringUtils.isNotBlank(transactionDateYY)) { String transactionDate = ""; if (transactionDateDD.length() < 2) { transactionDate = transactionDate + "0" + transactionDateDD; } else { transactionDate = transactionDate + transactionDateDD; } if (transactionDateMM.length() < 2) { transactionDate = transactionDate + "/" + "0" + transactionDateMM; } else { transactionDate = transactionDate + "/" + transactionDateMM; } transactionDate = transactionDate + "/" + transactionDateYY; return transactionDate; } return null; } public void setTransactionDate(String receiptDate) throws InvalidDateException { if (StringUtils.isBlank(receiptDate)) { transactionDateDD = null; transactionDateMM = null; transactionDateYY = null; } else { Calendar cal = new GregorianCalendar(); java.sql.Date date = DateUtils.getDateAsSentFromBrowser(receiptDate); cal.setTime(date); transactionDateDD = Integer.toString(cal.get(Calendar.DAY_OF_MONTH)); transactionDateMM = Integer.toString(cal.get(Calendar.MONTH) + 1); transactionDateYY = Integer.toString(cal.get(Calendar.YEAR)); } } public String getAccountId() { return accountId; } public void setAccountId(String accountId) { this.accountId = accountId; } public String getGlobalAccountNum() { return globalAccountNum; } public void setGlobalAccountNum(String globalAccountNum) { this.globalAccountNum = globalAccountNum; } protected void clear() throws InvalidDateException { this.amount = null; this.paymentTypeId = null; setReceiptDate(null); this.receiptId = null; } public String getReceiptDateDD() { return receiptDateDD; } public void setReceiptDateDD(String receiptDateDD) { this.receiptDateDD = receiptDateDD; } public String getReceiptDateMM() { return receiptDateMM; } public void setReceiptDateMM(String receiptDateMM) { this.receiptDateMM = receiptDateMM; } public String getReceiptDateYY() { return receiptDateYY; } public void setReceiptDateYY(String receiptDateYY) { this.receiptDateYY = receiptDateYY; } public String getTransactionDateDD() { return transactionDateDD; } public void setTransactionDateDD(String transactionDateDD) { this.transactionDateDD = transactionDateDD; } public String getTransactionDateMM() { return transactionDateMM; } public void setTransactionDateMM(String transactionDateMM) { this.transactionDateMM = transactionDateMM; } public String getTransactionDateYY() { return transactionDateYY; } public void setTransactionDateYY(String transactionDateYY) { this.transactionDateYY = transactionDateYY; } public Short getCurrencyId() { return this.currencyId; } public void setCurrencyId(Short currencyId) { this.currencyId = currencyId; } public String getWaiverInterest() { return waiverInterest; } public void setWaiverInterest(String waiverInterest) { this.waiverInterest = waiverInterest; } public LocalDate getReceiptDateAsLocalDate() throws InvalidDateException { Date receiptDateStr = DateUtils.getDateAsSentFromBrowser(getReceiptDate()); return (receiptDateStr != null) ? new LocalDate(receiptDateStr.getTime()) : null; } public LocalDate getTrxnDateAsLocalDate() throws InvalidDateException { return new LocalDate(getTrxnDate().getTime()); } public Date getTrxnDate() throws InvalidDateException { return DateUtils.getDateAsSentFromBrowser(getTransactionDate()); } }
/* * Copyright 2017-present Open Networking Foundation * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.atomix.utils.concurrent; import java.util.concurrent.CompletableFuture; import java.util.concurrent.CompletionStage; import java.util.concurrent.ExecutionException; import java.util.concurrent.Executor; import java.util.concurrent.TimeUnit; import java.util.concurrent.TimeoutException; import java.util.function.BiConsumer; import java.util.function.BiFunction; import java.util.function.Consumer; import java.util.function.Function; /** * A {@link CompletableFuture} that tracks whether the future or one of its descendants has been blocked on * a {@link CompletableFuture#get()} or {@link CompletableFuture#join()} call. */ public class AtomixFuture<T> extends CompletableFuture<T> { /** * Wraps the given future in a new blockable future. * * @param future the future to wrap * @param <T> the future value type * @return a new blockable future */ public static <T> AtomixFuture<T> wrap(CompletableFuture<T> future) { AtomixFuture<T> newFuture = new AtomixFuture<>(); future.whenComplete((result, error) -> { if (error == null) { newFuture.complete(result); } else { newFuture.completeExceptionally(error); } }); return newFuture; } /** * Returns a new completed Atomix future. * * @param result the future result * @param <T> the future result type * @return the completed future */ public static <T> CompletableFuture<T> completedFuture(T result) { CompletableFuture<T> future = new AtomixFuture<>(); future.complete(result); return future; } /** * Returns a new exceptionally completed Atomix future. * * @param t the future exception * @param <T> the future result type * @return the completed future */ public static <T> CompletableFuture<T> exceptionalFuture(Throwable t) { CompletableFuture<T> future = new AtomixFuture<>(); future.completeExceptionally(t); return future; } private static final ThreadContext NULL_CONTEXT = new NullThreadContext(); private ThreadContext getThreadContext() { ThreadContext context = ThreadContext.currentContext(); return context != null ? context : NULL_CONTEXT; } @Override public T get() throws InterruptedException, ExecutionException { ThreadContext context = getThreadContext(); context.block(); try { return super.get(); } finally { context.unblock(); } } @Override public T get(long timeout, TimeUnit unit) throws InterruptedException, ExecutionException, TimeoutException { ThreadContext context = getThreadContext(); context.block(); try { return super.get(timeout, unit); } finally { context.unblock(); } } @Override public synchronized T join() { ThreadContext context = getThreadContext(); context.block(); try { return super.join(); } finally { context.unblock(); } } @Override public <U> CompletableFuture<U> thenApply(Function<? super T, ? extends U> fn) { return wrap(super.thenApply(fn)); } @Override public <U> CompletableFuture<U> thenApplyAsync(Function<? super T, ? extends U> fn) { return wrap(super.thenApplyAsync(fn)); } @Override public <U> CompletableFuture<U> thenApplyAsync(Function<? super T, ? extends U> fn, Executor executor) { return wrap(super.thenApplyAsync(fn, executor)); } @Override public CompletableFuture<Void> thenAccept(Consumer<? super T> action) { return wrap(super.thenAccept(action)); } @Override public CompletableFuture<Void> thenAcceptAsync(Consumer<? super T> action) { return wrap(super.thenAcceptAsync(action)); } @Override public CompletableFuture<Void> thenAcceptAsync(Consumer<? super T> action, Executor executor) { return wrap(super.thenAcceptAsync(action, executor)); } @Override public CompletableFuture<Void> thenRun(Runnable action) { return wrap(super.thenRun(action)); } @Override public CompletableFuture<Void> thenRunAsync(Runnable action) { return wrap(super.thenRunAsync(action)); } @Override public CompletableFuture<Void> thenRunAsync(Runnable action, Executor executor) { return wrap(super.thenRunAsync(action, executor)); } @Override public <U, V> CompletableFuture<V> thenCombine( CompletionStage<? extends U> other, BiFunction<? super T, ? super U, ? extends V> fn) { return wrap(super.thenCombine(other, fn)); } @Override public <U, V> CompletableFuture<V> thenCombineAsync( CompletionStage<? extends U> other, BiFunction<? super T, ? super U, ? extends V> fn) { return wrap(super.thenCombineAsync(other, fn)); } @Override public <U, V> CompletableFuture<V> thenCombineAsync( CompletionStage<? extends U> other, BiFunction<? super T, ? super U, ? extends V> fn, Executor executor) { return wrap(super.thenCombineAsync(other, fn, executor)); } @Override public <U> CompletableFuture<Void> thenAcceptBoth( CompletionStage<? extends U> other, BiConsumer<? super T, ? super U> action) { return wrap(super.thenAcceptBoth(other, action)); } @Override public <U> CompletableFuture<Void> thenAcceptBothAsync( CompletionStage<? extends U> other, BiConsumer<? super T, ? super U> action) { return wrap(super.thenAcceptBothAsync(other, action)); } @Override public <U> CompletableFuture<Void> thenAcceptBothAsync( CompletionStage<? extends U> other, BiConsumer<? super T, ? super U> action, Executor executor) { return wrap(super.thenAcceptBothAsync(other, action, executor)); } @Override public CompletableFuture<Void> runAfterBoth(CompletionStage<?> other, Runnable action) { return wrap(super.runAfterBoth(other, action)); } @Override public CompletableFuture<Void> runAfterBothAsync(CompletionStage<?> other, Runnable action) { return wrap(super.runAfterBothAsync(other, action)); } @Override public CompletableFuture<Void> runAfterBothAsync(CompletionStage<?> other, Runnable action, Executor executor) { return wrap(super.runAfterBothAsync(other, action, executor)); } @Override public <U> CompletableFuture<U> applyToEither(CompletionStage<? extends T> other, Function<? super T, U> fn) { return wrap(super.applyToEither(other, fn)); } @Override public <U> CompletableFuture<U> applyToEitherAsync(CompletionStage<? extends T> other, Function<? super T, U> fn) { return wrap(super.applyToEitherAsync(other, fn)); } @Override public <U> CompletableFuture<U> applyToEitherAsync( CompletionStage<? extends T> other, Function<? super T, U> fn, Executor executor) { return wrap(super.applyToEitherAsync(other, fn, executor)); } @Override public CompletableFuture<Void> acceptEither(CompletionStage<? extends T> other, Consumer<? super T> action) { return wrap(super.acceptEither(other, action)); } @Override public CompletableFuture<Void> acceptEitherAsync(CompletionStage<? extends T> other, Consumer<? super T> action) { return wrap(super.acceptEitherAsync(other, action)); } @Override public CompletableFuture<Void> acceptEitherAsync( CompletionStage<? extends T> other, Consumer<? super T> action, Executor executor) { return wrap(super.acceptEitherAsync(other, action, executor)); } @Override public CompletableFuture<Void> runAfterEither(CompletionStage<?> other, Runnable action) { return wrap(super.runAfterEither(other, action)); } @Override public CompletableFuture<Void> runAfterEitherAsync(CompletionStage<?> other, Runnable action) { return wrap(super.runAfterEitherAsync(other, action)); } @Override public CompletableFuture<Void> runAfterEitherAsync(CompletionStage<?> other, Runnable action, Executor executor) { return wrap(super.runAfterEitherAsync(other, action, executor)); } @Override public <U> CompletableFuture<U> thenCompose(Function<? super T, ? extends CompletionStage<U>> fn) { return wrap(super.thenCompose(fn)); } @Override public <U> CompletableFuture<U> thenComposeAsync(Function<? super T, ? extends CompletionStage<U>> fn) { return wrap(super.thenComposeAsync(fn)); } @Override public <U> CompletableFuture<U> thenComposeAsync( Function<? super T, ? extends CompletionStage<U>> fn, Executor executor) { return wrap(super.thenComposeAsync(fn, executor)); } @Override public CompletableFuture<T> whenComplete(BiConsumer<? super T, ? super Throwable> action) { return wrap(super.whenComplete(action)); } @Override public CompletableFuture<T> whenCompleteAsync(BiConsumer<? super T, ? super Throwable> action) { return wrap(super.whenCompleteAsync(action)); } @Override public CompletableFuture<T> whenCompleteAsync(BiConsumer<? super T, ? super Throwable> action, Executor executor) { return wrap(super.whenCompleteAsync(action, executor)); } @Override public <U> CompletableFuture<U> handle(BiFunction<? super T, Throwable, ? extends U> fn) { return wrap(super.handle(fn)); } @Override public <U> CompletableFuture<U> handleAsync(BiFunction<? super T, Throwable, ? extends U> fn) { return wrap(super.handleAsync(fn)); } @Override public <U> CompletableFuture<U> handleAsync(BiFunction<? super T, Throwable, ? extends U> fn, Executor executor) { return wrap(super.handleAsync(fn, executor)); } }
/** * Copyright (C) 2015-Present McLeod Moores Software Limited. All rights reserved. */ package com.mcleodmoores.integration.testutils; import static org.testng.Assert.assertEquals; import static org.testng.Assert.assertNull; import static org.testng.Assert.assertTrue; import net.finmath.marketdata.model.AnalyticModelInterface; import net.finmath.marketdata.model.curves.Curve; import net.finmath.marketdata.model.curves.CurveInterface; import net.finmath.marketdata.model.volatilities.VolatilitySurfaceInterface; import net.finmath.marketdata.model.volatilities.VolatilitySurfaceInterface.QuotingConvention; import com.opengamma.util.ArgumentChecker; /** * Utility methods for testing the serialization of Finmath objects. */ public final class FinmathSerializationTestUtils { /** The accuracy */ private static final double EPS = 1e-15; /** * Restricted constructor. */ private FinmathSerializationTestUtils() { } /** * Tests the equality of two curves by sampling. * @param curveInterface1 The first curve * @param curveInterface2 The second curve */ public static void assertCurveEquals(final CurveInterface curveInterface1, final CurveInterface curveInterface2) { if (curveInterface1 instanceof Curve) { assertTrue(curveInterface2 instanceof Curve); final Curve curve1 = (Curve) curveInterface1; final Curve curve2 = (Curve) curveInterface2; assertEquals(curve1.getExtrapolationMethod(), curve2.getExtrapolationMethod()); assertEquals(curve1.getInterpolationEntity(), curve2.getInterpolationEntity()); assertEquals(curve1.getInterpolationMethod(), curve2.getInterpolationMethod()); } if (curveInterface1 == null) { assertNull(curveInterface2); return; } assertEquals(curveInterface1.getName(), curveInterface2.getName()); if (curveInterface1.getReferenceDate() != null) { if (curveInterface2.getReferenceDate() != null) { assertEquals(curveInterface1.getReferenceDate(), curveInterface2.getReferenceDate()); } else { assertNull(curveInterface2.getReferenceDate()); } } for (int i = 0; i < 100; i++) { assertEquals(curveInterface1.getValue(i), curveInterface2.getValue(i), EPS, "(" + i + ")"); } } /** * Tests the equality of two curves by sampling. An analytic model is supplied that should provide curves * that are not stored internally in the curve objects. * @param curveInterface1 The first curve * @param curveInterface2 The second curve * @param model The analytic model used to get the value */ public static void assertCurveEquals(final CurveInterface curveInterface1, final CurveInterface curveInterface2, final AnalyticModelInterface model) { if (curveInterface1 instanceof Curve) { assertTrue(curveInterface2 instanceof Curve); final Curve curve1 = (Curve) curveInterface1; final Curve curve2 = (Curve) curveInterface2; assertEquals(curve1.getExtrapolationMethod(), curve2.getExtrapolationMethod()); assertEquals(curve1.getInterpolationEntity(), curve2.getInterpolationEntity()); assertEquals(curve1.getInterpolationMethod(), curve2.getInterpolationMethod()); } if (curveInterface1 == null) { assertNull(curveInterface2); return; } assertEquals(curveInterface1.getName(), curveInterface2.getName()); if (curveInterface1.getReferenceDate() != null) { if (curveInterface2.getReferenceDate() != null) { assertEquals(curveInterface1.getReferenceDate(), curveInterface2.getReferenceDate()); } else { assertNull(curveInterface2.getReferenceDate()); } } for (int i = 0; i < 100; i++) { assertEquals(curveInterface1.getValue(model, i), curveInterface2.getValue(model, i), EPS, "(" + i + ")"); } } /** * Tests the equality of two surfaces by sampling. An analytic model is supplied that should provide curves * that are not stored internally in the curve objects. * @param surfaceInterface1 The first surface * @param surfaceInterface2 The second surface * @param model The analytic model used to get the value */ public static void assertSurfaceEquals(final VolatilitySurfaceInterface surfaceInterface1, final VolatilitySurfaceInterface surfaceInterface2, final AnalyticModelInterface model) { if (surfaceInterface1 == null) { assertNull(surfaceInterface2); return; } assertEquals(surfaceInterface1.getName(), surfaceInterface2.getName()); assertEquals(surfaceInterface1.getQuotingConvention(), surfaceInterface2.getQuotingConvention()); if (surfaceInterface1.getReferenceDate() != null) { if (surfaceInterface2.getReferenceDate() != null) { assertEquals(surfaceInterface1.getReferenceDate(), surfaceInterface2.getReferenceDate()); } else { assertNull(surfaceInterface2.getReferenceDate()); } } for (double i = 0; i < 10; i += 0.1) { for (double j = 0; j < 10; j += 0.1) { assertEquals(surfaceInterface1.getValue(model, i, j, QuotingConvention.VOLATILITYNORMAL), surfaceInterface2.getValue(model, i, j, QuotingConvention.VOLATILITYNORMAL), EPS, "(" + i + ", " + j + ")"); assertEquals(surfaceInterface1.getValue(model, i, j, QuotingConvention.VOLATILITYLOGNORMAL), surfaceInterface2.getValue(model, i, j, QuotingConvention.VOLATILITYLOGNORMAL), EPS, "(" + i + ", " + j + ")"); assertEquals(surfaceInterface1.getValue(model, i, j, QuotingConvention.PRICE), surfaceInterface2.getValue(model, i, j, QuotingConvention.PRICE), EPS, "(" + i + ", " + j + ")"); } } } /** * Tests the equality of two surfaces by sampling. An analytic model is supplied that should provide curves * that are not stored internally in the curve objects. The quoting convention is supplied because some classes do no handle all quoting * convention types correctly. * @param surfaceInterface1 The first surface * @param surfaceInterface2 The second surface * @param model The analytic model used to get the value * @param quotingConventionToTest The quoting convention to test, not null */ public static void assertSurfaceEquals(final VolatilitySurfaceInterface surfaceInterface1, final VolatilitySurfaceInterface surfaceInterface2, final AnalyticModelInterface model, final QuotingConvention quotingConventionToTest) { ArgumentChecker.notNull(quotingConventionToTest, "quotingConventionToTest"); if (surfaceInterface1 == null) { assertNull(surfaceInterface2); return; } assertEquals(surfaceInterface1.getName(), surfaceInterface2.getName()); assertEquals(surfaceInterface1.getQuotingConvention(), surfaceInterface2.getQuotingConvention()); if (surfaceInterface1.getReferenceDate() != null) { if (surfaceInterface2.getReferenceDate() != null) { assertEquals(surfaceInterface1.getReferenceDate(), surfaceInterface2.getReferenceDate()); } else { assertNull(surfaceInterface2.getReferenceDate()); } } for (double i = 0; i < 10; i += 0.1) { for (double j = 0; j < 10; j += 0.1) { assertEquals(surfaceInterface1.getValue(model, i, j, quotingConventionToTest), surfaceInterface2.getValue(model, i, j, quotingConventionToTest), EPS, "(" + i + ", " + j + ")"); } } } /** * Tests the equality of two surfaces by sampling. * @param surfaceInterface1 The first surface * @param surfaceInterface2 The second surface */ public static void assertSurfaceEquals(final VolatilitySurfaceInterface surfaceInterface1, final VolatilitySurfaceInterface surfaceInterface2) { if (surfaceInterface1 == null) { assertNull(surfaceInterface2); return; } assertEquals(surfaceInterface1.getName(), surfaceInterface2.getName()); assertEquals(surfaceInterface1.getQuotingConvention(), surfaceInterface2.getQuotingConvention()); if (surfaceInterface1.getReferenceDate() != null) { if (surfaceInterface2.getReferenceDate() != null) { assertEquals(surfaceInterface1.getReferenceDate(), surfaceInterface2.getReferenceDate()); } else { assertNull(surfaceInterface2.getReferenceDate()); } } for (double i = 0; i < 10; i += 0.1) { for (double j = 0; j < 10; j += 0.1) { assertEquals(surfaceInterface1.getValue(i, j, QuotingConvention.VOLATILITYNORMAL), surfaceInterface2.getValue(i, j, QuotingConvention.VOLATILITYNORMAL), EPS, "(" + i + ", " + j + ")"); assertEquals(surfaceInterface1.getValue(i, j, QuotingConvention.VOLATILITYLOGNORMAL), surfaceInterface2.getValue(i, j, QuotingConvention.VOLATILITYLOGNORMAL), EPS, "(" + i + ", " + j + ")"); assertEquals(surfaceInterface1.getValue(i, j, QuotingConvention.PRICE), surfaceInterface2.getValue(i, j, QuotingConvention.PRICE), EPS, "(" + i + ", " + j + ")"); } } } /** * Tests the equality of two surfaces by sampling. The quoting convention is supplied because some classes do no handle all quoting * convention types correctly. * @param surfaceInterface1 The first surface * @param surfaceInterface2 The second surface * @param quotingConventionToTest The quoting convention to test, not null */ public static void assertSurfaceEquals(final VolatilitySurfaceInterface surfaceInterface1, final VolatilitySurfaceInterface surfaceInterface2, final QuotingConvention quotingConventionToTest) { ArgumentChecker.notNull(quotingConventionToTest, "quotingConventionToTest"); if (surfaceInterface1 == null) { assertNull(surfaceInterface2); return; } assertEquals(surfaceInterface1.getName(), surfaceInterface2.getName()); assertEquals(surfaceInterface1.getQuotingConvention(), surfaceInterface2.getQuotingConvention()); if (surfaceInterface1.getReferenceDate() != null) { if (surfaceInterface2.getReferenceDate() != null) { assertEquals(surfaceInterface1.getReferenceDate(), surfaceInterface2.getReferenceDate()); } else { assertNull(surfaceInterface2.getReferenceDate()); } } for (double i = 0; i < 10; i += 0.1) { for (double j = 0; j < 10; j += 0.1) { assertEquals(surfaceInterface1.getValue(i, j, quotingConventionToTest), surfaceInterface2.getValue(i, j, quotingConventionToTest), EPS, "(" + i + ", " + j + ")"); } } } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.drill.exec.store.kafka; import java.util.Arrays; import java.util.Collection; import java.util.List; import java.util.Map; import java.util.Set; import org.apache.commons.lang3.StringUtils; import org.apache.drill.common.exceptions.ExecutionSetupException; import org.apache.drill.common.exceptions.UserException; import org.apache.drill.common.expression.SchemaPath; import org.apache.drill.exec.physical.EndpointAffinity; import org.apache.drill.exec.physical.base.AbstractGroupScan; import org.apache.drill.exec.physical.base.GroupScan; import org.apache.drill.exec.physical.base.PhysicalOperator; import org.apache.drill.exec.physical.base.ScanStats; import org.apache.drill.exec.physical.base.ScanStats.GroupScanProperty; import org.apache.drill.exec.proto.CoordinationProtos.DrillbitEndpoint; import org.apache.drill.exec.store.StoragePluginRegistry; import org.apache.drill.exec.store.kafka.KafkaSubScan.KafkaSubScanSpec; import org.apache.drill.exec.store.schedule.AffinityCreator; import org.apache.drill.exec.store.schedule.AssignmentCreator; import org.apache.drill.exec.store.schedule.CompleteWork; import org.apache.drill.exec.store.schedule.EndpointByteMap; import org.apache.drill.exec.store.schedule.EndpointByteMapImpl; import org.apache.kafka.clients.consumer.KafkaConsumer; import org.apache.kafka.common.Node; import org.apache.kafka.common.PartitionInfo; import org.apache.kafka.common.TopicPartition; import org.apache.kafka.common.serialization.ByteArrayDeserializer; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.fasterxml.jackson.annotation.JacksonInject; import com.fasterxml.jackson.annotation.JsonCreator; import com.fasterxml.jackson.annotation.JsonIgnore; import com.fasterxml.jackson.annotation.JsonProperty; import com.fasterxml.jackson.annotation.JsonTypeName; import com.google.common.base.Preconditions; import com.google.common.collect.ListMultimap; import com.google.common.collect.Lists; import com.google.common.collect.Maps; @JsonTypeName("kafka-scan") public class KafkaGroupScan extends AbstractGroupScan { private static final Logger logger = LoggerFactory.getLogger(KafkaGroupScan.class); // Assuming default average topic message size as 1KB, which will be used to // compute the stats and work assignments private static final long MSG_SIZE = 1024; private final KafkaStoragePlugin kafkaStoragePlugin; private final KafkaScanSpec kafkaScanSpec; private List<SchemaPath> columns; private List<PartitionScanWork> partitionWorkList; private ListMultimap<Integer, PartitionScanWork> assignments; private List<EndpointAffinity> affinities; @JsonCreator public KafkaGroupScan(@JsonProperty("userName") String userName, @JsonProperty("kafkaStoragePluginConfig") KafkaStoragePluginConfig kafkaStoragePluginConfig, @JsonProperty("columns") List<SchemaPath> columns, @JsonProperty("kafkaScanSpec") KafkaScanSpec scanSpec, @JacksonInject StoragePluginRegistry pluginRegistry) throws ExecutionSetupException { this(userName, (KafkaStoragePlugin) pluginRegistry.getPlugin(kafkaStoragePluginConfig), columns, scanSpec); } public KafkaGroupScan(KafkaStoragePlugin kafkaStoragePlugin, KafkaScanSpec kafkaScanSpec, List<SchemaPath> columns) { super(StringUtils.EMPTY); this.kafkaStoragePlugin = kafkaStoragePlugin; this.columns = columns; this.kafkaScanSpec = kafkaScanSpec; init(); } public KafkaGroupScan(String userName, KafkaStoragePlugin kafkaStoragePlugin, List<SchemaPath> columns, KafkaScanSpec kafkaScanSpec) { super(userName); this.kafkaStoragePlugin = kafkaStoragePlugin; this.columns = columns; this.kafkaScanSpec = kafkaScanSpec; init(); } public KafkaGroupScan(KafkaGroupScan that) { super(that); this.kafkaStoragePlugin = that.kafkaStoragePlugin; this.columns = that.columns; this.kafkaScanSpec = that.kafkaScanSpec; this.partitionWorkList = that.partitionWorkList; this.assignments = that.assignments; } private static class PartitionScanWork implements CompleteWork { private final EndpointByteMapImpl byteMap = new EndpointByteMapImpl(); private final TopicPartition topicPartition; private final long beginOffset; private final long latestOffset; public PartitionScanWork(TopicPartition topicPartition, long beginOffset, long latestOffset) { this.topicPartition = topicPartition; this.beginOffset = beginOffset; this.latestOffset = latestOffset; } public TopicPartition getTopicPartition() { return topicPartition; } public long getBeginOffset() { return beginOffset; } public long getLatestOffset() { return latestOffset; } @Override public int compareTo(CompleteWork o) { return Long.compare(getTotalBytes(), o.getTotalBytes()); } @Override public long getTotalBytes() { return (latestOffset - beginOffset) * MSG_SIZE; } @Override public EndpointByteMap getByteMap() { return byteMap; } } /** * Computes work per topic partition, based on start and end offset of each * corresponding topicPartition */ private void init() { partitionWorkList = Lists.newArrayList(); Collection<DrillbitEndpoint> endpoints = kafkaStoragePlugin.getContext().getBits(); Map<String, DrillbitEndpoint> endpointMap = Maps.newHashMap(); for (DrillbitEndpoint endpoint : endpoints) { endpointMap.put(endpoint.getAddress(), endpoint); } Map<TopicPartition, Long> startOffsetsMap = Maps.newHashMap(); Map<TopicPartition, Long> endOffsetsMap = Maps.newHashMap(); List<PartitionInfo> topicPartitions = null; String topicName = kafkaScanSpec.getTopicName(); try (KafkaConsumer<?, ?> kafkaConsumer = new KafkaConsumer<>(kafkaStoragePlugin.getConfig().getKafkaConsumerProps(), new ByteArrayDeserializer(), new ByteArrayDeserializer())) { if (!kafkaConsumer.listTopics().keySet().contains(topicName)) { throw UserException.dataReadError() .message("Table '%s' does not exist", topicName) .build(logger); } kafkaConsumer.subscribe(Arrays.asList(topicName)); // based on KafkaConsumer JavaDoc, seekToBeginning/seekToEnd functions // evaluates lazily, seeking to the first/last offset in all partitions only // when poll(long) or // position(TopicPartition) are called kafkaConsumer.poll(0); Set<TopicPartition> assignments = kafkaConsumer.assignment(); topicPartitions = kafkaConsumer.partitionsFor(topicName); // fetch start offsets for each topicPartition kafkaConsumer.seekToBeginning(assignments); for (TopicPartition topicPartition : assignments) { startOffsetsMap.put(topicPartition, kafkaConsumer.position(topicPartition)); } // fetch end offsets for each topicPartition kafkaConsumer.seekToEnd(assignments); for (TopicPartition topicPartition : assignments) { endOffsetsMap.put(topicPartition, kafkaConsumer.position(topicPartition)); } } catch (Exception e) { throw UserException.dataReadError(e).message("Failed to fetch start/end offsets of the topic %s", topicName) .addContext(e.getMessage()).build(logger); } // computes work for each end point for (PartitionInfo partitionInfo : topicPartitions) { TopicPartition topicPartition = new TopicPartition(topicName, partitionInfo.partition()); long lastCommittedOffset = startOffsetsMap.get(topicPartition); long latestOffset = endOffsetsMap.get(topicPartition); logger.debug("Latest offset of {} is {}", topicPartition, latestOffset); logger.debug("Last committed offset of {} is {}", topicPartition, lastCommittedOffset); PartitionScanWork work = new PartitionScanWork(topicPartition, lastCommittedOffset, latestOffset); Node[] inSyncReplicas = partitionInfo.inSyncReplicas(); for (Node isr : inSyncReplicas) { String host = isr.host(); DrillbitEndpoint ep = endpointMap.get(host); if (ep != null) { work.getByteMap().add(ep, work.getTotalBytes()); } } partitionWorkList.add(work); } } @Override public void applyAssignments(List<DrillbitEndpoint> incomingEndpoints) { assignments = AssignmentCreator.getMappings(incomingEndpoints, partitionWorkList); } @Override public KafkaSubScan getSpecificScan(int minorFragmentId) { List<PartitionScanWork> workList = assignments.get(minorFragmentId); List<KafkaSubScanSpec> scanSpecList = Lists.newArrayList(); for (PartitionScanWork work : workList) { scanSpecList.add(new KafkaSubScanSpec(work.getTopicPartition().topic(), work.getTopicPartition().partition(), work.getBeginOffset(), work.getLatestOffset())); } return new KafkaSubScan(getUserName(), kafkaStoragePlugin, columns, scanSpecList); } @Override public int getMaxParallelizationWidth() { return partitionWorkList.size(); } @Override public ScanStats getScanStats() { long messageCount = 0; for (PartitionScanWork work : partitionWorkList) { messageCount += (work.getLatestOffset() - work.getBeginOffset()); } return new ScanStats(GroupScanProperty.EXACT_ROW_COUNT, messageCount, 1, messageCount * MSG_SIZE); } @Override public String getDigest() { return toString(); } @Override public PhysicalOperator getNewWithChildren(List<PhysicalOperator> children) throws ExecutionSetupException { Preconditions.checkArgument(children.isEmpty()); return new KafkaGroupScan(this); } @Override public List<EndpointAffinity> getOperatorAffinity() { if (affinities == null) { affinities = AffinityCreator.getAffinityMap(partitionWorkList); } return affinities; } @Override @JsonIgnore public boolean canPushdownProjects(List<SchemaPath> columns) { return true; } @Override public GroupScan clone(List<SchemaPath> columns) { KafkaGroupScan clone = new KafkaGroupScan(this); clone.columns = columns; return clone; } @JsonProperty public KafkaStoragePluginConfig getKafkaStoragePluginConfig() { return kafkaStoragePlugin.getConfig(); } @JsonProperty public List<SchemaPath> getColumns() { return columns; } @JsonProperty public KafkaScanSpec getKafkaScanSpec() { return kafkaScanSpec; } @JsonIgnore public KafkaStoragePlugin getStoragePlugin() { return kafkaStoragePlugin; } @Override public String toString() { return String.format("KafkaGroupScan [KafkaScanSpec=%s, columns=%s]", kafkaScanSpec, columns); } }
/****************************************************************************** * Compilation: javac BinaryIn.java * Execution: java BinaryIn input output * Dependencies: none * * This library is for reading binary data from an input stream. * * % java BinaryIn http://introcs.cs.princeton.edu/cover.jpg output.jpg * ******************************************************************************/ package com.afcrowther.algorithms.library; import java.io.BufferedInputStream; import java.io.File; import java.io.FileInputStream; import java.io.IOException; import java.io.InputStream; import java.net.Socket; import java.net.URL; import java.net.URLConnection; /** * <i>Binary input</i>. This class provides methods for reading * in bits from a binary input stream, either * one bit at a time (as a {@code boolean}), * 8 bits at a time (as a {@code byte} or {@code char}), * 16 bits at a time (as a {@code short}), * 32 bits at a time (as an {@code int} or {@code float}), or * 64 bits at a time (as a {@code double} or {@code long}). * <p> * The binary input stream can be from standard input, a filename, * a URL name, a Socket, or an InputStream. * <p> * All primitive types are assumed to be represented using their * standard Java representations, in big-endian (most significant * byte first) order. * <p> * The client should not intermix calls to {@code BinaryIn} with calls * to {@code In}; otherwise unexpected behavior will result. * * @author Robert Sedgewick * @author Kevin Wayne */ public final class BinaryIn { private static final int EOF = -1; // end of file private BufferedInputStream in; // the input stream private int buffer; // one character buffer private int n; // number of bits left in buffer /** * Initializes a binary input stream from standard input. */ public BinaryIn() { in = new BufferedInputStream(System.in); fillBuffer(); } /** * Initializes a binary input stream from an {@code InputStream}. * * @param is the {@code InputStream} object */ public BinaryIn(InputStream is) { in = new BufferedInputStream(is); fillBuffer(); } /** * Initializes a binary input stream from a socket. * * @param socket the socket */ public BinaryIn(Socket socket) { try { InputStream is = socket.getInputStream(); in = new BufferedInputStream(is); fillBuffer(); } catch (IOException ioe) { System.err.println("Could not open " + socket); } } /** * Initializes a binary input stream from a URL. * * @param url the URL */ public BinaryIn(URL url) { try { URLConnection site = url.openConnection(); InputStream is = site.getInputStream(); in = new BufferedInputStream(is); fillBuffer(); } catch (IOException ioe) { System.err.println("Could not open " + url); } } /** * Initializes a binary input stream from a filename or URL name. * * @param name the name of the file or URL */ public BinaryIn(String name) { try { // first try to read file from local file system File file = new File(name); if (file.exists()) { FileInputStream fis = new FileInputStream(file); in = new BufferedInputStream(fis); fillBuffer(); return; } // next try for files included in jar URL url = getClass().getResource(name); // or URL from web if (url == null) { url = new URL(name); } URLConnection site = url.openConnection(); InputStream is = site.getInputStream(); in = new BufferedInputStream(is); fillBuffer(); } catch (IOException ioe) { System.err.println("Could not open " + name); } } private void fillBuffer() { try { buffer = in.read(); n = 8; } catch (IOException e) { System.err.println("EOF"); buffer = EOF; n = -1; } } /** * Returns true if this binary input stream exists. * * @return {@code true} if this binary input stream exists; * {@code false} otherwise */ public boolean exists() { return in != null; } /** * Returns true if this binary input stream is empty. * * @return {@code true} if this binary input stream is empty; * {@code false} otherwise */ public boolean isEmpty() { return buffer == EOF; } /** * Reads the next bit of data from this binary input stream and return as a boolean. * * @return the next bit of data from this binary input stream as a {@code boolean} * @throws RuntimeException if this binary input stream is empty */ public boolean readBoolean() { if (isEmpty()) throw new RuntimeException("Reading from empty input stream"); n--; boolean bit = ((buffer >> n) & 1) == 1; if (n == 0) fillBuffer(); return bit; } /** * Reads the next 8 bits from this binary input stream and return as an 8-bit char. * * @return the next 8 bits of data from this binary input stream as a {@code char} * @throws RuntimeException if there are fewer than 8 bits available */ public char readChar() { if (isEmpty()) throw new RuntimeException("Reading from empty input stream"); // special case when aligned byte if (n == 8) { int x = buffer; fillBuffer(); return (char) (x & 0xff); } // combine last N bits of current buffer with first 8-N bits of new buffer int x = buffer; x <<= (8 - n); int oldN = n; fillBuffer(); if (isEmpty()) throw new RuntimeException("Reading from empty input stream"); n = oldN; x |= (buffer >>> n); return (char) (x & 0xff); // the above code doesn't quite work for the last character if N = 8 // because buffer will be -1 } /** * Reads the next r bits from this binary input stream and return as an r-bit character. * * @param r number of bits to read * @return the next r bits of data from this binary input streamt as a {@code char} * @throws RuntimeException if there are fewer than r bits available */ public char readChar(int r) { if (r < 1 || r > 16) throw new RuntimeException("Illegal value of r = " + r); // optimize r = 8 case if (r == 8) return readChar(); char x = 0; for (int i = 0; i < r; i++) { x <<= 1; boolean bit = readBoolean(); if (bit) x |= 1; } return x; } /** * Reads the remaining bytes of data from this binary input stream and return as a string. * * @return the remaining bytes of data from this binary input stream as a {@code String} * @throws RuntimeException if this binary input stream is empty or if the number of bits * available is not a multiple of 8 (byte-aligned) */ public String readString() { if (isEmpty()) throw new RuntimeException("Reading from empty input stream"); StringBuilder sb = new StringBuilder(); while (!isEmpty()) { char c = readChar(); sb.append(c); } return sb.toString(); } /** * Reads the next 16 bits from this binary input stream and return as a 16-bit short. * * @return the next 16 bits of data from this binary standard input as a {@code short} * @throws RuntimeException if there are fewer than 16 bits available */ public short readShort() { short x = 0; for (int i = 0; i < 2; i++) { char c = readChar(); x <<= 8; x |= c; } return x; } /** * Reads the next 32 bits from this binary input stream and return as a 32-bit int. * * @return the next 32 bits of data from this binary input stream as a {@code int} * @throws RuntimeException if there are fewer than 32 bits available */ public int readInt() { int x = 0; for (int i = 0; i < 4; i++) { char c = readChar(); x <<= 8; x |= c; } return x; } /** * Reads the next r bits from this binary input stream return as an r-bit int. * * @param r number of bits to read * @return the next r bits of data from this binary input stream as a {@code int} * @throws RuntimeException if there are fewer than r bits available on standard input */ public int readInt(int r) { if (r < 1 || r > 32) throw new RuntimeException("Illegal value of r = " + r); // optimize r = 32 case if (r == 32) return readInt(); int x = 0; for (int i = 0; i < r; i++) { x <<= 1; boolean bit = readBoolean(); if (bit) x |= 1; } return x; } /** * Reads the next 64 bits from this binary input stream and return as a 64-bit long. * * @return the next 64 bits of data from this binary input stream as a {@code long} * @throws RuntimeException if there are fewer than 64 bits available */ public long readLong() { long x = 0; for (int i = 0; i < 8; i++) { char c = readChar(); x <<= 8; x |= c; } return x; } /** * Reads the next 64 bits from this binary input stream and return as a 64-bit double. * * @return the next 64 bits of data from this binary input stream as a {@code double} * @throws RuntimeException if there are fewer than 64 bits available */ public double readDouble() { return Double.longBitsToDouble(readLong()); } /** * Reads the next 32 bits from standard input and return as a 32-bit float. * * @return the next 32 bits of data from standard input as a {@code float} * @throws RuntimeException if there are fewer than 32 bits available on standard input */ public float readFloat() { return Float.intBitsToFloat(readInt()); } /** * Reads the next 8 bits from this binary input stream and return as an 8-bit byte. * * @return the next 8 bits of data from this binary input stream as a {@code byte} * @throws RuntimeException if there are fewer than 8 bits available */ public byte readByte() { char c = readChar(); byte x = (byte) (c & 0xff); return x; } /** * Unit tests the {@code BinaryIn} data type. * Reads the name of a file or URL (first command-line argument) * and writes it to a file (second command-line argument). * * @param args the command-line arguments */ public static void main(String[] args) { BinaryIn in = new BinaryIn(args[0]); BinaryOut out = new BinaryOut(args[1]); // read one 8-bit char at a time while (!in.isEmpty()) { char c = in.readChar(); out.write(c); } out.flush(); } } /****************************************************************************** * Copyright 2002-2016, Robert Sedgewick and Kevin Wayne. * * This file is part of algs4.jar, which accompanies the textbook * * Algorithms, 4th edition by Robert Sedgewick and Kevin Wayne, * Addison-Wesley Professional, 2011, ISBN 0-321-57351-X. * http://algs4.cs.princeton.edu * * * algs4.jar is free software: you can redistribute it and/or modify * it under the terms of the GNU General Public License as published by * the Free Software Foundation, either version 3 of the License, or * (at your option) any later version. * * algs4.jar is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License * along with algs4.jar. If not, see http://www.gnu.org/licenses. ******************************************************************************/
package fr.adrienbrault.idea.symfony2plugin.tests.completion.yaml; import com.intellij.codeInsight.completion.CompletionType; import fr.adrienbrault.idea.symfony2plugin.tests.SymfonyLightCodeInsightFixtureTestCase; import org.jetbrains.yaml.YAMLFileType; import java.util.Arrays; import java.util.List; /** * @author Thomas Schulz <mail@king2500.net> * * @see fr.adrienbrault.idea.symfony2plugin.completion.yaml.YamlCompletionContributor */ public class YamlCompletionContributorTest extends SymfonyLightCodeInsightFixtureTestCase { public String getTestDataPath() { return "src/test/java/fr/adrienbrault/idea/symfony2plugin/tests/completion/yaml/fixtures"; } public void testTagsCompletionContainsStandardTags() { assertCompletionContains(YAMLFileType.YML, "" + "root:\n" + " key: <caret>\n", "!!binary", "!!float", "!!str" ); assertCompletionContains(YAMLFileType.YML, "" + "root:\n" + " key: !<caret>\n", "!!binary", "!!float", "!!str" ); assertCompletionContains(YAMLFileType.YML, "" + "root:\n" + " key: !!<caret>\n", "!!binary", "!!float", "!!str" ); } public void testTagsCompletionNotContainsCustomTags() { assertCompletionNotContains(YAMLFileType.YML, "" + "root:\n" + " key: <caret>\n", "!php/const", "!php/object", "!tagged" ); } public void testAllTagsCompletionAt3rdInvocation() { assertCompletion3rdInvocationContains("" + "root:\n" + " key: <caret>\n", "!php/const", "!php/object", "!!binary", "!!float", "!!str" ); } public void testTagsCompletionContainsPhpConstTagInsideConfigAndServices() { assertCompletionContains("config.yaml", "" + "root:\n" + " key: <caret>\n", "!php/const" ); assertCompletionContains("services.yaml", "" + "services:\n" + " my_service:\n" + " key: <caret>\n", "!php/const" ); } public void testTagsCompletionNotContainsPhpObjectTagInsideServices() { assertCompletionNotContains("services.yaml", "" + "services:\n" + " my_service:\n" + " key: <caret>\n", "!php/object" ); assertCompletion3rdInvocationNotContains("services.yaml", "" + "services:\n" + " my_service:\n" + " key: <caret>\n", "!php/object" ); } public void testServiceArgumentCompletion() { assertCompletionContains("services.yaml", "" + "services:\n" + " my_service:\n" + " arguments: [<caret>]\n", "!tagged", "!tagged_locator", "!service", "!service_locator", "!iterator" ); } public void testServiceArgumentCompletionForSymfony32() { myFixture.copyFileToProject("Symfony32.php"); assertCompletionNotContains("services.yaml", "" + "services:\n" + " my_service:\n" + " arguments: [<caret>]\n", "!tagged", "!tagged_locator", "!tagged_iterator", "!service", "!service_locator", "!iterator" ); } public void testServiceArgumentCompletionForSymfony33() { myFixture.copyFileToProject("Symfony33.php"); assertCompletionContains("services.yaml", "" + "services:\n" + " my_service:\n" + " arguments: [<caret>]\n", "!iterator", "!service" ); assertCompletionNotContains("services.yaml", "" + "services:\n" + " my_service:\n" + " arguments: [<caret>]\n", "!tagged", "!tagged_locator", "!tagged_iterator", "!service_locator" ); } public void testServiceArgumentCompletionForSymfony34() { myFixture.copyFileToProject("Symfony34.php"); assertCompletionContains("services.yaml", "" + "services:\n" + " my_service:\n" + " arguments: [<caret>]\n", "!tagged", "!iterator", "!service" ); assertCompletionNotContains("services.yaml", "" + "services:\n" + " my_service:\n" + " arguments: [<caret>]\n", "!tagged_locator", "!tagged_iterator", "!service_locator" ); } public void testServiceArgumentCompletionForSymfony41() { myFixture.copyFileToProject("Symfony41.php"); assertCompletionNotContains("services.yaml", "" + "services:\n" + " my_service:\n" + " arguments: [<caret>]\n", "!service_locator" ); } public void testServiceArgumentCompletionForSymfony42() { myFixture.copyFileToProject("Symfony42.php"); assertCompletionContains("services.yaml", "" + "services:\n" + " my_service:\n" + " arguments: [<caret>]\n", "!service_locator", "!tagged", "!iterator", "!service" ); assertCompletionNotContains("services.yaml", "" + "services:\n" + " my_service:\n" + " arguments: [<caret>]\n", "!tagged_locator", "!tagged_iterator" ); } public void testServiceArgumentCompletionForSymfony43() { myFixture.copyFileToProject("Symfony43.php"); assertCompletionContains("services.yaml", "" + "services:\n" + " my_service:\n" + " arguments: [<caret>]\n", "!tagged_locator", "!service_locator", "!tagged", "!iterator", "!service" ); assertCompletionNotContains("services.yaml", "" + "services:\n" + " my_service:\n" + " arguments: [<caret>]\n", "!tagged_iterator" ); } public void testServiceArgumentCompletionForSymfony44() { myFixture.copyFileToProject("Symfony44.php"); assertCompletionContains("services.yaml", "" + "services:\n" + " my_service:\n" + " arguments: [<caret>]\n", "!tagged_iterator", "!tagged_locator", "!service_locator", "!iterator", "!service" ); assertCompletionNotContains("services.yaml", "" + "services:\n" + " my_service:\n" + " arguments: [<caret>]\n", "!tagged" ); } public void testServiceArgumentCompletionForSymfony50() { myFixture.copyFileToProject("Symfony50.php"); assertCompletionContains("services.yaml", "" + "services:\n" + " my_service:\n" + " arguments: [<caret>]\n", "!tagged_iterator", "!tagged_locator", "!service_locator", "!iterator", "!service" ); assertCompletionNotContains("services.yaml", "" + "services:\n" + " my_service:\n" + " arguments: [<caret>]\n", "!tagged" ); } public void testKeywordsCompletion() { assertCompletionContains(YAMLFileType.YML, "" + "root:\n" + " key: <caret>\n", "true", ".inf" ); assertCompletionContains(YAMLFileType.YML, "" + "root:\n" + " key: <caret>", "true", ".inf" ); assertCompletionContains(YAMLFileType.YML, "" + "root:\n" + " key: tr<caret>", "true" ); // assertCompletionContains(YAMLFileType.YML, "" + // "root:\n" + // " key: .i<caret>", // ".inf" // ); } public void testKeywordsCompletionInsideArray() { assertCompletionContains(YAMLFileType.YML, "" + "root:\n" + " key: [<caret>]\n", "true", ".inf" ); assertCompletionContains(YAMLFileType.YML, "" + "root:\n" + " key: [FOO, <caret>]\n", "true", ".inf" ); assertCompletionContains(YAMLFileType.YML, "" + "root:\n" + " key: [FOO, tr<caret>]\n", "true" ); } public void testKeywordsCompletionInsideSequence() { assertCompletionContains(YAMLFileType.YML, "" + "root:\n" + " key:\n" + " - <caret>\n", "true", ".inf" ); assertCompletionContains(YAMLFileType.YML, "" + "root:\n" + " key:\n" + " - tr<caret>\n", "true" ); } public void testKeywordsCompletionInsideMapping() { assertCompletionContains(YAMLFileType.YML, "" + "root:\n" + " key: { foo: <caret> }\n", "true", ".inf" ); assertCompletionContains(YAMLFileType.YML, "" + "root:\n" + " key: { foo: tr<caret> }\n", "true" ); } public void testThatKeywordsAreNotCompletedAfterYamlTag() { assertCompletionNotContains(YAMLFileType.YML, "" + "root:\n" + " key: !mytag <caret>\n" + " foo: bar", "true" ); assertCompletionNotContains(YAMLFileType.YML, "" + "root:\n" + " key: !mytag <caret>", "true" ); } public void testThatKeywordsAreNotCompletedInNewLine() { assertCompletionNotContains(YAMLFileType.YML, "" + "root:\n" + " <caret>" + " foo: bar\n", "true" ); assertCompletionNotContains(YAMLFileType.YML, "" + "root:\n" + " foo: bar\n" + "<caret>", "true" ); assertCompletionNotContains(YAMLFileType.YML, "" + "root:\n" + " foo: bar\n" + " <caret>", "true" ); } public void testThatKeywordsAreNotCompletedInsideStringLiteral() { assertCompletionNotContains(YAMLFileType.YML, "" + "root:\n" + " foo: '<caret>'\n" , "true" ); } public void testKeywordsCompletionInsideServiceDefinition() { var exceptedKeywords = new String[] { "abstract", "alias", "arguments", "autoconfigure", "autowire", "autowiring_types", "bind", "class", "configurator", "decorates", "decoration_inner_name", "decoration_on_invalid", "decoration_priority", "deprecated", "exclude", "factory", "factory_class", "factory_method", "factory_service", "file", "lazy", "parent", "properties", "public", "resource", "scope", "shared", "synchronized", "synthetic", "tags" }; assertCompletionContains(YAMLFileType.YML, "" + "services:\n" + " app.foo.service:\n" + " <caret>\n", exceptedKeywords ); } private void assertCompletion3rdInvocationContains(String configureByText, String... lookupStrings) { myFixture.configureByText(YAMLFileType.YML, configureByText); myFixture.complete(CompletionType.BASIC, 3); if(lookupStrings.length == 0) { fail("No lookup element given"); } List<String> lookupElements = myFixture.getLookupElementStrings(); if(lookupElements == null || lookupElements.size() == 0) { fail(String.format("failed that empty completion contains %s", Arrays.toString(lookupStrings))); } for (String s : lookupStrings) { if(!lookupElements.contains(s)) { fail(String.format("failed that completion contains %s in %s", s, lookupElements.toString())); } } } public void assertCompletion3rdInvocationNotContains(String filename, String configureByText, String... lookupStrings) { myFixture.configureByText(filename, configureByText); myFixture.complete(CompletionType.BASIC, 3); List<String> lookupElementStrings = myFixture.getLookupElementStrings(); assertNotNull(lookupElementStrings); assertFalse(lookupElementStrings.containsAll(Arrays.asList(lookupStrings))); } }
/* ==================================================================== Licensed to the Apache Software Foundation (ASF) under one or more contributor license agreements. See the NOTICE file distributed with this work for additional information regarding copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. ==================================================================== */ package org.apache.poi.hssf.usermodel; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertNotEquals; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertNull; import static org.junit.Assert.assertSame; import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; import static org.apache.poi.POITestCase.assertBetween; import java.io.IOException; import java.util.List; import org.apache.poi.ddf.EscherDgRecord; import org.apache.poi.hssf.HSSFITestDataProvider; import org.apache.poi.hssf.HSSFTestDataSamples; import org.apache.poi.hssf.model.DrawingManager2; import org.apache.poi.hssf.model.InternalSheet; import org.apache.poi.hssf.model.InternalWorkbook; import org.apache.poi.hssf.record.AutoFilterInfoRecord; import org.apache.poi.hssf.record.CommonObjectDataSubRecord; import org.apache.poi.hssf.record.DimensionsRecord; import org.apache.poi.hssf.record.FtCblsSubRecord; import org.apache.poi.hssf.record.GridsetRecord; import org.apache.poi.hssf.record.HCenterRecord; import org.apache.poi.hssf.record.LbsDataSubRecord; import org.apache.poi.hssf.record.NameRecord; import org.apache.poi.hssf.record.ObjRecord; import org.apache.poi.hssf.record.ObjectProtectRecord; import org.apache.poi.hssf.record.PasswordRecord; import org.apache.poi.hssf.record.ProtectRecord; import org.apache.poi.hssf.record.Record; import org.apache.poi.hssf.record.SCLRecord; import org.apache.poi.hssf.record.ScenarioProtectRecord; import org.apache.poi.hssf.record.SubRecord; import org.apache.poi.hssf.record.VCenterRecord; import org.apache.poi.hssf.record.WSBoolRecord; import org.apache.poi.hssf.record.WindowTwoRecord; import org.apache.poi.hssf.record.aggregates.WorksheetProtectionBlock; import org.apache.poi.hssf.usermodel.RecordInspector.RecordCollector; import org.apache.poi.ss.formula.ptg.Area3DPtg; import org.apache.poi.ss.formula.ptg.Ptg; import org.apache.poi.ss.usermodel.*; import org.apache.poi.ss.util.CellRangeAddress; import org.apache.poi.ss.util.CellRangeAddressList; import org.junit.Test; /** * Tests HSSFSheet. This test case is very incomplete at the moment. * * * @author Glen Stampoultzis (glens at apache.org) * @author Andrew C. Oliver (acoliver apache org) */ public final class TestHSSFSheet extends BaseTestSheet { public TestHSSFSheet() { super(HSSFITestDataProvider.instance); } /** * Test for Bugzilla #29747. * Moved from TestHSSFWorkbook#testSetRepeatingRowsAndColumns(). */ @Test public void setRepeatingRowsAndColumnsBug29747() throws IOException { HSSFWorkbook wb = new HSSFWorkbook(); wb.createSheet(); wb.createSheet(); HSSFSheet sheet2 = wb.createSheet(); sheet2.setRepeatingRows(CellRangeAddress.valueOf("1:2")); NameRecord nameRecord = wb.getWorkbook().getNameRecord(0); assertEquals(3, nameRecord.getSheetNumber()); wb.close(); } @Test public void getSetMargin() throws IOException { baseTestGetSetMargin(new double[]{0.75, 0.75, 1.0, 1.0, 0.3, 0.3}); } /** * Test the gridset field gets set as expected. */ @Test public void backupRecord() throws IOException { HSSFWorkbook wb = new HSSFWorkbook(); HSSFSheet s = wb.createSheet(); GridsetRecord gridsetRec = s.getSheet().getGridsetRecord(); assertTrue(gridsetRec.getGridset()); s.setGridsPrinted(true); assertFalse(gridsetRec.getGridset()); wb.close(); } /** * Test vertically centered output. */ @Test public void verticallyCenter() throws IOException { HSSFWorkbook wb = new HSSFWorkbook(); HSSFSheet s = wb.createSheet(); VCenterRecord record = s.getSheet().getPageSettings().getVCenter(); assertFalse(record.getVCenter()); assertFalse(s.getVerticallyCenter()); s.setVerticallyCenter(true); assertTrue(record.getVCenter()); assertTrue(s.getVerticallyCenter()); wb.close(); } /** * Test horizontally centered output. */ @Test public void horizontallyCenter() throws IOException { HSSFWorkbook wb = new HSSFWorkbook(); HSSFSheet s = wb.createSheet(); HCenterRecord record = s.getSheet().getPageSettings().getHCenter(); assertFalse(record.getHCenter()); assertFalse(s.getHorizontallyCenter()); s.setHorizontallyCenter(true); assertTrue(record.getHCenter()); assertTrue(s.getHorizontallyCenter()); wb.close(); } /** * Test WSBboolRecord fields get set in the user model. */ @Test public void wsBool() throws IOException { HSSFWorkbook wb = new HSSFWorkbook(); HSSFSheet s = wb.createSheet(); WSBoolRecord record = (WSBoolRecord) s.getSheet().findFirstRecordBySid(WSBoolRecord.sid); // Check defaults assertTrue(record.getAlternateExpression()); assertTrue(record.getAlternateFormula()); assertFalse(record.getAutobreaks()); assertFalse(record.getDialog()); assertFalse(record.getDisplayGuts()); assertTrue(record.getFitToPage()); assertFalse(record.getRowSumsBelow()); assertFalse(record.getRowSumsRight()); // Alter s.setAlternativeExpression(false); s.setAlternativeFormula(false); s.setAutobreaks(true); s.setDialog(true); s.setDisplayGuts(true); s.setFitToPage(false); s.setRowSumsBelow(true); s.setRowSumsRight(true); // Check assertTrue(record.getAlternateExpression()); //sheet.setRowSumsBelow alters this field too assertFalse(record.getAlternateFormula()); assertTrue(record.getAutobreaks()); assertTrue(record.getDialog()); assertTrue(record.getDisplayGuts()); assertFalse(record.getFitToPage()); assertTrue(record.getRowSumsBelow()); assertTrue(record.getRowSumsRight()); assertTrue(s.getAlternateExpression()); assertFalse(s.getAlternateFormula()); assertTrue(s.getAutobreaks()); assertTrue(s.getDialog()); assertTrue(s.getDisplayGuts()); assertFalse(s.getFitToPage()); assertTrue(s.getRowSumsBelow()); assertTrue(s.getRowSumsRight()); wb.close(); } /** * Setting landscape and portrait stuff on existing sheets */ @Test public void printSetupLandscapeExisting() throws IOException { HSSFWorkbook wb1 = HSSFTestDataSamples.openSampleWorkbook("SimpleWithPageBreaks.xls"); assertEquals(3, wb1.getNumberOfSheets()); HSSFSheet sheetL = wb1.getSheetAt(0); HSSFSheet sheetPM = wb1.getSheetAt(1); HSSFSheet sheetLS = wb1.getSheetAt(2); // Check two aspects of the print setup assertFalse(sheetL.getPrintSetup().getLandscape()); assertTrue(sheetPM.getPrintSetup().getLandscape()); assertTrue(sheetLS.getPrintSetup().getLandscape()); assertEquals(1, sheetL.getPrintSetup().getCopies()); assertEquals(1, sheetPM.getPrintSetup().getCopies()); assertEquals(1, sheetLS.getPrintSetup().getCopies()); // Change one on each sheetL.getPrintSetup().setLandscape(true); sheetPM.getPrintSetup().setLandscape(false); sheetPM.getPrintSetup().setCopies((short)3); // Check taken assertTrue(sheetL.getPrintSetup().getLandscape()); assertFalse(sheetPM.getPrintSetup().getLandscape()); assertTrue(sheetLS.getPrintSetup().getLandscape()); assertEquals(1, sheetL.getPrintSetup().getCopies()); assertEquals(3, sheetPM.getPrintSetup().getCopies()); assertEquals(1, sheetLS.getPrintSetup().getCopies()); // Save and re-load, and check still there Workbook wb2 = HSSFTestDataSamples.writeOutAndReadBack(wb1); wb1.close(); sheetL = wb1.getSheetAt(0); sheetPM = wb1.getSheetAt(1); sheetLS = wb1.getSheetAt(2); assertTrue(sheetL.getPrintSetup().getLandscape()); assertFalse(sheetPM.getPrintSetup().getLandscape()); assertTrue(sheetLS.getPrintSetup().getLandscape()); assertEquals(1, sheetL.getPrintSetup().getCopies()); assertEquals(3, sheetPM.getPrintSetup().getCopies()); assertEquals(1, sheetLS.getPrintSetup().getCopies()); wb2.close(); } @Test public void groupRows() throws IOException { HSSFWorkbook wb1 = new HSSFWorkbook(); HSSFSheet s = wb1.createSheet(); HSSFRow r1 = s.createRow(0); HSSFRow r2 = s.createRow(1); HSSFRow r3 = s.createRow(2); HSSFRow r4 = s.createRow(3); HSSFRow r5 = s.createRow(4); assertEquals(0, r1.getOutlineLevel()); assertEquals(0, r2.getOutlineLevel()); assertEquals(0, r3.getOutlineLevel()); assertEquals(0, r4.getOutlineLevel()); assertEquals(0, r5.getOutlineLevel()); s.groupRow(2,3); assertEquals(0, r1.getOutlineLevel()); assertEquals(0, r2.getOutlineLevel()); assertEquals(1, r3.getOutlineLevel()); assertEquals(1, r4.getOutlineLevel()); assertEquals(0, r5.getOutlineLevel()); // Save and re-open HSSFWorkbook wb2 = HSSFTestDataSamples.writeOutAndReadBack(wb1); wb1.close(); s = wb2.getSheetAt(0); r1 = s.getRow(0); r2 = s.getRow(1); r3 = s.getRow(2); r4 = s.getRow(3); r5 = s.getRow(4); assertEquals(0, r1.getOutlineLevel()); assertEquals(0, r2.getOutlineLevel()); assertEquals(1, r3.getOutlineLevel()); assertEquals(1, r4.getOutlineLevel()); assertEquals(0, r5.getOutlineLevel()); wb2.close(); } @Test public void groupRowsExisting() throws IOException { HSSFWorkbook wb1 = HSSFTestDataSamples.openSampleWorkbook("NoGutsRecords.xls"); HSSFSheet s = wb1.getSheetAt(0); HSSFRow r1 = s.getRow(0); HSSFRow r2 = s.getRow(1); HSSFRow r3 = s.getRow(2); HSSFRow r4 = s.getRow(3); HSSFRow r5 = s.getRow(4); HSSFRow r6 = s.getRow(5); assertEquals(0, r1.getOutlineLevel()); assertEquals(0, r2.getOutlineLevel()); assertEquals(0, r3.getOutlineLevel()); assertEquals(0, r4.getOutlineLevel()); assertEquals(0, r5.getOutlineLevel()); assertEquals(0, r6.getOutlineLevel()); // This used to complain about lacking guts records s.groupRow(2, 4); assertEquals(0, r1.getOutlineLevel()); assertEquals(0, r2.getOutlineLevel()); assertEquals(1, r3.getOutlineLevel()); assertEquals(1, r4.getOutlineLevel()); assertEquals(1, r5.getOutlineLevel()); assertEquals(0, r6.getOutlineLevel()); // Save and re-open HSSFWorkbook wb2 = null; try { wb2 = HSSFTestDataSamples.writeOutAndReadBack(wb1); } catch (OutOfMemoryError e) { fail("Identified bug 39903"); } s = wb2.getSheetAt(0); r1 = s.getRow(0); r2 = s.getRow(1); r3 = s.getRow(2); r4 = s.getRow(3); r5 = s.getRow(4); r6 = s.getRow(5); assertEquals(0, r1.getOutlineLevel()); assertEquals(0, r2.getOutlineLevel()); assertEquals(1, r3.getOutlineLevel()); assertEquals(1, r4.getOutlineLevel()); assertEquals(1, r5.getOutlineLevel()); assertEquals(0, r6.getOutlineLevel()); wb2.close(); wb1.close(); } @Test public void createDrawings() throws IOException { HSSFWorkbook workbook = new HSSFWorkbook(); HSSFSheet sheet = workbook.createSheet(); HSSFPatriarch p1 = sheet.createDrawingPatriarch(); HSSFPatriarch p2 = sheet.createDrawingPatriarch(); assertSame(p1, p2); workbook.close(); } @Test public void getDrawings() throws IOException { HSSFWorkbook wb1c = HSSFTestDataSamples.openSampleWorkbook("WithChart.xls"); HSSFWorkbook wb2c = HSSFTestDataSamples.openSampleWorkbook("WithTwoCharts.xls"); // 1 chart sheet -> data on 1st, chart on 2nd assertNotNull(wb1c.getSheetAt(0).getDrawingPatriarch()); assertSame(wb1c.getSheetAt(0).getDrawingPatriarch(), wb1c.getSheetAt(0).getDrawingPatriarch()); assertNotNull(wb1c.getSheetAt(1).getDrawingPatriarch()); assertSame(wb1c.getSheetAt(1).getDrawingPatriarch(), wb1c.getSheetAt(1).getDrawingPatriarch()); assertFalse(wb1c.getSheetAt(0).getDrawingPatriarch().containsChart()); assertTrue(wb1c.getSheetAt(1).getDrawingPatriarch().containsChart()); // 2 chart sheet -> data on 1st, chart on 2nd+3rd assertNotNull(wb2c.getSheetAt(0).getDrawingPatriarch()); assertNotNull(wb2c.getSheetAt(1).getDrawingPatriarch()); assertNotNull(wb2c.getSheetAt(2).getDrawingPatriarch()); assertFalse(wb2c.getSheetAt(0).getDrawingPatriarch().containsChart()); assertTrue(wb2c.getSheetAt(1).getDrawingPatriarch().containsChart()); assertTrue(wb2c.getSheetAt(2).getDrawingPatriarch().containsChart()); wb2c.close(); wb1c.close(); } /** * Test that the ProtectRecord is included when creating or cloning a sheet */ @Test public void cloneWithProtect() throws IOException { String passwordA = "secrect"; int expectedHashA = -6810; String passwordB = "admin"; int expectedHashB = -14556; HSSFWorkbook workbook = new HSSFWorkbook(); HSSFSheet hssfSheet = workbook.createSheet(); assertFalse(hssfSheet.getObjectProtect()); hssfSheet.protectSheet(passwordA); assertTrue(hssfSheet.getObjectProtect()); assertEquals(expectedHashA, hssfSheet.getPassword()); assertEquals(expectedHashA, hssfSheet.getSheet().getProtectionBlock().getPasswordHash()); // Clone the sheet, and make sure the password hash is preserved HSSFSheet sheet2 = workbook.cloneSheet(0); assertTrue(hssfSheet.getObjectProtect()); assertEquals(expectedHashA, sheet2.getSheet().getProtectionBlock().getPasswordHash()); // change the password on the first sheet hssfSheet.protectSheet(passwordB); assertTrue(hssfSheet.getObjectProtect()); assertEquals(expectedHashB, hssfSheet.getSheet().getProtectionBlock().getPasswordHash()); assertEquals(expectedHashB, hssfSheet.getPassword()); // but the cloned sheet's password should remain unchanged assertEquals(expectedHashA, sheet2.getSheet().getProtectionBlock().getPasswordHash()); workbook.close(); } @Test public void protectSheetA() throws IOException { int expectedHash = (short)0xfef1; HSSFWorkbook wb = new HSSFWorkbook(); HSSFSheet s = wb.createSheet(); s.protectSheet("abcdefghij"); WorksheetProtectionBlock pb = s.getSheet().getProtectionBlock(); assertTrue("protection should be on", pb.isSheetProtected()); assertTrue("object protection should be on",pb.isObjectProtected()); assertTrue("scenario protection should be on",pb.isScenarioProtected()); assertEquals("well known value for top secret hash should be "+Integer.toHexString(expectedHash).substring(4), expectedHash, pb.getPasswordHash()); wb.close(); } /** * {@link PasswordRecord} belongs with the rest of the Worksheet Protection Block * (which should be before {@link DimensionsRecord}). */ @Test public void protectSheetRecordOrder_bug47363a() throws IOException { HSSFWorkbook wb = new HSSFWorkbook(); HSSFSheet s = wb.createSheet(); s.protectSheet("secret"); RecordCollector rc = new RecordCollector(); s.getSheet().visitContainedRecords(rc, 0); Record[] recs = rc.getRecords(); int nRecs = recs.length; if (recs[nRecs-2] instanceof PasswordRecord && recs[nRecs-5] instanceof DimensionsRecord) { fail("Identified bug 47363a - PASSWORD after DIMENSION"); } // Check that protection block is together, and before DIMENSION confirmRecordClass(recs, nRecs-4, DimensionsRecord.class); confirmRecordClass(recs, nRecs-9, ProtectRecord.class); confirmRecordClass(recs, nRecs-8, ObjectProtectRecord.class); confirmRecordClass(recs, nRecs-7, ScenarioProtectRecord.class); confirmRecordClass(recs, nRecs-6, PasswordRecord.class); wb.close(); } private static void confirmRecordClass(Record[] recs, int index, Class<? extends Record> cls) { if (recs.length <= index) { fail("Expected (" + cls.getName() + ") at index " + index + " but array length is " + recs.length + "."); } assertEquals(cls, recs[index].getClass()); } /** * There should be no problem with adding data validations after sheet protection */ @Test public void dvProtectionOrder_bug47363b() throws IOException { HSSFWorkbook workbook = new HSSFWorkbook(); HSSFSheet sheet = workbook.createSheet("Sheet1"); sheet.protectSheet("secret"); DataValidationHelper dataValidationHelper = sheet.getDataValidationHelper(); DataValidationConstraint dvc = dataValidationHelper.createIntegerConstraint(DataValidationConstraint.OperatorType.BETWEEN, "10", "100"); CellRangeAddressList numericCellAddressList = new CellRangeAddressList(0, 0, 1, 1); DataValidation dv = dataValidationHelper.createValidation(dvc,numericCellAddressList); try { sheet.addValidationData(dv); } catch (IllegalStateException e) { String expMsg = "Unexpected (org.apache.poi.hssf.record.PasswordRecord) while looking for DV Table insert pos"; if (expMsg.equals(e.getMessage())) { fail("Identified bug 47363b"); } workbook.close(); throw e; } RecordCollector rc; rc = new RecordCollector(); sheet.getSheet().visitContainedRecords(rc, 0); int nRecsWithProtection = rc.getRecords().length; sheet.protectSheet(null); rc = new RecordCollector(); sheet.getSheet().visitContainedRecords(rc, 0); int nRecsWithoutProtection = rc.getRecords().length; assertEquals(4, nRecsWithProtection - nRecsWithoutProtection); workbook.close(); } @SuppressWarnings("deprecation") @Test public void zoom() throws IOException { HSSFWorkbook wb = new HSSFWorkbook(); HSSFSheet sheet = wb.createSheet(); assertEquals(-1, sheet.getSheet().findFirstRecordLocBySid(SCLRecord.sid)); sheet.setZoom(75); assertTrue(sheet.getSheet().findFirstRecordLocBySid(SCLRecord.sid) > 0); SCLRecord sclRecord = (SCLRecord) sheet.getSheet().findFirstRecordBySid(SCLRecord.sid); assertEquals(75, 100*sclRecord.getNumerator()/sclRecord.getDenominator()); int sclLoc = sheet.getSheet().findFirstRecordLocBySid(SCLRecord.sid); int window2Loc = sheet.getSheet().findFirstRecordLocBySid(WindowTwoRecord.sid); assertTrue(sclLoc == window2Loc + 1); // verify limits try { sheet.setZoom(0); fail("Should catch Exception here"); } catch (IllegalArgumentException e) { assertEquals("Numerator must be greater than 0 and less than 65536", e.getMessage()); } try { sheet.setZoom(65536); fail("Should catch Exception here"); } catch (IllegalArgumentException e) { assertEquals("Numerator must be greater than 0 and less than 65536", e.getMessage()); } try { sheet.setZoom(2, 0); fail("Should catch Exception here"); } catch (IllegalArgumentException e) { assertEquals("Denominator must be greater than 0 and less than 65536", e.getMessage()); } try { sheet.setZoom(2, 65536); fail("Should catch Exception here"); } catch (IllegalArgumentException e) { assertEquals("Denominator must be greater than 0 and less than 65536", e.getMessage()); } wb.close(); } /** * When removing one merged region, it would break * Make sure the excel file loads work */ @Test public void pageBreakFiles() throws IOException { HSSFWorkbook wb1 = HSSFTestDataSamples.openSampleWorkbook("SimpleWithPageBreaks.xls"); HSSFSheet sheet = wb1.getSheetAt(0); assertNotNull(sheet); assertEquals("1 row page break", 1, sheet.getRowBreaks().length); assertEquals("1 column page break", 1, sheet.getColumnBreaks().length); assertTrue("No row page break", sheet.isRowBroken(22)); assertTrue("No column page break", sheet.isColumnBroken((short)4)); sheet.setRowBreak(10); sheet.setColumnBreak((short)13); assertEquals("row breaks number", 2, sheet.getRowBreaks().length); assertEquals("column breaks number", 2, sheet.getColumnBreaks().length); HSSFWorkbook wb2 = HSSFTestDataSamples.writeOutAndReadBack(wb1); wb1.close(); sheet = wb2.getSheetAt(0); assertTrue("No row page break", sheet.isRowBroken(22)); assertTrue("No column page break", sheet.isColumnBroken((short)4)); assertEquals("row breaks number", 2, sheet.getRowBreaks().length); assertEquals("column breaks number", 2, sheet.getColumnBreaks().length); wb2.close(); } @Test public void dbcsName () throws IOException { HSSFWorkbook wb = HSSFTestDataSamples.openSampleWorkbook("DBCSSheetName.xls"); wb.getSheetAt(1); assertEquals ("DBCS Sheet Name 2", wb.getSheetName(1),"\u090f\u0915" ); assertEquals("DBCS Sheet Name 1", wb.getSheetName(0),"\u091c\u093e"); wb.close(); } /** * Testing newly added method that exposes the WINDOW2.toprow * parameter to allow setting the toprow in the visible view * of the sheet when it is first opened. */ @Test public void topRow() throws IOException { HSSFWorkbook wb = HSSFTestDataSamples.openSampleWorkbook("SimpleWithPageBreaks.xls"); HSSFSheet sheet = wb.getSheetAt(0); assertNotNull(sheet); short toprow = (short) 100; short leftcol = (short) 50; sheet.showInPane(toprow,leftcol); assertEquals("HSSFSheet.getTopRow()", toprow, sheet.getTopRow()); assertEquals("HSSFSheet.getLeftCol()", leftcol, sheet.getLeftCol()); wb.close(); } @Test public void addEmptyRow() throws IOException { //try to add 5 empty rows to a new sheet HSSFWorkbook wb1 = new HSSFWorkbook(); HSSFSheet sheet = wb1.createSheet(); for (int i = 0; i < 5; i++) { sheet.createRow(i); } HSSFTestDataSamples.writeOutAndReadBack(wb1).close(); wb1.close(); //try adding empty rows in an existing worksheet HSSFWorkbook wb2 = HSSFTestDataSamples.openSampleWorkbook("Simple.xls"); sheet = wb2.getSheetAt(0); for (int i = 3; i < 10; i++) sheet.createRow(i); HSSFTestDataSamples.writeOutAndReadBack(wb2).close(); wb2.close(); } @Test public void autoSizeColumn() throws IOException { HSSFWorkbook wb1 = HSSFTestDataSamples.openSampleWorkbook("43902.xls"); String sheetName = "my sheet"; HSSFSheet sheet = wb1.getSheet(sheetName); // Can't use literal numbers for column sizes, as // will come out with different values on different // machines based on the fonts available. // So, we use ranges, which are pretty large, but // thankfully don't overlap! int minWithRow1And2 = 6400; int maxWithRow1And2 = 7800; int minWithRow1Only = 2750; int maxWithRow1Only = 3400; // autoSize the first column and check its size before the merged region (1,0,1,1) is set: // it has to be based on the 2nd row width sheet.autoSizeColumn((short)0); assertTrue("Column autosized with only one row: wrong width", sheet.getColumnWidth(0) >= minWithRow1And2); assertTrue("Column autosized with only one row: wrong width", sheet.getColumnWidth(0) <= maxWithRow1And2); //create a region over the 2nd row and auto size the first column sheet.addMergedRegion(new CellRangeAddress(1,1,0,1)); assertNotNull(sheet.getMergedRegion(0)); sheet.autoSizeColumn((short)0); HSSFWorkbook wb2 = HSSFTestDataSamples.writeOutAndReadBack(wb1); // check that the autoSized column width has ignored the 2nd row // because it is included in a merged region (Excel like behavior) HSSFSheet sheet2 = wb2.getSheet(sheetName); assertTrue(sheet2.getColumnWidth(0) >= minWithRow1Only); assertTrue(sheet2.getColumnWidth(0) <= maxWithRow1Only); // remove the 2nd row merged region and check that the 2nd row value is used to the autoSizeColumn width sheet2.removeMergedRegion(1); sheet2.autoSizeColumn((short)0); HSSFWorkbook wb3 = HSSFTestDataSamples.writeOutAndReadBack(wb2); HSSFSheet sheet3 = wb3.getSheet(sheetName); assertTrue(sheet3.getColumnWidth(0) >= minWithRow1And2); assertTrue(sheet3.getColumnWidth(0) <= maxWithRow1And2); wb3.close(); wb2.close(); wb1.close(); } /** * Setting ForceFormulaRecalculation on sheets */ @Test public void forceRecalculation() throws IOException { HSSFWorkbook wb1 = HSSFTestDataSamples.openSampleWorkbook("UncalcedRecord.xls"); HSSFSheet sheet = wb1.getSheetAt(0); HSSFSheet sheet2 = wb1.getSheetAt(0); HSSFRow row = sheet.getRow(0); row.createCell(0).setCellValue(5); row.createCell(1).setCellValue(8); assertFalse(sheet.getForceFormulaRecalculation()); assertFalse(sheet2.getForceFormulaRecalculation()); // Save and manually verify that on column C we have 0, value in template HSSFTestDataSamples.writeOutAndReadBack(wb1).close(); sheet.setForceFormulaRecalculation(true); assertTrue(sheet.getForceFormulaRecalculation()); // Save and manually verify that on column C we have now 13, calculated value // Try it can be opened HSSFWorkbook wb2 = HSSFTestDataSamples.writeOutAndReadBack(wb1); wb1.close(); // And check correct sheet settings found sheet = wb2.getSheetAt(0); sheet2 = wb2.getSheetAt(1); assertTrue(sheet.getForceFormulaRecalculation()); assertFalse(sheet2.getForceFormulaRecalculation()); // Now turn if back off again sheet.setForceFormulaRecalculation(false); HSSFWorkbook wb3 = HSSFTestDataSamples.writeOutAndReadBack(wb2); wb2.close(); assertFalse(wb3.getSheetAt(0).getForceFormulaRecalculation()); assertFalse(wb3.getSheetAt(1).getForceFormulaRecalculation()); assertFalse(wb3.getSheetAt(2).getForceFormulaRecalculation()); // Now add a new sheet, and check things work // with old ones unset, new one set HSSFSheet s4 = wb3.createSheet(); s4.setForceFormulaRecalculation(true); assertFalse(sheet.getForceFormulaRecalculation()); assertFalse(sheet2.getForceFormulaRecalculation()); assertTrue(s4.getForceFormulaRecalculation()); HSSFWorkbook wb4 = HSSFTestDataSamples.writeOutAndReadBack(wb3); wb3.close(); assertFalse(wb4.getSheetAt(0).getForceFormulaRecalculation()); assertFalse(wb4.getSheetAt(1).getForceFormulaRecalculation()); assertFalse(wb4.getSheetAt(2).getForceFormulaRecalculation()); assertTrue(wb4.getSheetAt(3).getForceFormulaRecalculation()); wb4.close(); } @Test public void columnWidthA() throws IOException { //check we can correctly read column widths from a reference workbook HSSFWorkbook wb1 = HSSFTestDataSamples.openSampleWorkbook("colwidth.xls"); //reference values int[] ref = {365, 548, 731, 914, 1097, 1280, 1462, 1645, 1828, 2011, 2194, 2377, 2560, 2742, 2925, 3108, 3291, 3474, 3657}; HSSFSheet sh = wb1.getSheetAt(0); for (char i = 'A'; i <= 'S'; i++) { int idx = i - 'A'; int w = sh.getColumnWidth(idx); assertEquals(ref[idx], w); } //the second sheet doesn't have overridden column widths sh = wb1.getSheetAt(1); int def_width = sh.getDefaultColumnWidth(); for (char i = 'A'; i <= 'S'; i++) { int idx = i - 'A'; int w = sh.getColumnWidth(idx); //getDefaultColumnWidth returns width measured in characters //getColumnWidth returns width measured in 1/256th units assertEquals(def_width*256, w); } wb1.close(); //test new workbook HSSFWorkbook wb2 = new HSSFWorkbook(); sh = wb2.createSheet(); sh.setDefaultColumnWidth(10); assertEquals(10, sh.getDefaultColumnWidth()); assertEquals(256*10, sh.getColumnWidth(0)); assertEquals(256*10, sh.getColumnWidth(1)); assertEquals(256*10, sh.getColumnWidth(2)); for (char i = 'D'; i <= 'F'; i++) { short w = (256*12); sh.setColumnWidth(i, w); assertEquals(w, sh.getColumnWidth(i)); } //serialize and read again HSSFWorkbook wb3 = HSSFTestDataSamples.writeOutAndReadBack(wb2); wb2.close(); sh = wb3.getSheetAt(0); assertEquals(10, sh.getDefaultColumnWidth()); //columns A-C have default width assertEquals(256*10, sh.getColumnWidth(0)); assertEquals(256*10, sh.getColumnWidth(1)); assertEquals(256*10, sh.getColumnWidth(2)); //columns D-F have custom width for (char i = 'D'; i <= 'F'; i++) { short w = (256*12); assertEquals(w, sh.getColumnWidth(i)); } // check for 16-bit signed/unsigned error: sh.setColumnWidth(0, 40000); assertEquals(40000, sh.getColumnWidth(0)); wb3.close(); } @Test public void defaultColumnWidth() throws IOException { HSSFWorkbook wb1 = HSSFTestDataSamples.openSampleWorkbook( "12843-1.xls" ); HSSFSheet sheet = wb1.getSheetAt( 7 ); // shall not be NPE assertEquals(8, sheet.getDefaultColumnWidth()); assertEquals(8*256, sheet.getColumnWidth(0)); assertEquals(0xFF, sheet.getDefaultRowHeight()); wb1.close(); HSSFWorkbook wb2 = HSSFTestDataSamples.openSampleWorkbook( "34775.xls" ); // second and third sheets miss DefaultColWidthRecord for(int i = 1; i <= 2; i++){ int dw = wb2.getSheetAt( i ).getDefaultColumnWidth(); assertEquals(8, dw); int cw = wb2.getSheetAt( i ).getColumnWidth(0); assertEquals(8*256, cw); assertEquals(0xFF, sheet.getDefaultRowHeight()); } wb2.close(); } /** * Some utilities write Excel files without the ROW records. * Excel, ooo, and google docs are OK with this. * Now POI is too. */ @Test public void missingRowRecords_bug41187() throws IOException { HSSFWorkbook wb = HSSFTestDataSamples.openSampleWorkbook("ex41187-19267.xls"); HSSFSheet sheet = wb.getSheetAt(0); HSSFRow row = sheet.getRow(0); assertNotNull("Identified bug 41187 a", row); assertNotEquals("Identified bug 41187 b", (short)0, row.getHeight()); assertEquals("Hi Excel!", row.getCell(0).getRichStringCellValue().getString()); // check row height for 'default' flag assertEquals((short)0xFF, row.getHeight()); HSSFTestDataSamples.writeOutAndReadBack(wb).close(); wb.close(); } /** * If we clone a sheet containing drawings, * we must allocate a new ID of the drawing group and re-create shape IDs * * See bug #45720. */ @Test public void cloneSheetWithDrawings() throws IOException { HSSFWorkbook wb1 = HSSFTestDataSamples.openSampleWorkbook("45720.xls"); HSSFSheet sheet1 = wb1.getSheetAt(0); wb1.getWorkbook().findDrawingGroup(); DrawingManager2 dm1 = wb1.getWorkbook().getDrawingManager(); wb1.cloneSheet(0); HSSFWorkbook wb2 = HSSFTestDataSamples.writeOutAndReadBack(wb1); wb1.close(); wb2.getWorkbook().findDrawingGroup(); DrawingManager2 dm2 = wb2.getWorkbook().getDrawingManager(); //check EscherDggRecord - a workbook-level registry of drawing objects assertEquals(dm1.getDgg().getMaxDrawingGroupId() + 1, dm2.getDgg().getMaxDrawingGroupId()); HSSFSheet sheet2 = wb2.getSheetAt(1); //check that id of the drawing group was updated EscherDgRecord dg1 = (EscherDgRecord)sheet1.getDrawingPatriarch().getBoundAggregate().findFirstWithId(EscherDgRecord.RECORD_ID); EscherDgRecord dg2 = (EscherDgRecord)sheet2.getDrawingPatriarch().getBoundAggregate().findFirstWithId(EscherDgRecord.RECORD_ID); int dg_id_1 = dg1.getOptions() >> 4; int dg_id_2 = dg2.getOptions() >> 4; assertEquals(dg_id_1 + 1, dg_id_2); //TODO: check shapeId in the cloned sheet wb2.close(); } /** * POI now (Sep 2008) allows sheet names longer than 31 chars (for other apps besides Excel). * Since Excel silently truncates to 31, make sure that POI enforces uniqueness on the first * 31 chars. */ @Test public void longSheetNames() throws IOException { HSSFWorkbook wb = new HSSFWorkbook(); final String SAME_PREFIX = "A123456789B123456789C123456789"; // 30 chars wb.createSheet(SAME_PREFIX + "Dxxxx"); try { wb.createSheet(SAME_PREFIX + "Dyyyy"); // identical up to the 32nd char fail("Expected exception not thrown"); } catch (IllegalArgumentException e) { assertEquals("The workbook already contains a sheet named 'A123456789B123456789C123456789Dyyyy'", e.getMessage()); } wb.createSheet(SAME_PREFIX + "Exxxx"); // OK - differs in the 31st char wb.close(); } /** * Tests that we can read existing column styles */ @Test public void readColumnStyles() throws IOException { HSSFWorkbook wbNone = HSSFTestDataSamples.openSampleWorkbook("ColumnStyleNone.xls"); HSSFWorkbook wbSimple = HSSFTestDataSamples.openSampleWorkbook("ColumnStyle1dp.xls"); HSSFWorkbook wbComplex = HSSFTestDataSamples.openSampleWorkbook("ColumnStyle1dpColoured.xls"); // Presence / absence checks assertNull(wbNone.getSheetAt(0).getColumnStyle(0)); assertNull(wbNone.getSheetAt(0).getColumnStyle(1)); assertNull(wbSimple.getSheetAt(0).getColumnStyle(0)); assertNotNull(wbSimple.getSheetAt(0).getColumnStyle(1)); assertNull(wbComplex.getSheetAt(0).getColumnStyle(0)); assertNotNull(wbComplex.getSheetAt(0).getColumnStyle(1)); // Details checks HSSFCellStyle bs = wbSimple.getSheetAt(0).getColumnStyle(1); assertEquals(62, bs.getIndex()); assertEquals("#,##0.0_ ;\\-#,##0.0\\ ", bs.getDataFormatString()); assertEquals("Calibri", bs.getFont(wbSimple).getFontName()); assertEquals(11*20, bs.getFont(wbSimple).getFontHeight()); assertEquals(8, bs.getFont(wbSimple).getColor()); assertFalse(bs.getFont(wbSimple).getItalic()); assertEquals(HSSFFont.BOLDWEIGHT_NORMAL, bs.getFont(wbSimple).getBoldweight()); HSSFCellStyle cs = wbComplex.getSheetAt(0).getColumnStyle(1); assertEquals(62, cs.getIndex()); assertEquals("#,##0.0_ ;\\-#,##0.0\\ ", cs.getDataFormatString()); assertEquals("Arial", cs.getFont(wbComplex).getFontName()); assertEquals(8*20, cs.getFont(wbComplex).getFontHeight()); assertEquals(10, cs.getFont(wbComplex).getColor()); assertFalse(cs.getFont(wbComplex).getItalic()); assertTrue(cs.getFont(wbComplex).getBold()); wbComplex.close(); wbSimple.close(); wbNone.close(); } /** * Tests the arabic setting */ @Test public void arabic() throws IOException { HSSFWorkbook wb = new HSSFWorkbook(); HSSFSheet s = wb.createSheet(); assertFalse(s.isRightToLeft()); s.setRightToLeft(true); assertTrue(s.isRightToLeft()); wb.close(); } @Test public void autoFilter() throws IOException { HSSFWorkbook wb1 = new HSSFWorkbook(); HSSFSheet sh = wb1.createSheet(); InternalWorkbook iwb = wb1.getWorkbook(); InternalSheet ish = sh.getSheet(); assertNull( iwb.getSpecificBuiltinRecord(NameRecord.BUILTIN_FILTER_DB, 1) ); assertNull( ish.findFirstRecordBySid(AutoFilterInfoRecord.sid) ); CellRangeAddress range = CellRangeAddress.valueOf("A1:B10"); sh.setAutoFilter(range); NameRecord name = iwb.getSpecificBuiltinRecord(NameRecord.BUILTIN_FILTER_DB, 1); assertNotNull( name ); // The built-in name for auto-filter must consist of a single Area3d Ptg. Ptg[] ptg = name.getNameDefinition(); assertEquals("The built-in name for auto-filter must consist of a single Area3d Ptg", 1, ptg.length); assertTrue("The built-in name for auto-filter must consist of a single Area3d Ptg", ptg[0] instanceof Area3DPtg); Area3DPtg aref = (Area3DPtg)ptg[0]; assertEquals(range.getFirstColumn(), aref.getFirstColumn()); assertEquals(range.getFirstRow(), aref.getFirstRow()); assertEquals(range.getLastColumn(), aref.getLastColumn()); assertEquals(range.getLastRow(), aref.getLastRow()); // verify AutoFilterInfoRecord AutoFilterInfoRecord afilter = (AutoFilterInfoRecord)ish.findFirstRecordBySid(AutoFilterInfoRecord.sid); assertNotNull(afilter ); assertEquals(2, afilter.getNumEntries()); //filter covers two columns HSSFPatriarch dr = sh.getDrawingPatriarch(); assertNotNull(dr); HSSFSimpleShape comboBoxShape = (HSSFSimpleShape)dr.getChildren().get(0); assertEquals(comboBoxShape.getShapeType(), HSSFSimpleShape.OBJECT_TYPE_COMBO_BOX); assertNull( ish.findFirstRecordBySid(ObjRecord.sid) ); // ObjRecord will appear after serializetion HSSFWorkbook wb2 = HSSFTestDataSamples.writeOutAndReadBack(wb1); wb1.close(); sh = wb2.getSheetAt(0); ish = sh.getSheet(); ObjRecord objRecord = (ObjRecord)ish.findFirstRecordBySid(ObjRecord.sid); List<SubRecord> subRecords = objRecord.getSubRecords(); assertEquals(3, subRecords.size()); assertTrue(subRecords.get(0) instanceof CommonObjectDataSubRecord ); assertTrue(subRecords.get(1) instanceof FtCblsSubRecord ); // must be present, see Bug 51481 assertTrue(subRecords.get(2) instanceof LbsDataSubRecord ); wb2.close(); } @Test public void getSetColumnHiddenShort() throws IOException { Workbook workbook = new HSSFWorkbook(); Sheet sheet = workbook.createSheet("Sheet 1"); sheet.setColumnHidden((short)2, true); assertTrue(sheet.isColumnHidden((short)2)); workbook.close(); } @Test public void columnWidthShort() throws IOException { HSSFWorkbook wb1 = new HSSFWorkbook(); Sheet sheet = wb1.createSheet(); //default column width measured in characters sheet.setDefaultColumnWidth((short)10); assertEquals(10, sheet.getDefaultColumnWidth()); //columns A-C have default width assertEquals(256*10, sheet.getColumnWidth((short)0)); assertEquals(256*10, sheet.getColumnWidth((short)1)); assertEquals(256*10, sheet.getColumnWidth((short)2)); //set custom width for D-F for (char i = 'D'; i <= 'F'; i++) { //Sheet#setColumnWidth accepts the width in units of 1/256th of a character width int w = 256*12; sheet.setColumnWidth((short)i, w); assertEquals(w, sheet.getColumnWidth((short)i)); } //reset the default column width, columns A-C change, D-F still have custom width sheet.setDefaultColumnWidth((short)20); assertEquals(20, sheet.getDefaultColumnWidth()); assertEquals(256*20, sheet.getColumnWidth((short)0)); assertEquals(256*20, sheet.getColumnWidth((short)1)); assertEquals(256*20, sheet.getColumnWidth((short)2)); for (char i = 'D'; i <= 'F'; i++) { int w = 256*12; assertEquals(w, sheet.getColumnWidth((short)i)); } // check for 16-bit signed/unsigned error: sheet.setColumnWidth((short)10, 40000); assertEquals(40000, sheet.getColumnWidth((short)10)); //The maximum column width for an individual cell is 255 characters try { sheet.setColumnWidth((short)9, 256*256); fail("expected exception"); } catch(IllegalArgumentException e){ assertEquals("The maximum column width for an individual cell is 255 characters.", e.getMessage()); } //serialize and read again HSSFWorkbook wb2 = HSSFTestDataSamples.writeOutAndReadBack(wb1); wb1.close(); sheet = wb2.getSheetAt(0); assertEquals(20, sheet.getDefaultColumnWidth()); //columns A-C have default width assertEquals(256*20, sheet.getColumnWidth((short)0)); assertEquals(256*20, sheet.getColumnWidth((short)1)); assertEquals(256*20, sheet.getColumnWidth((short)2)); //columns D-F have custom width for (char i = 'D'; i <= 'F'; i++) { short w = (256*12); assertEquals(w, sheet.getColumnWidth((short)i)); } assertEquals(40000, sheet.getColumnWidth((short)10)); wb2.close(); } @Test public void showInPane() throws IOException { Workbook wb = new HSSFWorkbook(); Sheet sheet = wb.createSheet(); sheet.showInPane(2, 3); thrown.expect(IllegalArgumentException.class); thrown.expectMessage("Maximum row number is 65535"); sheet.showInPane(Integer.MAX_VALUE, 3); wb.close(); } @Test public void drawingRecords() throws IOException { HSSFWorkbook wb = new HSSFWorkbook(); HSSFSheet sheet = wb.createSheet(); /* TODO: NPE? sheet.dumpDrawingRecords(false); sheet.dumpDrawingRecords(true);*/ assertNull(sheet.getDrawingEscherAggregate()); wb.close(); } @Test public void bug55723b() throws IOException { HSSFWorkbook wb = new HSSFWorkbook(); Sheet sheet = wb.createSheet(); // stored with a special name assertNull(wb.getWorkbook().getSpecificBuiltinRecord(NameRecord.BUILTIN_FILTER_DB, 1)); CellRangeAddress range = CellRangeAddress.valueOf("A:B"); AutoFilter filter = sheet.setAutoFilter(range); assertNotNull(filter); // stored with a special name NameRecord record = wb.getWorkbook().getSpecificBuiltinRecord(NameRecord.BUILTIN_FILTER_DB, 1); assertNotNull(record); wb.close(); } @Test public void test58746() throws IOException { HSSFWorkbook wb = new HSSFWorkbook(); HSSFSheet first = wb.createSheet("first"); first.createRow(0).createCell(0).setCellValue(1); HSSFSheet second = wb.createSheet("second"); second.createRow(0).createCell(0).setCellValue(2); HSSFSheet third = wb.createSheet("third"); HSSFRow row = third.createRow(0); row.createCell(0).setCellFormula("first!A1"); row.createCell(1).setCellFormula("second!A1"); // re-order for sheet "third" wb.setSheetOrder("third", 0); // verify results assertEquals("third", wb.getSheetAt(0).getSheetName()); assertEquals("first", wb.getSheetAt(1).getSheetName()); assertEquals("second", wb.getSheetAt(2).getSheetName()); assertEquals("first!A1", wb.getSheetAt(0).getRow(0).getCell(0).getCellFormula()); assertEquals("second!A1", wb.getSheetAt(0).getRow(0).getCell(1).getCellFormula()); wb.close(); } @Test public void bug59135() throws IOException { HSSFWorkbook wb1 = new HSSFWorkbook(); wb1.createSheet().protectSheet("1111.2222.3333.1234"); HSSFWorkbook wb2 = HSSFTestDataSamples.writeOutAndReadBack(wb1); wb1.close(); assertEquals((short)0xb86b, wb2.getSheetAt(0).getPassword()); wb2.close(); HSSFWorkbook wb3 = new HSSFWorkbook(); wb3.createSheet().protectSheet("1111.2222.3333.12345"); HSSFWorkbook wb4 = HSSFTestDataSamples.writeOutAndReadBack(wb3); wb3.close(); assertEquals((short)0xbecc, wb4.getSheetAt(0).getPassword()); wb4.close(); } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.hive.metastore.client.builder; import org.apache.hadoop.hive.metastore.api.GetProjectionsSpec; import java.util.ArrayList; import java.util.List; /** * Builder for the GetProjectionsSpec. This is a projection specification for tables returned from the HMS. */ public class GetTableProjectionsSpecBuilder { private List<String> columnList = null; private String includeColumnPattern = null; private String excludeColumnPattern = null; public GetTableProjectionsSpecBuilder(List<String> columnList, String includeColumnPattern, String excludeColumnPattern) { this.columnList = columnList; this.includeColumnPattern = includeColumnPattern; this.excludeColumnPattern = excludeColumnPattern; } public GetTableProjectionsSpecBuilder() { } public GetTableProjectionsSpecBuilder setColumnList(List<String> columnList) { this.columnList = columnList; return this; } public GetTableProjectionsSpecBuilder setIncludeColumnPattern(String includeColumnPattern) { this.includeColumnPattern = includeColumnPattern; return this; } public GetTableProjectionsSpecBuilder setExcludeColumnPattern(String excludeColumnPattern) { this.excludeColumnPattern = excludeColumnPattern; return this; } private void initColumnListAndAddCol(String colName) { if (this.columnList == null) { this.columnList = new ArrayList<String>(); } this.columnList.add(colName); } public GetTableProjectionsSpecBuilder includeTableName() { initColumnListAndAddCol("tableName"); return this; } public GetTableProjectionsSpecBuilder includeDatabase() { initColumnListAndAddCol("dbName"); return this; } public GetTableProjectionsSpecBuilder includeSdCdColsName() { initColumnListAndAddCol("sd.cols.name"); return this; } public GetTableProjectionsSpecBuilder includeSdCdColsType() { initColumnListAndAddCol("sd.cols.type"); return this; } public GetTableProjectionsSpecBuilder includeSdCdColsComment() { initColumnListAndAddCol("sd.cols.comment"); return this; } public GetTableProjectionsSpecBuilder includeSdLocation() { initColumnListAndAddCol("sd.location"); return this; } public GetTableProjectionsSpecBuilder includeSdInputFormat() { initColumnListAndAddCol("sd.inputFormat"); return this; } public GetTableProjectionsSpecBuilder includeSdOutputFormat() { initColumnListAndAddCol("sd.outputFormat"); return this; } public GetTableProjectionsSpecBuilder includeSdIsCompressed() { initColumnListAndAddCol("sd.compressed"); return this; } public GetTableProjectionsSpecBuilder includeSdNumBuckets() { initColumnListAndAddCol("sd.numBuckets"); return this; } public GetTableProjectionsSpecBuilder includeSdSerDeInfoName() { initColumnListAndAddCol("sd.serdeInfo.name"); return this; } public GetTableProjectionsSpecBuilder includeSdSerDeInfoSerializationLib() { initColumnListAndAddCol("sd.serdeInfo.serializationLib"); return this; } public GetTableProjectionsSpecBuilder includeSdSerDeInfoParameters() { initColumnListAndAddCol("sd.serdeInfo.parameters"); return this; } public GetTableProjectionsSpecBuilder includeSdSerDeInfoDescription() { initColumnListAndAddCol("sd.serdeInfo.description"); return this; } public GetTableProjectionsSpecBuilder includeSdSerDeInfoSerializerClass() { initColumnListAndAddCol("sd.serdeInfo.serializerClass"); return this; } public GetTableProjectionsSpecBuilder includeSdSerDeInfoDeserializerClass() { initColumnListAndAddCol("sd.serdeInfo.deserializerClass"); return this; } public GetTableProjectionsSpecBuilder includeSdSerDeInfoSerdeType() { initColumnListAndAddCol("sd.serdeInfo.serdeType"); return this; } public GetTableProjectionsSpecBuilder includeSdBucketCols() { initColumnListAndAddCol("sd.bucketCols"); return this; } public GetTableProjectionsSpecBuilder includeSdSortColsCol() { initColumnListAndAddCol("sd.sortCols.col"); return this; } public GetTableProjectionsSpecBuilder includeSdSortColsOrder() { initColumnListAndAddCol("sd.sortCols.order"); return this; } public GetTableProjectionsSpecBuilder includeSdparameters() { initColumnListAndAddCol("sd.parameters"); return this; } public GetTableProjectionsSpecBuilder includeSdSkewedColNames() { initColumnListAndAddCol("sd.skewedInfo.skewedColNames"); return this; } public GetTableProjectionsSpecBuilder includeSdSkewedColValues() { initColumnListAndAddCol("sd.skewedInfo.skewedColValues"); return this; } public GetTableProjectionsSpecBuilder includeSdSkewedColValueLocationMaps() { initColumnListAndAddCol("sd.skewedInfo.skewedColValueLocationMaps"); return this; } public GetTableProjectionsSpecBuilder includeSdIsStoredAsSubDirectories() { initColumnListAndAddCol("sd.storedAsSubDirectories"); return this; } public GetTableProjectionsSpecBuilder includeOwner() { initColumnListAndAddCol("owner"); return this; } public GetTableProjectionsSpecBuilder includeOwnerType() { initColumnListAndAddCol("ownerType"); return this; } public GetTableProjectionsSpecBuilder includeCreateTime() { initColumnListAndAddCol("createTime"); return this; } public GetTableProjectionsSpecBuilder includeLastAccessTime() { initColumnListAndAddCol("lastAccessTime"); return this; } public GetTableProjectionsSpecBuilder includeRetention() { initColumnListAndAddCol("retention"); return this; } public GetTableProjectionsSpecBuilder includePartitionKeysName() { initColumnListAndAddCol("partitionKeys.name"); return this; } public GetTableProjectionsSpecBuilder includePartitionKeysType() { initColumnListAndAddCol("partitionKeys.type"); return this; } public GetTableProjectionsSpecBuilder includePartitionKeysComment() { initColumnListAndAddCol("partitionKeys.comment"); return this; } public GetTableProjectionsSpecBuilder includeParameters() { initColumnListAndAddCol("parameters"); return this; } public GetTableProjectionsSpecBuilder includeViewOriginalText() { initColumnListAndAddCol("viewOriginalText"); return this; } public GetTableProjectionsSpecBuilder includeViewExpandedText() { initColumnListAndAddCol("viewExpandedText"); return this; } public GetTableProjectionsSpecBuilder includeRewriteEnabled() { initColumnListAndAddCol("rewriteEnabled"); return this; } public GetTableProjectionsSpecBuilder includeTableType() { initColumnListAndAddCol("tableType"); return this; } public GetProjectionsSpec build() { return new GetProjectionsSpec(columnList, includeColumnPattern, excludeColumnPattern); } }
/* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright * ownership. Elasticsearch licenses this file to you under * the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.cluster.settings; import org.elasticsearch.action.admin.cluster.settings.ClusterUpdateSettingsRequestBuilder; import org.elasticsearch.action.admin.cluster.settings.ClusterUpdateSettingsResponse; import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.metadata.MetaData; import org.elasticsearch.cluster.routing.allocation.decider.EnableAllocationDecider; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.ByteSizeUnit; import org.elasticsearch.common.util.concurrent.EsExecutors; import org.elasticsearch.discovery.DiscoverySettings; import org.elasticsearch.index.store.IndexStoreConfig; import org.elasticsearch.test.ESIntegTestCase; import org.elasticsearch.test.ESIntegTestCase.ClusterScope; import org.hamcrest.Matchers; import static org.elasticsearch.common.settings.Settings.settingsBuilder; import static org.elasticsearch.test.ESIntegTestCase.Scope.TEST; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertBlocked; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.notNullValue; import static org.hamcrest.Matchers.nullValue; @ClusterScope(scope = TEST) public class ClusterSettingsIT extends ESIntegTestCase { public void testClusterNonExistingSettingsUpdate() { String key1 = "no_idea_what_you_are_talking_about"; int value1 = 10; ClusterUpdateSettingsResponse response = client().admin().cluster() .prepareUpdateSettings() .setTransientSettings(Settings.builder().put(key1, value1).build()) .get(); assertAcked(response); assertThat(response.getTransientSettings().getAsMap().entrySet(), Matchers.emptyIterable()); } public void testClusterSettingsUpdateResponse() { String key1 = IndexStoreConfig.INDICES_STORE_THROTTLE_MAX_BYTES_PER_SEC; int value1 = 10; String key2 = EnableAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ENABLE; boolean value2 = false; Settings transientSettings1 = Settings.builder().put(key1, value1, ByteSizeUnit.BYTES).build(); Settings persistentSettings1 = Settings.builder().put(key2, value2).build(); ClusterUpdateSettingsResponse response1 = client().admin().cluster() .prepareUpdateSettings() .setTransientSettings(transientSettings1) .setPersistentSettings(persistentSettings1) .execute() .actionGet(); assertAcked(response1); assertThat(response1.getTransientSettings().get(key1), notNullValue()); assertThat(response1.getTransientSettings().get(key2), nullValue()); assertThat(response1.getPersistentSettings().get(key1), nullValue()); assertThat(response1.getPersistentSettings().get(key2), notNullValue()); Settings transientSettings2 = Settings.builder().put(key1, value1, ByteSizeUnit.BYTES).put(key2, value2).build(); Settings persistentSettings2 = Settings.EMPTY; ClusterUpdateSettingsResponse response2 = client().admin().cluster() .prepareUpdateSettings() .setTransientSettings(transientSettings2) .setPersistentSettings(persistentSettings2) .execute() .actionGet(); assertAcked(response2); assertThat(response2.getTransientSettings().get(key1), notNullValue()); assertThat(response2.getTransientSettings().get(key2), notNullValue()); assertThat(response2.getPersistentSettings().get(key1), nullValue()); assertThat(response2.getPersistentSettings().get(key2), nullValue()); Settings transientSettings3 = Settings.EMPTY; Settings persistentSettings3 = Settings.builder().put(key1, value1, ByteSizeUnit.BYTES).put(key2, value2).build(); ClusterUpdateSettingsResponse response3 = client().admin().cluster() .prepareUpdateSettings() .setTransientSettings(transientSettings3) .setPersistentSettings(persistentSettings3) .execute() .actionGet(); assertAcked(response3); assertThat(response3.getTransientSettings().get(key1), nullValue()); assertThat(response3.getTransientSettings().get(key2), nullValue()); assertThat(response3.getPersistentSettings().get(key1), notNullValue()); assertThat(response3.getPersistentSettings().get(key2), notNullValue()); } public void testUpdateDiscoveryPublishTimeout() { DiscoverySettings discoverySettings = internalCluster().getInstance(DiscoverySettings.class); assertThat(discoverySettings.getPublishTimeout(), equalTo(DiscoverySettings.DEFAULT_PUBLISH_TIMEOUT)); ClusterUpdateSettingsResponse response = client().admin().cluster() .prepareUpdateSettings() .setTransientSettings(Settings.builder().put(DiscoverySettings.PUBLISH_TIMEOUT, "1s").build()) .get(); assertAcked(response); assertThat(response.getTransientSettings().getAsMap().get(DiscoverySettings.PUBLISH_TIMEOUT), equalTo("1s")); assertThat(discoverySettings.getPublishTimeout().seconds(), equalTo(1l)); response = client().admin().cluster() .prepareUpdateSettings() .setTransientSettings(Settings.builder().put(DiscoverySettings.PUBLISH_TIMEOUT, "whatever").build()) .get(); assertAcked(response); assertThat(response.getTransientSettings().getAsMap().entrySet(), Matchers.emptyIterable()); assertThat(discoverySettings.getPublishTimeout().seconds(), equalTo(1l)); response = client().admin().cluster() .prepareUpdateSettings() .setTransientSettings(Settings.builder().put(DiscoverySettings.PUBLISH_TIMEOUT, -1).build()) .get(); assertAcked(response); assertThat(response.getTransientSettings().getAsMap().entrySet(), Matchers.emptyIterable()); assertThat(discoverySettings.getPublishTimeout().seconds(), equalTo(1l)); } public void testClusterUpdateSettingsWithBlocks() { String key1 = "cluster.routing.allocation.enable"; Settings transientSettings = Settings.builder().put(key1, false).build(); String key2 = "cluster.routing.allocation.node_concurrent_recoveries"; Settings persistentSettings = Settings.builder().put(key2, "5").build(); ClusterUpdateSettingsRequestBuilder request = client().admin().cluster().prepareUpdateSettings() .setTransientSettings(transientSettings) .setPersistentSettings(persistentSettings); // Cluster settings updates are blocked when the cluster is read only try { setClusterReadOnly(true); assertBlocked(request, MetaData.CLUSTER_READ_ONLY_BLOCK); // But it's possible to update the settings to update the "cluster.blocks.read_only" setting Settings settings = settingsBuilder().put(MetaData.SETTING_READ_ONLY, false).build(); assertAcked(client().admin().cluster().prepareUpdateSettings().setTransientSettings(settings).get()); } finally { setClusterReadOnly(false); } // It should work now ClusterUpdateSettingsResponse response = request.execute().actionGet(); assertAcked(response); assertThat(response.getTransientSettings().get(key1), notNullValue()); assertThat(response.getTransientSettings().get(key2), nullValue()); assertThat(response.getPersistentSettings().get(key1), nullValue()); assertThat(response.getPersistentSettings().get(key2), notNullValue()); } public void testMissingUnits() { assertAcked(prepareCreate("test")); try { client().admin().indices().prepareUpdateSettings("test").setSettings(Settings.builder().put("index.refresh_interval", "10")).execute().actionGet(); fail("Expected IllegalArgumentException"); } catch (IllegalArgumentException e) { assertThat(e.getMessage(), containsString("[index.refresh_interval] with value [10]")); assertThat(e.getMessage(), containsString("unit is missing or unrecognized")); } } public void testMissingUnitsLenient() { try { createNode(Settings.builder().put(Settings.SETTINGS_REQUIRE_UNITS, "false").build()); assertAcked(prepareCreate("test")); ensureGreen(); client().admin().indices().prepareUpdateSettings("test").setSettings(Settings.builder().put("index.refresh_interval", "10")).execute().actionGet(); } finally { // Restore the default so subsequent tests require units: assertFalse(Settings.getSettingsRequireUnits()); Settings.setSettingsRequireUnits(true); } } private void createNode(Settings settings) { internalCluster().startNode(Settings.builder() .put(ClusterName.SETTING, "ClusterSettingsIT") .put("node.name", "ClusterSettingsIT") .put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 1) .put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 0) .put(EsExecutors.PROCESSORS, 1) // limit the number of threads created .put("http.enabled", false) .put("config.ignore_system_properties", true) // make sure we get what we set :) .put(settings) ); } }
/* * Copyright 2016 Russian Post * * This source code is Russian Post Confidential Proprietary. * This software is protected by copyright. All rights and titles are reserved. * You shall not use, copy, distribute, modify, decompile, disassemble or reverse engineer the software. * Otherwise this violation would be treated by law and would be subject to legal prosecution. * Legal use of the software provides receipt of a license from the right holder only. */ package org.maxur.ldoc.model; import com.sun.javadoc.AnnotationDesc; import com.sun.javadoc.AnnotationTypeDoc; import com.sun.javadoc.AnnotationValue; import com.sun.javadoc.ClassDoc; import com.sun.javadoc.PackageDoc; import com.sun.javadoc.ProgramElementDoc; import lombok.Getter; import lombok.extern.slf4j.Slf4j; import org.maxur.ldoc.annotation.BusinessDomain; import org.maxur.ldoc.annotation.Concept; import org.maxur.ldoc.annotation.Link; import org.maxur.ldoc.annotation.Links; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.List; import java.util.Optional; import java.util.stream.Collectors; /** * The type Domain model. * * @author Maxim Yunusov * @version 1.0 * @since <pre>8/11/2016</pre> */ @Slf4j public class SubDomain { private final String codeName; @Getter private String description; @Getter private String title; @Getter private final List<LinkModel> links; @Getter private List<ConceptModel> concepts; /** * Instantiates a new Domain model. * */ private SubDomain(final PackageDoc doc, final AnnotationDesc desc) { this.codeName = doc.name(); for (AnnotationDesc.ElementValuePair member : desc.elementValues()) { switch (member.element().name()) { case "name": this.title = getString(member); break; case "description": this.description = getString(member); break; default: } } this.links = Arrays.stream(doc.annotations()) .filter(ad -> isAnnotatedAsLink(ad.annotationType())) .map(LinkModel::makeByLink) .collect(Collectors.toList()); this.links.addAll(Arrays.stream(doc.annotations()) .filter(ad -> isAnnotatedAsLinks(ad.annotationType())) .map(LinkModel::makeByLinks) .flatMap(Collection::stream) .collect(Collectors.toList()) ); this.concepts = new ArrayList<>(); } void add(PackageDoc doc) { if (doc.name().startsWith(codeName)) { Arrays.stream(doc.allClasses()) .map(ConceptModel::makeBy) .filter(Optional::isPresent) .map(Optional::get) .forEach(concepts::add); } } /** * Make by optional. * * @param aPackage the a package * @return the optional */ static Optional<SubDomain> makeBy(final PackageDoc aPackage) { final List<AnnotationDesc> types = Arrays.stream(aPackage.annotations()) .filter(ad -> isAnnotatedAsBoundedContext(ad.annotationType())) .collect(Collectors.toList()); switch (types.size()) { case 0: return Optional.empty(); case 1: return Optional.of(new SubDomain(aPackage, types.get(0))); default: throw new IllegalStateException("There are more than one BoundedContext annotations"); } } /** * Gets id. * * @return the id */ public String getId() { final String[] strings = codeName.split("\\."); return strings[strings.length - 1].toLowerCase(); } /** * Gets name. * * @return the name */ public String getName() { final String[] strings = codeName.split("\\."); return capitalize(strings[strings.length - 1]); } private static boolean isAnnotatedAsBoundedContext(final AnnotationTypeDoc annotationType) { return BusinessDomain.class.getCanonicalName().equals(annotationType.qualifiedTypeName()); } private static boolean isAnnotatedAsLink(final AnnotationTypeDoc annotationType) { return Link.class.getCanonicalName().equals(annotationType.qualifiedTypeName()); } private static boolean isAnnotatedAsLinks(final AnnotationTypeDoc annotationType) { return Links.class.getCanonicalName().equals(annotationType.qualifiedTypeName()); } private static String getString(AnnotationDesc.ElementValuePair member) { return member.value().value().toString(); } private static String capitalize(final String line) { return Character.toUpperCase(line.charAt(0)) + line.substring(1).toLowerCase(); } /** * The type Concept model. */ public static class ConceptModel { @Getter private final String name; @Getter private String title; @Getter private String description; private ConceptModel(final ClassDoc doc, final AnnotationDesc desk) { this.name = doc.simpleTypeName(); for (AnnotationDesc.ElementValuePair member : desk.elementValues()) { switch (member.element().name()) { case "name": this.title = getString(member); break; case "description": this.description = getString(member); break; default: } } } /** * Make by optional. * * @param doc the doc * @return the optional */ static Optional<ConceptModel> makeBy(final ClassDoc doc) { final Optional<AnnotationDesc> desc = conceptAnnotation(doc); return desc.isPresent() ? Optional.of(new ConceptModel(doc, desc.get())) : Optional.empty(); } private static Optional<AnnotationDesc> conceptAnnotation(final ProgramElementDoc doc) { return Arrays.stream(doc.annotations()) .filter(d -> isAnnotatedAsConcept(d.annotationType())) .findFirst(); } private static boolean isAnnotatedAsConcept(final AnnotationTypeDoc annotationType) { return Concept.class.getCanonicalName().equals(annotationType.qualifiedTypeName()); } } /** * The type Link model. */ public static class LinkModel { @Getter private String label; @Getter private String related; private LinkModel(final AnnotationDesc desc) { for (AnnotationDesc.ElementValuePair member : desc.elementValues()) { switch (member.element().name()) { case "related": this.related = getString(member).toLowerCase(); break; case "label": this.label = getString(member); break; default: } } } private static LinkModel makeByLink(final AnnotationDesc desc) { return new LinkModel(desc); } private static List<LinkModel> makeByLinks(final AnnotationDesc desc) { final ArrayList<LinkModel> result = new ArrayList<>(); for (AnnotationDesc.ElementValuePair member : desc.elementValues()) if ("value".equals(member.element().name())) { Arrays.stream((AnnotationValue[]) member.value().value()) .map(AnnotationValue::value) .map(AnnotationDesc.class::cast) .forEach(ad -> result.add(new LinkModel(ad))); } return result; } } }
package org.wso2.maven.axis2.aar; import java.io.File; import java.io.IOException; import java.util.ArrayList; import java.util.Iterator; import java.util.List; import java.util.Set; import org.apache.axis2.maven2.aar.FileSet; import org.apache.maven.archiver.MavenArchiveConfiguration; import org.apache.maven.archiver.MavenArchiver; import org.apache.maven.artifact.Artifact; import org.apache.maven.artifact.DependencyResolutionRequiredException; import org.apache.maven.artifact.resolver.filter.ScopeArtifactFilter; import org.apache.maven.plugin.AbstractMojo; import org.apache.maven.plugin.MojoExecutionException; import org.apache.maven.project.MavenProject; import org.apache.maven.project.MavenProjectHelper; import org.codehaus.plexus.archiver.ArchiverException; import org.codehaus.plexus.archiver.jar.JarArchiver; import org.codehaus.plexus.archiver.jar.ManifestException; import org.codehaus.plexus.util.DirectoryScanner; import org.codehaus.plexus.util.FileUtils; /** * * @goal aar * @phase package * */ public class CSAarMojo extends AbstractMojo{ /** * The projects base directory. * * @parameter expression="${project.basedir}" * @required * @readonly */ protected File baseDir; /** * The maven project. * * @parameter expression="${project}" * @required * @readonly */ protected MavenProject project; /** * The directory containing generated classes. * * @parameter expression="${project.build.outputDirectory}" * @required */ private File classesDirectory; /** * The directory where the aar is built. * * @parameter expression="${project.build.directory}/aar" * @required */ protected File aarDirectory; /** * The location of the services.xml file. If it is present in the META-INF directory in * src/main/resources with that name then it will automatically be included. Otherwise this * parameter must be set. * * @parameter */ private File servicesXmlFile; /** * The location of the WSDL file, if any. By default, no WSDL file is added and it is assumed, * that Axis 2 will automatically generate a WSDL file. * * @parameter */ private File wsdlFile; /** * Name, to which the wsdl file shall be mapped. By default, the name will be computed from the * files path by removing the directory. * * @parameter default-value="service.wsdl" */ private String wsdlFileName; /** * Additional file sets, which are being added to the archive. * * @parameter */ private FileSet[] fileSets; /** * Whether the dependency jars should be included in the aar * * @parameter expression="${includeDependencies}" default-value="true" */ private boolean includeDependencies; /** * The directory for the generated aar. * * @parameter expression="${project.build.directory}" * @required */ private String outputDirectory; /** * The name of the generated aar. * * @parameter expression="${project.build.finalName}" * @required */ private String aarName; /** * The Jar archiver. * * @parameter expression="${component.org.codehaus.plexus.archiver.Archiver#jar}" * @required */ private JarArchiver jarArchiver; /** * The maven archive configuration to use. * * @parameter */ private MavenArchiveConfiguration archive = new MavenArchiveConfiguration(); /** * Classifier to add to the artifact generated. If given, the artifact will be an attachment * instead. * * @parameter */ private String classifier; /** * Whether this is the main artifact being built. Set to <code>false</code> if you don't want to * install or deploy it to the local repository instead of the default one in an execution. * * @parameter expression="${primaryArtifact}" default-value="true" */ private boolean primaryArtifact; /** @component */ private MavenProjectHelper projectHelper; /** * Executes the AarMojo on the current project. * * @throws MojoExecutionException if an error occured while building the webapp */ public void execute() throws MojoExecutionException { File aarFile = new File(outputDirectory, aarName + ".aar"); try { performPackaging(aarFile); } catch (Exception e) { throw new MojoExecutionException("Error assembling aar", e); } } /** * Generates the aar. * * @param aarFile the target aar file * @throws IOException * @throws ArchiverException * @throws ManifestException * @throws DependencyResolutionRequiredException * */ private void performPackaging(File aarFile) throws IOException, ArchiverException, ManifestException, DependencyResolutionRequiredException, MojoExecutionException { buildExplodedAar(); // generate aar file getLog().info("Generating aar " + aarFile.getAbsolutePath()); MavenArchiver archiver = new MavenArchiver(); archiver.setArchiver(jarArchiver); archiver.setOutputFile(aarFile); jarArchiver.addDirectory(aarDirectory); // create archive archiver.createArchive(project, archive); if (classifier != null) { projectHelper.attachArtifact(project, "aar", classifier, aarFile); } else { Artifact artifact = project.getArtifact(); if (primaryArtifact) { artifact.setFile(aarFile); } else if (artifact.getFile() == null || artifact.getFile().isDirectory()) { artifact.setFile(aarFile); } else { projectHelper.attachArtifact(project, "aar", aarFile); } } } /** * Builds the exploded AAR file. * * @throws MojoExecutionException */ protected void buildExplodedAar() throws MojoExecutionException { getLog().debug("Exploding aar..."); aarDirectory.mkdirs(); getLog().debug("Assembling aar " + project.getArtifactId() + " in " + aarDirectory); try { final File metaInfDir = new File(aarDirectory, "META-INF"); final File libDir = new File(aarDirectory, "lib"); final File servicesFileTarget = new File(metaInfDir, "services.xml"); boolean existsBeforeCopyingClasses = servicesFileTarget.exists(); String wsdlName = wsdlFileName; if (wsdlName == null && wsdlFile != null) { wsdlName = wsdlFile.getName(); } File wsdlFileTarget = null; if (wsdlFile != null) { wsdlFileTarget = new File(metaInfDir, wsdlFileName); } boolean wsdlExistsBeforeCopyingClasses = wsdlFileTarget == null ? false : wsdlFileTarget.exists(); if (classesDirectory.exists() && (!classesDirectory.equals(aarDirectory))) { FileUtils.copyDirectoryStructure(classesDirectory, aarDirectory); } if (fileSets != null) { for (int i = 0; i < fileSets.length; i++) { FileSet fileSet = fileSets[i]; copyFileSet(fileSet, aarDirectory); } } copyMetaInfFile(servicesXmlFile, servicesFileTarget, existsBeforeCopyingClasses, "services.xml file"); copyMetaInfFile(wsdlFile, wsdlFileTarget, wsdlExistsBeforeCopyingClasses, "WSDL file"); if (includeDependencies) { Set artifacts = project.getArtifacts(); List duplicates = findDuplicates(artifacts); for (Iterator iter = artifacts.iterator(); iter.hasNext();) { Artifact artifact = (Artifact)iter.next(); String targetFileName = getDefaultFinalName(artifact); getLog().debug("Processing: " + targetFileName); if (duplicates.contains(targetFileName)) { getLog().debug("Duplicate found: " + targetFileName); targetFileName = artifact.getGroupId() + "-" + targetFileName; getLog().debug("Renamed to: " + targetFileName); } // TODO: utilise appropriate methods from project builder ScopeArtifactFilter filter = new ScopeArtifactFilter( Artifact.SCOPE_RUNTIME); if (!artifact.isOptional() && filter.include(artifact)) { String type = artifact.getType(); if ("jar".equals(type)) { copyFileIfModified(artifact.getFile(), new File( libDir, targetFileName)); } } } } } catch (IOException e) { throw new MojoExecutionException("Could not explode aar...", e); } } /** * Searches a set of artifacts for duplicate filenames and returns a list of duplicates. * * @param artifacts set of artifacts * @return List of duplicated artifacts */ private List findDuplicates(Set artifacts) { List duplicates = new ArrayList(); List identifiers = new ArrayList(); for (Iterator iter = artifacts.iterator(); iter.hasNext();) { Artifact artifact = (Artifact)iter.next(); String candidate = getDefaultFinalName(artifact); if (identifiers.contains(candidate)) { duplicates.add(candidate); } else { identifiers.add(candidate); } } return duplicates; } /** * Converts the filename of an artifact to artifactId-version.type format. * * @param artifact * @return converted filename of the artifact */ private String getDefaultFinalName(Artifact artifact) { return artifact.getArtifactId() + "-" + artifact.getVersion() + "." + artifact.getArtifactHandler().getExtension(); } /** * Copy file from source to destination only if source timestamp is later than the destination * timestamp. The directories up to <code>destination</code> will be created if they don't * already exist. <code>destination</code> will be overwritten if it already exists. * * @param source An existing non-directory <code>File</code> to copy bytes from. * @param destination A non-directory <code>File</code> to write bytes to (possibly * overwriting). * @throws IOException if <code>source</code> does not exist, * <code>destination</code> cannot be written to, or an IO * error occurs during copying. * @throws java.io.FileNotFoundException if <code>destination</code> is a directory * <p/> * TO DO: Remove this method when Maven moves to * plexus-utils version 1.4 */ private void copyFileIfModified(File source, File destination) throws IOException { // TO DO: Remove this method and use the method in WarFileUtils when Maven 2 changes // to plexus-utils 1.2. if (destination.lastModified() < source.lastModified()) { FileUtils.copyFile(source.getCanonicalFile(), destination); // preserve timestamp destination.setLastModified(source.lastModified()); } } private void copyFileSet(FileSet fileSet, File targetDirectory) throws IOException { File dir = fileSet.getDirectory(); if (dir == null) { dir = baseDir; } File targetDir = targetDirectory; if (fileSet.getOutputDirectory() != null) { targetDir = new File(targetDir, fileSet.getOutputDirectory()); } if (targetDir.equals(dir)) { return; } DirectoryScanner ds = new DirectoryScanner(); ds.setBasedir(dir); if (!fileSet.isSkipDefaultExcludes()) { ds.addDefaultExcludes(); } final String[] excludes = fileSet.getExcludes(); if (excludes != null) { ds.setExcludes(excludes); } final String[] includes = fileSet.getIncludes(); if (includes != null) { ds.setIncludes(includes); } ds.scan(); String[] files = ds.getIncludedFiles(); for (int i = 0; i < files.length; i++) { File sourceFile = new File(dir, files[i]); File targetFile = new File(targetDir, files[i]); FileUtils.copyFile(sourceFile, targetFile); } } private void copyMetaInfFile(final File pSource, final File pTarget, final boolean pExistsBeforeCopying, final String pDescription) throws MojoExecutionException, IOException { if (pSource != null && pTarget != null) { if (!pSource.exists()) { throw new MojoExecutionException( "The configured " + pDescription + " could not be found at " + pSource); } if (!pExistsBeforeCopying && pTarget.exists()) { getLog().warn("The configured " + pDescription + " overwrites another file from the classpath."); } FileUtils.copyFile(pSource, pTarget); } } }
package org.wisdom.orientdb.runtime; import com.orientechnologies.orient.core.command.OCommandRequest; import com.orientechnologies.orient.core.id.ORecordId; import com.orientechnologies.orient.core.query.OQuery; import com.orientechnologies.orient.object.db.OObjectDatabaseTx; import org.wisdom.api.model.EntityFilter; import org.wisdom.api.model.FluentTransaction; import org.wisdom.api.model.HasBeenRollBackException; import org.wisdom.api.model.TransactionManager; import org.wisdom.orientdb.object.OrientDbCrud; import org.wisdom.orientdb.object.OrientDbRepository; import java.util.ArrayList; import java.util.List; import java.util.concurrent.Callable; /** * CRUD Service Implementation using Orientdb ODatabaseObject. * * created: 5/9/14. * * @author barjo */ class OrientDbCrudService<T> implements OrientDbCrud<T, String> { private final OrientDbTransactionManager txManager; private final OrientDbRepository repo; private final Class<T> entityClass; OrientDbCrudService(OrientDbRepositoryImpl repo, Class<T> entityClass) { this.repo = repo; this.txManager = new OrientDbTransactionManager(repo); this.entityClass = entityClass; } /** * Ask the Transaction manager to give us a db, if we are in a transaction running on the local thread, * the existing db is returned, otherwise an db is retrieved from the pool. * @return An OObjectDatabaseTx db */ private OObjectDatabaseTx acquireDb(){ return txManager.acquireDb(); } /** * Release the database connection to the pool */ private void releaseDb() { txManager.releaseDb(); } @Override public Class<T> getEntityClass() { return entityClass; } @Override public Class<String> getIdClass() { return String.class; } @Override public T delete(T t) { try { acquireDb().delete(t); } finally { releaseDb(); } return t; } @Override public void delete(String id) { ORecordId rid = new ORecordId(id); try { acquireDb().delete(rid); } finally { releaseDb(); } } @Override public Iterable<T> delete(Iterable<T> ts) { OObjectDatabaseTx db = acquireDb(); List<T> deleted = new ArrayList<>(); try { for (T todel : ts) { deleted.add((T) db.delete(todel)); } } finally { releaseDb(); } return deleted; } @Override public T save(T t) { try { return acquireDb().save(t); } finally { releaseDb(); } } @Override public Iterable<T> save(Iterable<T> ts) { List<T> saved = new ArrayList<>(); OObjectDatabaseTx db = acquireDb(); try { for (T tosave : ts) { saved.add((T) db.save(tosave)); } } finally { releaseDb(); } return saved; } @Override public T findOne(String id) { try { return acquireDb().load(new ORecordId(id)); } finally { releaseDb(); } } @Override public T findOne(final EntityFilter<T> tEntityFilter) { OObjectDatabaseTx db = acquireDb(); try { for (T entity : db.browseClass(entityClass)) { if (tEntityFilter.accept(entity)) { return entity; } } } finally { releaseDb(); } return null; } @Override public boolean exists(String id) { try { return acquireDb().existsUserObjectByRID(new ORecordId(id)); } finally { releaseDb(); } } @Override public Iterable<T> findAll() { try { return acquireDb().browseClass(entityClass); } finally { releaseDb(); } } @Override public Iterable<T> findAll(Iterable<String> ids) { OObjectDatabaseTx db = acquireDb(); List<T> entities = new ArrayList<>(); try { for (String id : ids) { entities.add((T) db.load(new ORecordId(id))); } } finally { releaseDb(); } return entities; } @Override public Iterable<T> findAll(EntityFilter<T> tEntityFilter) { OObjectDatabaseTx db = acquireDb(); List<T> entities = new ArrayList<>(); try { for (T entity : db.browseClass(entityClass)) { if (tEntityFilter.accept(entity)) { entities.add(entity); } } } finally { releaseDb(); } return entities; } @Override public List<T> query(OQuery<T> command, Object ... args){ try { return acquireDb().query(command, args); }finally { releaseDb(); } } @Override public <RET> RET execute(OCommandRequest command, Object ... args){ try{ return acquireDb().command(command).execute(args); }finally { releaseDb(); } } @Override public T load(T entity) { try { return acquireDb().load(entity); }finally { releaseDb(); } } @Override public T load(T entity, String fetchPlan) { try { return acquireDb().load(entity, fetchPlan); }finally { releaseDb(); } } @Override public void attach(T entity) { try { acquireDb().attach(entity); }finally { releaseDb(); } } @Override public T detach(T attachedEntity) { try { return acquireDb().detach(attachedEntity); }finally { releaseDb(); } } @Override public T detach(T entity, Boolean returnNonProxyInstance) { try { return acquireDb().detach(entity, returnNonProxyInstance); }finally { releaseDb(); } } @Override public long count() { return acquireDb().countClass(entityClass.getSimpleName()); } @Override public OrientDbRepository getRepository() { return repo; } @Override public void executeTransactionalBlock(final Runnable runnable) throws HasBeenRollBackException{ txManager.begin(); try{ runnable.run(); txManager.commit(); }catch (Exception e){ txManager.rollback(); throw new HasBeenRollBackException(e); } finally { txManager.close(); } } @Override public <A> A executeTransactionalBlock(Callable<A> aCallable) throws HasBeenRollBackException{ txManager.begin(); try{ A ret = aCallable.call(); txManager.commit(); return ret; }catch (Exception e){ txManager.rollback(); throw new HasBeenRollBackException(e); } finally { txManager.close(); } } @Override public TransactionManager getTransactionManager() { return txManager; } @Override public <R> FluentTransaction<R> transaction() { return FluentTransaction.transaction(txManager); } @Override public <R> FluentTransaction.Intermediate transaction(Callable<R> callable) { return FluentTransaction.transaction(txManager).with(callable); } }
/** * OLAT - Online Learning and Training<br> * http://www.olat.org * <p> * Licensed under the Apache License, Version 2.0 (the "License"); <br> * you may not use this file except in compliance with the License.<br> * You may obtain a copy of the License at * <p> * http://www.apache.org/licenses/LICENSE-2.0 * <p> * Unless required by applicable law or agreed to in writing,<br> * software distributed under the License is distributed on an "AS IS" BASIS, <br> * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. <br> * See the License for the specific language governing permissions and <br> * limitations under the License. * <p> * Copyright (c) since 2004 at Multimedia- & E-Learning Services (MELS),<br> * University of Zurich, Switzerland. * <p> */ package org.olat.search.service.document.file; import java.io.IOException; import java.util.List; import org.apache.lucene.document.Document; import org.olat.core.commons.services.search.SearchModule; import org.olat.core.logging.OLog; import org.olat.core.logging.Tracing; import org.olat.core.util.vfs.VFSLeaf; import org.olat.search.service.SearchResourceContext; /** * Lucene document mapper. * <p> * Supported file-types : <lu> * <li>pdf => PDF document</li> * <li>xls => Excel document</li> * <li>doc => Word document</li> * <li>ppt => Power-point document</li> * <li>odt, ods, odp, odf, odg => OpenDocument document</li> * <li>htm, html, xhtml, xml => HTML document</li> * <li>txt, tex, README, csv => Text document</li> * * @author Christian Guretzki */ public class FileDocumentFactory { private static OLog log = Tracing.createLoggerFor(FileDocumentFactory.class); static FileDocumentFactory instance; private static boolean pptFileEnabled; private static boolean excelFileEnabled; private final static String PDF_SUFFIX = "pdf"; private final static String EXCEL_SUFFIX = "xls"; private final static String WORD_SUFFIX = "doc"; private final static String POWERPOINT_SUFFIX = "ppt"; private final static String EXCEL_X_SUFFIX = "xlsx"; private final static String WORD_X_SUFFIX = "docx"; private final static String POWERPOINT_X_SUFFIX = "pptx"; private final static String OD_TEXT_SUFFIX = "odt"; private final static String OD_SPREADSHEET_SUFFIX = "ods"; private final static String OD_PRESENTATION_SUFFIX = "odp"; private final static String OD_FORMULA_SUFFIX = "odf"; private final static String OD_GRAPHIC_SUFFIX = "odg"; private final static String HTML_SUFFIX = "htm html xhtml"; private final static String XML_SUFFIX = "xml"; private final static String TEXT_SUFFIX = "txt tex readme csv"; // as a special parser; private static final String IMS_MANIFEST_FILE = "imsmanifest.xml"; private static List<String> checkFileSizeSuffixes; private static long maxFileSize; private int excludedFileSizeCount = 0; private final List<String> fileBlackList; /** * [used by spring] * * @param searchModule */ public FileDocumentFactory(final SearchModule searchModule) { instance = this; fileBlackList = searchModule.getFileBlackList(); pptFileEnabled = searchModule.getPptFileEnabled(); if (!pptFileEnabled) { log.info("PPT files are disabled in indexer."); } excelFileEnabled = searchModule.getExcelFileEnabled(); if (!excelFileEnabled) { log.info("Excel files are disabled in indexer."); } checkFileSizeSuffixes = searchModule.getFileSizeSuffixes(); maxFileSize = searchModule.getMaxFileSize(); } public static Document createDocument(final SearchResourceContext leafResourceContext, final VFSLeaf leaf) throws DocumentNotImplementedException, IOException, DocumentException, DocumentAccessException { final String fileName = leaf.getName(); final String suffix = getSuffix(fileName); if (log.isDebug()) { log.debug("suffix=" + suffix); } if (PDF_SUFFIX.indexOf(suffix) >= 0) { return PdfDocument.createDocument(leafResourceContext, leaf); } if (HTML_SUFFIX.indexOf(suffix) >= 0) { return HtmlDocument.createDocument(leafResourceContext, leaf); } if (XML_SUFFIX.indexOf(suffix) >= 0) { if (IMS_MANIFEST_FILE.equals(fileName)) { return IMSMetadataDocument.createDocument(leafResourceContext, leaf); } return XmlDocument.createDocument(leafResourceContext, leaf); } if (TEXT_SUFFIX.indexOf(suffix) >= 0) { return TextDocument.createDocument(leafResourceContext, leaf); } // microsoft openxml if (suffix.indexOf(WORD_X_SUFFIX) >= 0) { return WordOOXMLDocument.createDocument(leafResourceContext, leaf); } if (suffix.indexOf(EXCEL_X_SUFFIX) >= 0) { if (excelFileEnabled) { return ExcelOOXMLDocument.createDocument(leafResourceContext, leaf); } return null; } if (suffix.indexOf(POWERPOINT_X_SUFFIX) >= 0) { if (pptFileEnabled) { return PowerPointOOXMLDocument.createDocument(leafResourceContext, leaf); } return null; } // microsoft if (WORD_SUFFIX.indexOf(suffix) >= 0) { return WordDocument.createDocument(leafResourceContext, leaf); } if (POWERPOINT_SUFFIX.indexOf(suffix) >= 0) { if (pptFileEnabled) { return PowerPointDocument.createDocument(leafResourceContext, leaf); } return null; } if (EXCEL_SUFFIX.indexOf(suffix) >= 0) { if (excelFileEnabled) { return ExcelDocument.createDocument(leafResourceContext, leaf); } return null; } // open document if (OD_TEXT_SUFFIX.indexOf(suffix) >= 0 || OD_SPREADSHEET_SUFFIX.indexOf(suffix) >= 0 || OD_PRESENTATION_SUFFIX.indexOf(suffix) >= 0 || OD_FORMULA_SUFFIX.indexOf(suffix) >= 0 || OD_GRAPHIC_SUFFIX.indexOf(suffix) >= 0) { return OpenDocument.createDocument(leafResourceContext, leaf); } return UnkownDocument.createDocument(leafResourceContext, leaf); } private static String getSuffix(final String fileName) throws DocumentNotImplementedException { final int dotpos = fileName.lastIndexOf('.'); if (dotpos < 0 || dotpos == fileName.length() - 1) { if (log.isDebug()) { log.debug("I cannot detect the document suffix (marked with '.')."); } throw new DocumentNotImplementedException("I cannot detect the document suffix (marked with '.') for " + fileName); } final String suffix = fileName.substring(dotpos + 1).toLowerCase(); return suffix; } /** * Check if certain file is supported. * * @param fileName * @return */ public boolean isFileSupported(final VFSLeaf leaf) { final String fileName = leaf.getName(); if (fileName != null && fileName.startsWith(".")) { // don't index all mac os x hidden files return false; } String suffix; try { suffix = getSuffix(fileName); } catch (final DocumentNotImplementedException e) { return false; } // 1. Check if file is not on fileBlackList if (fileBlackList.contains(fileName)) { // File name is on blacklist return false; } // 2. Check for certain file-type the file size if (checkFileSizeSuffixes.contains(suffix)) { if ((maxFileSize != 0) && (leaf.getSize() > maxFileSize)) { log.info("File too big, exlude from search index. filename=" + fileName); excludedFileSizeCount++; return false; } } /* * 3. Check if suffix is supported if (supportedSuffixes.indexOf(suffix) >= 0) { return true; } */ // index all files (index metadatas) return true; } public int getExcludedFileSizeCount() { return excludedFileSizeCount; } public void resetExcludedFileSizeCount() { excludedFileSizeCount = 0; } }
/* * Copyright 2000-2012 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intellij.psi.impl.compiled; import com.intellij.openapi.util.Pair; import com.intellij.psi.*; import com.intellij.psi.impl.InheritanceImplUtil; import com.intellij.psi.impl.PsiClassImplUtil; import com.intellij.psi.impl.PsiSuperMethodImplUtil; import com.intellij.psi.impl.java.stubs.JavaStubElementTypes; import com.intellij.psi.impl.java.stubs.PsiTypeParameterStub; import com.intellij.psi.impl.light.LightEmptyImplementsList; import com.intellij.psi.impl.meta.MetaRegistry; import com.intellij.psi.impl.source.SourceTreeToPsiMap; import com.intellij.psi.impl.source.tree.TreeElement; import com.intellij.psi.javadoc.PsiDocComment; import com.intellij.psi.meta.PsiMetaData; import com.intellij.psi.scope.PsiScopeProcessor; import com.intellij.psi.search.SearchScope; import com.intellij.psi.util.PsiUtil; import com.intellij.util.IncorrectOperationException; import org.jetbrains.annotations.NonNls; import org.jetbrains.annotations.NotNull; import javax.swing.*; import java.util.Collection; import java.util.List; /** * @author max */ public class ClsTypeParameterImpl extends ClsRepositoryPsiElement<PsiTypeParameterStub> implements PsiTypeParameter { private final LightEmptyImplementsList myLightEmptyImplementsList; public ClsTypeParameterImpl(@NotNull PsiTypeParameterStub stub) { super(stub); myLightEmptyImplementsList = new LightEmptyImplementsList(getManager()); } @Override public String getQualifiedName() { return null; } @Override public boolean isInterface() { return false; } @Override public boolean isAnnotationType() { return false; } @Override public boolean isEnum() { return false; } @Override @NotNull public PsiField[] getFields() { return PsiField.EMPTY_ARRAY; } @Override @NotNull public PsiMethod[] getMethods() { return PsiMethod.EMPTY_ARRAY; } @Override public PsiMethod findMethodBySignature(PsiMethod patternMethod, boolean checkBases) { return PsiClassImplUtil.findMethodBySignature(this, patternMethod, checkBases); } @Override @NotNull public PsiMethod[] findMethodsBySignature(PsiMethod patternMethod, boolean checkBases) { return PsiClassImplUtil.findMethodsBySignature(this, patternMethod, checkBases); } @Override public PsiField findFieldByName(String name, boolean checkBases) { return PsiClassImplUtil.findFieldByName(this, name, checkBases); } @Override @NotNull public PsiMethod[] findMethodsByName(String name, boolean checkBases) { return PsiClassImplUtil.findMethodsByName(this, name, checkBases); } @Override @NotNull public List<Pair<PsiMethod, PsiSubstitutor>> findMethodsAndTheirSubstitutorsByName(String name, boolean checkBases) { return PsiClassImplUtil.findMethodsAndTheirSubstitutorsByName(this, name, checkBases); } @Override @NotNull public List<Pair<PsiMethod, PsiSubstitutor>> getAllMethodsAndTheirSubstitutors() { return PsiClassImplUtil.getAllWithSubstitutorsByMap(this, PsiClassImplUtil.MemberType.METHOD); } @Override public PsiClass findInnerClassByName(String name, boolean checkBases) { return PsiClassImplUtil.findInnerByName(this, name, checkBases); } @Override public PsiTypeParameterList getTypeParameterList() { return null; } @Override public boolean hasTypeParameters() { return false; } // very special method! @Override public PsiElement getScope() { return getParent().getParent(); } @Override public boolean isInheritorDeep(PsiClass baseClass, PsiClass classToByPass) { return InheritanceImplUtil.isInheritorDeep(this, baseClass, classToByPass); } @Override public boolean isInheritor(@NotNull PsiClass baseClass, boolean checkDeep) { return InheritanceImplUtil.isInheritor(this, baseClass, checkDeep); } @Override public PsiIdentifier getNameIdentifier() { return null; } @Override public boolean processDeclarations(@NotNull PsiScopeProcessor processor, @NotNull ResolveState state, PsiElement lastParent, @NotNull PsiElement place) { return PsiClassImplUtil.processDeclarationsInClass(this, processor, state, null, lastParent, place, PsiUtil.getLanguageLevel(place), false); } @Override public String getName() { return getStub().getName(); } @Override public PsiElement setName(@NotNull String name) throws IncorrectOperationException { throw new IncorrectOperationException("Cannot change compiled classes"); } @Override @NotNull public PsiMethod[] getConstructors() { return PsiMethod.EMPTY_ARRAY; } @Override public PsiDocComment getDocComment() { return null; } @Override public boolean isDeprecated() { return false; } @Override @NotNull public PsiReferenceList getExtendsList() { return getStub().findChildStubByType(JavaStubElementTypes.EXTENDS_BOUND_LIST).getPsi(); } @Override public PsiReferenceList getImplementsList() { return myLightEmptyImplementsList; } @Override @NotNull public PsiClassType[] getExtendsListTypes() { return getExtendsList().getReferencedTypes(); } @Override @NotNull public PsiClassType[] getImplementsListTypes() { return PsiClassType.EMPTY_ARRAY; } @Override @NotNull public PsiClass[] getInnerClasses() { return PsiClass.EMPTY_ARRAY; } @Override @NotNull public PsiField[] getAllFields() { return PsiField.EMPTY_ARRAY; } @Override @NotNull public PsiMethod[] getAllMethods() { return PsiMethod.EMPTY_ARRAY; } @Override @NotNull public PsiClass[] getAllInnerClasses() { return PsiClass.EMPTY_ARRAY; } @Override @NotNull public PsiClassInitializer[] getInitializers() { return PsiClassInitializer.EMPTY_ARRAY; } @Override @NotNull public PsiTypeParameter[] getTypeParameters() { return PsiTypeParameter.EMPTY_ARRAY; } @Override public PsiClass getSuperClass() { return PsiClassImplUtil.getSuperClass(this); } @Override public PsiClass[] getInterfaces() { return PsiClassImplUtil.getInterfaces(this); } @Override @NotNull public PsiClass[] getSupers() { return PsiClassImplUtil.getSupers(this); } @Override @NotNull public PsiClassType[] getSuperTypes() { return PsiClassImplUtil.getSuperTypes(this); } @Override public PsiClass getContainingClass() { return null; } @Override @NotNull public Collection<HierarchicalMethodSignature> getVisibleSignatures() { return PsiSuperMethodImplUtil.getVisibleSignatures(this); } @Override public PsiModifierList getModifierList() { return null; } @Override public boolean hasModifierProperty(@NotNull String name) { return false; } @Override public PsiJavaToken getLBrace() { return null; } @Override public PsiJavaToken getRBrace() { return null; } @Override public void accept(@NotNull PsiElementVisitor visitor) { if (visitor instanceof JavaElementVisitor) { ((JavaElementVisitor)visitor).visitTypeParameter(this); } else { visitor.visitElement(this); } } @NonNls public String toString() { return "PsiTypeParameter:" + getName(); } @Override public void appendMirrorText(int indentLevel, @NotNull StringBuilder buffer) { buffer.append(getName()); PsiJavaCodeReferenceElement[] bounds = getExtendsList().getReferenceElements(); if (bounds.length > 0) { buffer.append(" extends "); for (int i = 0; i < bounds.length; i++) { if (i > 0) buffer.append(" & "); buffer.append(bounds[i].getCanonicalText()); } } } @Override public void setMirror(@NotNull TreeElement element) throws InvalidMirrorException { setMirrorCheckingType(element, null); setMirror(getExtendsList(), SourceTreeToPsiMap.<PsiTypeParameter>treeToPsiNotNull(element).getExtendsList()); } @Override @NotNull public PsiElement[] getChildren() { return PsiElement.EMPTY_ARRAY; } @Override public PsiTypeParameterListOwner getOwner() { return (PsiTypeParameterListOwner)getParent().getParent(); } @Override public int getIndex() { final PsiTypeParameterStub stub = getStub(); return stub.getParentStub().getChildrenStubs().indexOf(stub); } public PsiMetaData getMetaData() { return MetaRegistry.getMeta(this); } @Override public Icon getElementIcon(final int flags) { return PsiClassImplUtil.getClassIcon(flags, this); } @Override public boolean isEquivalentTo(final PsiElement another) { return PsiClassImplUtil.isClassEquivalentTo(this, another); } @Override @NotNull public SearchScope getUseScope() { return PsiClassImplUtil.getClassUseScope(this); } //todo parse annotataions @Override @NotNull public PsiAnnotation[] getAnnotations() { return PsiAnnotation.EMPTY_ARRAY; } @Override public PsiAnnotation findAnnotation(@NotNull @NonNls String qualifiedName) { return null; } @Override @NotNull public PsiAnnotation addAnnotation(@NotNull @NonNls String qualifiedName) { throw new IncorrectOperationException(); } @Override @NotNull public PsiAnnotation[] getApplicableAnnotations() { return getAnnotations(); } }
package uk.co.benjiweber.expressions.exceptions; import org.junit.Test; import java.util.List; import java.util.concurrent.CompletableFuture; import java.util.function.Function; import static java.util.Arrays.asList; import static java.util.stream.Collectors.toList; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertTrue; import static uk.co.benjiweber.expressions.exceptions.Exceptions.unchecked; import static uk.co.benjiweber.expressions.exceptions.Exceptions.wrappingAll; import static uk.co.benjiweber.expressions.exceptions.Exceptions.wrappingChecked; import static uk.co.benjiweber.expressions.exceptions.Example.Do; public class ExceptionsTest { @Test public void checked_to_unchecked() { String foo = unchecked(() -> Example.methodThatThrowsCheckedException(Do.NOT_THROW)); assertEquals("foo", foo); } @Test public void checked_to_unchecked_method_reference() { unchecked(Example::methodThatThrowsCheckedExceptionNoParams); } @Test public void checked_to_unchecked_void() { unchecked(() -> Example.voidMethodThatThrowsACheckedException(Example.Do.NOT_THROW)); } @Test(expected = RuntimeException.class) public void checked_to_unchecked_should_wrap_in_runtime_exception() { String foo = unchecked(() -> Example.methodThatThrowsCheckedException(Do.THROW)); } @Test(expected = RuntimeException.class) public void checked_to_unchecked_void_should_wrap_in_runtime_exception() { unchecked(() -> Example.voidMethodThatThrowsACheckedException(Do.THROW)); } @Test(expected = Wrapped.class) public void wrapping_checked() throws Wrapped { wrappingChecked(() -> Example.methodThatThrowsCheckedException(Do.THROW)).in(Wrapped::new); } @Test(expected = NullPointerException.class) public void wrapping_checked_should_not_wrap_npe() throws Wrapped { wrappingChecked(() -> Example.methodThatThrowsNPE(Do.THROW)).in(Wrapped::new); } @Test public void wrapping_checked_should_return_value_when_no_exception() throws Wrapped { String foo = wrappingChecked(() -> Example.methodThatThrowsCheckedException(Example.Do.NOT_THROW)).in(Wrapped::new); assertEquals("foo", foo); } @Test public void wrapping_all_should_return_value_when_no_exception() throws Wrapped { String foo = wrappingAll(() -> Example.methodThatThrowsCheckedException(Do.NOT_THROW)).in(Wrapped::new); assertEquals("foo", foo); } @Test(expected = RuntimeException.class) public void wrapping_checked_should_not_wrap_runtime_exception() throws Wrapped { wrappingChecked(() -> Example.methodThatThrowsRuntimeException(Do.THROW)).in(Wrapped::new); } @Test(expected = Wrapped.class) public void wrapping_all_should_wrap_npe() throws Wrapped { wrappingAll(() -> Example.methodThatThrowsNPE(Do.THROW)).in(Wrapped::new); } @Test(expected = Wrapped.class) public void wrapping_all_should_wrap_runtime_exception() throws Wrapped { wrappingAll(() -> Example.methodThatThrowsRuntimeException(Do.THROW)).in(Wrapped::new); } @Test(expected = Wrapped.class) public void wrapping_checked_using_supplier() throws Wrapped { wrappingChecked(() -> Example.methodThatThrowsCheckedException(Do.THROW)).in(() -> new Wrapped(null)); } @Test(expected = Wrapped.class) public void wrapping_checked_using_reflection() throws Wrapped { wrappingChecked(() -> Example.methodThatThrowsCheckedException(Do.THROW)).in(Wrapped.class); } @Test(expected = Wrapped.class) public void wrapping_all_using_supplier() throws Wrapped { wrappingAll(() -> Example.methodThatThrowsCheckedException(Do.THROW)).in(() -> new Wrapped(null)); } @Test(expected = Wrapped.class) public void wrapping_all_using_reflection() throws Wrapped { wrappingAll(() -> Example.methodThatThrowsCheckedException(Do.THROW)).in(Wrapped.class); } @Test public void streams_and_exceptions() { List<String> result = asList("foo", "bar", "baz", "boooo") .stream() .map(Result.wrapReturn(Example::duplicatesShortStrings)) .map(Result.wrap(s -> s.toUpperCase())) .filter(Result::success) .map(Result::unwrap) .collect(toList()); assertEquals(asList("FOOFOO", "BARBAR", "BAZBAZ"), result); } @Test public void streams_and_exceptions_exceptions_mid_stream() { List<String> result = asList("foo", "bar", "baz", "UPR", "boooo") .stream() .map(Result.wrapReturn(Example::duplicatesShortStrings)) .map(Result.wrapExceptional(Example::uppercasesStrings)) .filter(Result::success) .map(Result::unwrap) .collect(toList()); assertEquals(asList("FOOFOO", "BARBAR", "BAZBAZ"), result); } @Test public void streams_and_exceptions_exceptions_map_failure_cases() { List<String> result = asList("foo", "bar", "baz", "UPR", "boooo") .stream() .map(Result.wrapReturn(Example::duplicatesShortStrings)) .map(Result.wrapExceptional(Example::uppercasesStrings)) .map(Result.onSuccess(Function.<String>identity()).on(InputTooLongException.class, s -> "OhNoes").mapper()) .filter(Result::success) .map(Result::unwrap) .collect(toList()); assertEquals(asList("FOOFOO","BARBAR","BAZBAZ", "OhNoes"), result); } @Test public void streams_and_exceptions_exceptions_throw_unfiltered_failures() { try { asList("foo", "bar", "baz", "UPR", "boooo") .stream() .map(Result.wrapReturn(Example::duplicatesShortStrings)) .map(Result.wrapExceptional(Example::uppercasesStrings)) .map(Result::unwrap) .collect(toList()); } catch (RuntimeException e) { assertTrue(e.getCause() instanceof InputContainsUppercaseException); } } @Test public void completable_future_supplyaync_exceptional() { CompletableFuture .supplyAsync(Result.wrapReturn(Example::throwingSupplier)) .thenApply(Result.wrap(String::toUpperCase)) .thenAccept(Result.wrapConsumer(System.out::println)); CompletableFuture .supplyAsync(Result.wrapReturn(Example::notThrowingSupplier)) .thenApply(Result.wrap(String::toUpperCase)) .thenAccept(Result.wrapConsumer(System.out::println)); } @Test public void exceptional_stream_flatmap() { Book book = () -> "book"; List<String> books = asList(book).stream() .flatMap(Exceptions.stream(Book::name)) .collect(toList()); assertEquals(asList("book"), books); } public static class ACheckedExceptionIDontHaveAGoodWayToDealWith extends Exception { } public static class Wrapped extends Exception { public Wrapped(Exception e) { super(e); } } public static class InputTooLongException extends Exception { } public static class InputContainsUppercaseException extends Exception { } public interface Book { String name() throws NoNameException; } public static class NoNameException extends Exception {} }
package com.faforever.client.map; import com.faforever.client.fa.FaStrings; import com.faforever.client.fx.Controller; import com.faforever.client.fx.JavaFxUtil; import com.faforever.client.i18n.I18n; import com.faforever.client.main.event.HostGameEvent; import com.faforever.client.map.MapServiceImpl.PreviewSize; import com.faforever.client.notification.ImmediateNotification; import com.faforever.client.notification.NotificationService; import com.faforever.client.notification.ReportAction; import com.faforever.client.notification.Severity; import com.faforever.client.player.Player; import com.faforever.client.player.PlayerService; import com.faforever.client.reporting.ReportingService; import com.faforever.client.util.IdenticonUtil; import com.faforever.client.util.TimeService; import com.faforever.client.vault.review.Review; import com.faforever.client.vault.review.ReviewService; import com.faforever.client.vault.review.ReviewsController; import com.faforever.commons.io.Bytes; import com.google.common.base.Strings; import com.google.common.eventbus.EventBus; import javafx.application.Platform; import javafx.beans.binding.Bindings; import javafx.collections.ListChangeListener; import javafx.collections.ObservableList; import javafx.collections.WeakListChangeListener; import javafx.scene.Node; import javafx.scene.control.Button; import javafx.scene.control.Label; import javafx.scene.control.ProgressBar; import javafx.scene.control.ScrollPane; import javafx.scene.image.ImageView; import javafx.scene.input.KeyCode; import javafx.scene.input.MouseEvent; import javafx.scene.layout.VBox; import lombok.extern.slf4j.Slf4j; import org.springframework.beans.factory.config.ConfigurableBeanFactory; import org.springframework.context.annotation.Scope; import org.springframework.stereotype.Component; import javax.inject.Inject; import java.time.LocalDateTime; import java.util.Optional; import static java.util.Collections.singletonList; @Component @Scope(ConfigurableBeanFactory.SCOPE_PROTOTYPE) @Slf4j public class MapDetailController implements Controller<Node> { private final MapService mapService; private final NotificationService notificationService; private final I18n i18n; private final TimeService timeService; private final ReportingService reportingService; private final PlayerService playerService; private final ReviewService reviewService; public Label progressLabel; public Button uninstallButton; public Button installButton; public ImageView thumbnailImageView; public Label nameLabel; public Label authorLabel; public ProgressBar progressBar; public Label mapDescriptionLabel; public Node mapDetailRoot; public ScrollPane scrollPane; public Label dimensionsLabel; public Label maxPlayersLabel; public Label dateLabel; public ReviewsController reviewsController; public VBox loadingContainer; private final EventBus eventBus; private MapBean map; private ListChangeListener<MapBean> installStatusChangeListener; @Inject public MapDetailController(MapService mapService, NotificationService notificationService, I18n i18n, ReportingService reportingService, TimeService timeService, PlayerService playerService, ReviewService reviewService, EventBus eventBus) { this.mapService = mapService; this.notificationService = notificationService; this.i18n = i18n; this.reportingService = reportingService; this.timeService = timeService; this.playerService = playerService; this.reviewService = reviewService; this.eventBus = eventBus; } public void initialize() { JavaFxUtil.fixScrollSpeed(scrollPane); uninstallButton.managedProperty().bind(uninstallButton.visibleProperty()); installButton.managedProperty().bind(installButton.visibleProperty()); progressBar.managedProperty().bind(progressBar.visibleProperty()); progressBar.visibleProperty().bind(uninstallButton.visibleProperty().not().and(installButton.visibleProperty().not())); progressLabel.managedProperty().bind(progressLabel.visibleProperty()); progressLabel.visibleProperty().bind(progressBar.visibleProperty()); loadingContainer.visibleProperty().bind(progressBar.visibleProperty()); reviewsController.setCanWriteReview(false); mapDetailRoot.setOnKeyPressed(keyEvent -> { if (keyEvent.getCode() == KeyCode.ESCAPE) { onCloseButtonClicked(); } }); installStatusChangeListener = change -> { while (change.next()) { for (MapBean mapBean : change.getAddedSubList()) { if (map.getFolderName().equalsIgnoreCase(mapBean.getFolderName())) { setInstalled(true); return; } } for (MapBean mapBean : change.getRemoved()) { if (map.getFolderName().equals(mapBean.getFolderName())) { setInstalled(false); return; } } } }; } public void onCloseButtonClicked() { getRoot().setVisible(false); } private void setInstalled(boolean installed) { installButton.setVisible(!installed); uninstallButton.setVisible(installed); } public Node getRoot() { return mapDetailRoot; } public void setMap(MapBean map) { this.map = map; if (map.getLargeThumbnailUrl() != null) { thumbnailImageView.setImage(mapService.loadPreview(map, PreviewSize.LARGE)); } else { thumbnailImageView.setImage(IdenticonUtil.createIdenticon(map.getId())); } nameLabel.setText(map.getDisplayName()); authorLabel.setText(Optional.ofNullable(map.getAuthor()).orElse(i18n.get("map.unknownAuthor"))); maxPlayersLabel.setText(i18n.number(map.getPlayers())); MapSize mapSize = map.getSize(); dimensionsLabel.setText(i18n.get("mapPreview.size", mapSize.getWidthInKm(), mapSize.getHeightInKm())); LocalDateTime createTime = map.getCreateTime(); dateLabel.setText(timeService.asDate(createTime)); boolean mapInstalled = mapService.isInstalled(map.getFolderName()); installButton.setVisible(!mapInstalled); Player player = playerService.getCurrentPlayer().orElseThrow(() -> new IllegalStateException("No user is logged in")); reviewsController.setCanWriteReview(false); mapService.hasPlayedMap(player.getId(), map.getId()) .thenAccept(hasPlayed -> reviewsController.setCanWriteReview(hasPlayed)); reviewsController.setOnSendReviewListener(this::onSendReview); reviewsController.setOnDeleteReviewListener(this::onDeleteReview); reviewsController.setReviews(map.getReviews()); reviewsController.setOwnReview(map.getReviews().stream() .filter(review -> review.getPlayer().getId() == player.getId()) .findFirst()); mapService.getFileSize(map.getDownloadUrl()) .thenAccept(mapFileSize -> Platform.runLater(() -> { if (mapFileSize > -1) { installButton.setText(i18n.get("mapVault.installButtonFormat", Bytes.formatSize(mapFileSize, i18n.getUserSpecificLocale()))); installButton.setDisable(false); } else { installButton.setText(i18n.get("notAvailable")); installButton.setDisable(true); } })); uninstallButton.setVisible(mapInstalled); mapDescriptionLabel.textProperty().bind(Bindings.createStringBinding(() -> Optional.ofNullable(map.getDescription()) .map(Strings::emptyToNull) .map(FaStrings::removeLocalizationTag) .orElseGet(() -> i18n.get("map.noDescriptionAvailable")), map.descriptionProperty())); ObservableList<MapBean> installedMaps = mapService.getInstalledMaps(); synchronized (installedMaps) { installedMaps.addListener(new WeakListChangeListener<>(installStatusChangeListener)); } setInstalled(mapService.isInstalled(map.getFolderName())); } private void onDeleteReview(Review review) { reviewService.deleteMapVersionReview(review) .thenRun(() -> Platform.runLater(() -> { map.getReviews().remove(review); reviewsController.setOwnReview(Optional.empty()); })) // TODO display error to user .exceptionally(throwable -> { log.warn("Review could not be deleted", throwable); return null; }); } private void onSendReview(Review review) { boolean isNew = review.getId() == null; Player player = playerService.getCurrentPlayer() .orElseThrow(() -> new IllegalStateException("No current player is available")); review.setPlayer(player); reviewService.saveMapVersionReview(review, map.getId()) .thenRun(() -> { if (isNew) { map.getReviews().add(review); } reviewsController.setOwnReview(Optional.of(review)); }) // TODO display error to user .exceptionally(throwable -> { log.warn("Review could not be saved", throwable); return null; }); } public void onInstallButtonClicked() { installButton.setVisible(false); mapService.downloadAndInstallMap(map, progressBar.progressProperty(), progressLabel.textProperty()) .thenRun(() -> setInstalled(true)) .exceptionally(throwable -> { notificationService.addNotification(new ImmediateNotification( i18n.get("errorTitle"), i18n.get("mapVault.installationFailed", map.getDisplayName(), throwable.getLocalizedMessage()), Severity.ERROR, throwable, singletonList(new ReportAction(i18n, reportingService, throwable)))); setInstalled(false); return null; }); } public void onUninstallButtonClicked() { progressBar.progressProperty().unbind(); progressBar.setProgress(-1); uninstallButton.setVisible(false); mapService.uninstallMap(map) .thenRun(() -> setInstalled(false)) .exceptionally(throwable -> { notificationService.addNotification(new ImmediateNotification( i18n.get("errorTitle"), i18n.get("mapVault.couldNotDeleteMap", map.getDisplayName(), throwable.getLocalizedMessage()), Severity.ERROR, throwable, singletonList(new ReportAction(i18n, reportingService, throwable)))); setInstalled(true); return null; }); } public void onDimmerClicked() { onCloseButtonClicked(); } public void onContentPaneClicked(MouseEvent event) { event.consume(); } public void onCreateGameButtonClicked() { eventBus.post(new HostGameEvent(map.getFolderName())); } }
/* This file is part of Openrouteservice. * * Openrouteservice is free software; you can redistribute it and/or modify it under the terms of the * GNU Lesser General Public License as published by the Free Software Foundation; either version 2.1 * of the License, or (at your option) any later version. * This library is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; * without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. * See the GNU Lesser General Public License for more details. * You should have received a copy of the GNU Lesser General Public License along with this library; * if not, see <https://www.gnu.org/licenses/>. */ package org.heigit.ors.services.isochrones.requestprocessors.json; import com.graphhopper.util.Helper; import com.vividsolutions.jts.geom.Coordinate; import org.heigit.ors.common.StatusCode; import org.heigit.ors.common.TravelRangeType; import org.heigit.ors.common.TravellerInfo; import org.heigit.ors.exceptions.MissingParameterException; import org.heigit.ors.exceptions.ParameterValueException; import org.heigit.ors.exceptions.StatusCodeException; import org.heigit.ors.exceptions.UnknownParameterValueException; import org.heigit.ors.isochrones.IsochroneRequest; import org.heigit.ors.isochrones.IsochronesErrorCodes; import org.heigit.ors.routing.RoutingProfileType; import org.heigit.ors.services.isochrones.IsochronesServiceSettings; import org.heigit.ors.util.CoordTools; import org.heigit.ors.util.StreamUtility; import org.json.JSONArray; import org.json.JSONObject; import javax.servlet.http.HttpServletRequest; import java.io.InputStream; import java.util.Arrays; public class JsonIsochroneRequestParser { public static final String KEY_TRAVELLERS = "travellers"; public static final String KEY_PROFILE = "profile"; public static final String KEY_LOCATION = "location"; public static final String KEY_LOCATION_TYPE = "location_type"; public static final String KEY_RANGE = "range"; public static final String KEY_RANGE_TYPE = "range_type"; public static final String KEY_OPTIONS = "options"; public static final String KEY_UNITS = "units"; public static final String KEY_AREA_UNITS = "area_units"; public static final String KEY_CALC_METHOD = "calc_method"; public static final String KEY_ATTRIBUTES = "attributes"; public static final String KEY_INTERSECTIONS = "intersections"; public static final String KEY_SMOOTHING = "smoothing"; public static final String KEY_LOCATIONS = "locations"; private JsonIsochroneRequestParser() {} public static IsochroneRequest parseFromStream(InputStream stream) throws Exception { JSONObject json; try { String body = StreamUtility.readStream(stream); json = new JSONObject(body); } catch (Exception ex) { throw new StatusCodeException(StatusCode.BAD_REQUEST, IsochronesErrorCodes.INVALID_JSON_FORMAT, "Unable to parse JSON document."); } IsochroneRequest req = new IsochroneRequest(); String value; if (json.has(KEY_TRAVELLERS)) { JSONArray jTravellers = json.getJSONArray(KEY_TRAVELLERS); if (jTravellers.length() == 0) throw new MissingParameterException(IsochronesErrorCodes.INVALID_JSON_FORMAT, "'travellers' array is empty."); for (int j = 0; j < jTravellers.length(); ++j) { JSONObject jTraveller = jTravellers.getJSONObject(j); TravellerInfo travellerInfo = new TravellerInfo(); value = jTraveller.optString(KEY_PROFILE); if (!Helper.isEmpty(value)) { int profileType = RoutingProfileType.getFromString(value); if (profileType == RoutingProfileType.UNKNOWN) throw new UnknownParameterValueException(IsochronesErrorCodes.INVALID_PARAMETER_VALUE, KEY_PROFILE, value); travellerInfo.getRouteSearchParameters().setProfileType(profileType); } else { throw new MissingParameterException(IsochronesErrorCodes.MISSING_PARAMETER, KEY_PROFILE); } if (jTraveller.has(KEY_LOCATION)) { try { JSONArray jLocation = jTraveller.getJSONArray(KEY_LOCATION); travellerInfo.setLocation(new Coordinate(jLocation.getDouble(0), jLocation.getDouble(1))); } catch(Exception nfex) { throw new ParameterValueException(IsochronesErrorCodes.INVALID_PARAMETER_FORMAT, KEY_LOCATION); } } else { throw new MissingParameterException(IsochronesErrorCodes.MISSING_PARAMETER, KEY_LOCATION); } value = jTraveller.optString(KEY_LOCATION_TYPE); if (!Helper.isEmpty(value)) { if (!"start".equalsIgnoreCase(value) && !"destination".equalsIgnoreCase(value)) throw new UnknownParameterValueException(IsochronesErrorCodes.INVALID_PARAMETER_VALUE, KEY_LOCATION_TYPE, value); travellerInfo.setLocationType(value); } if (jTraveller.has(KEY_RANGE)) { JSONArray jRanges = jTraveller.getJSONArray(KEY_RANGE); if (jRanges.length() == 0) throw new ParameterValueException(IsochronesErrorCodes.INVALID_PARAMETER_FORMAT, KEY_RANGE); double[] ranges = new double[jRanges.length()]; try { for (int i = 0; i < ranges.length; i++) ranges[i] = jRanges.getDouble(i); } catch(Exception ex) { throw new ParameterValueException(IsochronesErrorCodes.INVALID_PARAMETER_FORMAT, KEY_RANGE); } Arrays.sort(ranges); travellerInfo.setRanges(ranges); } else { throw new MissingParameterException(IsochronesErrorCodes.MISSING_PARAMETER, KEY_RANGE); } value = jTraveller.optString(KEY_RANGE_TYPE); if (!Helper.isEmpty(value)) { switch (value.toLowerCase()) { case "distance": travellerInfo.setRangeType(TravelRangeType.DISTANCE); break; case "time": travellerInfo.setRangeType(TravelRangeType.TIME); break; default: throw new UnknownParameterValueException(IsochronesErrorCodes.INVALID_PARAMETER_VALUE, KEY_RANGE_TYPE, value); } } value = jTraveller.optString(KEY_OPTIONS); if (!Helper.isEmpty(value)) { try { travellerInfo.getRouteSearchParameters().setOptions(value); } catch(Exception ex) { throw new ParameterValueException(IsochronesErrorCodes.INVALID_JSON_FORMAT, KEY_OPTIONS, value); } } req.addTraveller(travellerInfo); } } else { throw new MissingParameterException(IsochronesErrorCodes.MISSING_PARAMETER, KEY_TRAVELLERS); } value = json.optString(KEY_UNITS); if (!Helper.isEmpty(value)) { if (!("m".equals(value) || "km".equals(value) || "mi".equals(value))) throw new UnknownParameterValueException(IsochronesErrorCodes.INVALID_PARAMETER_VALUE, KEY_UNITS, value); req.setUnits(value.toLowerCase()); } value = json.optString(KEY_AREA_UNITS); if (!Helper.isEmpty(value)) { if (!("m".equals(value) || "km".equals(value) || "mi".equals(value))) throw new UnknownParameterValueException(IsochronesErrorCodes.INVALID_PARAMETER_VALUE, KEY_AREA_UNITS, value); req.setUnits(value.toLowerCase()); } value = json.optString(KEY_CALC_METHOD); if (!Helper.isEmpty(value)) req.setCalcMethod(value); value = json.optString(KEY_ATTRIBUTES); if (!Helper.isEmpty(value)) { String[] values = value.split("\\|"); for (int i = 0; i < values.length; i++) { String attr = values[i]; if (!(attr.equalsIgnoreCase("area") || attr.equalsIgnoreCase("reachfactor") || IsochronesServiceSettings.isStatsAttributeSupported(attr))) throw new ParameterValueException(IsochronesErrorCodes.INVALID_PARAMETER_VALUE, KEY_ATTRIBUTES, attr); } req.setAttributes(values); } value = json.optString(KEY_INTERSECTIONS); if (!Helper.isEmpty(value)) { try { req.setIncludeIntersections(Boolean.parseBoolean(value)); } catch(Exception ex) { throw new ParameterValueException(IsochronesErrorCodes.INVALID_PARAMETER_VALUE, KEY_INTERSECTIONS, value); } } setIsochroneSmoothing(req, json.optString(KEY_SMOOTHING)); value = json.optString("id"); if (!Helper.isEmpty(value)) req.setId(value); return req; } public static IsochroneRequest parseFromRequestParams(HttpServletRequest request) throws Exception { IsochroneRequest req = new IsochroneRequest(); TravellerInfo travellerInfo = new TravellerInfo(); String value = request.getParameter(KEY_PROFILE); if (!Helper.isEmpty(value)) { int profileType = RoutingProfileType.getFromString(value); if (profileType == RoutingProfileType.UNKNOWN) throw new UnknownParameterValueException(IsochronesErrorCodes.INVALID_PARAMETER_VALUE, KEY_PROFILE, value); travellerInfo.getRouteSearchParameters().setProfileType(profileType); } else { throw new MissingParameterException(IsochronesErrorCodes.MISSING_PARAMETER, KEY_PROFILE); } double rangeValue = -1.0; boolean skipInterval = false; value = request.getParameter(KEY_RANGE); if (Helper.isEmpty(value)) throw new MissingParameterException(IsochronesErrorCodes.MISSING_PARAMETER, KEY_RANGE); else { String[] rangeValues = value.split(","); if (rangeValues.length == 1) { try { rangeValue = Double.parseDouble(value); travellerInfo.setRanges(new double[] { rangeValue}); } catch(NumberFormatException ex) { throw new ParameterValueException(IsochronesErrorCodes.INVALID_PARAMETER_FORMAT, KEY_RANGE); } } else { double[] ranges = new double[rangeValues.length]; double maxRange = Double.MIN_VALUE; for (int i = 0; i < ranges.length; i++) { double dv = Double.parseDouble(rangeValues[i]); if (dv > maxRange) maxRange = dv; ranges[i] = dv; } Arrays.sort(ranges); travellerInfo.setRanges(ranges); skipInterval = true; } } if (!skipInterval) { value = request.getParameter("interval"); if (!Helper.isEmpty(value) && rangeValue != -1) { travellerInfo.setRanges(rangeValue, Double.parseDouble(value)); } } value = request.getParameter(KEY_RANGE_TYPE); if (!Helper.isEmpty(value)) { switch (value.toLowerCase()) { case "distance": travellerInfo.setRangeType(TravelRangeType.DISTANCE); break; case "time": travellerInfo.setRangeType(TravelRangeType.TIME); break; default: throw new UnknownParameterValueException(IsochronesErrorCodes.INVALID_PARAMETER_VALUE, KEY_RANGE_TYPE, value); } } value = request.getParameter(KEY_AREA_UNITS); if (!Helper.isEmpty(value)) { if (!("m".equals(value) || "km".equals(value) || "mi".equals(value))) throw new UnknownParameterValueException(IsochronesErrorCodes.INVALID_PARAMETER_VALUE, KEY_AREA_UNITS, value); req.setAreaUnits(value.toLowerCase()); } value = request.getParameter(KEY_UNITS); if (!Helper.isEmpty(value)) { if (!("m".equals(value) || "km".equals(value) || "mi".equals(value))) throw new UnknownParameterValueException(IsochronesErrorCodes.INVALID_PARAMETER_VALUE, KEY_UNITS, value); req.setUnits(value.toLowerCase()); } boolean inverseXY = false; value = request.getParameter(KEY_LOCATIONS); if (Helper.isEmpty(value)) { value = request.getParameter("latlng"); inverseXY = true; } Coordinate[] coords = null; if (!Helper.isEmpty(value)) { try { coords = CoordTools.parse(value, "\\|", false, inverseXY); } catch(NumberFormatException nfex) { throw new ParameterValueException(IsochronesErrorCodes.INVALID_PARAMETER_FORMAT, KEY_LOCATIONS); } } else { throw new MissingParameterException(IsochronesErrorCodes.MISSING_PARAMETER, KEY_LOCATIONS); } value = request.getParameter(KEY_LOCATION_TYPE); if (!Helper.isEmpty(value)) { if (!"start".equalsIgnoreCase(value) && !"destination".equalsIgnoreCase(value)) throw new UnknownParameterValueException(IsochronesErrorCodes.INVALID_PARAMETER_VALUE, KEY_LOCATION_TYPE, value); travellerInfo.setLocationType(value); } value = request.getParameter(KEY_CALC_METHOD); if (!Helper.isEmpty(value)) req.setCalcMethod(value); value = request.getParameter(KEY_ATTRIBUTES); if (!Helper.isEmpty(value)) { String[] values = value.split("\\|"); for (int i = 0; i < values.length; i++) { String attr = values[i]; if (!(attr.equalsIgnoreCase("area") || attr.equalsIgnoreCase("reachfactor") || IsochronesServiceSettings.isStatsAttributeSupported(attr))) throw new ParameterValueException(IsochronesErrorCodes.INVALID_PARAMETER_VALUE, KEY_ATTRIBUTES, attr); } req.setAttributes(values); } value = request.getParameter(KEY_INTERSECTIONS); if (!Helper.isEmpty(value)) { try { req.setIncludeIntersections(Boolean.parseBoolean(value)); } catch(Exception ex) { throw new ParameterValueException(IsochronesErrorCodes.INVALID_PARAMETER_VALUE, KEY_INTERSECTIONS, value); } } value = request.getParameter(KEY_OPTIONS); if (!Helper.isEmpty(value)) { try { travellerInfo.getRouteSearchParameters().setOptions(value); req.setCalcMethod("ConcaveBalls"); } catch(Exception ex) { throw new ParameterValueException(IsochronesErrorCodes.INVALID_JSON_FORMAT, KEY_OPTIONS, value); } } else req.setCalcMethod("FastIsochrone"); setIsochroneSmoothing(req, request.getParameter(KEY_SMOOTHING)); if (coords.length == 1) { travellerInfo.setLocation(coords[0]); req.addTraveller(travellerInfo); } else { travellerInfo.setLocation(coords[0]); req.addTraveller(travellerInfo); for (int i = 1; i < coords.length; i++) { TravellerInfo ti = new TravellerInfo(travellerInfo); ti.setLocation(coords[i]); req.addTraveller(ti); } } value = request.getParameter("id"); if (!Helper.isEmpty(value)) req.setId(value); return req; } private static void setIsochroneSmoothing(IsochroneRequest isochroneRequest, String requestSmoothingValue) throws ParameterValueException { if (!Helper.isEmpty(requestSmoothingValue)) { float smoothingValue; try { smoothingValue = Float.parseFloat(requestSmoothingValue); } catch (Exception e) { throw new ParameterValueException(IsochronesErrorCodes.INVALID_PARAMETER_VALUE, KEY_SMOOTHING, requestSmoothingValue); } if(smoothingValue < 0 || smoothingValue > 100) throw new ParameterValueException(IsochronesErrorCodes.INVALID_PARAMETER_VALUE, KEY_SMOOTHING, requestSmoothingValue); isochroneRequest.setSmoothingFactor(smoothingValue); } } }
/* Copyright 2018 The TensorFlow Authors. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. =======================================================================*/ // This class has been generated, DO NOT EDIT! package org.tensorflow.op.core; import java.util.Arrays; import java.util.Iterator; import java.util.List; import org.tensorflow.GraphOperation; import org.tensorflow.Operand; import org.tensorflow.Operation; import org.tensorflow.OperationBuilder; import org.tensorflow.Output; import org.tensorflow.op.Operands; import org.tensorflow.op.RawOp; import org.tensorflow.op.RawOpInputs; import org.tensorflow.op.Scope; import org.tensorflow.op.annotation.Endpoint; import org.tensorflow.op.annotation.OpInputsMetadata; import org.tensorflow.op.annotation.OpMetadata; import org.tensorflow.op.annotation.Operator; import org.tensorflow.proto.framework.DataType; import org.tensorflow.types.TInt32; import org.tensorflow.types.TInt64; import org.tensorflow.types.family.TType; /** * Op removes and returns the values associated with the key * from the underlying container. If the underlying container * does not contain this key, the op will block until it does. */ @OpMetadata( opType = MapUnstage.OP_NAME, inputsClass = MapUnstage.Inputs.class ) @Operator public final class MapUnstage extends RawOp implements Iterable<Operand<TType>> { /** * The name of this op, as known by TensorFlow core engine */ public static final String OP_NAME = "MapUnstage"; private List<Output<?>> values; @SuppressWarnings("unchecked") public MapUnstage(Operation operation) { super(operation, OP_NAME); int outputIdx = 0; int valuesLength = operation.outputListLength("values"); values = Arrays.asList(operation.outputList(outputIdx, valuesLength)); outputIdx += valuesLength; } /** * Factory method to create a class wrapping a new MapUnstage operation. * * @param scope current scope * @param key The key value * @param indices The indices value * @param dtypes The value of the dtypes attribute * @param options carries optional attribute values * @return a new instance of MapUnstage */ @Endpoint( describeByClass = true ) public static MapUnstage create(Scope scope, Operand<TInt64> key, Operand<TInt32> indices, List<Class<? extends TType>> dtypes, Options... options) { OperationBuilder opBuilder = scope.opBuilder(OP_NAME, "MapUnstage"); opBuilder.addInput(key.asOutput()); opBuilder.addInput(indices.asOutput()); opBuilder.setAttr("dtypes", Operands.toDataTypes(dtypes)); if (options != null) { for (Options opts : options) { if (opts.capacity != null) { opBuilder.setAttr("capacity", opts.capacity); } if (opts.memoryLimit != null) { opBuilder.setAttr("memory_limit", opts.memoryLimit); } if (opts.container != null) { opBuilder.setAttr("container", opts.container); } if (opts.sharedName != null) { opBuilder.setAttr("shared_name", opts.sharedName); } } } return new MapUnstage(opBuilder.build()); } /** * Sets the capacity option. * * @param capacity the capacity option * @return this Options instance. */ public static Options capacity(Long capacity) { return new Options().capacity(capacity); } /** * Sets the memoryLimit option. * * @param memoryLimit the memoryLimit option * @return this Options instance. */ public static Options memoryLimit(Long memoryLimit) { return new Options().memoryLimit(memoryLimit); } /** * Sets the container option. * * @param container the container option * @return this Options instance. */ public static Options container(String container) { return new Options().container(container); } /** * Sets the sharedName option. * * @param sharedName the sharedName option * @return this Options instance. */ public static Options sharedName(String sharedName) { return new Options().sharedName(sharedName); } /** * Gets values. * * @return values. */ public List<Output<?>> values() { return values; } @Override @SuppressWarnings({"rawtypes", "unchecked"}) public Iterator<Operand<TType>> iterator() { return (Iterator) values.iterator(); } /** * Optional attributes for {@link org.tensorflow.op.core.MapUnstage} */ public static class Options { private Long capacity; private Long memoryLimit; private String container; private String sharedName; private Options() { } /** * Sets the capacity option. * * @param capacity the capacity option * @return this Options instance. */ public Options capacity(Long capacity) { this.capacity = capacity; return this; } /** * Sets the memoryLimit option. * * @param memoryLimit the memoryLimit option * @return this Options instance. */ public Options memoryLimit(Long memoryLimit) { this.memoryLimit = memoryLimit; return this; } /** * Sets the container option. * * @param container the container option * @return this Options instance. */ public Options container(String container) { this.container = container; return this; } /** * Sets the sharedName option. * * @param sharedName the sharedName option * @return this Options instance. */ public Options sharedName(String sharedName) { this.sharedName = sharedName; return this; } } @OpInputsMetadata( outputsClass = MapUnstage.class ) public static class Inputs extends RawOpInputs<MapUnstage> { /** * The key input */ public final Operand<TInt64> key; /** * The indices input */ public final Operand<TInt32> indices; /** * The capacity attribute */ public final long capacity; /** * The memoryLimit attribute */ public final long memoryLimit; /** * The dtypes attribute */ public final DataType[] dtypes; /** * The container attribute */ public final String container; /** * The sharedName attribute */ public final String sharedName; public Inputs(GraphOperation op) { super(new MapUnstage(op), op, Arrays.asList("capacity", "memory_limit", "dtypes", "container", "shared_name")); int inputIndex = 0; key = (Operand<TInt64>) op.input(inputIndex++); indices = (Operand<TInt32>) op.input(inputIndex++); capacity = op.attributes().getAttrInt("capacity"); memoryLimit = op.attributes().getAttrInt("memory_limit"); dtypes = op.attributes().getAttrTypeList("dtypes"); container = op.attributes().getAttrString("container"); sharedName = op.attributes().getAttrString("shared_name"); } } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.sysml.runtime.instructions.gpu.context; import static jcuda.driver.JCudaDriver.cuDeviceGetCount; import static jcuda.driver.JCudaDriver.cuInit; import static jcuda.runtime.JCuda.cudaGetDeviceProperties; import java.util.LinkedList; import java.util.List; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.sysml.runtime.DMLRuntimeException; import org.apache.sysml.utils.GPUStatistics; import jcuda.driver.JCudaDriver; import jcuda.jcublas.JCublas2; import jcuda.jcudnn.JCudnn; import jcuda.jcusparse.JCusparse; import jcuda.runtime.JCuda; import jcuda.runtime.cudaDeviceProp; public class GPUContextPool { protected static final Log LOG = LogFactory.getLog(GPUContextPool.class.getName()); /** * Maximum number of gpus to use, -1 for all */ public static int PER_PROCESS_MAX_GPUS = -1; private static long INITIAL_GPU_MEMORY_BUDGET = -1; /** * Whether cuda has been initialized */ static boolean initialized = false; /** * The total number of cuda devices on this machine */ static int deviceCount = -1; /** * Stores the cached deviceProperties */ static cudaDeviceProp[] deviceProperties; /** * Set of free GPUContexts */ static List<GPUContext> pool = new LinkedList<>(); /** * Whether the pool of GPUs is reserved or not */ static boolean reserved = false; /** * Static initialization of the number of devices * Also sets behaviour for J{Cuda, Cudnn, Cublas, Cusparse} in case of error * Initializes the CUDA driver * All these need be done once, and not per GPU * * @throws DMLRuntimeException ? */ public synchronized static void initializeGPU() throws DMLRuntimeException { initialized = true; GPUContext.LOG.info("Initializing CUDA"); long start = System.nanoTime(); JCuda.setExceptionsEnabled(true); JCudnn.setExceptionsEnabled(true); JCublas2.setExceptionsEnabled(true); JCusparse.setExceptionsEnabled(true); JCudaDriver.setExceptionsEnabled(true); cuInit(0); // Initialize the driver int deviceCountArray[] = { 0 }; cuDeviceGetCount(deviceCountArray); // Obtain the number of devices deviceCount = deviceCountArray[0]; deviceProperties = new cudaDeviceProp[deviceCount]; if (PER_PROCESS_MAX_GPUS > 0) deviceCount = Math.min(PER_PROCESS_MAX_GPUS, deviceCount); // Initialize the list of devices for (int i = 0; i < deviceCount; i++) { cudaDeviceProp properties = new cudaDeviceProp(); cudaGetDeviceProperties(properties, i); deviceProperties[i] = properties; } // Initialize the pool of GPUContexts for (int i = 0; i < deviceCount; i++) { GPUContext gCtx = new GPUContext(i); pool.add(gCtx); } // Initialize the initial memory budget // If there are heterogeneous GPUs on the machine (different memory sizes) // initially available memory is set to the GPU with the lowest memory // This is because at runtime, we wouldn't know which GPU a certain // operation gets scheduled on long minAvailableMemory = Integer.MAX_VALUE; for (GPUContext gCtx : pool) { gCtx.initializeThread(); minAvailableMemory = Math.min(minAvailableMemory, gCtx.getAvailableMemory()); } INITIAL_GPU_MEMORY_BUDGET = minAvailableMemory; GPUContext.LOG.info("Total number of GPUs on the machine: " + deviceCount); GPUContext.LOG.info("Initial GPU memory: " + initialGPUMemBudget()); //int[] device = {-1}; //cudaGetDevice(device); //cudaDeviceProp prop = getGPUProperties(device[0]); //int maxBlocks = prop.maxGridSize[0]; //int maxThreadsPerBlock = prop.maxThreadsPerBlock; //long sharedMemPerBlock = prop.sharedMemPerBlock; //LOG.debug("Active CUDA device number : " + device[0]); //LOG.debug("Max Blocks/Threads/SharedMem on active device: " + maxBlocks + "/" + maxThreadsPerBlock + "/" + sharedMemPerBlock); GPUStatistics.cudaInitTime = System.nanoTime() - start; } /** * Reserves and gets an initialized list of GPUContexts * * @return null if no GPUContexts in pool, otherwise a valid list of GPUContext * @throws DMLRuntimeException ? */ public static synchronized List<GPUContext> reserveAllGPUContexts() throws DMLRuntimeException { if (reserved) throw new DMLRuntimeException("Trying to re-reserve GPUs"); if (!initialized) initializeGPU(); reserved = true; LOG.trace("GPU : Reserved all GPUs"); return pool; } /** * Get the number of free GPUContexts * * @return number of free GPUContexts */ public static synchronized int getAvailableCount() { return pool.size(); } /** * Gets the device properties * * @param device the device number (on a machine with more than 1 GPU) * @return the device properties * @throws DMLRuntimeException if there is problem initializing the GPUContexts */ static cudaDeviceProp getGPUProperties(int device) throws DMLRuntimeException { // do once - initialization of GPU if (!initialized) initializeGPU(); return deviceProperties[device]; } /** * Number of available devices on this machine * * @return number of available GPUs on this machine * @throws DMLRuntimeException if error */ public static int getDeviceCount() throws DMLRuntimeException { if (!initialized) initializeGPU(); return deviceCount; } /** * Unreserves all GPUContexts * * @throws DMLRuntimeException if error */ public static synchronized void freeAllGPUContexts() throws DMLRuntimeException { if (!reserved) throw new DMLRuntimeException("Trying to free unreserved GPUs"); reserved = false; LOG.trace("GPU : Unreserved all GPUs"); } /** * Gets the initial GPU memory budget. This is the minimum of the * available memories across all the GPUs on the machine(s) * @return minimum available memory * @throws RuntimeException if error initializing the GPUs */ public static synchronized long initialGPUMemBudget() throws RuntimeException { try { if (!initialized) initializeGPU(); return INITIAL_GPU_MEMORY_BUDGET; } catch (DMLRuntimeException e){ throw new RuntimeException(e); } } }
package com.vaadin.data.provider; import static org.junit.Assert.assertArrayEquals; import static org.junit.Assert.assertEquals; import static org.junit.Assert.fail; import java.util.ArrayList; import java.util.Comparator; import java.util.List; import java.util.Locale; import java.util.stream.Collectors; import org.junit.Assert; import org.junit.Test; import com.vaadin.server.SerializableComparator; import com.vaadin.shared.data.sort.SortDirection; public class ListDataProviderTest extends DataProviderTestBase<ListDataProvider<StrBean>> { @Override protected ListDataProvider<StrBean> createDataProvider() { return DataProvider.ofCollection(data); } @Test public void dataProvider_ofItems_shouldCreateAnEditableDataProvider() { ListDataProvider<String> dataProvider = DataProvider.ofItems("0", "1"); Assert.assertTrue( "DataProvider.ofItems should create a list data provider backed an ArrayList allowing edits", dataProvider.getItems() instanceof ArrayList); List<String> list = (List<String>) dataProvider.getItems(); // previously the following would explode since Arrays.ArrayList does // not support it list.add(0, "2"); } @Test public void setSortByProperty_ascending() { ListDataProvider<StrBean> dataProvider = getDataProvider(); dataProvider.setSortOrder(StrBean::getId, SortDirection.ASCENDING); int[] threeFirstIds = dataProvider.fetch(new Query<>()) .mapToInt(StrBean::getId).limit(3).toArray(); assertArrayEquals(new int[] { 0, 1, 2 }, threeFirstIds); } @Test public void setSortByProperty_descending() { ListDataProvider<StrBean> dataProvider = getDataProvider(); dataProvider.setSortOrder(StrBean::getId, SortDirection.DESCENDING); int[] threeFirstIds = dataProvider.fetch(new Query<>()) .mapToInt(StrBean::getId).limit(3).toArray(); assertArrayEquals(new int[] { 98, 97, 96 }, threeFirstIds); } @Test public void testMultipleSortOrder_firstAddedWins() { ListDataProvider<StrBean> dataProvider = getDataProvider(); dataProvider.addSortOrder(StrBean::getValue, SortDirection.DESCENDING); dataProvider.addSortOrder(StrBean::getId, SortDirection.DESCENDING); List<StrBean> threeFirstItems = dataProvider.fetch(new Query<>()) .limit(3).collect(Collectors.toList()); // First one is Xyz assertEquals(new StrBean("Xyz", 10, 100), threeFirstItems.get(0)); // The following are Foos ordered by id assertEquals(new StrBean("Foo", 93, 2), threeFirstItems.get(1)); assertEquals(new StrBean("Foo", 91, 2), threeFirstItems.get(2)); } @Test public void setFilter() { dataProvider.setFilter(item -> item.getValue().equals("Foo")); assertEquals(36, sizeWithUnfilteredQuery()); dataProvider.setFilter(item -> !item.getValue().equals("Foo")); assertEquals("Previous filter should be reset when setting a new one", 64, sizeWithUnfilteredQuery()); dataProvider.setFilter(null); assertEquals("Setting filter to null should remove all filters", 100, sizeWithUnfilteredQuery()); } @Test public void setFilter_valueProvider() { dataProvider.setFilter(StrBean::getValue, "Foo"::equals); assertEquals(36, sizeWithUnfilteredQuery()); dataProvider.setFilter(StrBean::getValue, value -> !value.equals("Foo")); assertEquals("Previous filter should be reset when setting a new one", 64, sizeWithUnfilteredQuery()); } @Test public void setFilterEquals() { dataProvider.setFilterByValue(StrBean::getValue, "Foo"); assertEquals(36, sizeWithUnfilteredQuery()); dataProvider.setFilterByValue(StrBean::getValue, "Bar"); assertEquals(23, sizeWithUnfilteredQuery()); } @Test public void addFilter_withPreviousFilter() { dataProvider.setFilterByValue(StrBean::getValue, "Foo"); dataProvider.addFilter(item -> item.getId() > 50); assertEquals("Both filters should be used", 17, sizeWithUnfilteredQuery()); } @Test public void addFilter_noPreviousFilter() { dataProvider.addFilter(item -> item.getId() > 50); assertEquals(48, sizeWithUnfilteredQuery()); } @Test public void addFilter_valueProvider() { dataProvider.setFilter(item -> item.getId() > 50); dataProvider.addFilter(StrBean::getValue, "Foo"::equals); assertEquals("Both filters should be used", 17, sizeWithUnfilteredQuery()); } @Test public void addFilterEquals() { dataProvider.setFilter(item -> item.getId() > 50); dataProvider.addFilterByValue(StrBean::getValue, "Foo"); assertEquals("Both filters should be used", 17, sizeWithUnfilteredQuery()); } @Test public void addFilter_firstAddedUsedFirst() { dataProvider.addFilter(item -> false); dataProvider.addFilter(item -> { fail("This filter should never be invoked"); return true; }); assertEquals(0, sizeWithUnfilteredQuery()); } @Test public void combineProviderAndQueryFilters() { dataProvider.addFilterByValue(StrBean::getValue, "Foo"); int size = dataProvider.size(new Query<>(item -> item.getId() > 50)); assertEquals("Both filters should be used", 17, size); } @Test public void providerFilterBeforeQueryFilter() { dataProvider.setFilter(item -> false); int size = dataProvider.size(new Query<>(item -> { fail("This filter should never be invoked"); return true; })); assertEquals(0, size); } @Test public void filteringBy_itemPredicate() { DataProvider<StrBean, String> filteringBy = dataProvider.filteringBy( (item, filterValue) -> item.getValue().equals(filterValue)); assertSizeWithFilter(36, filteringBy, "Foo"); } @Test public void filteringBy_equals() { DataProvider<StrBean, String> filteringBy = dataProvider .filteringByEquals(StrBean::getValue); assertSizeWithFilter(36, filteringBy, "Foo"); } @Test public void filteringBy_propertyValuePredicate() { DataProvider<StrBean, Integer> filteringBy = dataProvider.filteringBy( StrBean::getId, (propertyValue, filterValue) -> propertyValue >= filterValue); assertSizeWithFilter(90, filteringBy, 10); } @Test public void filteringBy_caseInsensitiveSubstring() { DataProvider<StrBean, String> filteringBy = dataProvider .filteringBySubstring(StrBean::getValue, Locale.ENGLISH); assertSizeWithFilter(36, filteringBy, "oo"); assertSizeWithFilter(36, filteringBy, "Oo"); } @Test public void filterBy_caseInsensitivePrefix() { DataProvider<StrBean, String> filteringBy = dataProvider .filteringByPrefix(StrBean::getValue, Locale.ENGLISH); assertSizeWithFilter(36, filteringBy, "Fo"); assertSizeWithFilter(36, filteringBy, "fo"); assertSizeWithFilter(0, filteringBy, "oo"); } @Override protected void setSortOrder(List<QuerySortOrder> sortOrder, Comparator<StrBean> comp) { SerializableComparator<StrBean> serializableComp = comp::compare; getDataProvider().setSortComparator(serializableComp); } }
package net.jiyuu_ni.seiidex.dto.json; import java.util.Comparator; import java.util.List; import java.util.Map; import java.util.TreeMap; import javax.persistence.EntityManager; import javax.persistence.Query; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.databind.ObjectMapper; import net.jiyuu_ni.seiidex.jpa.PokemonFormGeneration; import net.jiyuu_ni.seiidex.jpa.VersionGroup; import net.jiyuu_ni.seiidex.util.FileOperations; public class Gen6Pokemon extends GenericPokemon { private static final int THIS_GEN = 6; private Logger logger = LoggerFactory.getLogger(Gen6Pokemon.class); //Is this a Mega Evolution? private boolean isMega = false; //Abilities (including hidden ability, if one exists) private PokemonAbilitiesDTO abilities; //Stats (using Generation 2+ system) private PokemonStatsGen2PlusDTO stats; //Effort Values (EVs) private PokemonEffortValuesDTO effortValues; //Move set (for Generation 2 and later) private Map<String, PokemonMoveListDTO> moves; //Breeding statistics private PokemonBreedingDTO breeding; /* * Default constructor * * Set object to default values: * - Pokemon isn't a Mega Evolution * - Pokemon is not in a special form */ public Gen6Pokemon() { super(); isMega = false; form = "None"; } public boolean isMega() { return isMega; } public void setMega(boolean isMega) { this.isMega = isMega; } public PokemonAbilitiesDTO getAbilities() { return abilities; } public void setAbilities(PokemonAbilitiesDTO abilities) { this.abilities = abilities; } public PokemonStatsGen2PlusDTO getStats() { return stats; } public void setStats(PokemonStatsGen2PlusDTO stats) { this.stats = stats; } public PokemonEffortValuesDTO getEffortValues() { return effortValues; } public void setEffortValues(PokemonEffortValuesDTO effortValues) { this.effortValues = effortValues; } public Map<String, PokemonMoveListDTO> getMoves() { return moves; } public void setMoves(Map<String, PokemonMoveListDTO> moves) { this.moves = moves; } public PokemonBreedingDTO getBreeding() { return breeding; } public void setBreeding(PokemonBreedingDTO breeding) { this.breeding = breeding; } public void populateAllFields(PokemonFormGeneration formGen, EntityManager em) { super.populateAllFields(formGen, em); String methodName = "populateAllFields"; logger.debug("Entering " + methodName); this.setMega(formGen.getPokemonForm().getIsMega()); populateAbilitiesFromQuery(formGen); populateStatsFromQuery(formGen); populateEffortValuesFromQuery(formGen); populateMovesFromQuery(formGen, em); populateBreedingFromQuery(formGen); logger.debug("Exiting " + methodName); } private void populateBreedingFromQuery(PokemonFormGeneration formGen) { String methodName = "populateBreedingFromQuery"; logger.debug("Entering " + methodName); PokemonBreedingDTO pokeBreeding = new PokemonBreedingDTO(); pokeBreeding.populateAllFields(formGen); this.setBreeding(pokeBreeding); logger.debug("Exiting " + methodName); } private void populateMovesFromQuery(PokemonFormGeneration formGen, EntityManager em) { String methodName = "populateMovesFromQuery"; logger.debug("Entering " + methodName); Query versionGroupQuery = em.createNamedQuery("VersionGroup.findAllByGenerationId") .setParameter("genId", THIS_GEN); List<VersionGroup> versionGroupList = versionGroupQuery.getResultList(); Map<String, PokemonMoveListDTO> pokeMovesList = new TreeMap<String, PokemonMoveListDTO>(new Comparator<String>() { @Override public int compare(String first, String second) { int result = first.compareTo(second); if(result == 0) { result = 1; } return result; } }); for(VersionGroup groupObj : versionGroupList) { String groupName = super.formatGameName(groupObj.getIdentifier()); PokemonMoveListDTO pokeMoves = new PokemonMoveListDTO(); pokeMoves.populateAllFields(formGen, groupObj, em); pokeMovesList.put(groupName, pokeMoves); } this.setMoves(pokeMovesList); logger.debug("Exiting " + methodName); } private void populateEffortValuesFromQuery(PokemonFormGeneration formGen) { String methodName = "populateEffortValuesFromQuery"; logger.debug("Entering " + methodName); PokemonEffortValuesDTO pokeEVs = new PokemonEffortValuesDTO(); pokeEVs.populateAllFields(formGen); this.setEffortValues(pokeEVs); logger.debug("Exiting " + methodName); } private void populateStatsFromQuery(PokemonFormGeneration formGen) { String methodName = "populateStatsFromQuery"; logger.debug("Entering " + methodName); PokemonStatsGen2PlusDTO pokeStats = new PokemonStatsGen2PlusDTO(); pokeStats.populateAllFields(formGen); this.setStats(pokeStats); logger.debug("Exiting " + methodName); } private void populateAbilitiesFromQuery(PokemonFormGeneration formGen) { String methodName = "populateAbilitiesFromQuery"; logger.debug("Entering " + methodName); PokemonAbilitiesDTO pokeAbilities = new PokemonAbilitiesDTO(); pokeAbilities.populateAllFields(formGen); this.setAbilities(pokeAbilities); logger.debug("Exiting " + methodName); } @Override public String toJsonString() { String methodName = "toJsonString"; logger.debug("Entering " + methodName); ObjectMapper mapper = new ObjectMapper(); String result = null; try { result = mapper.writeValueAsString(this); } catch (JsonProcessingException e) { logger.error(e.getLocalizedMessage()); } logger.debug("Exiting " + methodName); return result; } }
// Copyright 2000-2020 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file. package com.intellij.application.options.schemes; import com.intellij.ide.IdeBundle; import com.intellij.openapi.options.Scheme; import com.intellij.openapi.ui.MessageType; import com.intellij.openapi.wm.IdeFocusManager; import com.intellij.ui.DocumentAdapter; import com.intellij.ui.JBColor; import com.intellij.ui.SimpleTextAttributes; import com.intellij.ui.scale.JBUIScale; import org.jetbrains.annotations.Nls; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import javax.swing.*; import javax.swing.event.DocumentEvent; import java.awt.*; import java.awt.event.*; import java.util.Collection; import java.util.function.Consumer; import static com.intellij.openapi.util.text.StringUtil.isEmptyOrSpaces; public class EditableSchemesCombo<T extends Scheme> { public static final int COMBO_WIDTH = 200; // endregion private SchemesCombo<T> myComboBox; private final JPanel myRootPanel; private final AbstractSchemesPanel<T, ?> mySchemesPanel; private final CardLayout myLayout; private final JTextField myNameEditorField; private @Nullable NameEditData myNameEditData; private final static KeyStroke ESC_KEY_STROKE = KeyStroke.getKeyStroke(KeyEvent.VK_ESCAPE, 0, false); private final static KeyStroke ENTER_KEY_STROKE = KeyStroke.getKeyStroke(KeyEvent.VK_ENTER, 0, false); private final static Color MODIFIED_ITEM_FOREGROUND = JBColor.namedColor("ComboBox.modifiedItemForeground", JBColor.BLUE); public EditableSchemesCombo(@NotNull AbstractSchemesPanel<T, ?> schemesPanel) { mySchemesPanel = schemesPanel; myLayout = new CardLayout(); myRootPanel = new JPanel(myLayout); createCombo(); myRootPanel.add(myComboBox); myNameEditorField = createNameEditorField(); myRootPanel.add(myNameEditorField); myRootPanel.setPreferredSize(new Dimension(JBUIScale.scale(COMBO_WIDTH), myNameEditorField.getPreferredSize().height)); myRootPanel.setMaximumSize(new Dimension(JBUIScale.scale(COMBO_WIDTH), Short.MAX_VALUE)); } private JTextField createNameEditorField() { JTextField nameEditorField = new JTextField(); nameEditorField.registerKeyboardAction(new ActionListener() { @Override public void actionPerformed(ActionEvent e) { revertSchemeName(); cancelEdit(); } }, ESC_KEY_STROKE, JComponent.WHEN_FOCUSED); nameEditorField.registerKeyboardAction(new ActionListener() { @Override public void actionPerformed(ActionEvent e) { stopEdit(); } }, ENTER_KEY_STROKE, JComponent.WHEN_FOCUSED); nameEditorField.addFocusListener(new FocusAdapter() { @Override public void focusLost(FocusEvent e) { stopEdit(); } }); nameEditorField.getDocument().addDocumentListener(new DocumentAdapter() { @Override protected void textChanged(@NotNull DocumentEvent e) { validateOnTyping(); } }); return nameEditorField; } private void validateOnTyping() { if (myNameEditData == null) return; String currName = myNameEditorField.getText(); if (!currName.equals(myNameEditData.initialName)) { String validationMessage = validateSchemeName(currName, myNameEditData.isProjectScheme); if (validationMessage != null) { mySchemesPanel.showInfo(validationMessage, MessageType.ERROR); return; } } showHint(); } private void showHint() { mySchemesPanel.showInfo(IdeBundle.message("hint.scheme.editing"), MessageType.INFO); } private void revertSchemeName() { if (myNameEditData != null) { myNameEditorField.setText(myNameEditData.initialName); } } public void updateSelected() { myComboBox.repaint(); } private void stopEdit() { if (myNameEditData == null) { cancelEdit(); return; } String newName = myNameEditorField.getText(); String validationMessage = validateSchemeName(newName, myNameEditData.isProjectScheme); if (validationMessage != null) { mySchemesPanel.showInfo(validationMessage, MessageType.ERROR); } else { myNameEditData.nameConsumer.accept(newName); cancelEdit(); } } public void cancelEdit() { mySchemesPanel.clearInfo(); myLayout.first(myRootPanel); myNameEditData = null; final IdeFocusManager focusManager = IdeFocusManager.getGlobalInstance(); focusManager.doWhenFocusSettlesDown(() -> IdeFocusManager.getGlobalInstance().requestFocus(myRootPanel, true)); } private void createCombo() { myComboBox = new SchemesCombo<>() { @Override protected boolean supportsProjectSchemes() { return mySchemesPanel.supportsProjectSchemes(); } @Override protected boolean isProjectScheme(@NotNull T scheme) { return mySchemesPanel.getModel().isProjectScheme(scheme); } @Override protected int getIndent(@NotNull T scheme) { return mySchemesPanel.getIndent(scheme); } @NotNull @Override protected SimpleTextAttributes getSchemeAttributes(T scheme) { SchemesModel<T> model = mySchemesPanel.getModel(); SimpleTextAttributes baseAttributes = !useBoldForNonRemovableSchemes() || model.canDeleteScheme(scheme) ? SimpleTextAttributes.REGULAR_ATTRIBUTES : SimpleTextAttributes.REGULAR_BOLD_ATTRIBUTES; if (mySchemesPanel.highlightNonDefaultSchemes() && model.canResetScheme(scheme) && model.differsFromDefault(scheme)) { return baseAttributes.derive(-1, MODIFIED_ITEM_FOREGROUND, null, null); } return baseAttributes; } }; myComboBox.addActionListener(new ActionListener() { @Override public void actionPerformed(ActionEvent e) { mySchemesPanel.getActions().onSchemeChanged(getSelectedScheme()); } }); } public void startEdit(@NotNull String initialName, boolean isProjectScheme, @NotNull Consumer<? super String> nameConsumer) { showHint(); myNameEditData = new NameEditData(initialName, nameConsumer, isProjectScheme); myNameEditorField.setText(initialName); myLayout.last(myRootPanel); SwingUtilities.invokeLater(() -> { final IdeFocusManager focusManager = IdeFocusManager.getGlobalInstance(); focusManager.doWhenFocusSettlesDown(() -> focusManager.requestFocus(myNameEditorField, true)); }); } public void resetSchemes(@NotNull Collection<? extends T> schemes) { myComboBox.resetSchemes(schemes); } @Nullable public T getSelectedScheme() { return myComboBox.getSelectedScheme(); } public void selectScheme(@Nullable T scheme) { myComboBox.selectScheme(scheme); } public JComponent getComponent() { return myRootPanel; } private boolean useBoldForNonRemovableSchemes() { return mySchemesPanel.useBoldForNonRemovableSchemes(); } @Nullable @Nls private String validateSchemeName(@NotNull String name, boolean isProjectScheme) { if (myNameEditData != null && name.equals(myNameEditData.initialName)) return null; if (isEmptyOrSpaces(name)) { return IdeBundle.message("error.empty.name"); } else if (mySchemesPanel.getModel().containsScheme(name, isProjectScheme)) { return IdeBundle.message("error.name.already.exists"); } return null; } private static final class NameEditData { private @NotNull final String initialName; private @NotNull final Consumer<? super String> nameConsumer; private final boolean isProjectScheme; private NameEditData(@NotNull String name, @NotNull Consumer<? super String> nameConsumer, boolean isProjectScheme) { initialName = name; this.nameConsumer = nameConsumer; this.isProjectScheme = isProjectScheme; } } }
/* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright * ownership. Elasticsearch licenses this file to you under * the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.cluster.service; import org.elasticsearch.Version; import org.elasticsearch.cluster.AckedClusterStateUpdateTask; import org.elasticsearch.cluster.ClusterChangedEvent; import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.ClusterState.Builder; import org.elasticsearch.cluster.ClusterStateListener; import org.elasticsearch.cluster.ClusterStateUpdateTask; import org.elasticsearch.cluster.LocalNodeMasterListener; import org.elasticsearch.cluster.TimeoutClusterStateListener; import org.elasticsearch.cluster.block.ClusterBlock; import org.elasticsearch.cluster.block.ClusterBlocks; import org.elasticsearch.cluster.metadata.MetaData; import org.elasticsearch.cluster.metadata.ProcessClusterEventTimeoutException; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.node.DiscoveryNodeService; import org.elasticsearch.cluster.node.DiscoveryNodes; import org.elasticsearch.cluster.routing.OperationRouting; import org.elasticsearch.cluster.routing.RoutingTable; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.Priority; import org.elasticsearch.common.component.AbstractLifecycleComponent; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.logging.ESLogger; import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.text.StringText; import org.elasticsearch.common.transport.TransportAddress; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.util.concurrent.ConcurrentCollections; import org.elasticsearch.common.util.concurrent.CountDown; import org.elasticsearch.common.util.concurrent.EsExecutors; import org.elasticsearch.common.util.concurrent.EsRejectedExecutionException; import org.elasticsearch.common.util.concurrent.FutureUtils; import org.elasticsearch.common.util.concurrent.PrioritizedEsThreadPoolExecutor; import org.elasticsearch.common.util.concurrent.PrioritizedRunnable; import org.elasticsearch.common.util.iterable.Iterables; import org.elasticsearch.discovery.Discovery; import org.elasticsearch.discovery.DiscoveryService; import org.elasticsearch.node.settings.NodeSettingsService; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; import java.util.ArrayList; import java.util.Collection; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Queue; import java.util.concurrent.ConcurrentMap; import java.util.concurrent.CopyOnWriteArrayList; import java.util.concurrent.Executor; import java.util.concurrent.Future; import java.util.concurrent.ScheduledFuture; import java.util.concurrent.TimeUnit; import static org.elasticsearch.common.util.concurrent.EsExecutors.daemonThreadFactory; /** * */ public class InternalClusterService extends AbstractLifecycleComponent<ClusterService> implements ClusterService { public static final String SETTING_CLUSTER_SERVICE_SLOW_TASK_LOGGING_THRESHOLD = "cluster.service.slow_task_logging_threshold"; public static final String SETTING_CLUSTER_SERVICE_RECONNECT_INTERVAL = "cluster.service.reconnect_interval"; public static final String UPDATE_THREAD_NAME = "clusterService#updateTask"; private final ThreadPool threadPool; private final DiscoveryService discoveryService; private final OperationRouting operationRouting; private final TransportService transportService; private final NodeSettingsService nodeSettingsService; private final DiscoveryNodeService discoveryNodeService; private final Version version; private final TimeValue reconnectInterval; private TimeValue slowTaskLoggingThreshold; private volatile PrioritizedEsThreadPoolExecutor updateTasksExecutor; /** * Those 3 state listeners are changing infrequently - CopyOnWriteArrayList is just fine */ private final Collection<ClusterStateListener> priorityClusterStateListeners = new CopyOnWriteArrayList<>(); private final Collection<ClusterStateListener> clusterStateListeners = new CopyOnWriteArrayList<>(); private final Collection<ClusterStateListener> lastClusterStateListeners = new CopyOnWriteArrayList<>(); // TODO this is rather frequently changing I guess a Synced Set would be better here and a dedicated remove API private final Collection<ClusterStateListener> postAppliedListeners = new CopyOnWriteArrayList<>(); private final Iterable<ClusterStateListener> preAppliedListeners = Iterables.concat(priorityClusterStateListeners, clusterStateListeners, lastClusterStateListeners); private final LocalNodeMasterListeners localNodeMasterListeners; private final Queue<NotifyTimeout> onGoingTimeouts = ConcurrentCollections.newQueue(); private volatile ClusterState clusterState; private final ClusterBlocks.Builder initialBlocks; private volatile ScheduledFuture reconnectToNodes; @Inject public InternalClusterService(Settings settings, DiscoveryService discoveryService, OperationRouting operationRouting, TransportService transportService, NodeSettingsService nodeSettingsService, ThreadPool threadPool, ClusterName clusterName, DiscoveryNodeService discoveryNodeService, Version version) { super(settings); this.operationRouting = operationRouting; this.transportService = transportService; this.discoveryService = discoveryService; this.threadPool = threadPool; this.nodeSettingsService = nodeSettingsService; this.discoveryNodeService = discoveryNodeService; this.version = version; // will be replaced on doStart. this.clusterState = ClusterState.builder(clusterName).build(); this.nodeSettingsService.setClusterService(this); this.nodeSettingsService.addListener(new ApplySettings()); this.reconnectInterval = this.settings.getAsTime(SETTING_CLUSTER_SERVICE_RECONNECT_INTERVAL, TimeValue.timeValueSeconds(10)); this.slowTaskLoggingThreshold = this.settings.getAsTime(SETTING_CLUSTER_SERVICE_SLOW_TASK_LOGGING_THRESHOLD, TimeValue.timeValueSeconds(30)); localNodeMasterListeners = new LocalNodeMasterListeners(threadPool); initialBlocks = ClusterBlocks.builder().addGlobalBlock(discoveryService.getNoMasterBlock()); } public NodeSettingsService settingsService() { return this.nodeSettingsService; } @Override public void addInitialStateBlock(ClusterBlock block) throws IllegalStateException { if (lifecycle.started()) { throw new IllegalStateException("can't set initial block when started"); } initialBlocks.addGlobalBlock(block); } @Override public void removeInitialStateBlock(ClusterBlock block) throws IllegalStateException { if (lifecycle.started()) { throw new IllegalStateException("can't set initial block when started"); } initialBlocks.removeGlobalBlock(block); } @Override protected void doStart() { add(localNodeMasterListeners); this.clusterState = ClusterState.builder(clusterState).blocks(initialBlocks).build(); this.updateTasksExecutor = EsExecutors.newSinglePrioritizing(UPDATE_THREAD_NAME, daemonThreadFactory(settings, UPDATE_THREAD_NAME)); this.reconnectToNodes = threadPool.schedule(reconnectInterval, ThreadPool.Names.GENERIC, new ReconnectToNodes()); Map<String, String> nodeAttributes = discoveryNodeService.buildAttributes(); // note, we rely on the fact that its a new id each time we start, see FD and "kill -9" handling final String nodeId = DiscoveryService.generateNodeId(settings); final TransportAddress publishAddress = transportService.boundAddress().publishAddress(); DiscoveryNode localNode = new DiscoveryNode(settings.get("name"), nodeId, publishAddress, nodeAttributes, version); DiscoveryNodes.Builder nodeBuilder = DiscoveryNodes.builder().put(localNode).localNodeId(localNode.id()); this.clusterState = ClusterState.builder(clusterState).nodes(nodeBuilder).blocks(initialBlocks).build(); this.transportService.setLocalNode(localNode); } @Override protected void doStop() { FutureUtils.cancel(this.reconnectToNodes); for (NotifyTimeout onGoingTimeout : onGoingTimeouts) { onGoingTimeout.cancel(); onGoingTimeout.listener.onClose(); } ThreadPool.terminate(updateTasksExecutor, 10, TimeUnit.SECONDS); remove(localNodeMasterListeners); } @Override protected void doClose() { } @Override public DiscoveryNode localNode() { return clusterState.getNodes().localNode(); } @Override public OperationRouting operationRouting() { return operationRouting; } @Override public ClusterState state() { return this.clusterState; } @Override public void addFirst(ClusterStateListener listener) { priorityClusterStateListeners.add(listener); } @Override public void addLast(ClusterStateListener listener) { lastClusterStateListeners.add(listener); } @Override public void add(ClusterStateListener listener) { clusterStateListeners.add(listener); } @Override public void remove(ClusterStateListener listener) { clusterStateListeners.remove(listener); priorityClusterStateListeners.remove(listener); lastClusterStateListeners.remove(listener); postAppliedListeners.remove(listener); for (Iterator<NotifyTimeout> it = onGoingTimeouts.iterator(); it.hasNext(); ) { NotifyTimeout timeout = it.next(); if (timeout.listener.equals(listener)) { timeout.cancel(); it.remove(); } } } @Override public void add(LocalNodeMasterListener listener) { localNodeMasterListeners.add(listener); } @Override public void remove(LocalNodeMasterListener listener) { localNodeMasterListeners.remove(listener); } @Override public void add(@Nullable final TimeValue timeout, final TimeoutClusterStateListener listener) { if (lifecycle.stoppedOrClosed()) { listener.onClose(); return; } // call the post added notification on the same event thread try { updateTasksExecutor.execute(new SourcePrioritizedRunnable(Priority.HIGH, "_add_listener_") { @Override public void run() { if (timeout != null) { NotifyTimeout notifyTimeout = new NotifyTimeout(listener, timeout); notifyTimeout.future = threadPool.schedule(timeout, ThreadPool.Names.GENERIC, notifyTimeout); onGoingTimeouts.add(notifyTimeout); } postAppliedListeners.add(listener); listener.postAdded(); } }); } catch (EsRejectedExecutionException e) { if (lifecycle.stoppedOrClosed()) { listener.onClose(); } else { throw e; } } } @Override public void submitStateUpdateTask(final String source, final ClusterStateUpdateTask updateTask) { submitStateUpdateTask(source, Priority.NORMAL, updateTask); } @Override public void submitStateUpdateTask(final String source, Priority priority, final ClusterStateUpdateTask updateTask) { if (!lifecycle.started()) { return; } try { final UpdateTask task = new UpdateTask(source, priority, updateTask); if (updateTask.timeout() != null) { updateTasksExecutor.execute(task, threadPool.scheduler(), updateTask.timeout(), new Runnable() { @Override public void run() { threadPool.generic().execute(new Runnable() { @Override public void run() { updateTask.onFailure(task.source(), new ProcessClusterEventTimeoutException(updateTask.timeout(), task.source())); } }); } }); } else { updateTasksExecutor.execute(task); } } catch (EsRejectedExecutionException e) { // ignore cases where we are shutting down..., there is really nothing interesting // to be done here... if (!lifecycle.stoppedOrClosed()) { throw e; } } } @Override public List<PendingClusterTask> pendingTasks() { PrioritizedEsThreadPoolExecutor.Pending[] pendings = updateTasksExecutor.getPending(); List<PendingClusterTask> pendingClusterTasks = new ArrayList<>(pendings.length); for (PrioritizedEsThreadPoolExecutor.Pending pending : pendings) { final String source; final long timeInQueue; // we have to capture the task as it will be nulled after execution and we don't want to change while we check things here. final Object task = pending.task; if (task == null) { continue; } else if (task instanceof SourcePrioritizedRunnable) { SourcePrioritizedRunnable runnable = (SourcePrioritizedRunnable) task; source = runnable.source(); timeInQueue = runnable.getAgeInMillis(); } else { assert false : "expected SourcePrioritizedRunnable got " + task.getClass(); source = "unknown [" + task.getClass() + "]"; timeInQueue = 0; } pendingClusterTasks.add(new PendingClusterTask(pending.insertionOrder, pending.priority, new StringText(source), timeInQueue, pending.executing)); } return pendingClusterTasks; } @Override public int numberOfPendingTasks() { return updateTasksExecutor.getNumberOfPendingTasks(); } @Override public TimeValue getMaxTaskWaitTime() { return updateTasksExecutor.getMaxTaskWaitTime(); } /** asserts that the current thread is the cluster state update thread */ public boolean assertClusterStateThread() { assert Thread.currentThread().getName().contains(InternalClusterService.UPDATE_THREAD_NAME) : "not called from the cluster state update thread"; return true; } static abstract class SourcePrioritizedRunnable extends PrioritizedRunnable { protected final String source; public SourcePrioritizedRunnable(Priority priority, String source) { super(priority); this.source = source; } public String source() { return source; } } class UpdateTask extends SourcePrioritizedRunnable { public final ClusterStateUpdateTask updateTask; UpdateTask(String source, Priority priority, ClusterStateUpdateTask updateTask) { super(priority, source); this.updateTask = updateTask; } @Override public void run() { if (!lifecycle.started()) { logger.debug("processing [{}]: ignoring, cluster_service not started", source); return; } logger.debug("processing [{}]: execute", source); ClusterState previousClusterState = clusterState; if (!previousClusterState.nodes().localNodeMaster() && updateTask.runOnlyOnMaster()) { logger.debug("failing [{}]: local node is no longer master", source); updateTask.onNoLongerMaster(source); return; } ClusterState newClusterState; long startTimeNS = System.nanoTime(); try { newClusterState = updateTask.execute(previousClusterState); } catch (Throwable e) { TimeValue executionTime = TimeValue.timeValueMillis(Math.max(0, TimeValue.nsecToMSec(System.nanoTime() - startTimeNS))); if (logger.isTraceEnabled()) { StringBuilder sb = new StringBuilder("failed to execute cluster state update in ").append(executionTime).append(", state:\nversion [").append(previousClusterState.version()).append("], source [").append(source).append("]\n"); sb.append(previousClusterState.nodes().prettyPrint()); sb.append(previousClusterState.routingTable().prettyPrint()); sb.append(previousClusterState.getRoutingNodes().prettyPrint()); logger.trace(sb.toString(), e); } warnAboutSlowTaskIfNeeded(executionTime, source); updateTask.onFailure(source, e); return; } if (previousClusterState == newClusterState) { if (updateTask instanceof AckedClusterStateUpdateTask) { //no need to wait for ack if nothing changed, the update can be counted as acknowledged ((AckedClusterStateUpdateTask) updateTask).onAllNodesAcked(null); } updateTask.clusterStateProcessed(source, previousClusterState, newClusterState); TimeValue executionTime = TimeValue.timeValueMillis(Math.max(0, TimeValue.nsecToMSec(System.nanoTime() - startTimeNS))); logger.debug("processing [{}]: took {} no change in cluster_state", source, executionTime); warnAboutSlowTaskIfNeeded(executionTime, source); return; } try { Discovery.AckListener ackListener = new NoOpAckListener(); if (newClusterState.nodes().localNodeMaster()) { // only the master controls the version numbers Builder builder = ClusterState.builder(newClusterState).incrementVersion(); if (previousClusterState.routingTable() != newClusterState.routingTable()) { builder.routingTable(RoutingTable.builder(newClusterState.routingTable()).version(newClusterState.routingTable().version() + 1).build()); } if (previousClusterState.metaData() != newClusterState.metaData()) { builder.metaData(MetaData.builder(newClusterState.metaData()).version(newClusterState.metaData().version() + 1)); } newClusterState = builder.build(); if (updateTask instanceof AckedClusterStateUpdateTask) { final AckedClusterStateUpdateTask ackedUpdateTask = (AckedClusterStateUpdateTask) updateTask; if (ackedUpdateTask.ackTimeout() == null || ackedUpdateTask.ackTimeout().millis() == 0) { ackedUpdateTask.onAckTimeout(); } else { try { ackListener = new AckCountDownListener(ackedUpdateTask, newClusterState.version(), newClusterState.nodes(), threadPool); } catch (EsRejectedExecutionException ex) { if (logger.isDebugEnabled()) { logger.debug("Couldn't schedule timeout thread - node might be shutting down", ex); } //timeout straightaway, otherwise we could wait forever as the timeout thread has not started ackedUpdateTask.onAckTimeout(); } } } } newClusterState.status(ClusterState.ClusterStateStatus.BEING_APPLIED); if (logger.isTraceEnabled()) { StringBuilder sb = new StringBuilder("cluster state updated, source [").append(source).append("]\n"); sb.append(newClusterState.prettyPrint()); logger.trace(sb.toString()); } else if (logger.isDebugEnabled()) { logger.debug("cluster state updated, version [{}], source [{}]", newClusterState.version(), source); } ClusterChangedEvent clusterChangedEvent = new ClusterChangedEvent(source, newClusterState, previousClusterState); // new cluster state, notify all listeners final DiscoveryNodes.Delta nodesDelta = clusterChangedEvent.nodesDelta(); if (nodesDelta.hasChanges() && logger.isInfoEnabled()) { String summary = nodesDelta.shortSummary(); if (summary.length() > 0) { logger.info("{}, reason: {}", summary, source); } } // TODO, do this in parallel (and wait) for (DiscoveryNode node : nodesDelta.addedNodes()) { if (!nodeRequiresConnection(node)) { continue; } try { transportService.connectToNode(node); } catch (Throwable e) { // the fault detection will detect it as failed as well logger.warn("failed to connect to node [" + node + "]", e); } } // if we are the master, publish the new state to all nodes // we publish here before we send a notification to all the listeners, since if it fails // we don't want to notify if (newClusterState.nodes().localNodeMaster()) { logger.debug("publishing cluster state version [{}]", newClusterState.version()); try { discoveryService.publish(clusterChangedEvent, ackListener); } catch (Discovery.FailedToCommitClusterStateException t) { logger.warn("failing [{}]: failed to commit cluster state version [{}]", t, source, newClusterState.version()); updateTask.onFailure(source, t); return; } } // update the current cluster state clusterState = newClusterState; logger.debug("set local cluster state to version {}", newClusterState.version()); for (ClusterStateListener listener : preAppliedListeners) { try { listener.clusterChanged(clusterChangedEvent); } catch (Exception ex) { logger.warn("failed to notify ClusterStateListener", ex); } } for (DiscoveryNode node : nodesDelta.removedNodes()) { try { transportService.disconnectFromNode(node); } catch (Throwable e) { logger.warn("failed to disconnect to node [" + node + "]", e); } } newClusterState.status(ClusterState.ClusterStateStatus.APPLIED); for (ClusterStateListener listener : postAppliedListeners) { try { listener.clusterChanged(clusterChangedEvent); } catch (Exception ex) { logger.warn("failed to notify ClusterStateListener", ex); } } //manual ack only from the master at the end of the publish if (newClusterState.nodes().localNodeMaster()) { try { ackListener.onNodeAck(newClusterState.nodes().localNode(), null); } catch (Throwable t) { logger.debug("error while processing ack for master node [{}]", t, newClusterState.nodes().localNode()); } } updateTask.clusterStateProcessed(source, previousClusterState, newClusterState); TimeValue executionTime = TimeValue.timeValueMillis(Math.max(0, TimeValue.nsecToMSec(System.nanoTime() - startTimeNS))); logger.debug("processing [{}]: took {} done applying updated cluster_state (version: {}, uuid: {})", source, executionTime, newClusterState.version(), newClusterState.stateUUID()); warnAboutSlowTaskIfNeeded(executionTime, source); } catch (Throwable t) { TimeValue executionTime = TimeValue.timeValueMillis(Math.max(0, TimeValue.nsecToMSec(System.nanoTime() - startTimeNS))); StringBuilder sb = new StringBuilder("failed to apply updated cluster state in ").append(executionTime).append(":\nversion [").append(newClusterState.version()).append("], uuid [").append(newClusterState.stateUUID()).append("], source [").append(source).append("]\n"); sb.append(newClusterState.nodes().prettyPrint()); sb.append(newClusterState.routingTable().prettyPrint()); sb.append(newClusterState.getRoutingNodes().prettyPrint()); logger.warn(sb.toString(), t); // TODO: do we want to call updateTask.onFailure here? } } } private void warnAboutSlowTaskIfNeeded(TimeValue executionTime, String source) { if (executionTime.getMillis() > slowTaskLoggingThreshold.getMillis()) { logger.warn("cluster state update task [{}] took {} above the warn threshold of {}", source, executionTime, slowTaskLoggingThreshold); } } class NotifyTimeout implements Runnable { final TimeoutClusterStateListener listener; final TimeValue timeout; volatile ScheduledFuture future; NotifyTimeout(TimeoutClusterStateListener listener, TimeValue timeout) { this.listener = listener; this.timeout = timeout; } public void cancel() { FutureUtils.cancel(future); } @Override public void run() { if (future != null && future.isCancelled()) { return; } if (lifecycle.stoppedOrClosed()) { listener.onClose(); } else { listener.onTimeout(this.timeout); } // note, we rely on the listener to remove itself in case of timeout if needed } } private class ReconnectToNodes implements Runnable { private ConcurrentMap<DiscoveryNode, Integer> failureCount = ConcurrentCollections.newConcurrentMap(); @Override public void run() { // master node will check against all nodes if its alive with certain discoveries implementations, // but we can't rely on that, so we check on it as well for (DiscoveryNode node : clusterState.nodes()) { if (lifecycle.stoppedOrClosed()) { return; } if (!nodeRequiresConnection(node)) { continue; } if (clusterState.nodes().nodeExists(node.id())) { // we double check existence of node since connectToNode might take time... if (!transportService.nodeConnected(node)) { try { transportService.connectToNode(node); } catch (Exception e) { if (lifecycle.stoppedOrClosed()) { return; } if (clusterState.nodes().nodeExists(node.id())) { // double check here as well, maybe its gone? Integer nodeFailureCount = failureCount.get(node); if (nodeFailureCount == null) { nodeFailureCount = 1; } else { nodeFailureCount = nodeFailureCount + 1; } // log every 6th failure if ((nodeFailureCount % 6) == 0) { // reset the failure count... nodeFailureCount = 0; logger.warn("failed to reconnect to node {}", e, node); } failureCount.put(node, nodeFailureCount); } } } } } // go over and remove failed nodes that have been removed DiscoveryNodes nodes = clusterState.nodes(); for (Iterator<DiscoveryNode> failedNodesIt = failureCount.keySet().iterator(); failedNodesIt.hasNext(); ) { DiscoveryNode failedNode = failedNodesIt.next(); if (!nodes.nodeExists(failedNode.id())) { failedNodesIt.remove(); } } if (lifecycle.started()) { reconnectToNodes = threadPool.schedule(reconnectInterval, ThreadPool.Names.GENERIC, this); } } } private boolean nodeRequiresConnection(DiscoveryNode node) { return localNode().shouldConnectTo(node); } private static class LocalNodeMasterListeners implements ClusterStateListener { private final List<LocalNodeMasterListener> listeners = new CopyOnWriteArrayList<>(); private final ThreadPool threadPool; private volatile boolean master = false; private LocalNodeMasterListeners(ThreadPool threadPool) { this.threadPool = threadPool; } @Override public void clusterChanged(ClusterChangedEvent event) { if (!master && event.localNodeMaster()) { master = true; for (LocalNodeMasterListener listener : listeners) { Executor executor = threadPool.executor(listener.executorName()); executor.execute(new OnMasterRunnable(listener)); } return; } if (master && !event.localNodeMaster()) { master = false; for (LocalNodeMasterListener listener : listeners) { Executor executor = threadPool.executor(listener.executorName()); executor.execute(new OffMasterRunnable(listener)); } } } private void add(LocalNodeMasterListener listener) { listeners.add(listener); } private void remove(LocalNodeMasterListener listener) { listeners.remove(listener); } private void clear() { listeners.clear(); } } private static class OnMasterRunnable implements Runnable { private final LocalNodeMasterListener listener; private OnMasterRunnable(LocalNodeMasterListener listener) { this.listener = listener; } @Override public void run() { listener.onMaster(); } } private static class OffMasterRunnable implements Runnable { private final LocalNodeMasterListener listener; private OffMasterRunnable(LocalNodeMasterListener listener) { this.listener = listener; } @Override public void run() { listener.offMaster(); } } private static class NoOpAckListener implements Discovery.AckListener { @Override public void onNodeAck(DiscoveryNode node, @Nullable Throwable t) { } @Override public void onTimeout() { } } private static class AckCountDownListener implements Discovery.AckListener { private static final ESLogger logger = Loggers.getLogger(AckCountDownListener.class); private final AckedClusterStateUpdateTask ackedUpdateTask; private final CountDown countDown; private final DiscoveryNodes nodes; private final long clusterStateVersion; private final Future<?> ackTimeoutCallback; private Throwable lastFailure; AckCountDownListener(AckedClusterStateUpdateTask ackedUpdateTask, long clusterStateVersion, DiscoveryNodes nodes, ThreadPool threadPool) { this.ackedUpdateTask = ackedUpdateTask; this.clusterStateVersion = clusterStateVersion; this.nodes = nodes; int countDown = 0; for (DiscoveryNode node : nodes) { if (ackedUpdateTask.mustAck(node)) { countDown++; } } //we always wait for at least 1 node (the master) countDown = Math.max(1, countDown); logger.trace("expecting {} acknowledgements for cluster_state update (version: {})", countDown, clusterStateVersion); this.countDown = new CountDown(countDown); this.ackTimeoutCallback = threadPool.schedule(ackedUpdateTask.ackTimeout(), ThreadPool.Names.GENERIC, new Runnable() { @Override public void run() { onTimeout(); } }); } @Override public void onNodeAck(DiscoveryNode node, @Nullable Throwable t) { if (!ackedUpdateTask.mustAck(node)) { //we always wait for the master ack anyway if (!node.equals(nodes.masterNode())) { return; } } if (t == null) { logger.trace("ack received from node [{}], cluster_state update (version: {})", node, clusterStateVersion); } else { this.lastFailure = t; logger.debug("ack received from node [{}], cluster_state update (version: {})", t, node, clusterStateVersion); } if (countDown.countDown()) { logger.trace("all expected nodes acknowledged cluster_state update (version: {})", clusterStateVersion); FutureUtils.cancel(ackTimeoutCallback); ackedUpdateTask.onAllNodesAcked(lastFailure); } } @Override public void onTimeout() { if (countDown.fastForward()) { logger.trace("timeout waiting for acknowledgement for cluster_state update (version: {})", clusterStateVersion); ackedUpdateTask.onAckTimeout(); } } } class ApplySettings implements NodeSettingsService.Listener { @Override public void onRefreshSettings(Settings settings) { final TimeValue slowTaskLoggingThreshold = settings.getAsTime(SETTING_CLUSTER_SERVICE_SLOW_TASK_LOGGING_THRESHOLD, InternalClusterService.this.slowTaskLoggingThreshold); InternalClusterService.this.slowTaskLoggingThreshold = slowTaskLoggingThreshold; } } }
package com.capitalone.dashboard.collector; import com.capitalone.dashboard.misc.HygieiaException; import com.capitalone.dashboard.model.ChangeOrder; import com.capitalone.dashboard.model.Cmdb; import com.capitalone.dashboard.model.HpsmSoapModel; import com.capitalone.dashboard.model.Incident; import com.capitalone.dashboard.util.HpsmCollectorConstants; import com.capitalone.dashboard.util.XmlUtil; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.joda.time.DateTime; import org.joda.time.format.DateTimeFormat; import org.joda.time.format.DateTimeFormatter; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Component; import org.w3c.dom.Document; import org.w3c.dom.Node; import org.w3c.dom.NodeList; import org.w3c.dom.NamedNodeMap; import javax.xml.namespace.QName; import javax.xml.soap.MessageFactory; import javax.xml.soap.SOAPEnvelope; import javax.xml.soap.SOAPMessage; import javax.xml.soap.SOAPPart; import javax.xml.soap.SOAPBody; import javax.xml.soap.SOAPBodyElement; import javax.xml.soap.SOAPException; import java.io.ByteArrayOutputStream; import java.io.IOException; import java.io.UnsupportedEncodingException; import java.text.MessageFormat; import java.util.ArrayList; import java.util.List; import java.util.Map; /** * HpsmClient implementation that uses SVNKit to fetch information about * Subversion repositories. */ @Component public class DefaultHpsmClient extends DefaultBaseClient implements HpsmClient { private static final Log LOG = LogFactory.getLog(DefaultHpsmClient.class); private final HpsmSettings hpsmSettings; private static final String APP_TYPE = "app"; private static final String COMPONENT_TYPE = "component"; private static final String ENVIRONMENT_TYPE = "environment"; private static final String DEFAULT_CHANGE_QUERY_FORMAT = "(date.entered > ''{0}'' and date.entered < ''{1}'') or (close.time > ''{0}'' and close.time < ''{1}'')"; private static final String DEFAULT_INCIDENT_QUERY_FORMAT = "(Severity=1 or Severity=2 or Severity=3 or Severity=4) and update.time > ''{0}'' and update.time < ''{1}''"; private static final String QUERY_DATE_FORMAT = "MM/dd/yyyy HH:mm:ss"; public static final int MILLISECONDS_IN_DAY = 1000 * 60 * 60 * 24; private long lastExecuted; private long incidentCount; private long changeCount; private enum SoapRequestType { CMDB, CHANGE_ORDER, INCIDENT } @Override public void setLastExecuted(long lastExecuted) { this.lastExecuted = lastExecuted; }; @Override public long getLastExecuted() { return lastExecuted; }; @Override public long getIncidentCount() { return incidentCount; } @Override public void setIncidentCount(long incidentCount) { this.incidentCount = incidentCount; } @Override public long getChangeCount() { return changeCount; } @Override public void setChangeCount(long changeCount) { this.changeCount = changeCount; } @Autowired public DefaultHpsmClient(HpsmSettings hpsmSettings) { super(hpsmSettings); this.hpsmSettings = hpsmSettings; } /** * * @return Combined List<Cmdb> of APPs and Components */ @Override public List<Cmdb> getApps() throws HygieiaException { String limit = hpsmSettings.getCmdbBatchLimit(); if(limit != null && !limit.isEmpty()) { LOG.info("NOTE: Collector run limited to " + limit + " results by property file setting."); } List<Cmdb> cmdbList = new ArrayList<>(); String statusString = hpsmSettings.getAppStatus(); String[] statusArray = (statusString == null || statusString.isEmpty()) ? new String[]{null} : statusString.split(","); for(int i = 0; i < statusArray.length; i++) { if(statusArray[i] != null) { // this is just for logging what we are doing - it is perfectly valid for this to be null, but will // only run once - additional logging is unnecessary. LOG.info("Retrieving for status: " + statusArray[i]); } cmdbList.addAll(getAppList(statusArray[i])); cmdbList.addAll(getComponentList(statusArray[i])); cmdbList.addAll(getEnvironmentList(statusArray[i])); } return cmdbList; } @Override public List<ChangeOrder> getChangeOrders() throws HygieiaException{ List<ChangeOrder> changeOrderList; changeOrderList = getChangeOrderList(); return changeOrderList; } @Override public List<Incident> getIncidents() throws HygieiaException{ List<Incident> incidentList; incidentList = getIncidentList(); return incidentList; } /** * * Returns List<Cmdb> of Apps * @return List<Cmdb> */ private List<Cmdb> getAppList(String status) throws HygieiaException{ List<Cmdb> appList; HpsmSoapModel hpsmSoapModel = new HpsmSoapModel(); hpsmSoapModel.setItemSubType(hpsmSettings.getAppSubType()); hpsmSoapModel.setRequestTypeName(hpsmSettings.getDetailsRequestType()); hpsmSoapModel.setSoapAction(hpsmSettings.getDetailsSoapAction()); hpsmSoapModel.setStatus(status); appList = getConfigurationItemList(hpsmSoapModel); return appList; } /** * * @return Returns List<Cmdb> of Components */ private List<Cmdb> getComponentList(String status) throws HygieiaException{ List<Cmdb> componentList; HpsmSoapModel hpsmSoapModel = new HpsmSoapModel(); hpsmSoapModel.setItemSubType(hpsmSettings.getCompSubType()); hpsmSoapModel.setItemType(hpsmSettings.getCompType()); hpsmSoapModel.setSoapAction(hpsmSettings.getDetailsSoapAction()); hpsmSoapModel.setRequestTypeName(hpsmSettings.getDetailsRequestType()); hpsmSoapModel.setStatus(status); componentList = getConfigurationItemList(hpsmSoapModel); return componentList; } /** * * Returns List<Cmdb> of Environments * @return List<Cmdb> */ private List<Cmdb> getEnvironmentList(String status) throws HygieiaException{ List<Cmdb> componentList; HpsmSoapModel hpsmSoapModel = new HpsmSoapModel(); hpsmSoapModel.setItemSubType(hpsmSettings.getEnvSubType()); hpsmSoapModel.setItemType(hpsmSettings.getEnvType()); hpsmSoapModel.setSoapAction(hpsmSettings.getDetailsSoapAction()); hpsmSoapModel.setRequestTypeName(hpsmSettings.getDetailsRequestType()); hpsmSoapModel.setStatus(status); componentList = getConfigurationItemList(hpsmSoapModel); return componentList; } /** * Takes hpsmSoapModel with settings set. Makes SOAP call and returns List <Cmdb> with details * @param hpsmSoapModel * @return */ private List<Cmdb> getConfigurationItemList(HpsmSoapModel hpsmSoapModel) throws HygieiaException{ List<Cmdb> configurationItemList = new ArrayList<>(); boolean getMore = true; int startValue = 0; while(getMore){ String batchLimit = hpsmSettings.getCmdbBatchLimit(); int returnLimit = Integer.parseInt(batchLimit); String newStart = Integer.toString(startValue); String soapString = getSoapMessage(hpsmSoapModel,newStart, batchLimit, SoapRequestType.CMDB); String response = makeSoapCall(soapString, hpsmSoapModel); Document doc = responseToDoc(response); NodeList responseNodeList = doc.getElementsByTagName("RetrieveDeviceListResponse"); String more = ""; String status = ""; for (int i = 0; i < responseNodeList.getLength(); i++) { NamedNodeMap instanceChildNodes = responseNodeList.item(i).getAttributes(); more = instanceChildNodes.getNamedItem("more").getNodeValue(); status = instanceChildNodes.getNamedItem("status").getNodeValue(); } configurationItemList.addAll(documentToCmdbDetailsList(doc)); if(more == null || !more.equals("1") || status == null || !status.equals("SUCCESS")){ getMore = false; LOG.info("No more items retrieved. Item count " + configurationItemList.size()); } startValue += returnLimit; } return configurationItemList; } private List <Cmdb> documentToCmdbDetailsList(Document doc) throws HygieiaException{ List <Cmdb> returnList = new ArrayList<>(); try { for(Node n: XmlUtil.asList(doc.getElementsByTagName("instance"))){ Map xmlMap = XmlUtil.getElementKeyValue(n.getChildNodes()); returnList.addAll(getCmdbItemFromXmlMap(xmlMap)); } }catch(Exception e){ LOG.error(e); } return returnList; } /** * * Returns List<ChangeOrder> of Change Orders * @return List<ChangeOrder> */ private List<ChangeOrder> getChangeOrderList() throws HygieiaException{ List<ChangeOrder> changeOrderList; String limit = hpsmSettings.getChangeOrderReturnLimit(); HpsmSoapModel hpsmSoapModel = new HpsmSoapModel(); hpsmSoapModel.setRequestTypeName(hpsmSettings.getChangeOrderRequestType()); hpsmSoapModel.setSoapAction(hpsmSettings.getChangeOrderSoapAction()); String soapString = getSoapMessage(hpsmSoapModel,"",limit, SoapRequestType.CHANGE_ORDER); String response = makeSoapCall(soapString, hpsmSoapModel); changeOrderList = responseToChangeOrderList(response); return changeOrderList; } private List <ChangeOrder> responseToChangeOrderList(String response) { List <ChangeOrder> returnList = new ArrayList<>(); try { Document doc = responseToDoc(response); for(Node n: XmlUtil.asList(doc.getElementsByTagName("instance"))){ Map headerMap = XmlUtil.getElementKeyValueByTag(n.getChildNodes(), "header"); Map instanceMap = XmlUtil.getElementKeyValue(n.getChildNodes()); if(instanceMap.containsKey(HpsmCollectorConstants.CHANGE_SERVICE)){ headerMap.put(HpsmCollectorConstants.CHANGE_SERVICE,instanceMap.get(HpsmCollectorConstants.CHANGE_SERVICE)); } if(headerMap != null && !headerMap.isEmpty()){ returnList.addAll(getChangeFromXmlMap(headerMap)); } } }catch(Exception e){ LOG.error(e); } return returnList; } /** * * Returns List<Incident> of Incidents * @return List<Incident> */ private List<Incident> getIncidentList() throws HygieiaException{ List<Incident> incidentList; String limit = hpsmSettings.getIncidentReturnLimit(); HpsmSoapModel hpsmSoapModel = new HpsmSoapModel(); hpsmSoapModel.setRequestTypeName(hpsmSettings.getIncidentRequestType()); hpsmSoapModel.setSoapAction(hpsmSettings.getIncidentSoapAction()); String soapString = getSoapMessage(hpsmSoapModel, "", limit, SoapRequestType.INCIDENT ); String response = makeSoapCall(soapString, hpsmSoapModel); incidentList = responseToIncidentList(response); return incidentList; } /** * Returns the type of the configuration item. * @param cmdb * @return the type of the configuration item. */ private String getItemType(Cmdb cmdb) { String itemType = null; String subType = cmdb.getConfigurationItemSubType(); String type = cmdb.getConfigurationItemType(); String hpsmSettingsSubType = hpsmSettings.getAppSubType(); String hpsmSettingsType = hpsmSettings.getAppType(); boolean typeCheck = false; boolean subTypeCheck = false; if(!"".equals(hpsmSettingsType)){ typeCheck = true; } if(!"".equals(hpsmSettingsSubType)){ subTypeCheck = true; } if(!typeCheck && subTypeCheck){ if(subType != null && subType.equals(hpsmSettings.getAppSubType())){ itemType = APP_TYPE; } else if(subType != null && subType.equals(hpsmSettings.getCompSubType())){ itemType = COMPONENT_TYPE; } else if(subType != null && subType.equals(hpsmSettings.getEnvSubType())) { itemType = ENVIRONMENT_TYPE; } }else if(typeCheck && !subTypeCheck){ if(type != null && type.equals(hpsmSettings.getAppType())){ itemType = APP_TYPE; } else if(type != null && type.equals(hpsmSettings.getCompType())){ itemType = COMPONENT_TYPE; } else if(type != null && type.equals(hpsmSettings.getEnvType())) { itemType = ENVIRONMENT_TYPE; } }else{ if(subType != null && subType.equals(hpsmSettings.getAppSubType()) && type != null && type.equals(hpsmSettings.getAppType())){ itemType = APP_TYPE; } else if(subType != null && subType.equals(hpsmSettings.getCompSubType()) && type != null && type.equals(hpsmSettings.getCompType())){ itemType = COMPONENT_TYPE; } else if(subType != null && subType.equals(hpsmSettings.getEnvSubType()) && type != null && type.equals(hpsmSettings.getEnvType())){ itemType = ENVIRONMENT_TYPE; } } return itemType; } private String getSoapMessage(HpsmSoapModel hpsmSoapModel, String start, String limit, SoapRequestType type){ String strMsg = ""; SOAPMessage soapMsg; String requestTypeName = hpsmSoapModel.getRequestTypeName(); try { MessageFactory factory = MessageFactory.newInstance(); soapMsg = factory.createMessage(); SOAPPart part = soapMsg.getSOAPPart(); SOAPEnvelope envelope = part.getEnvelope(); envelope.addNamespaceDeclaration("ns", "http://schemas.hp.com/SM/7"); envelope.addNamespaceDeclaration("com", "http://schemas.hp.com/SM/7/Common"); envelope.addNamespaceDeclaration("xm", "http://www.w3.org/2005/05/xmlmime"); SOAPBody body = envelope.getBody(); SOAPBodyElement requestType = body.addBodyElement(envelope.createName(requestTypeName,"ns", "")); if(limit != null && !limit.isEmpty()) { QName name1 = new QName("count"); requestType.addAttribute(name1, limit); } if(start != null && !start.isEmpty()) { QName qNameStart = new QName("start"); requestType.addAttribute(qNameStart, start); } QName qNameIgnoreEmptyValues = new QName("ignoreEmptyElements"); requestType.addAttribute(qNameIgnoreEmptyValues, "true"); SOAPBodyElement modelTag = body.addBodyElement(envelope.createName("model","ns", "")); SOAPBodyElement keysTag = body.addBodyElement(envelope.createName("keys","ns", "")); // creates instance tag body.addBodyElement(envelope.createName("instance", "ns", "")); if(type.equals(SoapRequestType.CHANGE_ORDER)){ handleChangeSoapMessage(keysTag); }else if(type.equals(SoapRequestType.INCIDENT)){ handleIncidentSoapMessage(keysTag); }else{ handleCmdbSoapMessage(hpsmSoapModel, envelope, keysTag); } modelTag.addChildElement(keysTag); requestType.addChildElement(modelTag); ByteArrayOutputStream out = new ByteArrayOutputStream(); soapMsg.writeTo(out); strMsg = new String(out.toByteArray()); } catch (SOAPException e) { LOG.error("SOAPException: " + e); } catch (UnsupportedEncodingException e) { LOG.error("UnsupportedEncodingException: " + e); } catch (IOException e) { LOG.error("IOException: " + e); } return strMsg; } private void handleCmdbSoapMessage(HpsmSoapModel hpsmSoapModel, SOAPEnvelope envelope, SOAPBodyElement keysTag) throws SOAPException{ String itemType = hpsmSoapModel.getItemType(); String itemSubType = hpsmSoapModel.getItemSubType(); String item = hpsmSoapModel.getItem(); String status = hpsmSoapModel.getStatus(); SOAPBody body = envelope.getBody(); if (itemType != null && !itemType.isEmpty()) { SOAPBodyElement configItemType = body.addBodyElement(envelope.createName("ConfigurationItemType", "ns", "")); configItemType.addTextNode(itemType); keysTag.addChildElement(configItemType); } if (itemSubType != null && !itemSubType.isEmpty()) { SOAPBodyElement configItemSubType = body.addBodyElement(envelope.createName("ConfigurationItemSubType", "ns", "")); configItemSubType.addTextNode(itemSubType); keysTag.addChildElement(configItemSubType); } if (item != null && !item.isEmpty()) { SOAPBodyElement configItem = body.addBodyElement(envelope.createName("ConfigurationItem", "ns", "")); configItem.addTextNode(item); keysTag.addChildElement(configItem); } if (status != null && !status.isEmpty()) { SOAPBodyElement configItemStatus = body.addBodyElement(envelope.createName("Status", "ns", "")); configItemStatus.addTextNode(status); keysTag.addChildElement(configItemStatus); } } private void handleIncidentSoapMessage(SOAPBodyElement keysTag) throws SOAPException { QName query = new QName("query"); // Incidents can be queried based on time. This code retrieves the incidents since // the last time it was run. If that time cannot be determined, it counts backwards // the number of days specified in hpsm.properties and retrieves those incidents. // get the number of days specified in the hpsm.properties file int incidentDays = hpsmSettings.getIncidentDays(); // Get current date/time DateTime nowDate = new DateTime(); DateTimeFormatter formatter = DateTimeFormat.forPattern(QUERY_DATE_FORMAT); String now = nowDate.toString(formatter); String previous = getPreviousDateValue(nowDate, incidentCount, incidentDays, hpsmSettings.getIncidentOffsetMinutes(), formatter); String format = hpsmSettings.getIncidentQuery(); if(format == null || format.isEmpty()){ format = DEFAULT_INCIDENT_QUERY_FORMAT; } Object[] args = new Object[]{ previous, now }; String queryString = MessageFormat.format(format, args); keysTag.addAttribute(query, queryString); } private void handleChangeSoapMessage(SOAPBodyElement keysTag) throws SOAPException{ QName query = new QName("query"); // Changes can be queried based on time. This code retrieves the changes since // the last time it was run. If that time cannot be determined, it counts backwards // the number of days specified in hpsm.properties and retrieves those changes. // get the number of days specified in the hpsm.properties file int changeDays = hpsmSettings.getChangeOrderDays(); // Get current date/time DateTime nowDate = new DateTime(); DateTimeFormatter formatter = DateTimeFormat.forPattern(QUERY_DATE_FORMAT); String now = nowDate.toString(formatter); String previous = getPreviousDateValue(nowDate, changeCount, changeDays, hpsmSettings.getChangeOrderOffsetMinutes(), formatter); String format = hpsmSettings.getChangeOrderQuery(); if(format == null || format.isEmpty()) { format = DEFAULT_CHANGE_QUERY_FORMAT; } Object[] args = new Object[]{ previous, now }; String queryString = MessageFormat.format(format, args); keysTag.addAttribute(query, queryString); } public String getPreviousDateValue(DateTime nowDate, long count, int days, int offsetMinutes, DateTimeFormatter formatter) { // Get the last time this collector was run DateTime previousDate = getDate(new DateTime(this.lastExecuted),0, offsetMinutes); // Convert the above times to milliseconds for comparison long nowMillis = nowDate.getMillis(); long previousMillis = previousDate.getMillis(); // calculate the difference in days between the two dates by dividing the difference by the number of milliseconds in a day int diffInDays = (int) (Math.abs((nowMillis - previousMillis)) / MILLISECONDS_IN_DAY); // IF there are no changes in the collection, or the collection does not exist // OR if the times are reversed // OR the number of days since collector last ran is greater than the requested number of days // THEN the last time the collector ran is irrelevant so use the number of days in hpsm.properties if((count < 1) || (previousMillis > nowMillis) || (diffInDays > days)) { previousDate = nowDate.minusDays(days); } return previousDate.toString(formatter); } private List<Cmdb> getCmdbItemFromXmlMap(Map map) { if(map == null || map.isEmpty()) return new ArrayList<>(); if(getStringValueFromMap(map,HpsmCollectorConstants.CONFIGURATION_ITEM).isEmpty()) return new ArrayList<>(); Cmdb cmdb = new Cmdb(); cmdb.setConfigurationItem(getStringValueFromMap(map,HpsmCollectorConstants.CONFIGURATION_ITEM)); cmdb.setConfigurationItemSubType(getStringValueFromMap(map,HpsmCollectorConstants.CONFIGURATION_ITEM_SUBTYPE)); cmdb.setConfigurationItemType(getStringValueFromMap(map,HpsmCollectorConstants.CONFIGURATION_ITEM_TYPE)); cmdb.setCommonName(getStringValueFromMap(map,HpsmCollectorConstants.COMMON_NAME)); cmdb.setAssignmentGroup(getStringValueFromMap(map,HpsmCollectorConstants.ASSIGNMENT_GROUP)); cmdb.setOwnerDept(getStringValueFromMap(map,HpsmCollectorConstants.OWNER_DEPT)); cmdb.setAppServiceOwner(getStringValueFromMap(map,HpsmCollectorConstants.APP_SERVICE_OWNER)); cmdb.setBusinessOwner(getStringValueFromMap(map,HpsmCollectorConstants.BUSINESS_OWNER)); cmdb.setSupportOwner(getStringValueFromMap(map,HpsmCollectorConstants.SUPPORT_OWNER)); cmdb.setDevelopmentOwner(getStringValueFromMap(map,HpsmCollectorConstants.DEVELOPMENT_OWNER)); cmdb.setItemType(getItemType(cmdb)); cmdb.setValidConfigItem(true); cmdb.setTimestamp(System.currentTimeMillis()); List<Cmdb> list = new ArrayList<>(); list.add(cmdb); return list; } private List<ChangeOrder> getChangeFromXmlMap(Map map) { if(map == null || map.isEmpty()) return new ArrayList<>(); if(getStringValueFromMap(map,HpsmCollectorConstants.CHANGE_ID).isEmpty()) return new ArrayList<>(); ChangeOrder change = new ChangeOrder(); change.setChangeID(getStringValueFromMap(map,HpsmCollectorConstants.CHANGE_ID)); change.setCategory(getStringValueFromMap(map,HpsmCollectorConstants.CHANGE_CATEGORY)); change.setStatus(getStringValueFromMap(map,HpsmCollectorConstants.CHANGE_STATUS)); change.setApprovalStatus(getStringValueFromMap(map,HpsmCollectorConstants.CHANGE_APPROVAL_STATUS)); change.setInitiatedBy(getStringValueFromMap(map,HpsmCollectorConstants.CHANGE_INITIATED_BY)); change.setAssignedTo(getStringValueFromMap(map,HpsmCollectorConstants.CHANGE_ASSIGNED_TO)); change.setAssignmentGroup(getStringValueFromMap(map,HpsmCollectorConstants.CHANGE_ASSIGNMENT_GROUP)); change.setPlannedStart(getStringValueFromMap(map,HpsmCollectorConstants.CHANGE_PLANNED_START)); change.setPlannedEnd(getStringValueFromMap(map,HpsmCollectorConstants.CHANGE_PLANNED_END)); change.setReason(getStringValueFromMap(map,HpsmCollectorConstants.CHANGE_REASON)); change.setPhase(getStringValueFromMap(map,HpsmCollectorConstants.CHANGE_PHASE)); change.setRiskAssessment(getStringValueFromMap(map,HpsmCollectorConstants.CHANGE_RISK_ASSESSMENT)); change.setDateEntered(getStringValueFromMap(map,HpsmCollectorConstants.CHANGE_DATE_ENTERED)); change.setOpen(getStringValueFromMap(map,HpsmCollectorConstants.CHANGE_OPEN)); change.setTitle(getStringValueFromMap(map,HpsmCollectorConstants.CHANGE_TITLE)); change.setSubcategory(getStringValueFromMap(map,HpsmCollectorConstants.CHANGE_SUBCATEGORY)); change.setChangeModel(getStringValueFromMap(map,HpsmCollectorConstants.CHANGE_MODEL)); change.setService(getStringValueFromMap(map,HpsmCollectorConstants.CHANGE_SERVICE)); List<ChangeOrder> list = new ArrayList<>(); list.add(change); return list; } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.spark.network.util; import java.util.Locale; import java.util.Properties; import com.google.common.primitives.Ints; /** * A central location that tracks all the settings we expose to users. */ public class TransportConf { private final String SPARK_NETWORK_IO_MODE_KEY; private final String SPARK_NETWORK_IO_PREFERDIRECTBUFS_KEY; private final String SPARK_NETWORK_IO_CONNECTIONTIMEOUT_KEY; private final String SPARK_NETWORK_IO_BACKLOG_KEY; private final String SPARK_NETWORK_IO_NUMCONNECTIONSPERPEER_KEY; private final String SPARK_NETWORK_IO_SERVERTHREADS_KEY; private final String SPARK_NETWORK_IO_CLIENTTHREADS_KEY; private final String SPARK_NETWORK_IO_RECEIVEBUFFER_KEY; private final String SPARK_NETWORK_IO_SENDBUFFER_KEY; private final String SPARK_NETWORK_SASL_TIMEOUT_KEY; private final String SPARK_NETWORK_IO_MAXRETRIES_KEY; private final String SPARK_NETWORK_IO_RETRYWAIT_KEY; private final String SPARK_NETWORK_IO_LAZYFD_KEY; private final String SPARK_NETWORK_VERBOSE_METRICS; private final ConfigProvider conf; private final String module; public TransportConf(String module, ConfigProvider conf) { this.module = module; this.conf = conf; SPARK_NETWORK_IO_MODE_KEY = getConfKey("io.mode"); SPARK_NETWORK_IO_PREFERDIRECTBUFS_KEY = getConfKey("io.preferDirectBufs"); SPARK_NETWORK_IO_CONNECTIONTIMEOUT_KEY = getConfKey("io.connectionTimeout"); SPARK_NETWORK_IO_BACKLOG_KEY = getConfKey("io.backLog"); SPARK_NETWORK_IO_NUMCONNECTIONSPERPEER_KEY = getConfKey("io.numConnectionsPerPeer"); SPARK_NETWORK_IO_SERVERTHREADS_KEY = getConfKey("io.serverThreads"); SPARK_NETWORK_IO_CLIENTTHREADS_KEY = getConfKey("io.clientThreads"); SPARK_NETWORK_IO_RECEIVEBUFFER_KEY = getConfKey("io.receiveBuffer"); SPARK_NETWORK_IO_SENDBUFFER_KEY = getConfKey("io.sendBuffer"); SPARK_NETWORK_SASL_TIMEOUT_KEY = getConfKey("sasl.timeout"); SPARK_NETWORK_IO_MAXRETRIES_KEY = getConfKey("io.maxRetries"); SPARK_NETWORK_IO_RETRYWAIT_KEY = getConfKey("io.retryWait"); SPARK_NETWORK_IO_LAZYFD_KEY = getConfKey("io.lazyFD"); SPARK_NETWORK_VERBOSE_METRICS = getConfKey("io.enableVerboseMetrics"); } public int getInt(String name, int defaultValue) { return conf.getInt(name, defaultValue); } public String get(String name, String defaultValue) { return conf.get(name, defaultValue); } private String getConfKey(String suffix) { return "spark." + module + "." + suffix; } public String getModuleName() { return module; } /** IO mode: nio or epoll */ public String ioMode() { return conf.get(SPARK_NETWORK_IO_MODE_KEY, "NIO").toUpperCase(Locale.ROOT); } /** If true, we will prefer allocating off-heap byte buffers within Netty. */ public boolean preferDirectBufs() { return conf.getBoolean(SPARK_NETWORK_IO_PREFERDIRECTBUFS_KEY, true); } /** Connect timeout in milliseconds. Default 120 secs. */ public int connectionTimeoutMs() { long defaultNetworkTimeoutS = JavaUtils.timeStringAsSec( conf.get("spark.network.timeout", "120s")); long defaultTimeoutMs = JavaUtils.timeStringAsSec( conf.get(SPARK_NETWORK_IO_CONNECTIONTIMEOUT_KEY, defaultNetworkTimeoutS + "s")) * 1000; return (int) defaultTimeoutMs; } /** Number of concurrent connections between two nodes for fetching data. */ public int numConnectionsPerPeer() { return conf.getInt(SPARK_NETWORK_IO_NUMCONNECTIONSPERPEER_KEY, 1); } /** Requested maximum length of the queue of incoming connections. Default -1 for no backlog. */ public int backLog() { return conf.getInt(SPARK_NETWORK_IO_BACKLOG_KEY, -1); } /** Number of threads used in the server thread pool. Default to 0, which is 2x#cores. */ public int serverThreads() { return conf.getInt(SPARK_NETWORK_IO_SERVERTHREADS_KEY, 0); } /** Number of threads used in the client thread pool. Default to 0, which is 2x#cores. */ public int clientThreads() { return conf.getInt(SPARK_NETWORK_IO_CLIENTTHREADS_KEY, 0); } /** * Receive buffer size (SO_RCVBUF). * Note: the optimal size for receive buffer and send buffer should be * latency * network_bandwidth. * Assuming latency = 1ms, network_bandwidth = 10Gbps * buffer size should be ~ 1.25MB */ public int receiveBuf() { return conf.getInt(SPARK_NETWORK_IO_RECEIVEBUFFER_KEY, -1); } /** Send buffer size (SO_SNDBUF). */ public int sendBuf() { return conf.getInt(SPARK_NETWORK_IO_SENDBUFFER_KEY, -1); } /** Timeout for a single round trip of auth message exchange, in milliseconds. */ public int authRTTimeoutMs() { return (int) JavaUtils.timeStringAsSec(conf.get("spark.network.auth.rpcTimeout", conf.get(SPARK_NETWORK_SASL_TIMEOUT_KEY, "30s"))) * 1000; } /** * Max number of times we will try IO exceptions (such as connection timeouts) per request. * If set to 0, we will not do any retries. */ public int maxIORetries() { return conf.getInt(SPARK_NETWORK_IO_MAXRETRIES_KEY, 3); } /** * Time (in milliseconds) that we will wait in order to perform a retry after an IOException. * Only relevant if maxIORetries &gt; 0. */ public int ioRetryWaitTimeMs() { return (int) JavaUtils.timeStringAsSec(conf.get(SPARK_NETWORK_IO_RETRYWAIT_KEY, "5s")) * 1000; } /** * Minimum size of a block that we should start using memory map rather than reading in through * normal IO operations. This prevents Spark from memory mapping very small blocks. In general, * memory mapping has high overhead for blocks close to or below the page size of the OS. */ public int memoryMapBytes() { return Ints.checkedCast(JavaUtils.byteStringAsBytes( conf.get("spark.storage.memoryMapThreshold", "2m"))); } /** * Whether to initialize FileDescriptor lazily or not. If true, file descriptors are * created only when data is going to be transferred. This can reduce the number of open files. */ public boolean lazyFileDescriptor() { return conf.getBoolean(SPARK_NETWORK_IO_LAZYFD_KEY, true); } /** * Whether to track Netty memory detailed metrics. If true, the detailed metrics of Netty * PoolByteBufAllocator will be gotten, otherwise only general memory usage will be tracked. */ public boolean verboseMetrics() { return conf.getBoolean(SPARK_NETWORK_VERBOSE_METRICS, false); } /** * Maximum number of retries when binding to a port before giving up. */ public int portMaxRetries() { return conf.getInt("spark.port.maxRetries", 16); } /** * Enables strong encryption. Also enables the new auth protocol, used to negotiate keys. */ public boolean encryptionEnabled() { return conf.getBoolean("spark.network.crypto.enabled", false); } /** * The cipher transformation to use for encrypting session data. */ public String cipherTransformation() { return conf.get("spark.network.crypto.cipher", "AES/CTR/NoPadding"); } /** * The key generation algorithm. This should be an algorithm that accepts a "PBEKeySpec" * as input. The default value (PBKDF2WithHmacSHA1) is available in Java 7. */ public String keyFactoryAlgorithm() { return conf.get("spark.network.crypto.keyFactoryAlgorithm", "PBKDF2WithHmacSHA1"); } /** * How many iterations to run when generating keys. * * See some discussion about this at: http://security.stackexchange.com/q/3959 * The default value was picked for speed, since it assumes that the secret has good entropy * (128 bits by default), which is not generally the case with user passwords. */ public int keyFactoryIterations() { return conf.getInt("spark.networy.crypto.keyFactoryIterations", 1024); } /** * Encryption key length, in bits. */ public int encryptionKeyLength() { return conf.getInt("spark.network.crypto.keyLength", 128); } /** * Initial vector length, in bytes. */ public int ivLength() { return conf.getInt("spark.network.crypto.ivLength", 16); } /** * The algorithm for generated secret keys. Nobody should really need to change this, * but configurable just in case. */ public String keyAlgorithm() { return conf.get("spark.network.crypto.keyAlgorithm", "AES"); } /** * Whether to fall back to SASL if the new auth protocol fails. Enabled by default for * backwards compatibility. */ public boolean saslFallback() { return conf.getBoolean("spark.network.crypto.saslFallback", true); } /** * Whether to enable SASL-based encryption when authenticating using SASL. */ public boolean saslEncryption() { return conf.getBoolean("spark.authenticate.enableSaslEncryption", false); } /** * Maximum number of bytes to be encrypted at a time when SASL encryption is used. */ public int maxSaslEncryptedBlockSize() { return Ints.checkedCast(JavaUtils.byteStringAsBytes( conf.get("spark.network.sasl.maxEncryptedBlockSize", "64k"))); } /** * Whether the server should enforce encryption on SASL-authenticated connections. */ public boolean saslServerAlwaysEncrypt() { return conf.getBoolean("spark.network.sasl.serverAlwaysEncrypt", false); } /** * The commons-crypto configuration for the module. */ public Properties cryptoConf() { return CryptoUtils.toCryptoConf("spark.network.crypto.config.", conf.getAll()); } /** * The max number of chunks allowed to be transferred at the same time on shuffle service. * Note that new incoming connections will be closed when the max number is hit. The client will * retry according to the shuffle retry configs (see `spark.shuffle.io.maxRetries` and * `spark.shuffle.io.retryWait`), if those limits are reached the task will fail with fetch * failure. */ public long maxChunksBeingTransferred() { return conf.getLong("spark.shuffle.maxChunksBeingTransferred", Long.MAX_VALUE); } }
/* * Autopsy Forensic Browser * * Copyright 2014-2019 Basis Technology Corp. * Contact: carrier <at> sleuthkit <dot> org * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.sleuthkit.autopsy.timeline; import com.google.common.cache.CacheBuilder; import com.google.common.cache.LoadingCache; import com.google.common.collect.ImmutableList; import com.google.common.eventbus.EventBus; import java.util.Collection; import java.util.Collections; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; import java.util.concurrent.ExecutionException; import java.util.concurrent.TimeUnit; import java.util.logging.Level; import javafx.beans.InvalidationListener; import javafx.beans.property.ReadOnlyObjectProperty; import javafx.beans.property.ReadOnlyObjectWrapper; import javafx.collections.FXCollections; import javafx.collections.ObservableMap; import static org.apache.commons.collections4.CollectionUtils.emptyIfNull; import static org.apache.commons.collections4.CollectionUtils.isNotEmpty; import org.joda.time.DateTimeZone; import org.joda.time.Interval; import org.openide.util.NbBundle; import org.sleuthkit.autopsy.casemodule.Case; import org.sleuthkit.autopsy.casemodule.events.BlackBoardArtifactTagAddedEvent; import org.sleuthkit.autopsy.casemodule.events.BlackBoardArtifactTagDeletedEvent; import org.sleuthkit.autopsy.casemodule.events.BlackBoardArtifactTagDeletedEvent.DeletedBlackboardArtifactTagInfo; import org.sleuthkit.autopsy.casemodule.events.ContentTagAddedEvent; import org.sleuthkit.autopsy.casemodule.events.ContentTagDeletedEvent; import org.sleuthkit.autopsy.casemodule.events.ContentTagDeletedEvent.DeletedContentTagInfo; import org.sleuthkit.autopsy.coreutils.Logger; import org.sleuthkit.autopsy.coreutils.MessageNotifyUtil; import org.sleuthkit.autopsy.timeline.events.RefreshRequestedEvent; import org.sleuthkit.autopsy.timeline.events.TagsAddedEvent; import org.sleuthkit.autopsy.timeline.events.TagsDeletedEvent; import org.sleuthkit.autopsy.timeline.ui.filtering.datamodel.FilterState; import org.sleuthkit.autopsy.timeline.ui.filtering.datamodel.RootFilterState; import org.sleuthkit.autopsy.timeline.ui.filtering.datamodel.SqlFilterState; import org.sleuthkit.autopsy.timeline.utils.CacheLoaderImpl; import org.sleuthkit.autopsy.timeline.utils.FilterUtils; import org.sleuthkit.autopsy.timeline.zooming.EventsModelParams; import org.sleuthkit.datamodel.AbstractFile; import org.sleuthkit.datamodel.BlackboardArtifact; import org.sleuthkit.datamodel.BlackboardArtifactTag; import org.sleuthkit.datamodel.Content; import org.sleuthkit.datamodel.ContentTag; import org.sleuthkit.datamodel.DataSource; import org.sleuthkit.datamodel.SleuthkitCase; import org.sleuthkit.datamodel.TimelineManager; import org.sleuthkit.datamodel.TskCoreException; import org.sleuthkit.datamodel.TimelineEvent; import org.sleuthkit.datamodel.TimelineEventType; import org.sleuthkit.datamodel.TimelineFilter; import org.sleuthkit.datamodel.TimelineFilter.DataSourceFilter; import org.sleuthkit.datamodel.TimelineFilter.DataSourcesFilter; import org.sleuthkit.datamodel.TimelineFilter.EventTypeFilter; import org.sleuthkit.datamodel.TimelineFilter.FileTypesFilter; import org.sleuthkit.datamodel.TimelineFilter.HashHitsFilter; import org.sleuthkit.datamodel.TimelineFilter.HideKnownFilter; import org.sleuthkit.datamodel.TimelineFilter.RootFilter; import org.sleuthkit.datamodel.TimelineFilter.TagsFilter; import org.sleuthkit.datamodel.TimelineFilter.TextFilter; import org.sleuthkit.datamodel.TimelineLevelOfDetail; /** * In the timeline implementation of the MVC pattern, this class acts as the * model. The views are the event counts view, the event details view and the * events list view. * * Concurrency Policy: TimelineManager is internally synchronized, so methods * that only access the TimelineManager atomically do not need further * synchronization. All other member state variables should only be accessed * with intrinsic lock of the containing FilteredEventsModel held. * */ public final class EventsModel { private static final Logger logger = Logger.getLogger(EventsModel.class.getName()); private final EventBus eventbus = new EventBus("EventsModel_EventBus"); //NON-NLS private final Case currentCase; private final TimelineManager caseDbEventManager; /* * User-specified parameters for the model exposed as JFX properties. These * parameters apply across all of the views of the model and are set using * GUI elements such the event filters panel. * * IMPORTANT: Note that the parameters are exposed both as a set and * individually. */ private final ReadOnlyObjectWrapper<EventsModelParams> modelParamsProperty = new ReadOnlyObjectWrapper<>(); private final ReadOnlyObjectWrapper<RootFilterState> filterStateProperty = new ReadOnlyObjectWrapper<>(); private final ReadOnlyObjectWrapper<Interval> timeRangeProperty = new ReadOnlyObjectWrapper<>(); private final ReadOnlyObjectWrapper<TimelineEventType.HierarchyLevel> eventTypesHierarchyLevelProperty = new ReadOnlyObjectWrapper<>(TimelineEventType.HierarchyLevel.CATEGORY); private final ReadOnlyObjectWrapper<TimelineLevelOfDetail> timelineLODProperty = new ReadOnlyObjectWrapper<>(TimelineLevelOfDetail.LOW); /* * Caches of model data from the case database. */ private final ObservableMap<Long, String> datasourceIDsToNamesMap = FXCollections.observableHashMap(); private final LoadingCache<Object, Long> maxEventTimeCache; private final LoadingCache<Object, Long> minEventTimeCache; private final LoadingCache<Long, TimelineEvent> idsToEventsCache; private final LoadingCache<EventsModelParams, Map<TimelineEventType, Long>> eventCountsCache; /** * Makes a new data source filter from a given entry in the cache of data * source object IDs to data source names. * * @param dataSourceEntry The cache entry. * * @return A new DataSourceFilter. */ private static DataSourceFilter newDataSourceFilter(Map.Entry<Long, String> dataSourceEntry) { return new DataSourceFilter(dataSourceEntry.getValue(), dataSourceEntry.getKey()); } /** * Constructs the model in the timeline implementation of the MVC pattern. * * @param currentCase The current case. * @param modelParams The initial state of the model parameters. * * @throws TskCoreException If there is an error reading model data from the * case database. */ public EventsModel(Case currentCase, ReadOnlyObjectProperty<EventsModelParams> modelParams) throws TskCoreException { this.currentCase = currentCase; this.caseDbEventManager = currentCase.getSleuthkitCase().getTimelineManager(); /* * Set up the caches of model data from the case database. Note that the * build() method calls specify the methods used to create default cache * entries when a call to get() would otherwise return a cache miss. */ populateDataSourcesCache(); idsToEventsCache = CacheBuilder.newBuilder() .maximumSize(5000L) .expireAfterAccess(10, TimeUnit.MINUTES) .build(new CacheLoaderImpl<>(caseDbEventManager::getEventById)); eventCountsCache = CacheBuilder.newBuilder() .maximumSize(1000L) .expireAfterAccess(10, TimeUnit.MINUTES) .build(new CacheLoaderImpl<>(this::countEventsByType)); maxEventTimeCache = CacheBuilder.newBuilder() .build(new CacheLoaderImpl<>(ignored -> caseDbEventManager.getMaxEventTime())); minEventTimeCache = CacheBuilder.newBuilder() .build(new CacheLoaderImpl<>(ignored -> caseDbEventManager.getMinEventTime())); /* * Add a listener to the data sources cache that adds a data source * filter to the event filter state model parameter when a data source * is added to the cache. */ InvalidationListener dataSourcesMapListener = observable -> { RootFilterState rootFilter = filterStateProperty.getReadOnlyProperty().get(); addDataSourceFilters(rootFilter); filterStateProperty.set(rootFilter.copyOf()); }; datasourceIDsToNamesMap.addListener(dataSourcesMapListener); /* * Initialize the events filter state model parameter with the default * events filter. */ filterStateProperty.set(getDefaultEventFilterState()); /* * Add a listener to the model parameters property that updates the * properties that expose the individual model parameters when they are * changed through the model parameters property. */ modelParamsProperty.addListener(observable -> { final EventsModelParams params = modelParamsProperty.get(); if (params != null) { synchronized (EventsModel.this) { eventTypesHierarchyLevelProperty.set(params.getEventTypesHierarchyLevel()); filterStateProperty.set(params.getEventFilterState()); timeRangeProperty.set(params.getTimeRange()); timelineLODProperty.set(params.getTimelineLOD()); } } }); modelParamsProperty.bind(modelParams); } /** * Populates the map of data source object IDs to data source names from the * data source data in the case database. */ synchronized private void populateDataSourcesCache() throws TskCoreException { SleuthkitCase skCase = currentCase.getSleuthkitCase(); for (DataSource ds : skCase.getDataSources()) { datasourceIDsToNamesMap.putIfAbsent(ds.getId(), ds.getName()); } } /** * Adds a data source filter for each data source in the data sources cache * to a given root filter state object. * * @param rootFilterState A root filter state object. */ synchronized void addDataSourceFilters(RootFilterState rootFilterState) { datasourceIDsToNamesMap.entrySet().forEach(entry -> rootFilterState.getDataSourcesFilterState().addSubFilterState(new SqlFilterState<>(newDataSourceFilter(entry)))); } /** * Gets the count of all events that fit the given model parameters. The * counts are organized by event type for the given event types hierarchy * level. * * @param modelParams The model parameters. * * @return A mapping of event types to event counts at the given event types * hierarchy level. * * @throws TskCoreException If there is an error reading model data from the * case database. */ private Map<TimelineEventType, Long> countEventsByType(EventsModelParams modelParams) throws TskCoreException { if (modelParams.getTimeRange() == null) { return Collections.emptyMap(); } else { return caseDbEventManager.countEventsByType(modelParams.getTimeRange().getStartMillis() / 1000, modelParams.getTimeRange().getEndMillis() / 1000, modelParams.getEventFilterState().getActiveFilter(), modelParams.getEventTypesHierarchyLevel()); } } /** * Gets the case database events manager. * * @return The case database events manager. */ public TimelineManager getEventManager() { return caseDbEventManager; } /** * Gets the case database. * * @return The case database. */ public SleuthkitCase getSleuthkitCase() { return currentCase.getSleuthkitCase(); } /** * Gets the model parameters property. * * @return A read only, observable property for the current model * parameters. */ synchronized public ReadOnlyObjectProperty<EventsModelParams> modelParamsProperty() { return modelParamsProperty.getReadOnlyProperty(); } /** * Gets a read only, observable property for the time range model parameter. * * @return The time range model parameter property. */ @NbBundle.Messages({ "FilteredEventsModel.timeRangeProperty.errorTitle=Timeline", "FilteredEventsModel.timeRangeProperty.errorMessage=Error getting spanning interval."}) synchronized public ReadOnlyObjectProperty<Interval> timeRangeProperty() { if (timeRangeProperty.get() == null) { try { timeRangeProperty.set(EventsModel.this.getSpanningInterval()); } catch (TskCoreException timelineCacheException) { MessageNotifyUtil.Notify.error(Bundle.FilteredEventsModel_timeRangeProperty_errorTitle(), Bundle.FilteredEventsModel_timeRangeProperty_errorMessage()); logger.log(Level.SEVERE, "Error getting spanning interval.", timelineCacheException); } } return timeRangeProperty.getReadOnlyProperty(); } /** * Gets a read only, observable property for the timeline level of detail * model parameter. * * @return The timeline level of detail model parameter property. */ synchronized public ReadOnlyObjectProperty<TimelineLevelOfDetail> descriptionLODProperty() { return timelineLODProperty.getReadOnlyProperty(); } /** * Gets a read only, observable property for the event filter model * parameter. * * @return The event filter model parameter property. */ synchronized public ReadOnlyObjectProperty<RootFilterState> eventFilterProperty() { return filterStateProperty.getReadOnlyProperty(); } /** * Gets a read only, observable property for the event types hierarchy level * model parameter. * * @return The event types hierarchy level model parameter property. */ synchronized public ReadOnlyObjectProperty<TimelineEventType.HierarchyLevel> eventTypesHierarchyLevelProperty() { return eventTypesHierarchyLevelProperty.getReadOnlyProperty(); } /** * Gets the current model parameters. * * @return The current model parameters. */ synchronized public EventsModelParams getModelParams() { return modelParamsProperty.get(); } /** * Gets the time range model parameter. * * @return The time range model parameter. */ synchronized public Interval getTimeRange() { return getModelParams().getTimeRange(); } /** * Gets the time range model parameter. * * @return The time range model parameter. */ synchronized public TimelineLevelOfDetail getDescriptionLOD() { return getModelParams().getTimelineLOD(); } /** * Gets the event filter model parameter. * * @return The event filter model parameter. */ synchronized public RootFilterState getEventFilterState() { return getModelParams().getEventFilterState(); } /** * Gets the event types hierarchy level model model parameter. * * @return The event types hierarchy level model model parameter. */ synchronized public TimelineEventType.HierarchyLevel getEventTypeZoom() { return getModelParams().getEventTypesHierarchyLevel(); } /** * Gets a new instance of the default event filter state model parameter, * with data source filters for every data source currently in the data * sopurces cache. * * @return An instance of the default filter state model parameter. */ public synchronized RootFilterState getDefaultEventFilterState() { /* * Construct data source filters for all of the data sources in the data * sources cache. */ DataSourcesFilter dataSourcesFilter = new DataSourcesFilter(); datasourceIDsToNamesMap.entrySet().forEach(dataSourceEntry -> dataSourcesFilter.addSubFilter(newDataSourceFilter(dataSourceEntry))); /* * Make the rest of the event filters and wrap all of the filters with * filter state objects for the GUI. */ RootFilterState rootFilterState = new RootFilterState(new RootFilter( new HideKnownFilter(), new TagsFilter(), new HashHitsFilter(), new TextFilter(), new EventTypeFilter(TimelineEventType.ROOT_EVENT_TYPE), dataSourcesFilter, FilterUtils.createDefaultFileTypesFilter(), Collections.emptySet())); return rootFilterState; } /** * Gets an event given its event ID. * * @param eventID The event ID. * * @return The event. * * @throws TskCoreException If there is an error reading model data from the * case database. */ public TimelineEvent getEventById(Long eventID) throws TskCoreException { try { return idsToEventsCache.get(eventID); } catch (ExecutionException ex) { throw new TskCoreException("Error getting cached event from ID", ex); } } /** * Gets a set of events given their event IDs. * * @param eventIDs The event IDs. * * @return THe events. * * @throws TskCoreException If there is an error reading model data from the * case database. */ public Set<TimelineEvent> getEventsById(Collection<Long> eventIDs) throws TskCoreException { Set<TimelineEvent> events = new HashSet<>(); for (Long id : eventIDs) { events.add(getEventById(id)); } return events; } /** * Gets a list of event IDs for a given time range and a given events * filter. * * @param timeRange The time range. * @param filterState A filter state object for the events filter. * * @return The events. * * @throws TskCoreException If there is an error reading model data from the * case database. */ public List<Long> getEventIDs(Interval timeRange, FilterState<? extends TimelineFilter> filterState) throws TskCoreException { final Interval overlap; RootFilter intersection; synchronized (this) { overlap = EventsModel.this.getSpanningInterval().overlap(timeRange); intersection = getEventFilterState().intersect(filterState).getActiveFilter(); } return caseDbEventManager.getEventIDs(overlap, intersection); } /** * Gets a set of event IDs associated with a given file. * * @param file The file. * @param includeDerivedArtifacts If true, also gets the event IDs of events * associated with artifacts for which the * file is the source file. * * @return The event IDs. * * @throws TskCoreException If there is an error reading model data from the * case database. */ public Set<Long> getEventIDsForFile(AbstractFile file, boolean includeDerivedArtifacts) throws TskCoreException { return caseDbEventManager.getEventIDsForContent(file, includeDerivedArtifacts); } /** * Gets a set of event IDs associated with a given artifact. * * @param artifact The artifact. * * @return The event IDs. * * @throws TskCoreException If there is an error reading model data from the * case database. */ public List<Long> getEventIDsForArtifact(BlackboardArtifact artifact) throws TskCoreException { return caseDbEventManager.getEventIDsForArtifact(artifact); } /** * Gets counts by event type of the events within a given time range. * * @param timeRange The time range. * * @return The event counts by type. * * @throws TskCoreException If there is an error reading model data from the * case database. */ public Map<TimelineEventType, Long> getEventCounts(Interval timeRange) throws TskCoreException { final RootFilterState filter; final TimelineEventType.HierarchyLevel typeZoom; synchronized (this) { filter = getEventFilterState(); typeZoom = getEventTypeZoom(); } try { return eventCountsCache.get(new EventsModelParams(timeRange, typeZoom, filter, null)); } catch (ExecutionException executionException) { throw new TskCoreException("Error getting cached event counts.`1", executionException); } } /** * Gets the spanning interval for the events that fall within the time range * and event filter model parameters, in terms of a given time zone. * * @param timeZone The time zone. * * @return The spanning interval. * * @throws TskCoreException If there is an error reading model data from the * case database. */ public Interval getSpanningInterval(DateTimeZone timeZone) throws TskCoreException { return caseDbEventManager.getSpanningInterval(modelParamsProperty().get().getTimeRange(), getEventFilterState().getActiveFilter(), timeZone); } /** * Gets the spanning interval for all of the events in the case database. * * @return The spanning interval. * * @throws TskCoreException If there is an error reading model data from the * case database. */ public Interval getSpanningInterval() throws TskCoreException { return new Interval(getMinEventTime() * 1000, 1000 + getMaxEventTime() * 1000); } /** * Gets the spanning interval for a collection of events. * * @param eventIDs The event IDs of the events. * * @return The spanning interval. * * @throws TskCoreException If there is an error reading model data from the * case database. */ public Interval getSpanningInterval(Collection<Long> eventIDs) throws TskCoreException { return caseDbEventManager.getSpanningInterval(eventIDs); } /** * Gets the minimum event time in the case database, in seconds since the * UNIX epoch. * * @return The minimum event time. * * @throws TskCoreException If there is an error reading model data from the * case database. */ public Long getMinEventTime() throws TskCoreException { try { return minEventTimeCache.get("min"); // NON-NLS } catch (ExecutionException ex) { throw new TskCoreException("Error getting cached min time.", ex); } } /** * Gets the maximum event time in the case database, in seconds since the * UNIX epoch. * * @return The maximum event time. * * @throws TskCoreException If there is an error reading model data from the * case database. */ public Long getMaxEventTime() throws TskCoreException { try { return maxEventTimeCache.get("max"); // NON-NLS } catch (ExecutionException ex) { throw new TskCoreException("Error getting cached max time.", ex); } } /** * Updates the events model for a content tag added event and publishes a * tag added event via the model's event bus. * * @param evt The event. * * @return If a tags added event was published via the model's event bus. * * @throws TskCoreException If there is an error reading model data from the * case database. */ synchronized public boolean handleContentTagAdded(ContentTagAddedEvent evt) throws TskCoreException { ContentTag contentTag = evt.getAddedTag(); Content content = contentTag.getContent(); Set<Long> updatedEventIDs = caseDbEventManager.updateEventsForContentTagAdded(content); if (isNotEmpty(updatedEventIDs)) { invalidateCaches(updatedEventIDs); } return postTagsAdded(updatedEventIDs); } /** * Updates the events model for an artifact tag added event and publishes a * tag added event via the model's event bus. * * @param evt The event. * * @return If a tags added event was published via the model's event bus. * * @throws TskCoreException If there is an error reading model data from the * case database. */ synchronized public boolean handleArtifactTagAdded(BlackBoardArtifactTagAddedEvent evt) throws TskCoreException { BlackboardArtifactTag artifactTag = evt.getAddedTag(); BlackboardArtifact artifact = artifactTag.getArtifact(); Set<Long> updatedEventIDs = caseDbEventManager.updateEventsForArtifactTagAdded(artifact); if (isNotEmpty(updatedEventIDs)) { invalidateCaches(updatedEventIDs); } return postTagsAdded(updatedEventIDs); } /** * Updates the events model for a content tag deleted event and publishes a * tag deleted event via the model's event bus. * * @param evt The event. * * @return If a tags deleted event was published via the model's event bus. * * @throws TskCoreException If there is an error reading model data from the * case database. */ synchronized public boolean handleContentTagDeleted(ContentTagDeletedEvent evt) throws TskCoreException { DeletedContentTagInfo deletedTagInfo = evt.getDeletedTagInfo(); Content content = currentCase.getSleuthkitCase().getContentById(deletedTagInfo.getContentID()); Set<Long> updatedEventIDs = caseDbEventManager.updateEventsForContentTagDeleted(content); if (isNotEmpty(updatedEventIDs)) { invalidateCaches(updatedEventIDs); } return postTagsDeleted(updatedEventIDs); } /** * Updates the events model for a data source added event. * * @throws TskCoreException If there is an error reading model data from the * case database. */ synchronized void handleDataSourceAdded() throws TskCoreException { populateDataSourcesCache(); invalidateCaches(null); } /** * Updates the events model for an artifact tag deleted event and publishes * a tag deleted event via the model's event bus. * * @param evt The event. * * @return If a tags deleted event was published via the model's event bus. * * @throws TskCoreException If there is an error reading model data from the * case database. */ synchronized public boolean handleArtifactTagDeleted(BlackBoardArtifactTagDeletedEvent evt) throws TskCoreException { DeletedBlackboardArtifactTagInfo deletedTagInfo = evt.getDeletedTagInfo(); BlackboardArtifact artifact = currentCase.getSleuthkitCase().getBlackboardArtifact(deletedTagInfo.getArtifactID()); Set<Long> updatedEventIDs = caseDbEventManager.updateEventsForArtifactTagDeleted(artifact); if (isNotEmpty(updatedEventIDs)) { invalidateCaches(updatedEventIDs); } return postTagsDeleted(updatedEventIDs); } /** * Post a TagsAddedEvent to all registered subscribers, if the given set of * updated event IDs is not empty. * * @param updatedEventIDs The set of event ids to be included in the * TagsAddedEvent. * * @return True if an event was posted. */ private boolean postTagsAdded(Set<Long> updatedEventIDs) { boolean tagsUpdated = !updatedEventIDs.isEmpty(); if (tagsUpdated) { eventbus.post(new TagsAddedEvent(updatedEventIDs)); } return tagsUpdated; } /** * Post a TagsDeletedEvent to all registered subscribers, if the given set * of updated event IDs is not empty. * * @param updatedEventIDs The set of event ids to be included in the * TagsDeletedEvent. * * @return True if an event was posted. */ private boolean postTagsDeleted(Set<Long> updatedEventIDs) { boolean tagsUpdated = !updatedEventIDs.isEmpty(); if (tagsUpdated) { eventbus.post(new TagsDeletedEvent(updatedEventIDs)); } return tagsUpdated; } /** * Register the given object to receive events. * * @param subscriber The object to register. Must implement public methods * annotated with Subscribe. */ synchronized public void registerForEvents(Object subscriber) { eventbus.register(subscriber); } /** * Un-register the given object, so it no longer receives events. * * @param subscriber The object to un-register. */ synchronized public void unRegisterForEvents(Object subscriber) { eventbus.unregister(subscriber); } /** * Posts a refresh requested event to all registered subscribers. */ public void postRefreshRequest() { eventbus.post(new RefreshRequestedEvent()); } /** * Gets a list of the event types from the case database. * * @return The list of event types. */ public ImmutableList<TimelineEventType> getEventTypes() { return caseDbEventManager.getEventTypes(); } /** * Sets the hash set hits flag for the events associated with the source * files for a collection of hash set hit artifacts. * * @param hashSetHitArtifacts The hash set hit artifacts. * * @return The event IDs of the updated events. * * @throws TskCoreException If there is an error reading model data from or * writing model data to the case database. */ synchronized public Set<Long> updateEventsForHashSetHits(Collection<BlackboardArtifact> hashSetHitArtifacts) throws TskCoreException { Set<Long> updatedEventIDs = new HashSet<>(); for (BlackboardArtifact artifact : hashSetHitArtifacts) { Content content = currentCase.getSleuthkitCase().getContentById(artifact.getObjectID()); updatedEventIDs.addAll(caseDbEventManager.updateEventsForHashSetHit(content)); } if (isNotEmpty(updatedEventIDs)) { invalidateCaches(updatedEventIDs); } return updatedEventIDs; } /** * Invalidates all of the the model caches and publishes a caches * invalidated event. Optionally, a collection of event IDs may be supplied, * in which case only the corresponding entries in the event IDs cache are * invalidated. * * @param updatedEventIDs Either null or a collection of the event IDs. * * @throws TskCoreException */ public synchronized void invalidateCaches(Collection<Long> updatedEventIDs) throws TskCoreException { minEventTimeCache.invalidateAll(); maxEventTimeCache.invalidateAll(); idsToEventsCache.invalidateAll(emptyIfNull(updatedEventIDs)); eventCountsCache.invalidateAll(); eventbus.post(new CacheInvalidatedEvent()); } /** * Event fired when a cache has been invalidated and the views need to be * refreshed */ public static class CacheInvalidatedEvent { private CacheInvalidatedEvent() { } } }
/** * * Copyright 2003-2005 Jive Software. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.jivesoftware.smackx.jingleold.packet; import org.jivesoftware.smack.packet.PacketExtension; import org.jivesoftware.smackx.jingleold.nat.ICECandidate; import org.jivesoftware.smackx.jingleold.nat.TransportCandidate; import java.util.ArrayList; import java.util.Collections; import java.util.Iterator; import java.util.List; /** * A jingle transport extension * * @author Alvaro Saurin <alvaro.saurin@gmail.com> */ public class JingleTransport implements PacketExtension { // static public static final String NODENAME = "transport"; // non-static protected String namespace; protected final List<JingleTransportCandidate> candidates = new ArrayList<JingleTransportCandidate>(); /** * Default constructor. */ public JingleTransport() { super(); } /** * Utility constructor, with a transport candidate element. * * @param candidate A transport candidate element to add. */ public JingleTransport(final JingleTransportCandidate candidate) { super(); addCandidate(candidate); } /** * Copy constructor. * * @param tr the other jingle transport. */ public JingleTransport(final JingleTransport tr) { if (tr != null) { namespace = tr.namespace; if (tr.candidates.size() > 0) { candidates.addAll(tr.candidates); } } } /** * Adds a transport candidate. * * @param candidate the candidate */ public void addCandidate(final JingleTransportCandidate candidate) { if (candidate != null) { synchronized (candidates) { candidates.add(candidate); } } } /** * Get an iterator for the candidates * * @return an iterator */ public Iterator<JingleTransportCandidate> getCandidates() { return Collections.unmodifiableList(getCandidatesList()).iterator(); } /** * Get the list of candidates. * * @return The candidates list. */ public List<JingleTransportCandidate> getCandidatesList() { ArrayList<JingleTransportCandidate> res = null; synchronized (candidates) { res = new ArrayList<JingleTransportCandidate>(candidates); } return res; } /** * Get the number of transport candidates. * * @return The number of transport candidates contained. */ public int getCandidatesCount() { return getCandidatesList().size(); } /** * Returns the XML element name of the element. * * @return the XML element name of the element. */ public String getElementName() { return NODENAME; } /** * Set the namespace. * * @param ns The namespace */ protected void setNamespace(final String ns) { namespace = ns; } /** * Get the namespace. * * @return The namespace */ public String getNamespace() { return namespace; } /** * Return the XML representation for this element. */ public String toXML() { StringBuilder buf = new StringBuilder(); buf.append("<").append(getElementName()).append(" xmlns=\""); buf.append(getNamespace()).append("\" "); synchronized (candidates) { if (getCandidatesCount() > 0) { buf.append(">"); Iterator<JingleTransportCandidate> iter = getCandidates(); while (iter.hasNext()) { JingleTransportCandidate candidate = iter.next(); buf.append(candidate.toXML()); } buf.append("</").append(getElementName()).append(">"); } else { buf.append("/>"); } } return buf.toString(); } /** * Candidate element in the transport. This class acts as a view of the * "TransportCandidate" in the Jingle space. * * @author Alvaro Saurin * @see TransportCandidate */ public static abstract class JingleTransportCandidate { public static final String NODENAME = "candidate"; // The transport candidate contained in the element. protected TransportCandidate transportCandidate; /** * Creates a new TransportNegotiator child. */ public JingleTransportCandidate() { super(); } /** * Creates a new TransportNegotiator child. * * @param candidate the jmf transport candidate */ public JingleTransportCandidate(final TransportCandidate candidate) { super(); setMediaTransport(candidate); } /** * Returns the XML element name of the element. * * @return the XML element name of the element. */ public static String getElementName() { return NODENAME; } /** * Get the current transportElement candidate. * * @return the transportElement candidate */ public TransportCandidate getMediaTransport() { return transportCandidate; } /** * Set the transportElement candidate. * * @param cand the transportElement candidate */ public void setMediaTransport(final TransportCandidate cand) { if (cand != null) { transportCandidate = cand; } } /** * Get the list of attributes. * * @return a string with the list of attributes. */ protected String getChildElements() { return null; } /** * Obtain a valid XML representation of a trancport candidate * * @return A string containing the XML dump of the transport candidate. */ public String toXML() { StringBuilder buf = new StringBuilder(); String childElements = getChildElements(); if (transportCandidate != null && childElements != null) { buf.append("<").append(getElementName()).append(" "); buf.append(childElements); buf.append("/>"); } return buf.toString(); } } // Subclasses /** * RTP-ICE profile */ public static class Ice extends JingleTransport { public static final String NAMESPACE = "urn:xmpp:tmp:jingle:transports:ice-udp"; public Ice() { super(); setNamespace(NAMESPACE); } /** * Add a transport candidate * * @see org.jivesoftware.smackx.jingleold.packet.JingleTransport#addCandidate(org.jivesoftware.smackx.jingleold.packet.JingleTransport.JingleTransportCandidate) */ public void addCandidate(final JingleTransportCandidate candidate) { super.addCandidate(candidate); } /** * Get the list of candidates. As a "raw-udp" transport can only contain * one candidate, we use the first in the list... * * @see org.jivesoftware.smackx.jingleold.packet.JingleTransport#getCandidates() */ public List<JingleTransportCandidate> getCandidatesList() { List<JingleTransportCandidate> copy = new ArrayList<JingleTransportCandidate>(); List<JingleTransportCandidate> superCandidatesList = super.getCandidatesList(); for (int i = 0; i < superCandidatesList.size(); i++) { copy.add(superCandidatesList.get(i)); } return copy; } public static class Candidate extends JingleTransportCandidate { /** * Default constructor */ public Candidate() { super(); } /** * Constructor with a transport candidate. */ public Candidate(final TransportCandidate tc) { super(tc); } /** * Get the elements of this candidate. */ protected String getChildElements() { StringBuilder buf = new StringBuilder(); if (transportCandidate != null) {// && transportCandidate instanceof ICECandidate) { ICECandidate tci = (ICECandidate) transportCandidate; // We convert the transportElement candidate to XML here... buf.append(" generation=\"").append(tci.getGeneration()).append("\""); buf.append(" ip=\"").append(tci.getIp()).append("\""); buf.append(" port=\"").append(tci.getPort()).append("\""); buf.append(" network=\"").append(tci.getNetwork()).append("\""); buf.append(" username=\"").append(tci.getUsername()).append("\""); buf.append(" password=\"").append(tci.getPassword()).append("\""); buf.append(" preference=\"").append(tci.getPreference()).append("\""); buf.append(" type=\"").append(tci.getType()).append("\""); // Optional elements if (transportCandidate.getName() != null) { buf.append(" name=\"").append(tci.getName()).append("\""); } } return buf.toString(); } } } /** * Raw UDP profile. */ public static class RawUdp extends JingleTransport { public static final String NAMESPACE = "http://www.xmpp.org/extensions/xep-0177.html#ns"; public RawUdp() { super(); setNamespace(NAMESPACE); } /** * Add a transport candidate * * @see org.jivesoftware.smackx.jingleold.packet.JingleTransport#addCandidate(org.jivesoftware.smackx.jingleold.packet.JingleTransport.JingleTransportCandidate) */ public void addCandidate(final JingleTransportCandidate candidate) { candidates.clear(); super.addCandidate(candidate); } /** * Get the list of candidates. As a "raw-udp" transport can only contain * one candidate, we use the first in the list... * * @see org.jivesoftware.smackx.jingleold.packet.JingleTransport#getCandidates() */ public List<JingleTransportCandidate> getCandidatesList() { List<JingleTransportCandidate> copy = new ArrayList<JingleTransportCandidate>(); List<JingleTransportCandidate> superCandidatesList = super.getCandidatesList(); if (superCandidatesList.size() > 0) { copy.add(superCandidatesList.get(0)); } return copy; } /** * Raw-udp transport candidate. */ public static class Candidate extends JingleTransportCandidate { /** * Default constructor */ public Candidate() { super(); } /** * Constructor with a transport candidate. */ public Candidate(final TransportCandidate tc) { super(tc); } /** * Get the elements of this candidate. */ protected String getChildElements() { StringBuilder buf = new StringBuilder(); if (transportCandidate != null && transportCandidate instanceof TransportCandidate.Fixed) { TransportCandidate.Fixed tcf = (TransportCandidate.Fixed) transportCandidate; buf.append(" generation=\"").append(tcf.getGeneration()).append("\""); buf.append(" ip=\"").append(tcf.getIp()).append("\""); buf.append(" port=\"").append(tcf.getPort()).append("\""); // Optional parameters String name = tcf.getName(); if (name != null) { buf.append(" name=\"").append(name).append("\""); } } return buf.toString(); } } } }
package org.apache.maven.plugin.rar; /* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ import org.apache.maven.archiver.MavenArchiveConfiguration; import org.apache.maven.archiver.MavenArchiver; import org.apache.maven.artifact.Artifact; import org.apache.maven.artifact.resolver.filter.ScopeArtifactFilter; import org.apache.maven.execution.MavenSession; import org.apache.maven.model.Resource; import org.apache.maven.plugin.AbstractMojo; import org.apache.maven.plugin.MojoExecutionException; import org.apache.maven.plugins.annotations.Component; import org.apache.maven.plugins.annotations.LifecyclePhase; import org.apache.maven.plugins.annotations.Mojo; import org.apache.maven.plugins.annotations.Parameter; import org.apache.maven.plugins.annotations.ResolutionScope; import org.apache.maven.project.MavenProject; import org.apache.maven.shared.filtering.MavenFilteringException; import org.apache.maven.shared.filtering.MavenResourcesExecution; import org.apache.maven.shared.filtering.MavenResourcesFiltering; import org.codehaus.plexus.archiver.Archiver; import org.codehaus.plexus.archiver.jar.JarArchiver; import org.codehaus.plexus.util.FileUtils; import java.io.File; import java.io.IOException; import java.util.ArrayList; import java.util.Collections; import java.util.Iterator; import java.util.LinkedHashSet; import java.util.List; import java.util.Set; /** * Builds J2EE Resource Adapter Archive (RAR) files. * * @author <a href="stephane.nicoll@gmail.com">Stephane Nicoll</a> * @version $Id$ */ @Mojo( name = "rar", threadSafe = true, defaultPhase = LifecyclePhase.PACKAGE, requiresDependencyResolution = ResolutionScope.TEST ) public class RarMojo extends AbstractMojo { public static final String RA_XML_URI = "META-INF/ra.xml"; private static final String[] DEFAULT_INCLUDES = { "**/**" }; /** * Single directory for extra files to include in the RAR. */ @Parameter( defaultValue = "${basedir}/src/main/rar", required = true ) private File rarSourceDirectory; /** * The location of the ra.xml file to be used within the rar file. */ @Parameter( defaultValue = "${basedir}/src/main/rar/META-INF/ra.xml" ) private File raXmlFile; /** * Specify if the generated jar file of this project should be * included in the rar file ; default is true. */ @Parameter private Boolean includeJar = Boolean.TRUE; /** * The location of the manifest file to be used within the rar file. */ @Parameter( defaultValue = "${basedir}/src/main/rar/META-INF/MANIFEST.MF" ) private File manifestFile; /** * Directory that resources are copied to during the build. */ @Parameter( defaultValue = "${project.build.directory}/${project.build.finalName}", required = true ) private String workDirectory; /** * The directory for the generated RAR. */ @Parameter( defaultValue = "${project.build.directory}", required = true ) private String outputDirectory; /** * The name of the RAR file to generate. */ @Parameter( alias = "rarName", defaultValue = "${project.build.finalName}", required = true ) private String finalName; /** * The maven project. */ @Component private MavenProject project; /** * The Jar archiver. */ @Component( role = Archiver.class, hint = "jar" ) private JarArchiver jarArchiver; /** * The archive configuration to use. * See <a href="http://maven.apache.org/shared/maven-archiver/index.html">Maven Archiver Reference</a>. */ @Parameter private MavenArchiveConfiguration archive = new MavenArchiveConfiguration(); /** * allow filtering of link{rarSourceDirectory} * * @since 2.3 */ @Parameter( property = "rar.filterRarSourceDirectory", defaultValue = "false" ) private boolean filterRarSourceDirectory; /** * @since 2.3 */ @Component( role = MavenResourcesFiltering.class, hint = "default" ) protected MavenResourcesFiltering mavenResourcesFiltering; /** * @since 2.3 */ @Parameter( defaultValue = "${session}", required = true, readonly = true ) protected MavenSession session; /** * @since 2.3 */ @Parameter( property = "encoding", defaultValue = "${project.build.sourceEncoding}" ) protected String encoding; /** * Whether to escape backslashes and colons in windows-style paths. * * @since 2.3 */ @Parameter( property = "maven.resources.escapeWindowsPaths", defaultValue = "true" ) protected boolean escapeWindowsPaths; /** * Expression preceded with the String won't be interpolated * \${foo} will be replaced with ${foo} * * @since 2.3 */ @Parameter( property = "maven.resources.escapeString" ) protected String escapeString; /** * Overwrite existing files even if the destination files are newer. * * @since 2.3 */ @Parameter( property = "maven.resources.overwrite", defaultValue = "false" ) private boolean overwrite; /** * Copy any empty directories included in the Resources. * * @since 2.3 */ @Parameter( property = "maven.resources.includeEmptyDirs", defaultValue = "false" ) protected boolean includeEmptyDirs; /** * stop searching endToken at the end of line * * @since 2.3 */ @Parameter( property = "maven.resources.supportMultiLineFiltering", defaultValue = "false" ) private boolean supportMultiLineFiltering; /** * @since 2.3 */ @Parameter( defaultValue = "true" ) protected boolean useDefaultDelimiters; /** * <p> * Set of delimiters for expressions to filter within the resources. These delimiters are specified in the * form 'beginToken*endToken'. If no '*' is given, the delimiter is assumed to be the same for start and end. * </p><p> * So, the default filtering delimiters might be specified as: * </p> * <pre> * &lt;delimiters&gt; * &lt;delimiter&gt;${*}&lt/delimiter&gt; * &lt;delimiter&gt;@&lt/delimiter&gt; * &lt;/delimiters&gt; * </pre> * <p> * Since the '@' delimiter is the same on both ends, we don't need to specify '@*@' (though we can). * </p> * * @since 2.3 */ @Parameter protected List<String> delimiters; /** * The list of extra filter properties files to be used along with System properties, * project properties, and filter properties files specified in the POM build/filters section, * which should be used for the filtering during the current mojo execution. * <br/> * Normally, these will be configured from a plugin's execution section, to provide a different * set of filters for a particular execution. For instance, starting in Maven 2.2.0, you have the * option of configuring executions with the id's <code>default-resources</code> and * <code>default-testResources</code> to supply different configurations for the two * different types of resources. By supplying <code>extraFilters</code> configurations, you * can separate which filters are used for which type of resource. * * @since 2.3 */ @Parameter protected List<String> filters; /** * Additional file extensions to not apply filtering (already defined are : jpg, jpeg, gif, bmp, png) * * @since 2.3 */ @Parameter protected List<String> nonFilteredFileExtensions; /** * extra resource to include in rar archive * * @since 2.3 */ @Parameter protected List<RarResource> rarResources; /** * Whether or not warn if the <code>ra.xml</code> file is missing. Set to <code>false</code> * if you want you RAR built without a <code>ra.xml</code> file. * This may be useful if you are building against JCA 1.6 or later. * * @since 2.3 */ @Parameter( property = "warnOnMissingRaXml", defaultValue = "true" ) protected boolean warnOnMissingRaXml = true; private File buildDir; public void execute() throws MojoExecutionException { getLog().debug( " ======= RarMojo settings =======" ); getLog().debug( "rarSourceDirectory[" + rarSourceDirectory + "]" ); getLog().debug( "manifestFile[" + manifestFile + "]" ); getLog().debug( "raXmlFile[" + raXmlFile + "]" ); getLog().debug( "workDirectory[" + workDirectory + "]" ); getLog().debug( "outputDirectory[" + outputDirectory + "]" ); getLog().debug( "finalName[" + finalName + "]" ); // Check if jar file is there and if requested, copy it try { if ( includeJar.booleanValue() ) { File generatedJarFile = new File( outputDirectory, finalName + ".jar" ); if ( generatedJarFile.exists() ) { getLog().info( "Including generated jar file[" + generatedJarFile.getName() + "]" ); FileUtils.copyFileToDirectory( generatedJarFile, getBuildDir() ); } } } catch ( IOException e ) { throw new MojoExecutionException( "Error copying generated Jar file", e ); } // Copy dependencies try { Set artifacts = project.getArtifacts(); for ( Iterator iter = artifacts.iterator(); iter.hasNext(); ) { Artifact artifact = (Artifact) iter.next(); ScopeArtifactFilter filter = new ScopeArtifactFilter( Artifact.SCOPE_RUNTIME ); if ( !artifact.isOptional() && filter.include( artifact ) && artifact.getArtifactHandler().isAddedToClasspath() ) { getLog().info( "Copying artifact[" + artifact.getGroupId() + ", " + artifact.getId() + ", " + artifact.getScope() + "]" ); FileUtils.copyFileToDirectory( artifact.getFile(), getBuildDir() ); } } } catch ( IOException e ) { throw new MojoExecutionException( "Error copying RAR dependencies", e ); } Resource resource = new Resource(); resource.setDirectory( rarSourceDirectory.getAbsolutePath() ); resource.setTargetPath( getBuildDir().getAbsolutePath() ); resource.setFiltering( filterRarSourceDirectory ); List<Resource> resources = new ArrayList<Resource>(); resources.add( resource ); if ( rarResources != null && !rarResources.isEmpty() ) { resources.addAll( rarResources ); } MavenResourcesExecution mavenResourcesExecution = new MavenResourcesExecution( resources, getBuildDir(), project, encoding, filters, Collections.<String>emptyList(), session ); mavenResourcesExecution.setEscapeWindowsPaths( escapeWindowsPaths ); // never include project build filters in this call, since we've already accounted for the POM build filters // above, in getCombinedFiltersList(). mavenResourcesExecution.setInjectProjectBuildFilters( false ); mavenResourcesExecution.setEscapeString( escapeString ); mavenResourcesExecution.setOverwrite( overwrite ); mavenResourcesExecution.setIncludeEmptyDirs( includeEmptyDirs ); mavenResourcesExecution.setSupportMultiLineFiltering( supportMultiLineFiltering ); // if these are NOT set, just use the defaults, which are '${*}' and '@'. if ( delimiters != null && !delimiters.isEmpty() ) { LinkedHashSet<String> delims = new LinkedHashSet<String>(); if ( useDefaultDelimiters ) { delims.addAll( mavenResourcesExecution.getDelimiters() ); } for ( String delim : delimiters ) { if ( delim == null ) { // FIXME: ${filter:*} could also trigger this condition. Need a better long-term solution. delims.add( "${*}" ); } else { delims.add( delim ); } } mavenResourcesExecution.setDelimiters( delims ); } if ( nonFilteredFileExtensions != null ) { mavenResourcesExecution.setNonFilteredFileExtensions( nonFilteredFileExtensions ); } try { mavenResourcesFiltering.filterResources( mavenResourcesExecution ); } catch ( MavenFilteringException e ) { throw new MojoExecutionException( "Error copying RAR resources", e ); } // Include custom manifest if necessary try { includeCustomRaXmlFile(); } catch ( IOException e ) { throw new MojoExecutionException( "Error copying ra.xml file", e ); } // Check if connector deployment descriptor is there File ddFile = new File( getBuildDir(), RA_XML_URI ); if ( !ddFile.exists() && warnOnMissingRaXml ) { getLog().warn( "Connector deployment descriptor: " + ddFile.getAbsolutePath() + " does not exist." ); } try { File rarFile = new File( outputDirectory, finalName + ".rar" ); MavenArchiver archiver = new MavenArchiver(); archiver.setArchiver( jarArchiver ); archiver.setOutputFile( rarFile ); // Include custom manifest if necessary includeCustomManifestFile(); archiver.getArchiver().addDirectory( getBuildDir() ); archiver.createArchive( session, project, archive ); project.getArtifact().setFile( rarFile ); } catch ( Exception e ) { throw new MojoExecutionException( "Error assembling RAR", e ); } } protected File getBuildDir() { if ( buildDir == null ) { buildDir = new File( workDirectory ); } return buildDir; } private void includeCustomManifestFile() throws IOException { File customManifestFile = manifestFile; if ( !customManifestFile.exists() ) { getLog().info( "Could not find manifest file: " + manifestFile + " - Generating one" ); } else { getLog().info( "Including custom manifest file[" + customManifestFile + "]" ); archive.setManifestFile( customManifestFile ); File metaInfDir = new File( getBuildDir(), "META-INF" ); FileUtils.copyFileToDirectory( customManifestFile, metaInfDir ); } } private void includeCustomRaXmlFile() throws IOException { if ( raXmlFile == null ) { } File raXml = raXmlFile; if ( raXml.exists() ) { getLog().info( "Using ra.xml " + raXmlFile ); File metaInfDir = new File( getBuildDir(), "META-INF" ); FileUtils.copyFileToDirectory( raXml, metaInfDir ); } } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.cassandra.service; import java.net.InetAddress; import java.util.*; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicBoolean; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.apache.cassandra.gms.*; import org.apache.cassandra.net.IVerbHandler; import org.apache.cassandra.net.Message; import org.apache.cassandra.net.MessagingService; import org.apache.cassandra.utils.FBUtilities; /* * The load balancing algorithm here is an implementation of * the algorithm as described in the paper "Scalable range query * processing for large-scale distributed database applications". * This class keeps track of load information across the system. * It registers itself with the Gossiper for ApplicationState namely * load information i.e number of requests processed w.r.t distinct * keys at an Endpoint. Monitor load information for a 5 minute * interval and then do load balancing operations if necessary. */ public class StorageLoadBalancer implements IEndpointStateChangeSubscriber { class LoadBalancer implements Runnable { LoadBalancer() { /* Copy the entries in loadInfo_ into loadInfo2_ and use it for all calculations */ loadInfo2_.putAll(loadInfo_); } /** * Obtain a node which is a potential target. Start with * the neighbours i.e either successor or predecessor. * Send the target a MoveMessage. If the node cannot be * relocated on the ring then we pick another candidate for * relocation. */ public void run() { /* int threshold = (int)(StorageLoadBalancer.TOPHEAVY_RATIO * averageSystemLoad()); int myLoad = localLoad(); InetAddress predecessor = StorageService.instance.getPredecessor(StorageService.getLocalStorageEndpoint()); if (logger_.isDebugEnabled()) logger_.debug("Trying to relocate the predecessor {}", predecessor); boolean value = tryThisNode(myLoad, threshold, predecessor); if ( !value ) { loadInfo2_.remove(predecessor); InetAddress successor = StorageService.instance.getSuccessor(StorageService.getLocalStorageEndpoint()); if (logger_.isDebugEnabled()) logger_.debug("Trying to relocate the successor {}", successor); value = tryThisNode(myLoad, threshold, successor); if ( !value ) { loadInfo2_.remove(successor); while ( !loadInfo2_.isEmpty() ) { InetAddress target = findARandomLightNode(); if ( target != null ) { if (logger_.isDebugEnabled()) logger_.debug("Trying to relocate the random node {}", target); value = tryThisNode(myLoad, threshold, target); if ( !value ) { loadInfo2_.remove(target); } else { break; } } else { // No light nodes available - this is NOT good. logger_.warn("Not even a single lightly loaded node is available ..."); break; } } loadInfo2_.clear(); // If we are here and no node was available to // perform load balance with we need to report and bail. if ( !value ) { logger_.warn("Load Balancing operations weren't performed for this node"); } } } */ } /* private boolean tryThisNode(int myLoad, int threshold, InetAddress target) { boolean value = false; LoadInfo li = loadInfo2_.get(target); int pLoad = li.count(); if ( ((myLoad + pLoad) >> 1) <= threshold ) { //calculate the number of keys to be transferred int keyCount = ( (myLoad - pLoad) >> 1 ); if (logger_.isDebugEnabled()) logger_.debug("Number of keys we attempt to transfer to " + target + " " + keyCount); // Determine the token that the target should join at. BigInteger targetToken = BootstrapAndLbHelper.getTokenBasedOnPrimaryCount(keyCount); // Send a MoveMessage and see if this node is relocateable MoveMessage moveMessage = new MoveMessage(targetToken); Message message = new Message(StorageService.getLocalStorageEndpoint(), StorageLoadBalancer.lbStage_, StorageLoadBalancer.moveMessageVerbHandler_, new Object[]{moveMessage}); if (logger_.isDebugEnabled()) logger_.debug("Sending a move message to {}", target); IAsyncResult result = MessagingService.getMessagingInstance().sendRR(message, target); value = (Boolean)result.get()[0]; if (logger_.isDebugEnabled()) logger_.debug("Response for query to relocate " + target + " is " + value); } return value; } */ } private static final int BROADCAST_INTERVAL = 60 * 1000; public static final StorageLoadBalancer instance = new StorageLoadBalancer(); private static final Logger logger_ = LoggerFactory.getLogger(StorageLoadBalancer.class); /* time to delay in minutes the actual load balance procedure if heavily loaded */ private static final int delay_ = 5; /* If a node's load is this factor more than the average, it is considered Heavy */ private static final double TOPHEAVY_RATIO = 1.5; /* this indicates whether this node is already helping someone else */ private AtomicBoolean isMoveable_ = new AtomicBoolean(false); private Map<InetAddress, Double> loadInfo_ = new HashMap<InetAddress, Double>(); /* This map is a clone of the one above and is used for various calculations during LB operation */ private Map<InetAddress, Double> loadInfo2_ = new HashMap<InetAddress, Double>(); private StorageLoadBalancer() { Gossiper.instance.register(this); } public void onChange(InetAddress endpoint, ApplicationState state, VersionedValue value) { if (state != ApplicationState.LOAD) return; loadInfo_.put(endpoint, Double.valueOf(value.value)); /* // clone load information to perform calculations loadInfo2_.putAll(loadInfo_); // Perform the analysis for load balance operations if ( isHeavyNode() ) { if (logger_.isDebugEnabled()) logger_.debug(StorageService.getLocalStorageEndpoint() + " is a heavy node with load " + localLoad()); // lb_.schedule( new LoadBalancer(), StorageLoadBalancer.delay_, TimeUnit.MINUTES ); } */ } public void onJoin(InetAddress endpoint, EndpointState epState) { VersionedValue localValue = epState.getApplicationState(ApplicationState.LOAD); if (localValue != null) { onChange(endpoint, ApplicationState.LOAD, localValue); } } public void onAlive(InetAddress endpoint, EndpointState state) {} public void onDead(InetAddress endpoint, EndpointState state) {} public void onRestart(InetAddress endpoint, EndpointState state) { } public void onRemove(InetAddress endpoint) {} /* private boolean isMoveable() { if ( !isMoveable_.get() ) return false; int myload = localLoad(); InetAddress successor = StorageService.instance.getSuccessor(StorageService.getLocalStorageEndpoint()); LoadInfo li = loadInfo2_.get(successor); // "load" is NULL means that the successor node has not // yet gossiped its load information. We should return // false in this case since we want to err on the side // of caution. if ( li == null ) return false; else { return ( ( myload + li.count() ) <= StorageLoadBalancer.TOPHEAVY_RATIO*averageSystemLoad() ); } } */ private double localLoad() { Double load = loadInfo2_.get(FBUtilities.getLocalAddress()); return load == null ? 0 : load; } private double averageSystemLoad() { int nodeCount = loadInfo2_.size(); Set<InetAddress> nodes = loadInfo2_.keySet(); double systemLoad = 0; for (InetAddress node : nodes) { systemLoad += loadInfo2_.get(node); } double averageLoad = (nodeCount > 0) ? (systemLoad / nodeCount) : 0; if (logger_.isDebugEnabled()) logger_.debug("Average system load is {}", averageLoad); return averageLoad; } private boolean isHeavyNode() { return ( localLoad() > ( StorageLoadBalancer.TOPHEAVY_RATIO * averageSystemLoad() ) ); } private boolean isMoveable(InetAddress target) { double threshold = StorageLoadBalancer.TOPHEAVY_RATIO * averageSystemLoad(); if (isANeighbour(target)) { // If the target is a neighbour then it is // moveable if its Double load = loadInfo2_.get(target); if (load == null) { return false; } else { double myload = localLoad(); double avgLoad = (load + myload) / 2; return avgLoad <= threshold; } } else { InetAddress successor = StorageService.instance.getSuccessor(target); double sLoad = loadInfo2_.get(successor); double targetLoad = loadInfo2_.get(target); return (sLoad + targetLoad) <= threshold; } } private boolean isANeighbour(InetAddress neighbour) { InetAddress predecessor = StorageService.instance.getPredecessor(FBUtilities.getLocalAddress()); if ( predecessor.equals(neighbour) ) return true; InetAddress successor = StorageService.instance.getSuccessor(FBUtilities.getLocalAddress()); if ( successor.equals(neighbour) ) return true; return false; } /* * Determine the nodes that are lightly loaded. Choose at * random one of the lightly loaded nodes and use them as * a potential target for load balance. */ private InetAddress findARandomLightNode() { List<InetAddress> potentialCandidates = new ArrayList<InetAddress>(); Set<InetAddress> allTargets = loadInfo2_.keySet(); double avgLoad = averageSystemLoad(); for (InetAddress target : allTargets) { double load = loadInfo2_.get(target); if (load < avgLoad) { potentialCandidates.add(target); } } if (potentialCandidates.size() > 0) { Random random = new Random(); int index = random.nextInt(potentialCandidates.size()); return potentialCandidates.get(index); } return null; } public Map<InetAddress, Double> getLoadInfo() { return loadInfo_; } public void startBroadcasting() { // send the first broadcast "right away" (i.e., in 2 gossip heartbeats, when we should have someone to talk to); // after that send every BROADCAST_INTERVAL. Runnable runnable = new Runnable() { public void run() { if (logger_.isDebugEnabled()) logger_.debug("Disseminating load info ..."); Gossiper.instance.addLocalApplicationState(ApplicationState.LOAD, StorageService.instance.valueFactory.load(StorageService.instance.getLoad())); } }; StorageService.scheduledTasks.scheduleWithFixedDelay(runnable, 2 * Gossiper.intervalInMillis, BROADCAST_INTERVAL, TimeUnit.MILLISECONDS); } /** * Wait for at least BROADCAST_INTERVAL ms, to give all nodes enough time to * report in. */ public void waitForLoadInfo() { int duration = BROADCAST_INTERVAL + StorageService.RING_DELAY; try { logger_.info("Sleeping {} ms to wait for load information...", duration); Thread.sleep(duration); } catch (InterruptedException e) { throw new AssertionError(e); } } }
/* Copyright (c) 2013, Washington University in St.Louis All rights reserved. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package edu.wustl.xipHost.dicom; import java.io.File; import java.io.IOException; import java.net.MalformedURLException; import java.net.URI; import com.pixelmed.dicom.Attribute; import com.pixelmed.dicom.AttributeList; import com.pixelmed.dicom.DicomException; import com.pixelmed.dicom.TagFromName; /** * <font face="Tahoma" size="2"> * Parses DICOM file and retrieves commonly used attribues.<br></br> * @version January 2008 * @author Jaroslaw Krych * </font> */ public class BasicDicomParser2 { String patientName; String patientID; String patientBirthDate; String studyDate; String studyID; String studyDesc; String studyInstanceUID; String seriesNumber; String modality; String seriesDesc; String seriesInstanceUID; String sopClassUID; String sopInstanceUID; String transferSyntaxUID; AttributeList list = new AttributeList(); public AttributeList parse(File dicomFile){ try { list.read(dicomFile.getAbsolutePath() , null, true, true); /*Iterator iter = list.getDictionary().getTagIterator(); while(iter.hasNext()){ AttributeTag attTag = (AttributeTag)iter.next(); String strAtt = attTag.toString(); String attValue = Attribute.getSingleStringValueOrEmptyString(list, attTag); System.out.println(strAtt + " " + attValue); }*/ } catch (IOException e) { } catch (DicomException e) { e.printStackTrace(); } return list; } public String getPatientName(){ patientName = new String(); Attribute att = list.get(TagFromName.PatientName); if(att != null){ patientName = att.getDelimitedStringValuesOrEmptyString(); } return patientName; } public String getPatientID(){ patientID = new String(); Attribute att = list.get(TagFromName.PatientID); if(att != null){ patientID = att.getDelimitedStringValuesOrEmptyString(); } return patientID; } public String getPatientBirthDate(){ patientBirthDate = new String(); Attribute att = list.get(TagFromName.PatientBirthDate); if(att != null){ patientBirthDate = att.getDelimitedStringValuesOrEmptyString(); } return patientID; } public String getStudyDate(){ studyDate = new String(); Attribute att = list.get(TagFromName.StudyDate); if(att != null){ studyDate = att.getDelimitedStringValuesOrEmptyString(); } return studyDate; } public String getStudyID(){ studyID = new String(); Attribute att = list.get(TagFromName.StudyID); if(att != null){ studyID = att.getDelimitedStringValuesOrEmptyString(); } return studyID; } public String getStudyDescription(){ studyDesc = new String(); Attribute att = list.get(TagFromName.StudyDescription); if(att != null){ studyDesc = att.getDelimitedStringValuesOrEmptyString(); } return studyDesc; } public String getStudyInstanceUID(){ studyInstanceUID = new String(); Attribute att = list.get(TagFromName.StudyInstanceUID); if(att != null){ studyInstanceUID = att.getDelimitedStringValuesOrEmptyString(); } return studyInstanceUID; } public String getSeriesNumer(){ seriesNumber = new String(); Attribute att = list.get(TagFromName.SeriesNumber); if(att != null){ seriesNumber = att.getDelimitedStringValuesOrEmptyString(); } return seriesNumber; } public String getModality(){ modality = new String(); Attribute att = list.get(TagFromName.Modality); if(att != null){ modality = att.getDelimitedStringValuesOrEmptyString(); } return modality; } public String getSeriesDescription(){ seriesDesc = new String(); Attribute att = list.get(TagFromName.SeriesDescription); if(att != null){ seriesDesc = att.getDelimitedStringValuesOrEmptyString(); } return seriesDesc; } public String getSeriesInstanceUID(){ seriesInstanceUID = new String(); Attribute att = list.get(TagFromName.SeriesInstanceUID); if(att != null){ seriesInstanceUID = att.getDelimitedStringValuesOrEmptyString(); } return seriesInstanceUID; } public String getSOPClassUID(){ sopClassUID = new String(); Attribute att = list.get(TagFromName.SOPClassUID); if(att != null){ sopClassUID = att.getDelimitedStringValuesOrEmptyString(); } return sopClassUID; } public String getSOPInstanceUID(){ sopInstanceUID = new String(); Attribute att = list.get(TagFromName.SOPInstanceUID); if(att != null){ sopInstanceUID = att.getDelimitedStringValuesOrEmptyString(); } return sopInstanceUID; } public String getTransferSyntaxUID(){ transferSyntaxUID = new String(); Attribute att = list.get(TagFromName.TransferSyntaxUID); if(att != null){ transferSyntaxUID = att.getDelimitedStringValuesOrEmptyString(); } return transferSyntaxUID; } public String[][] getShortDicomHeader(URI item){ String[][] map = new String[15][2]; map[0][0] = "Patient name"; map[1][0] = "Patient ID"; map[2][0] = "Birth date"; map[3][0] = "Study date"; map[4][0] = "Study ID"; map[5][0] = "Study description"; map[6][0] = "Series number"; map[7][0] = "Modality"; map[8][0] = "Series description"; map[9][0] = "File location"; map[10][0] = "StudyInstanceUID"; map[11][0] = "SeriesInstanceUID"; map[12][0] = "TransferSyntaxUID"; map[13][0] = "SOPInstanceUID"; map[14][0] = "SOPClassUID"; map[0][1] = getPatientName(); map[1][1] = getPatientID(); map[2][1] = getPatientBirthDate(); map[3][1] = getStudyDate(); map[4][1] = getStudyID(); map[5][1] = getStudyDescription(); map[6][1] = getSeriesNumer(); map[7][1] = getModality(); map[8][1] = getSeriesDescription(); try { map[9][1] = item.toURL().toExternalForm(); } catch (MalformedURLException e) { map[9][1] = ""; } map[10][1] = getStudyInstanceUID(); map[11][1] = getSeriesInstanceUID(); map[12][1] = getTransferSyntaxUID(); map[13][1] = getSOPInstanceUID(); map[14][1] = getSOPClassUID(); return map; } public static void main (String [] args){ BasicDicomParser2 parser = new BasicDicomParser2(); parser.parse(new File("C:/WUSTL/Tmp/IN000349")); System.out.println(parser.getStudyInstanceUID()); System.out.println(parser.getSeriesInstanceUID()); } }
/* * Copyright 2001-2005 Stephen Colebourne * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.joda.time; import java.util.Locale; import java.util.TimeZone; import junit.framework.TestCase; import junit.framework.TestSuite; import org.joda.time.chrono.BuddhistChronology; import org.joda.time.chrono.GJChronology; import org.joda.time.chrono.ISOChronology; import org.joda.time.convert.ConverterManager; import org.joda.time.convert.IntervalConverter; /** * This class is a JUnit test for Interval. * * @author Stephen Colebourne */ public class TestMutableInterval_Constructors extends TestCase { // Test in 2002/03 as time zones are more well known // (before the late 90's they were all over the place) private static final DateTimeZone PARIS = DateTimeZone.forID("Europe/Paris"); private static final DateTimeZone LONDON = DateTimeZone.forID("Europe/London"); long y2002days = 365 + 365 + 366 + 365 + 365 + 365 + 366 + 365 + 365 + 365 + 366 + 365 + 365 + 365 + 366 + 365 + 365 + 365 + 366 + 365 + 365 + 365 + 366 + 365 + 365 + 365 + 366 + 365 + 365 + 365 + 366 + 365; long y2003days = 365 + 365 + 366 + 365 + 365 + 365 + 366 + 365 + 365 + 365 + 366 + 365 + 365 + 365 + 366 + 365 + 365 + 365 + 366 + 365 + 365 + 365 + 366 + 365 + 365 + 365 + 366 + 365 + 365 + 365 + 366 + 365 + 365; // 2002-06-09 private long TEST_TIME_NOW = (y2002days + 31L + 28L + 31L + 30L + 31L + 9L -1L) * DateTimeConstants.MILLIS_PER_DAY; // 2002-04-05 private long TEST_TIME1 = (y2002days + 31L + 28L + 31L + 5L -1L) * DateTimeConstants.MILLIS_PER_DAY + 12L * DateTimeConstants.MILLIS_PER_HOUR + 24L * DateTimeConstants.MILLIS_PER_MINUTE; // 2003-05-06 private long TEST_TIME2 = (y2003days + 31L + 28L + 31L + 30L + 6L -1L) * DateTimeConstants.MILLIS_PER_DAY + 14L * DateTimeConstants.MILLIS_PER_HOUR + 28L * DateTimeConstants.MILLIS_PER_MINUTE; private DateTimeZone originalDateTimeZone = null; private TimeZone originalTimeZone = null; private Locale originalLocale = null; public static void main(String[] args) { junit.textui.TestRunner.run(suite()); } public static TestSuite suite() { return new TestSuite(TestMutableInterval_Constructors.class); } public TestMutableInterval_Constructors(String name) { super(name); } protected void setUp() throws Exception { DateTimeUtils.setCurrentMillisFixed(TEST_TIME_NOW); originalDateTimeZone = DateTimeZone.getDefault(); originalTimeZone = TimeZone.getDefault(); originalLocale = Locale.getDefault(); DateTimeZone.setDefault(PARIS); TimeZone.setDefault(TimeZone.getTimeZone("Europe/London")); Locale.setDefault(Locale.UK); } protected void tearDown() throws Exception { DateTimeUtils.setCurrentMillisSystem(); DateTimeZone.setDefault(originalDateTimeZone); TimeZone.setDefault(originalTimeZone); Locale.setDefault(originalLocale); originalDateTimeZone = null; originalTimeZone = null; originalLocale = null; } //----------------------------------------------------------------------- public void testTest() { assertEquals("2002-06-09T00:00:00.000Z", new Instant(TEST_TIME_NOW).toString()); assertEquals("2002-04-05T12:24:00.000Z", new Instant(TEST_TIME1).toString()); assertEquals("2003-05-06T14:28:00.000Z", new Instant(TEST_TIME2).toString()); } //----------------------------------------------------------------------- public void testParse_noFormatter() throws Throwable { DateTime start = new DateTime(2010, 6, 30, 12, 30, ISOChronology.getInstance(PARIS)); DateTime end = new DateTime(2010, 7, 1, 14, 30, ISOChronology.getInstance(PARIS)); assertEquals(new MutableInterval(start, end), MutableInterval.parse("2010-06-30T12:30/2010-07-01T14:30")); assertEquals(new MutableInterval(start, end), MutableInterval.parse("2010-06-30T12:30/P1DT2H")); assertEquals(new MutableInterval(start, end), MutableInterval.parse("P1DT2H/2010-07-01T14:30")); } //----------------------------------------------------------------------- public void testConstructor() throws Throwable { MutableInterval test = new MutableInterval(); assertEquals(0L, test.getStartMillis()); assertEquals(0L, test.getEndMillis()); } //----------------------------------------------------------------------- public void testConstructor_long_long1() throws Throwable { DateTime dt1 = new DateTime(2004, 6, 9, 0, 0, 0, 0); DateTime dt2 = new DateTime(2005, 7, 10, 1, 1, 1, 1); MutableInterval test = new MutableInterval(dt1.getMillis(), dt2.getMillis()); assertEquals(dt1.getMillis(), test.getStartMillis()); assertEquals(dt2.getMillis(), test.getEndMillis()); assertEquals(ISOChronology.getInstance(), test.getChronology()); } public void testConstructor_long_long2() throws Throwable { DateTime dt1 = new DateTime(2004, 6, 9, 0, 0, 0, 0); MutableInterval test = new MutableInterval(dt1.getMillis(), dt1.getMillis()); assertEquals(dt1.getMillis(), test.getStartMillis()); assertEquals(dt1.getMillis(), test.getEndMillis()); assertEquals(ISOChronology.getInstance(), test.getChronology()); } public void testConstructor_long_long3() throws Throwable { DateTime dt1 = new DateTime(2005, 7, 10, 1, 1, 1, 1); DateTime dt2 = new DateTime(2004, 6, 9, 0, 0, 0, 0); try { new MutableInterval(dt1.getMillis(), dt2.getMillis()); fail(); } catch (IllegalArgumentException ex) {} } //----------------------------------------------------------------------- public void testConstructor_long_long_Chronology1() throws Throwable { DateTime dt1 = new DateTime(2004, 6, 9, 0, 0, 0, 0); DateTime dt2 = new DateTime(2005, 7, 10, 1, 1, 1, 1); MutableInterval test = new MutableInterval(dt1.getMillis(), dt2.getMillis(), GJChronology.getInstance()); assertEquals(dt1.getMillis(), test.getStartMillis()); assertEquals(dt2.getMillis(), test.getEndMillis()); assertEquals(GJChronology.getInstance(), test.getChronology()); } public void testConstructor_long_long_Chronology2() throws Throwable { DateTime dt1 = new DateTime(2004, 6, 9, 0, 0, 0, 0); DateTime dt2 = new DateTime(2005, 7, 10, 1, 1, 1, 1); MutableInterval test = new MutableInterval(dt1.getMillis(), dt2.getMillis(), null); assertEquals(dt1.getMillis(), test.getStartMillis()); assertEquals(dt2.getMillis(), test.getEndMillis()); assertEquals(ISOChronology.getInstance(), test.getChronology()); } //----------------------------------------------------------------------- public void testConstructor_RI_RI1() throws Throwable { DateTime dt1 = new DateTime(2004, 6, 9, 0, 0, 0, 0); DateTime dt2 = new DateTime(2005, 7, 10, 1, 1, 1, 1); MutableInterval test = new MutableInterval(dt1, dt2); assertEquals(dt1.getMillis(), test.getStartMillis()); assertEquals(dt2.getMillis(), test.getEndMillis()); } public void testConstructor_RI_RI2() throws Throwable { Instant dt1 = new Instant(new DateTime(2004, 6, 9, 0, 0, 0, 0)); Instant dt2 = new Instant(new DateTime(2005, 7, 10, 1, 1, 1, 1)); MutableInterval test = new MutableInterval(dt1, dt2); assertEquals(dt1.getMillis(), test.getStartMillis()); assertEquals(dt2.getMillis(), test.getEndMillis()); } public void testConstructor_RI_RI3() throws Throwable { MutableInterval test = new MutableInterval((ReadableInstant) null, (ReadableInstant) null); assertEquals(TEST_TIME_NOW, test.getStartMillis()); assertEquals(TEST_TIME_NOW, test.getEndMillis()); } public void testConstructor_RI_RI4() throws Throwable { DateTime dt1 = new DateTime(2000, 6, 9, 0, 0, 0, 0); MutableInterval test = new MutableInterval(dt1, (ReadableInstant) null); assertEquals(dt1.getMillis(), test.getStartMillis()); assertEquals(TEST_TIME_NOW, test.getEndMillis()); } public void testConstructor_RI_RI5() throws Throwable { DateTime dt2 = new DateTime(2005, 7, 10, 1, 1, 1, 1); MutableInterval test = new MutableInterval((ReadableInstant) null, dt2); assertEquals(TEST_TIME_NOW, test.getStartMillis()); assertEquals(dt2.getMillis(), test.getEndMillis()); } public void testConstructor_RI_RI6() throws Throwable { DateTime dt1 = new DateTime(2004, 6, 9, 0, 0, 0, 0); MutableInterval test = new MutableInterval(dt1, dt1); assertEquals(dt1.getMillis(), test.getStartMillis()); assertEquals(dt1.getMillis(), test.getEndMillis()); } public void testConstructor_RI_RI7() throws Throwable { DateTime dt1 = new DateTime(2005, 7, 10, 1, 1, 1, 1); DateTime dt2 = new DateTime(2004, 6, 9, 0, 0, 0, 0); try { new MutableInterval(dt1, dt2); fail(); } catch (IllegalArgumentException ex) {} } public void testConstructor_RI_RI8() throws Throwable { DateTime dt1 = new DateTime(2004, 6, 9, 0, 0, 0, 0, GJChronology.getInstance()); DateTime dt2 = new DateTime(2005, 7, 10, 1, 1, 1, 1); MutableInterval test = new MutableInterval(dt1, dt2); assertEquals(dt1.getMillis(), test.getStartMillis()); assertEquals(dt2.getMillis(), test.getEndMillis()); assertEquals(GJChronology.getInstance(), test.getChronology()); } public void testConstructor_RI_RI9() throws Throwable { DateTime dt1 = new DateTime(2004, 6, 9, 0, 0, 0, 0); DateTime dt2 = new DateTime(2005, 7, 10, 1, 1, 1, 1, GJChronology.getInstance()); MutableInterval test = new MutableInterval(dt1, dt2); assertEquals(dt1.getMillis(), test.getStartMillis()); assertEquals(dt2.getMillis(), test.getEndMillis()); assertEquals(ISOChronology.getInstance(), test.getChronology()); } //----------------------------------------------------------------------- public void testConstructor_RI_RP1() throws Throwable { DateTime dt = new DateTime(TEST_TIME_NOW); Period dur = new Period(0, 6, 0, 0, 1, 0, 0, 0); long result = TEST_TIME_NOW; result = ISOChronology.getInstance().months().add(result, 6); result = ISOChronology.getInstance().hours().add(result, 1); MutableInterval test = new MutableInterval(dt, dur); assertEquals(dt.getMillis(), test.getStartMillis()); assertEquals(result, test.getEndMillis()); } public void testConstructor_RI_RP2() throws Throwable { Instant dt = new Instant(new DateTime(TEST_TIME_NOW)); Period dur = new Period(0, 6, 0, 3, 1, 0, 0, 0); long result = TEST_TIME_NOW; result = ISOChronology.getInstanceUTC().months().add(result, 6); result = ISOChronology.getInstanceUTC().days().add(result, 3); result = ISOChronology.getInstanceUTC().hours().add(result, 1); MutableInterval test = new MutableInterval(dt, dur); assertEquals(dt.getMillis(), test.getStartMillis()); assertEquals(result, test.getEndMillis()); } public void testConstructor_RI_RP3() throws Throwable { DateTime dt = new DateTime(TEST_TIME_NOW, ISOChronology.getInstanceUTC()); Period dur = new Period(0, 6, 0, 3, 1, 0, 0, 0, PeriodType.standard()); long result = TEST_TIME_NOW; result = ISOChronology.getInstanceUTC().months().add(result, 6); result = ISOChronology.getInstanceUTC().days().add(result, 3); result = ISOChronology.getInstanceUTC().hours().add(result, 1); MutableInterval test = new MutableInterval(dt, dur); assertEquals(dt.getMillis(), test.getStartMillis()); assertEquals(result, test.getEndMillis()); } public void testConstructor_RI_RP4() throws Throwable { DateTime dt = new DateTime(TEST_TIME_NOW); Period dur = new Period(1 * DateTimeConstants.MILLIS_PER_HOUR + 23L); long result = TEST_TIME_NOW; result = ISOChronology.getInstance().hours().add(result, 1); result = ISOChronology.getInstance().millis().add(result, 23); MutableInterval test = new MutableInterval(dt, dur); assertEquals(dt.getMillis(), test.getStartMillis()); assertEquals(result, test.getEndMillis()); } public void testConstructor_RI_RP5() throws Throwable { MutableInterval test = new MutableInterval((ReadableInstant) null, (ReadablePeriod) null); assertEquals(TEST_TIME_NOW, test.getStartMillis()); assertEquals(TEST_TIME_NOW, test.getEndMillis()); } public void testConstructor_RI_RP6() throws Throwable { DateTime dt = new DateTime(TEST_TIME_NOW); MutableInterval test = new MutableInterval(dt, (ReadablePeriod) null); assertEquals(dt.getMillis(), test.getStartMillis()); assertEquals(dt.getMillis(), test.getEndMillis()); } public void testConstructor_RI_RP7() throws Throwable { Period dur = new Period(0, 6, 0, 0, 1, 0, 0, 0); long result = TEST_TIME_NOW; result = ISOChronology.getInstance().monthOfYear().add(result, 6); result = ISOChronology.getInstance().hourOfDay().add(result, 1); MutableInterval test = new MutableInterval((ReadableInstant) null, dur); assertEquals(TEST_TIME_NOW, test.getStartMillis()); assertEquals(result, test.getEndMillis()); } public void testConstructor_RI_RP8() throws Throwable { DateTime dt = new DateTime(TEST_TIME_NOW); Period dur = new Period(0, 0, 0, 0, 0, 0, 0, -1); try { new MutableInterval(dt, dur); fail(); } catch (IllegalArgumentException ex) {} } //----------------------------------------------------------------------- public void testConstructor_RP_RI1() throws Throwable { DateTime dt = new DateTime(TEST_TIME_NOW); Period dur = new Period(0, 6, 0, 0, 1, 0, 0, 0); long result = TEST_TIME_NOW; result = ISOChronology.getInstance().months().add(result, -6); result = ISOChronology.getInstance().hours().add(result, -1); MutableInterval test = new MutableInterval(dur, dt); assertEquals(result, test.getStartMillis()); assertEquals(dt.getMillis(), test.getEndMillis()); } public void testConstructor_RP_RI2() throws Throwable { Instant dt = new Instant(new DateTime(TEST_TIME_NOW)); Period dur = new Period(0, 6, 0, 3, 1, 0, 0, 0); long result = TEST_TIME_NOW; result = ISOChronology.getInstanceUTC().months().add(result, -6); result = ISOChronology.getInstanceUTC().days().add(result, -3); result = ISOChronology.getInstanceUTC().hours().add(result, -1); MutableInterval test = new MutableInterval(dur, dt); assertEquals(result, test.getStartMillis()); assertEquals(dt.getMillis(), test.getEndMillis()); } public void testConstructor_RP_RI3() throws Throwable { DateTime dt = new DateTime(TEST_TIME_NOW, ISOChronology.getInstanceUTC()); Period dur = new Period(0, 6, 0, 3, 1, 0, 0, 0, PeriodType.standard()); long result = TEST_TIME_NOW; result = ISOChronology.getInstanceUTC().months().add(result, -6); result = ISOChronology.getInstanceUTC().days().add(result, -3); result = ISOChronology.getInstanceUTC().hours().add(result, -1); MutableInterval test = new MutableInterval(dur, dt); assertEquals(result, test.getStartMillis()); assertEquals(dt.getMillis(), test.getEndMillis()); } public void testConstructor_RP_RI4() throws Throwable { DateTime dt = new DateTime(TEST_TIME_NOW); Period dur = new Period(1 * DateTimeConstants.MILLIS_PER_HOUR + 23L); long result = TEST_TIME_NOW; result = ISOChronology.getInstance().hours().add(result, -1); result = ISOChronology.getInstance().millis().add(result, -23); MutableInterval test = new MutableInterval(dur, dt); assertEquals(result, test.getStartMillis()); assertEquals(dt.getMillis(), test.getEndMillis()); } public void testConstructor_RP_RI5() throws Throwable { MutableInterval test = new MutableInterval((ReadablePeriod) null, (ReadableInstant) null); assertEquals(TEST_TIME_NOW, test.getStartMillis()); assertEquals(TEST_TIME_NOW, test.getEndMillis()); } public void testConstructor_RP_RI6() throws Throwable { DateTime dt = new DateTime(TEST_TIME_NOW); MutableInterval test = new MutableInterval((ReadablePeriod) null, dt); assertEquals(dt.getMillis(), test.getStartMillis()); assertEquals(dt.getMillis(), test.getEndMillis()); } public void testConstructor_RP_RI7() throws Throwable { Period dur = new Period(0, 6, 0, 0, 1, 0, 0, 0); long result = TEST_TIME_NOW; result = ISOChronology.getInstance().monthOfYear().add(result, -6); result = ISOChronology.getInstance().hourOfDay().add(result, -1); MutableInterval test = new MutableInterval(dur, (ReadableInstant) null); assertEquals(result, test.getStartMillis()); assertEquals(TEST_TIME_NOW, test.getEndMillis()); } public void testConstructor_RP_RI8() throws Throwable { DateTime dt = new DateTime(TEST_TIME_NOW); Period dur = new Period(0, 0, 0, 0, 0, 0, 0, -1); try { new MutableInterval(dur, dt); fail(); } catch (IllegalArgumentException ex) {} } //----------------------------------------------------------------------- public void testConstructor_RI_RD1() throws Throwable { long result = TEST_TIME_NOW; result = ISOChronology.getInstance().months().add(result, 6); result = ISOChronology.getInstance().hours().add(result, 1); DateTime dt = new DateTime(TEST_TIME_NOW); Duration dur = new Duration(result - TEST_TIME_NOW); MutableInterval test = new MutableInterval(dt, dur); assertEquals(dt.getMillis(), test.getStartMillis()); assertEquals(result, test.getEndMillis()); } public void testConstructor_RI_RD2() throws Throwable { MutableInterval test = new MutableInterval((ReadableInstant) null, (ReadableDuration) null); assertEquals(TEST_TIME_NOW, test.getStartMillis()); assertEquals(TEST_TIME_NOW, test.getEndMillis()); } public void testConstructor_RI_RD3() throws Throwable { DateTime dt = new DateTime(TEST_TIME_NOW); MutableInterval test = new MutableInterval(dt, (ReadableDuration) null); assertEquals(dt.getMillis(), test.getStartMillis()); assertEquals(dt.getMillis(), test.getEndMillis()); } public void testConstructor_RI_RD4() throws Throwable { long result = TEST_TIME_NOW; result = ISOChronology.getInstance().monthOfYear().add(result, 6); result = ISOChronology.getInstance().hourOfDay().add(result, 1); Duration dur = new Duration(result - TEST_TIME_NOW); MutableInterval test = new MutableInterval((ReadableInstant) null, dur); assertEquals(TEST_TIME_NOW, test.getStartMillis()); assertEquals(result, test.getEndMillis()); } public void testConstructor_RI_RD5() throws Throwable { DateTime dt = new DateTime(TEST_TIME_NOW); Duration dur = new Duration(-1); try { new MutableInterval(dt, dur); fail(); } catch (IllegalArgumentException ex) {} } //----------------------------------------------------------------------- public void testConstructor_RD_RI1() throws Throwable { long result = TEST_TIME_NOW; result = ISOChronology.getInstance().months().add(result, -6); result = ISOChronology.getInstance().hours().add(result, -1); DateTime dt = new DateTime(TEST_TIME_NOW); Duration dur = new Duration(TEST_TIME_NOW - result); MutableInterval test = new MutableInterval(dur, dt); assertEquals(result, test.getStartMillis()); assertEquals(dt.getMillis(), test.getEndMillis()); } public void testConstructor_RD_RI2() throws Throwable { MutableInterval test = new MutableInterval((ReadableDuration) null, (ReadableInstant) null); assertEquals(TEST_TIME_NOW, test.getStartMillis()); assertEquals(TEST_TIME_NOW, test.getEndMillis()); } public void testConstructor_RD_RI3() throws Throwable { DateTime dt = new DateTime(TEST_TIME_NOW); MutableInterval test = new MutableInterval((ReadableDuration) null, dt); assertEquals(dt.getMillis(), test.getStartMillis()); assertEquals(dt.getMillis(), test.getEndMillis()); } public void testConstructor_RD_RI4() throws Throwable { long result = TEST_TIME_NOW; result = ISOChronology.getInstance().monthOfYear().add(result, -6); result = ISOChronology.getInstance().hourOfDay().add(result, -1); Duration dur = new Duration(TEST_TIME_NOW - result); MutableInterval test = new MutableInterval(dur, (ReadableInstant) null); assertEquals(result, test.getStartMillis()); assertEquals(TEST_TIME_NOW, test.getEndMillis()); } public void testConstructor_RD_RI5() throws Throwable { DateTime dt = new DateTime(TEST_TIME_NOW); Duration dur = new Duration(-1); try { new MutableInterval(dur, dt); fail(); } catch (IllegalArgumentException ex) {} } //----------------------------------------------------------------------- public void testConstructor_Object1() throws Throwable { DateTime dt1 = new DateTime(2004, 6, 9, 0, 0, 0, 0); DateTime dt2 = new DateTime(2005, 7, 10, 1, 1, 1, 1); MutableInterval test = new MutableInterval(dt1.toString() + '/' + dt2.toString()); assertEquals(dt1.getMillis(), test.getStartMillis()); assertEquals(dt2.getMillis(), test.getEndMillis()); } public void testConstructor_Object2() throws Throwable { DateTime dt1 = new DateTime(2004, 6, 9, 0, 0, 0, 0); DateTime dt2 = new DateTime(2005, 7, 10, 1, 1, 1, 1); MutableInterval base = new MutableInterval(dt1, dt2); MutableInterval test = new MutableInterval(base); assertEquals(dt1.getMillis(), test.getStartMillis()); assertEquals(dt2.getMillis(), test.getEndMillis()); } public void testConstructor_Object3() throws Throwable { DateTime dt1 = new DateTime(2004, 6, 9, 0, 0, 0, 0); DateTime dt2 = new DateTime(2005, 7, 10, 1, 1, 1, 1); Interval base = new Interval(dt1, dt2); MutableInterval test = new MutableInterval(base); assertEquals(dt1.getMillis(), test.getStartMillis()); assertEquals(dt2.getMillis(), test.getEndMillis()); } public void testConstructor_Object4() throws Throwable { MockInterval base = new MockInterval(); MutableInterval test = new MutableInterval(base); assertEquals(base.getStartMillis(), test.getStartMillis()); assertEquals(base.getEndMillis(), test.getEndMillis()); } public void testConstructor_Object5() throws Throwable { IntervalConverter oldConv = ConverterManager.getInstance().getIntervalConverter(""); IntervalConverter conv = new IntervalConverter() { public boolean isReadableInterval(Object object, Chronology chrono) { return false; } public void setInto(ReadWritableInterval interval, Object object, Chronology chrono) { interval.setChronology(chrono); interval.setInterval(1234L, 5678L); } public Class<?> getSupportedType() { return String.class; } }; try { ConverterManager.getInstance().addIntervalConverter(conv); DateTime dt1 = new DateTime(2004, 6, 9, 0, 0, 0, 0); DateTime dt2 = new DateTime(2005, 7, 10, 1, 1, 1, 1); MutableInterval test = new MutableInterval(dt1.toString() + '/' + dt2.toString()); assertEquals(1234L, test.getStartMillis()); assertEquals(5678L, test.getEndMillis()); } finally { ConverterManager.getInstance().addIntervalConverter(oldConv); } } public void testConstructor_Object6() throws Throwable { IntervalConverter oldConv = ConverterManager.getInstance().getIntervalConverter(new MutableInterval(0L, 0L)); IntervalConverter conv = new IntervalConverter() { public boolean isReadableInterval(Object object, Chronology chrono) { return false; } public void setInto(ReadWritableInterval interval, Object object, Chronology chrono) { interval.setChronology(chrono); interval.setInterval(1234L, 5678L); } public Class<?> getSupportedType() { return ReadableInterval.class; } }; try { ConverterManager.getInstance().addIntervalConverter(conv); Interval base = new Interval(-1000L, 1000L); MutableInterval test = new MutableInterval(base); assertEquals(1234L, test.getStartMillis()); assertEquals(5678L, test.getEndMillis()); } finally { ConverterManager.getInstance().addIntervalConverter(oldConv); } } class MockInterval implements ReadableInterval { public Chronology getChronology() { return ISOChronology.getInstance(); } public long getStartMillis() { return 1234L; } public DateTime getStart() { return new DateTime(1234L); } public long getEndMillis() { return 5678L; } public DateTime getEnd() { return new DateTime(5678L); } public long toDurationMillis() { return (5678L - 1234L); } public Duration toDuration() { return new Duration(5678L - 1234L); } public boolean contains(long millisInstant) { return false; } public boolean containsNow() { return false; } public boolean contains(ReadableInstant instant) { return false; } public boolean contains(ReadableInterval interval) { return false; } public boolean overlaps(ReadableInterval interval) { return false; } public boolean isBefore(ReadableInstant instant) { return false; } public boolean isBefore(ReadableInterval interval) { return false; } public boolean isAfter(ReadableInstant instant) { return false; } public boolean isAfter(ReadableInterval interval) { return false; } public Interval toInterval() { return null; } public MutableInterval toMutableInterval() { return null; } public Period toPeriod() { return null; } public Period toPeriod(PeriodType type) { return null; } } //----------------------------------------------------------------------- public void testConstructor_Object_Chronology1() throws Throwable { DateTime dt1 = new DateTime(2004, 6, 9, 0, 0, 0, 0); DateTime dt2 = new DateTime(2005, 7, 10, 1, 1, 1, 1); Interval base = new Interval(dt1, dt2); MutableInterval test = new MutableInterval(base, BuddhistChronology.getInstance()); assertEquals(dt1.getMillis(), test.getStartMillis()); assertEquals(dt2.getMillis(), test.getEndMillis()); assertEquals(BuddhistChronology.getInstance(), test.getChronology()); } public void testConstructor_Object_Chronology2() throws Throwable { DateTime dt1 = new DateTime(2004, 6, 9, 0, 0, 0, 0); DateTime dt2 = new DateTime(2005, 7, 10, 1, 1, 1, 1); Interval base = new Interval(dt1, dt2); MutableInterval test = new MutableInterval(base, null); assertEquals(dt1.getMillis(), test.getStartMillis()); assertEquals(dt2.getMillis(), test.getEndMillis()); assertEquals(ISOChronology.getInstance(), test.getChronology()); } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.catalina.filters; import java.io.IOException; import java.net.URI; import java.net.URISyntaxException; import java.util.Arrays; import java.util.Collection; import java.util.HashSet; import java.util.LinkedList; import java.util.List; import java.util.Locale; import java.util.Set; import javax.servlet.Filter; import javax.servlet.FilterChain; import javax.servlet.FilterConfig; import javax.servlet.ServletException; import javax.servlet.ServletRequest; import javax.servlet.ServletResponse; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import org.apache.juli.logging.Log; import org.apache.juli.logging.LogFactory; import org.apache.tomcat.util.res.StringManager; /** * <p> * A {@link Filter} that enable client-side cross-origin requests by * implementing W3C's CORS (<b>C</b>ross-<b>O</b>rigin <b>R</b>esource * <b>S</b>haring) specification for resources. Each {@link HttpServletRequest} * request is inspected as per specification, and appropriate response headers * are added to {@link HttpServletResponse}. * </p> * * <p> * By default, it also sets following request attributes, that help to * determine the nature of the request downstream. * <ul> * <li><b>cors.isCorsRequest:</b> Flag to determine if the request is a CORS * request. Set to <code>true</code> if a CORS request; <code>false</code> * otherwise.</li> * <li><b>cors.request.origin:</b> The Origin URL, i.e. the URL of the page from * where the request is originated.</li> * <li> * <b>cors.request.type:</b> Type of request. Possible values: * <ul> * <li>SIMPLE: A request which is not preceded by a pre-flight request.</li> * <li>ACTUAL: A request which is preceded by a pre-flight request.</li> * <li>PRE_FLIGHT: A pre-flight request.</li> * <li>NOT_CORS: A normal same-origin request.</li> * <li>INVALID_CORS: A cross-origin request which is invalid.</li> * </ul> * </li> * <li><b>cors.request.headers:</b> Request headers sent as * 'Access-Control-Request-Headers' header, for pre-flight request.</li> * </ul> * </p> * * @see <a href="http://www.w3.org/TR/cors/">CORS specification</a> * */ public class CorsFilter implements Filter { private static final Log log = LogFactory.getLog(CorsFilter.class); private static final StringManager sm = StringManager.getManager(Constants.Package); /** * A {@link Collection} of origins consisting of zero or more origins that * are allowed access to the resource. */ private final Collection<String> allowedOrigins; /** * Determines if any origin is allowed to make request. */ private boolean anyOriginAllowed; /** * A {@link Collection} of methods consisting of zero or more methods that * are supported by the resource. */ private final Collection<String> allowedHttpMethods; /** * A {@link Collection} of headers consisting of zero or more header field * names that are supported by the resource. */ private final Collection<String> allowedHttpHeaders; /** * A {@link Collection} of exposed headers consisting of zero or more header * field names of headers other than the simple response headers that the * resource might use and can be exposed. */ private final Collection<String> exposedHeaders; /** * A supports credentials flag that indicates whether the resource supports * user credentials in the request. It is true when the resource does and * false otherwise. */ private boolean supportsCredentials; /** * Indicates (in seconds) how long the results of a pre-flight request can * be cached in a pre-flight result cache. */ private long preflightMaxAge; /** * Determines if the request should be decorated or not. */ private boolean decorateRequest; public CorsFilter() { this.allowedOrigins = new HashSet<String>(); this.allowedHttpMethods = new HashSet<String>(); this.allowedHttpHeaders = new HashSet<String>(); this.exposedHeaders = new HashSet<String>(); } @Override public void doFilter(final ServletRequest servletRequest, final ServletResponse servletResponse, final FilterChain filterChain) throws IOException, ServletException { if (!(servletRequest instanceof HttpServletRequest) || !(servletResponse instanceof HttpServletResponse)) { throw new ServletException(sm.getString("corsFilter.onlyHttp")); } // Safe to downcast at this point. HttpServletRequest request = (HttpServletRequest) servletRequest; HttpServletResponse response = (HttpServletResponse) servletResponse; // Determines the CORS request type. CorsFilter.CORSRequestType requestType = checkRequestType(request); // Adds CORS specific attributes to request. if (decorateRequest) { CorsFilter.decorateCORSProperties(request, requestType); } switch (requestType) { case SIMPLE: // Handles a Simple CORS request. this.handleSimpleCORS(request, response, filterChain); break; case ACTUAL: // Handles an Actual CORS request. this.handleSimpleCORS(request, response, filterChain); break; case PRE_FLIGHT: // Handles a Pre-flight CORS request. this.handlePreflightCORS(request, response, filterChain); break; case NOT_CORS: // Handles a Normal request that is not a cross-origin request. this.handleNonCORS(request, response, filterChain); break; default: // Handles a CORS request that violates specification. this.handleInvalidCORS(request, response, filterChain); break; } } @Override public void init(final FilterConfig filterConfig) throws ServletException { // Initialize defaults parseAndStore(DEFAULT_ALLOWED_ORIGINS, DEFAULT_ALLOWED_HTTP_METHODS, DEFAULT_ALLOWED_HTTP_HEADERS, DEFAULT_EXPOSED_HEADERS, DEFAULT_SUPPORTS_CREDENTIALS, DEFAULT_PREFLIGHT_MAXAGE, DEFAULT_DECORATE_REQUEST); if (filterConfig != null) { String configAllowedOrigins = filterConfig .getInitParameter(PARAM_CORS_ALLOWED_ORIGINS); String configAllowedHttpMethods = filterConfig .getInitParameter(PARAM_CORS_ALLOWED_METHODS); String configAllowedHttpHeaders = filterConfig .getInitParameter(PARAM_CORS_ALLOWED_HEADERS); String configExposedHeaders = filterConfig .getInitParameter(PARAM_CORS_EXPOSED_HEADERS); String configSupportsCredentials = filterConfig .getInitParameter(PARAM_CORS_SUPPORT_CREDENTIALS); String configPreflightMaxAge = filterConfig .getInitParameter(PARAM_CORS_PREFLIGHT_MAXAGE); String configDecorateRequest = filterConfig .getInitParameter(PARAM_CORS_REQUEST_DECORATE); parseAndStore(configAllowedOrigins, configAllowedHttpMethods, configAllowedHttpHeaders, configExposedHeaders, configSupportsCredentials, configPreflightMaxAge, configDecorateRequest); } } /** * Handles a CORS request of type {@link CORSRequestType}.SIMPLE. * * @param request * The {@link HttpServletRequest} object. * @param response * The {@link HttpServletResponse} object. * @param filterChain * The {@link FilterChain} object. * @throws IOException * @throws ServletException * @see <a href="http://www.w3.org/TR/cors/#resource-requests">Simple * Cross-Origin Request, Actual Request, and Redirects</a> */ protected void handleSimpleCORS(final HttpServletRequest request, final HttpServletResponse response, final FilterChain filterChain) throws IOException, ServletException { CorsFilter.CORSRequestType requestType = checkRequestType(request); if (!(requestType == CorsFilter.CORSRequestType.SIMPLE || requestType == CorsFilter.CORSRequestType.ACTUAL)) { throw new IllegalArgumentException( sm.getString("corsFilter.wrongType2", CorsFilter.CORSRequestType.SIMPLE, CorsFilter.CORSRequestType.ACTUAL)); } final String origin = request .getHeader(CorsFilter.REQUEST_HEADER_ORIGIN); final String method = request.getMethod(); // Section 6.1.2 if (!isOriginAllowed(origin)) { handleInvalidCORS(request, response, filterChain); return; } if (!allowedHttpMethods.contains(method)) { handleInvalidCORS(request, response, filterChain); return; } // Section 6.1.3 // Add a single Access-Control-Allow-Origin header. if (anyOriginAllowed && !supportsCredentials) { // If resource doesn't support credentials and if any origin is // allowed // to make CORS request, return header with '*'. response.addHeader( CorsFilter.RESPONSE_HEADER_ACCESS_CONTROL_ALLOW_ORIGIN, "*"); } else { // If the resource supports credentials add a single // Access-Control-Allow-Origin header, with the value of the Origin // header as value. response.addHeader( CorsFilter.RESPONSE_HEADER_ACCESS_CONTROL_ALLOW_ORIGIN, origin); } // Section 6.1.3 // If the resource supports credentials, add a single // Access-Control-Allow-Credentials header with the case-sensitive // string "true" as value. if (supportsCredentials) { response.addHeader( CorsFilter.RESPONSE_HEADER_ACCESS_CONTROL_ALLOW_CREDENTIALS, "true"); } // Section 6.1.4 // If the list of exposed headers is not empty add one or more // Access-Control-Expose-Headers headers, with as values the header // field names given in the list of exposed headers. if ((exposedHeaders != null) && (exposedHeaders.size() > 0)) { String exposedHeadersString = join(exposedHeaders, ","); response.addHeader( CorsFilter.RESPONSE_HEADER_ACCESS_CONTROL_EXPOSE_HEADERS, exposedHeadersString); } // Indicate the response depends on the origin response.addHeader(CorsFilter.REQUEST_HEADER_VARY, CorsFilter.REQUEST_HEADER_ORIGIN); // Forward the request down the filter chain. filterChain.doFilter(request, response); } /** * Handles CORS pre-flight request. * * @param request * The {@link HttpServletRequest} object. * @param response * The {@link HttpServletResponse} object. * @param filterChain * The {@link FilterChain} object. * @throws IOException * @throws ServletException */ protected void handlePreflightCORS(final HttpServletRequest request, final HttpServletResponse response, final FilterChain filterChain) throws IOException, ServletException { CORSRequestType requestType = checkRequestType(request); if (requestType != CORSRequestType.PRE_FLIGHT) { throw new IllegalArgumentException(sm.getString("corsFilter.wrongType1", CORSRequestType.PRE_FLIGHT.name().toLowerCase(Locale.ENGLISH))); } final String origin = request .getHeader(CorsFilter.REQUEST_HEADER_ORIGIN); // Section 6.2.2 if (!isOriginAllowed(origin)) { handleInvalidCORS(request, response, filterChain); return; } // Section 6.2.3 String accessControlRequestMethod = request.getHeader( CorsFilter.REQUEST_HEADER_ACCESS_CONTROL_REQUEST_METHOD); if (accessControlRequestMethod == null) { handleInvalidCORS(request, response, filterChain); return; } else { accessControlRequestMethod = accessControlRequestMethod.trim(); } // Section 6.2.4 String accessControlRequestHeadersHeader = request.getHeader( CorsFilter.REQUEST_HEADER_ACCESS_CONTROL_REQUEST_HEADERS); List<String> accessControlRequestHeaders = new LinkedList<String>(); if (accessControlRequestHeadersHeader != null && !accessControlRequestHeadersHeader.trim().isEmpty()) { String[] headers = accessControlRequestHeadersHeader.trim().split( ","); for (String header : headers) { accessControlRequestHeaders.add(header.trim().toLowerCase(Locale.ENGLISH)); } } // Section 6.2.5 if (!allowedHttpMethods.contains(accessControlRequestMethod)) { handleInvalidCORS(request, response, filterChain); return; } // Section 6.2.6 if (!accessControlRequestHeaders.isEmpty()) { for (String header : accessControlRequestHeaders) { if (!allowedHttpHeaders.contains(header)) { handleInvalidCORS(request, response, filterChain); return; } } } // Section 6.2.7 if (supportsCredentials) { response.addHeader( CorsFilter.RESPONSE_HEADER_ACCESS_CONTROL_ALLOW_ORIGIN, origin); response.addHeader( CorsFilter.RESPONSE_HEADER_ACCESS_CONTROL_ALLOW_CREDENTIALS, "true"); } else { if (anyOriginAllowed) { response.addHeader( CorsFilter.RESPONSE_HEADER_ACCESS_CONTROL_ALLOW_ORIGIN, "*"); } else { response.addHeader( CorsFilter.RESPONSE_HEADER_ACCESS_CONTROL_ALLOW_ORIGIN, origin); } } // Section 6.2.8 if (preflightMaxAge > 0) { response.addHeader( CorsFilter.RESPONSE_HEADER_ACCESS_CONTROL_MAX_AGE, String.valueOf(preflightMaxAge)); } // Section 6.2.9 response.addHeader( CorsFilter.RESPONSE_HEADER_ACCESS_CONTROL_ALLOW_METHODS, accessControlRequestMethod); // Section 6.2.10 if ((allowedHttpHeaders != null) && (!allowedHttpHeaders.isEmpty())) { response.addHeader( CorsFilter.RESPONSE_HEADER_ACCESS_CONTROL_ALLOW_HEADERS, join(allowedHttpHeaders, ",")); } // Do not forward the request down the filter chain. } /** * Handles a request, that's not a CORS request, but is a valid request i.e. * it is not a cross-origin request. This implementation, just forwards the * request down the filter chain. * * @param request * The {@link HttpServletRequest} object. * @param response * The {@link HttpServletResponse} object. * @param filterChain * The {@link FilterChain} object. * @throws IOException * @throws ServletException */ private void handleNonCORS(final HttpServletRequest request, final HttpServletResponse response, final FilterChain filterChain) throws IOException, ServletException { // Let request pass. filterChain.doFilter(request, response); } /** * Handles a CORS request that violates specification. * * @param request * The {@link HttpServletRequest} object. * @param response * The {@link HttpServletResponse} object. * @param filterChain * The {@link FilterChain} object. */ private void handleInvalidCORS(final HttpServletRequest request, final HttpServletResponse response, final FilterChain filterChain) { String origin = request.getHeader(CorsFilter.REQUEST_HEADER_ORIGIN); String method = request.getMethod(); String accessControlRequestHeaders = request.getHeader( REQUEST_HEADER_ACCESS_CONTROL_REQUEST_HEADERS); response.setContentType("text/plain"); response.setStatus(HttpServletResponse.SC_FORBIDDEN); response.resetBuffer(); if (log.isDebugEnabled()) { // Debug so no need for i18n StringBuilder message = new StringBuilder("Invalid CORS request; Origin="); message.append(origin); message.append(";Method="); message.append(method); if (accessControlRequestHeaders != null) { message.append(";Access-Control-Request-Headers="); message.append(accessControlRequestHeaders); } log.debug(message.toString()); } } @Override public void destroy() { // NOOP } /** * Decorates the {@link HttpServletRequest}, with CORS attributes. * <ul> * <li><b>cors.isCorsRequest:</b> Flag to determine if request is a CORS * request. Set to <code>true</code> if CORS request; <code>false</code> * otherwise.</li> * <li><b>cors.request.origin:</b> The Origin URL.</li> * <li><b>cors.request.type:</b> Type of request. Values: * <code>simple</code> or <code>preflight</code> or <code>not_cors</code> or * <code>invalid_cors</code></li> * <li><b>cors.request.headers:</b> Request headers sent as * 'Access-Control-Request-Headers' header, for pre-flight request.</li> * </ul> * * @param request * The {@link HttpServletRequest} object. * @param corsRequestType * The {@link CORSRequestType} object. */ protected static void decorateCORSProperties( final HttpServletRequest request, final CORSRequestType corsRequestType) { if (request == null) { throw new IllegalArgumentException( sm.getString("corsFilter.nullRequest")); } if (corsRequestType == null) { throw new IllegalArgumentException( sm.getString("corsFilter.nullRequestType")); } switch (corsRequestType) { case SIMPLE: request.setAttribute( CorsFilter.HTTP_REQUEST_ATTRIBUTE_IS_CORS_REQUEST, Boolean.TRUE); request.setAttribute(CorsFilter.HTTP_REQUEST_ATTRIBUTE_ORIGIN, request.getHeader(CorsFilter.REQUEST_HEADER_ORIGIN)); request.setAttribute( CorsFilter.HTTP_REQUEST_ATTRIBUTE_REQUEST_TYPE, corsRequestType.name().toLowerCase(Locale.ENGLISH)); break; case ACTUAL: request.setAttribute( CorsFilter.HTTP_REQUEST_ATTRIBUTE_IS_CORS_REQUEST, Boolean.TRUE); request.setAttribute(CorsFilter.HTTP_REQUEST_ATTRIBUTE_ORIGIN, request.getHeader(CorsFilter.REQUEST_HEADER_ORIGIN)); request.setAttribute( CorsFilter.HTTP_REQUEST_ATTRIBUTE_REQUEST_TYPE, corsRequestType.name().toLowerCase(Locale.ENGLISH)); break; case PRE_FLIGHT: request.setAttribute( CorsFilter.HTTP_REQUEST_ATTRIBUTE_IS_CORS_REQUEST, Boolean.TRUE); request.setAttribute(CorsFilter.HTTP_REQUEST_ATTRIBUTE_ORIGIN, request.getHeader(CorsFilter.REQUEST_HEADER_ORIGIN)); request.setAttribute( CorsFilter.HTTP_REQUEST_ATTRIBUTE_REQUEST_TYPE, corsRequestType.name().toLowerCase(Locale.ENGLISH)); String headers = request.getHeader( REQUEST_HEADER_ACCESS_CONTROL_REQUEST_HEADERS); if (headers == null) { headers = ""; } request.setAttribute( CorsFilter.HTTP_REQUEST_ATTRIBUTE_REQUEST_HEADERS, headers); break; case NOT_CORS: request.setAttribute( CorsFilter.HTTP_REQUEST_ATTRIBUTE_IS_CORS_REQUEST, Boolean.FALSE); break; default: // Don't set any attributes break; } } /** * Joins elements of {@link Set} into a string, where each element is * separated by the provided separator. * * @param elements * The {@link Set} containing elements to join together. * @param joinSeparator * The character to be used for separating elements. * @return The joined {@link String}; <code>null</code> if elements * {@link Set} is null. */ protected static String join(final Collection<String> elements, final String joinSeparator) { String separator = ","; if (elements == null) { return null; } if (joinSeparator != null) { separator = joinSeparator; } StringBuilder buffer = new StringBuilder(); boolean isFirst = true; for (String element : elements) { if (!isFirst) { buffer.append(separator); } else { isFirst = false; } if (element != null) { buffer.append(element); } } return buffer.toString(); } /** * Determines the request type. * * @param request */ protected CORSRequestType checkRequestType(final HttpServletRequest request) { CORSRequestType requestType = CORSRequestType.INVALID_CORS; if (request == null) { throw new IllegalArgumentException( sm.getString("corsFilter.nullRequest")); } String originHeader = request.getHeader(REQUEST_HEADER_ORIGIN); // Section 6.1.1 and Section 6.2.1 if (originHeader != null) { if (originHeader.isEmpty()) { requestType = CORSRequestType.INVALID_CORS; } else if (!isValidOrigin(originHeader)) { requestType = CORSRequestType.INVALID_CORS; } else if (isLocalOrigin(request, originHeader)) { return CORSRequestType.NOT_CORS; } else { String method = request.getMethod(); if (method != null) { if ("OPTIONS".equals(method)) { String accessControlRequestMethodHeader = request.getHeader( REQUEST_HEADER_ACCESS_CONTROL_REQUEST_METHOD); if (accessControlRequestMethodHeader != null && !accessControlRequestMethodHeader.isEmpty()) { requestType = CORSRequestType.PRE_FLIGHT; } else if (accessControlRequestMethodHeader != null && accessControlRequestMethodHeader.isEmpty()) { requestType = CORSRequestType.INVALID_CORS; } else { requestType = CORSRequestType.ACTUAL; } } else if ("GET".equals(method) || "HEAD".equals(method)) { requestType = CORSRequestType.SIMPLE; } else if ("POST".equals(method)) { String mediaType = getMediaType(request.getContentType()); if (mediaType != null) { if (SIMPLE_HTTP_REQUEST_CONTENT_TYPE_VALUES .contains(mediaType)) { requestType = CORSRequestType.SIMPLE; } else { requestType = CORSRequestType.ACTUAL; } } } else { requestType = CORSRequestType.ACTUAL; } } } } else { requestType = CORSRequestType.NOT_CORS; } return requestType; } private boolean isLocalOrigin(HttpServletRequest request, String origin) { // Build scheme://host:port from request StringBuilder target = new StringBuilder(); String scheme = request.getScheme(); if (scheme == null) { return false; } else { scheme = scheme.toLowerCase(Locale.ENGLISH); } target.append(scheme); target.append("://"); String host = request.getServerName(); if (host == null) { return false; } target.append(host); int port = request.getServerPort(); if ("http".equals(scheme) && port != 80 || "https".equals(scheme) && port != 443) { target.append(':'); target.append(port); } return origin.equalsIgnoreCase(target.toString()); } /* * Return the lower case, trimmed value of the media type from the content * type. */ private String getMediaType(String contentType) { if (contentType == null) { return null; } String result = contentType.toLowerCase(Locale.ENGLISH); int firstSemiColonIndex = result.indexOf(';'); if (firstSemiColonIndex > -1) { result = result.substring(0, firstSemiColonIndex); } result = result.trim(); return result; } /** * Checks if the Origin is allowed to make a CORS request. * * @param origin * The Origin. * @return <code>true</code> if origin is allowed; <code>false</code> * otherwise. */ private boolean isOriginAllowed(final String origin) { if (anyOriginAllowed) { return true; } // If 'Origin' header is a case-sensitive match of any of allowed // origins, then return true, else return false. return allowedOrigins.contains(origin); } /** * Parses each param-value and populates configuration variables. If a param * is provided, it overrides the default. * * @param allowedOrigins * A {@link String} of comma separated origins. * @param allowedHttpMethods * A {@link String} of comma separated HTTP methods. * @param allowedHttpHeaders * A {@link String} of comma separated HTTP headers. * @param exposedHeaders * A {@link String} of comma separated headers that needs to be * exposed. * @param supportsCredentials * "true" if support credentials needs to be enabled. * @param preflightMaxAge * The amount of seconds the user agent is allowed to cache the * result of the pre-flight request. * @throws ServletException */ private void parseAndStore(final String allowedOrigins, final String allowedHttpMethods, final String allowedHttpHeaders, final String exposedHeaders, final String supportsCredentials, final String preflightMaxAge, final String decorateRequest) throws ServletException { if (allowedOrigins != null) { if (allowedOrigins.trim().equals("*")) { this.anyOriginAllowed = true; } else { this.anyOriginAllowed = false; Set<String> setAllowedOrigins = parseStringToSet(allowedOrigins); this.allowedOrigins.clear(); this.allowedOrigins.addAll(setAllowedOrigins); } } if (allowedHttpMethods != null) { Set<String> setAllowedHttpMethods = parseStringToSet(allowedHttpMethods); this.allowedHttpMethods.clear(); this.allowedHttpMethods.addAll(setAllowedHttpMethods); } if (allowedHttpHeaders != null) { Set<String> setAllowedHttpHeaders = parseStringToSet(allowedHttpHeaders); Set<String> lowerCaseHeaders = new HashSet<String>(); for (String header : setAllowedHttpHeaders) { String lowerCase = header.toLowerCase(Locale.ENGLISH); lowerCaseHeaders.add(lowerCase); } this.allowedHttpHeaders.clear(); this.allowedHttpHeaders.addAll(lowerCaseHeaders); } if (exposedHeaders != null) { Set<String> setExposedHeaders = parseStringToSet(exposedHeaders); this.exposedHeaders.clear(); this.exposedHeaders.addAll(setExposedHeaders); } if (supportsCredentials != null) { // For any value other then 'true' this will be false. this.supportsCredentials = Boolean .parseBoolean(supportsCredentials); } if (preflightMaxAge != null) { try { if (!preflightMaxAge.isEmpty()) { this.preflightMaxAge = Long.parseLong(preflightMaxAge); } else { this.preflightMaxAge = 0L; } } catch (NumberFormatException e) { throw new ServletException( sm.getString("corsFilter.invalidPreflightMaxAge"), e); } } if (decorateRequest != null) { // For any value other then 'true' this will be false. this.decorateRequest = Boolean.parseBoolean(decorateRequest); } } /** * Takes a comma separated list and returns a Set<String>. * * @param data * A comma separated list of strings. * @return Set<String> */ private Set<String> parseStringToSet(final String data) { String[] splits; if (data != null && data.length() > 0) { splits = data.split(","); } else { splits = new String[] {}; } Set<String> set = new HashSet<String>(); if (splits.length > 0) { for (String split : splits) { set.add(split.trim()); } } return set; } /** * Checks if a given origin is valid or not. Criteria: * <ul> * <li>If an encoded character is present in origin, it's not valid.</li> * <li>If origin is "null", it's valid.</li> * <li>Origin should be a valid {@link URI}</li> * </ul> * * @param origin * @see <a href="http://tools.ietf.org/html/rfc952">RFC952</a> */ protected static boolean isValidOrigin(String origin) { // Checks for encoded characters. Helps prevent CRLF injection. if (origin.contains("%")) { return false; } // "null" is a valid origin if ("null".equals(origin)) { return true; } // RFC6454, section 4. "If uri-scheme is file, the implementation MAY // return an implementation-defined value.". No limits are placed on // that value so treat all file URIs as valid origins. if (origin.startsWith("file://")) { return true; } URI originURI; try { originURI = new URI(origin); } catch (URISyntaxException e) { return false; } // If scheme for URI is null, return false. Return true otherwise. return originURI.getScheme() != null; } /** * Determines if any origin is allowed to make CORS request. * * @return <code>true</code> if it's enabled; false otherwise. */ public boolean isAnyOriginAllowed() { return anyOriginAllowed; } /** * Returns a {@link Set} of headers that should be exposed by browser. */ public Collection<String> getExposedHeaders() { return exposedHeaders; } /** * Determines is supports credentials is enabled. */ public boolean isSupportsCredentials() { return supportsCredentials; } /** * Returns the preflight response cache time in seconds. * * @return Time to cache in seconds. */ public long getPreflightMaxAge() { return preflightMaxAge; } /** * Returns the {@link Set} of allowed origins that are allowed to make * requests. * * @return {@link Set} */ public Collection<String> getAllowedOrigins() { return allowedOrigins; } /** * Returns a {@link Set} of HTTP methods that are allowed to make requests. * * @return {@link Set} */ public Collection<String> getAllowedHttpMethods() { return allowedHttpMethods; } /** * Returns a {@link Set} of headers support by resource. * * @return {@link Set} */ public Collection<String> getAllowedHttpHeaders() { return allowedHttpHeaders; } // -------------------------------------------------- CORS Response Headers /** * The Access-Control-Allow-Origin header indicates whether a resource can * be shared based by returning the value of the Origin request header in * the response. */ public static final String RESPONSE_HEADER_ACCESS_CONTROL_ALLOW_ORIGIN = "Access-Control-Allow-Origin"; /** * The Access-Control-Allow-Credentials header indicates whether the * response to request can be exposed when the omit credentials flag is * unset. When part of the response to a preflight request it indicates that * the actual request can include user credentials. */ public static final String RESPONSE_HEADER_ACCESS_CONTROL_ALLOW_CREDENTIALS = "Access-Control-Allow-Credentials"; /** * The Access-Control-Expose-Headers header indicates which headers are safe * to expose to the API of a CORS API specification */ public static final String RESPONSE_HEADER_ACCESS_CONTROL_EXPOSE_HEADERS = "Access-Control-Expose-Headers"; /** * The Access-Control-Max-Age header indicates how long the results of a * preflight request can be cached in a preflight result cache. */ public static final String RESPONSE_HEADER_ACCESS_CONTROL_MAX_AGE = "Access-Control-Max-Age"; /** * The Access-Control-Allow-Methods header indicates, as part of the * response to a preflight request, which methods can be used during the * actual request. */ public static final String RESPONSE_HEADER_ACCESS_CONTROL_ALLOW_METHODS = "Access-Control-Allow-Methods"; /** * The Access-Control-Allow-Headers header indicates, as part of the * response to a preflight request, which header field names can be used * during the actual request. */ public static final String RESPONSE_HEADER_ACCESS_CONTROL_ALLOW_HEADERS = "Access-Control-Allow-Headers"; // -------------------------------------------------- CORS Request Headers /** * The Vary header indicates allows disabling proxy caching by indicating * the the response depends on the origin. */ public static final String REQUEST_HEADER_VARY = "Vary"; /** * The Origin header indicates where the cross-origin request or preflight * request originates from. */ public static final String REQUEST_HEADER_ORIGIN = "Origin"; /** * The Access-Control-Request-Method header indicates which method will be * used in the actual request as part of the preflight request. */ public static final String REQUEST_HEADER_ACCESS_CONTROL_REQUEST_METHOD = "Access-Control-Request-Method"; /** * The Access-Control-Request-Headers header indicates which headers will be * used in the actual request as part of the preflight request. */ public static final String REQUEST_HEADER_ACCESS_CONTROL_REQUEST_HEADERS = "Access-Control-Request-Headers"; // ----------------------------------------------------- Request attributes /** * The prefix to a CORS request attribute. */ public static final String HTTP_REQUEST_ATTRIBUTE_PREFIX = "cors."; /** * Attribute that contains the origin of the request. */ public static final String HTTP_REQUEST_ATTRIBUTE_ORIGIN = HTTP_REQUEST_ATTRIBUTE_PREFIX + "request.origin"; /** * Boolean value, suggesting if the request is a CORS request or not. */ public static final String HTTP_REQUEST_ATTRIBUTE_IS_CORS_REQUEST = HTTP_REQUEST_ATTRIBUTE_PREFIX + "isCorsRequest"; /** * Type of CORS request, of type {@link CORSRequestType}. */ public static final String HTTP_REQUEST_ATTRIBUTE_REQUEST_TYPE = HTTP_REQUEST_ATTRIBUTE_PREFIX + "request.type"; /** * Request headers sent as 'Access-Control-Request-Headers' header, for * pre-flight request. */ public static final String HTTP_REQUEST_ATTRIBUTE_REQUEST_HEADERS = HTTP_REQUEST_ATTRIBUTE_PREFIX + "request.headers"; // -------------------------------------------------------------- Constants /** * Enumerates varies types of CORS requests. Also, provides utility methods * to determine the request type. */ protected static enum CORSRequestType { /** * A simple HTTP request, i.e. it shouldn't be pre-flighted. */ SIMPLE, /** * A HTTP request that needs to be pre-flighted. */ ACTUAL, /** * A pre-flight CORS request, to get meta information, before a * non-simple HTTP request is sent. */ PRE_FLIGHT, /** * Not a CORS request, but a normal request. */ NOT_CORS, /** * An invalid CORS request, i.e. it qualifies to be a CORS request, but * fails to be a valid one. */ INVALID_CORS } /** * {@link Collection} of HTTP methods. Case sensitive. * @deprecated Not used. Will be removed in Tomcat 9.0.x onwards. */ @Deprecated public static final Collection<String> HTTP_METHODS = new HashSet<String>(Arrays.asList("OPTIONS", "GET", "HEAD", "POST", "PUT", "DELETE", "TRACE", "CONNECT")); /** * {@link Collection} of non-simple HTTP methods. Case sensitive. * @deprecated Not used. Will be removed in Tomcat 9.0.x onwards. All HTTP * methods not in {@link #SIMPLE_HTTP_METHODS} are assumed to be * non-simple. */ @Deprecated public static final Collection<String> COMPLEX_HTTP_METHODS = new HashSet<String>(Arrays.asList("PUT", "DELETE", "TRACE", "CONNECT")); /** * {@link Collection} of Simple HTTP methods. Case sensitive. * * @see <a href="http://www.w3.org/TR/cors/#terminology" * >http://www.w3.org/TR/cors/#terminology</a> */ public static final Collection<String> SIMPLE_HTTP_METHODS = new HashSet<String>(Arrays.asList("GET", "POST", "HEAD")); /** * {@link Collection} of Simple HTTP request headers. Case in-sensitive. * * @see <a href="http://www.w3.org/TR/cors/#terminology" * >http://www.w3.org/TR/cors/#terminology</a> */ public static final Collection<String> SIMPLE_HTTP_REQUEST_HEADERS = new HashSet<String>(Arrays.asList("Accept", "Accept-Language", "Content-Language")); /** * {@link Collection} of Simple HTTP request headers. Case in-sensitive. * * @see <a href="http://www.w3.org/TR/cors/#terminology" * >http://www.w3.org/TR/cors/#terminology</a> */ public static final Collection<String> SIMPLE_HTTP_RESPONSE_HEADERS = new HashSet<String>(Arrays.asList("Cache-Control", "Content-Language", "Content-Type", "Expires", "Last-Modified", "Pragma")); /** * {@link Collection} of media type values for the Content-Type header that * will be treated as 'simple'. Note media-type values are compared ignoring * parameters and in a case-insensitive manner. * * @see <a href="http://www.w3.org/TR/cors/#terminology" * >http://www.w3.org/TR/cors/#terminology</a> */ public static final Collection<String> SIMPLE_HTTP_REQUEST_CONTENT_TYPE_VALUES = new HashSet<String>(Arrays.asList( "application/x-www-form-urlencoded", "multipart/form-data", "text/plain")); // ------------------------------------------------ Configuration Defaults /** * By default, all origins are allowed to make requests. */ public static final String DEFAULT_ALLOWED_ORIGINS = "*"; /** * By default, following methods are supported: GET, POST, HEAD and OPTIONS. */ public static final String DEFAULT_ALLOWED_HTTP_METHODS = "GET,POST,HEAD,OPTIONS"; /** * By default, time duration to cache pre-flight response is 30 mins. */ public static final String DEFAULT_PREFLIGHT_MAXAGE = "1800"; /** * By default, support credentials is turned on. */ public static final String DEFAULT_SUPPORTS_CREDENTIALS = "true"; /** * By default, following headers are supported: * Origin,Accept,X-Requested-With, Content-Type, * Access-Control-Request-Method, and Access-Control-Request-Headers. */ public static final String DEFAULT_ALLOWED_HTTP_HEADERS = "Origin,Accept,X-Requested-With,Content-Type," + "Access-Control-Request-Method,Access-Control-Request-Headers"; /** * By default, none of the headers are exposed in response. */ public static final String DEFAULT_EXPOSED_HEADERS = ""; /** * By default, request is decorated with CORS attributes. */ public static final String DEFAULT_DECORATE_REQUEST = "true"; // ----------------------------------------Filter Config Init param-name(s) /** * Key to retrieve allowed origins from {@link FilterConfig}. */ public static final String PARAM_CORS_ALLOWED_ORIGINS = "cors.allowed.origins"; /** * Key to retrieve support credentials from {@link FilterConfig}. */ public static final String PARAM_CORS_SUPPORT_CREDENTIALS = "cors.support.credentials"; /** * Key to retrieve exposed headers from {@link FilterConfig}. */ public static final String PARAM_CORS_EXPOSED_HEADERS = "cors.exposed.headers"; /** * Key to retrieve allowed headers from {@link FilterConfig}. */ public static final String PARAM_CORS_ALLOWED_HEADERS = "cors.allowed.headers"; /** * Key to retrieve allowed methods from {@link FilterConfig}. */ public static final String PARAM_CORS_ALLOWED_METHODS = "cors.allowed.methods"; /** * Key to retrieve preflight max age from {@link FilterConfig}. */ public static final String PARAM_CORS_PREFLIGHT_MAXAGE = "cors.preflight.maxage"; /** * Key to determine if request should be decorated. */ public static final String PARAM_CORS_REQUEST_DECORATE = "cors.request.decorate"; }
/* * Copyright 2011 Google Inc. * Copyright 2014 Andreas Schildbach * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.matthewmitchell.nubitsj.core; import com.google.common.collect.*; import com.google.common.util.concurrent.*; import com.matthewmitchell.nubitsj.utils.*; import javax.annotation.*; import java.io.*; import java.util.*; import java.util.concurrent.*; import static com.google.common.base.Preconditions.*; // TODO: Modify the getDepthInBlocks method to require the chain height to be specified, in preparation for ceasing to touch every tx on every block. /** * <p>A TransactionConfidence object tracks data you can use to make a confidence decision about a transaction. * It also contains some pre-canned rules for common scenarios: if you aren't really sure what level of confidence * you need, these should prove useful. You can get a confidence object using {@link Transaction#getConfidence()}. * They cannot be constructed directly.</p> * * <p>Confidence in a transaction can come in multiple ways:</p> * * <ul> * <li>Because you created it yourself and only you have the necessary keys.</li> * <li>Receiving it from a fully validating peer you know is trustworthy, for instance, because it's run by yourself.</li> * <li>Receiving it from a peer on the network you randomly chose. If your network connection is not being * intercepted, you have a pretty good chance of connecting to a node that is following the rules.</li> * <li>Receiving it from multiple peers on the network. If your network connection is not being intercepted, * hearing about a transaction from multiple peers indicates the network has accepted the transaction and * thus miners likely have too (miners have the final say in whether a transaction becomes valid or not).</li> * <li>Seeing the transaction appear appear in a block on the main chain. Your confidence increases as the transaction * becomes further buried under work. Work can be measured either in blocks (roughly, units of time), or * amount of work done.</li> * </ul> * * <p>Alternatively, you may know that the transaction is "dead", that is, one or more of its inputs have * been double spent and will never confirm unless there is another re-org.</p> * * <p>TransactionConfidence is updated via the {@link com.matthewmitchell.nubitsj.core.TransactionConfidence#incrementDepthInBlocks()} * method to ensure the block depth is up to date.</p> * To make a copy that won't be changed, use {@link com.matthewmitchell.nubitsj.core.TransactionConfidence#duplicate()}. */ public class TransactionConfidence implements Serializable { private static final long serialVersionUID = 4577920141400556444L; /** * The peers that have announced the transaction to us. Network nodes don't have stable identities, so we use * IP address as an approximation. It's obviously vulnerable to being gamed if we allow arbitrary people to connect * to us, so only peers we explicitly connected to should go here. */ private CopyOnWriteArrayList<PeerAddress> broadcastBy; /** The Transaction that this confidence object is associated with. */ private final Sha256Hash hash; // Lazily created listeners array. private transient CopyOnWriteArrayList<ListenerRegistration<Listener>> listeners; // The depth of the transaction on the best chain in blocks. An unconfirmed block has depth 0. private int depth; /** Describes the state of the transaction in general terms. Properties can be read to learn specifics. */ public enum ConfidenceType { /** If BUILDING, then the transaction is included in the best chain and your confidence in it is increasing. */ BUILDING(1), /** * If PENDING, then the transaction is unconfirmed and should be included shortly, as long as it is being * announced and is considered valid by the network. A pending transaction will be announced if the containing * wallet has been attached to a live {@link PeerGroup} using {@link PeerGroup#addWallet(Wallet)}. * You can estimate how likely the transaction is to be included by connecting to a bunch of nodes then measuring * how many announce it, using {@link com.matthewmitchell.nubitsj.core.TransactionConfidence#numBroadcastPeers()}. * Or if you saw it from a trusted peer, you can assume it's valid and will get mined sooner or later as well. */ PENDING(2), /** * If DEAD, then it means the transaction won't confirm unless there is another re-org, * because some other transaction is spending one of its inputs. Such transactions should be alerted to the user * so they can take action, eg, suspending shipment of goods if they are a merchant. * It can also mean that a coinbase transaction has been made dead from it being moved onto a side chain. */ DEAD(4), /** * If a transaction hasn't been broadcast yet, or there's no record of it, its confidence is UNKNOWN. */ UNKNOWN(0); private int value; ConfidenceType(int value) { this.value = value; } public int getValue() { return value; } } private ConfidenceType confidenceType = ConfidenceType.UNKNOWN; private int appearedAtChainHeight = -1; // The transaction that double spent this one, if any. private Transaction overridingTransaction; /** * Information about where the transaction was first seen (network, sent direct from peer, created by ourselves). * Useful for risk analyzing pending transactions. Probably not that useful after a tx is included in the chain, * unless re-org double spends start happening frequently. */ public enum Source { /** We don't know where the transaction came from. */ UNKNOWN, /** We got this transaction from a network peer. */ NETWORK, /** This transaction was created by our own wallet, so we know it's not a double spend. */ SELF } private Source source = Source.UNKNOWN; public TransactionConfidence(Sha256Hash hash) { // Assume a default number of peers for our set. broadcastBy = new CopyOnWriteArrayList<PeerAddress>(); listeners = new CopyOnWriteArrayList<ListenerRegistration<Listener>>(); this.hash = hash; } /** * In case the class gets created from a serialised version, we need to recreate the listeners object as it is set * as transient and only created in the constructor. */ private void readObject(java.io.ObjectInputStream in) throws IOException, ClassNotFoundException { in.defaultReadObject(); listeners = new CopyOnWriteArrayList<ListenerRegistration<Listener>>(); } /** * <p>A confidence listener is informed when the level of {@link TransactionConfidence} is updated by something, like * for example a {@link Wallet}. You can add listeners to update your user interface or manage your order tracking * system when confidence levels pass a certain threshold. <b>Note that confidence can go down as well as up.</b> * For example, this can happen if somebody is doing a double-spend attack against you. Whilst it's unlikely, your * code should be able to handle that in order to be correct.</p> * * <p>During listener execution, it's safe to remove the current listener but not others.</p> */ public interface Listener { /** An enum that describes why a transaction confidence listener is being invoked (i.e. the class of change). */ enum ChangeReason { /** * Occurs when the type returned by {@link com.matthewmitchell.nubitsj.core.TransactionConfidence#getConfidenceType()} * has changed. For example, if a PENDING transaction changes to BUILDING or DEAD, then this reason will * be given. It's a high level summary. */ TYPE, /** * Occurs when a transaction that is in the best known block chain gets buried by another block. If you're * waiting for a certain number of confirmations, this is the reason to watch out for. */ DEPTH, /** * Occurs when a pending transaction (not in the chain) was announced by another connected peers. By * watching the number of peers that announced a transaction go up, you can see whether it's being * accepted by the network or not. If all your peers announce, it's a pretty good bet the transaction * is considered relayable and has thus reached the miners. */ SEEN_PEERS, } void onConfidenceChanged(TransactionConfidence confidence, ChangeReason reason); } // This is used to ensure that confidence objects which aren't referenced from anywhere but which have an event // listener set on them don't become eligible for garbage collection. Otherwise the TxConfidenceTable, which only // has weak references to these objects, would not be enough to keep the event listeners working as transactions // propagate around the network - it cannot know directly if the API user is interested in the object, so it uses // heap reachability as a proxy for interest. // // We add ourselves to this set when a listener is added and remove ourselves when the listener list is empty. private static final Set<TransactionConfidence> pinnedConfidenceObjects = Collections.synchronizedSet(new HashSet<TransactionConfidence>()); /** * <p>Adds an event listener that will be run when this confidence object is updated. The listener will be locked and * is likely to be invoked on a peer thread.</p> * * <p>Note that this is NOT called when every block arrives. Instead it is called when the transaction * transitions between confidence states, ie, from not being seen in the chain to being seen (not necessarily in * the best chain). If you want to know when the transaction gets buried under another block, consider using * a future from {@link #getDepthFuture(int)}.</p> */ public void addEventListener(Listener listener, Executor executor) { checkNotNull(listener); listeners.addIfAbsent(new ListenerRegistration<Listener>(listener, executor)); pinnedConfidenceObjects.add(this); } /** * <p>Adds an event listener that will be run when this confidence object is updated. The listener will be locked and * is likely to be invoked on a peer thread.</p> * * <p>Note that this is NOT called when every block arrives. Instead it is called when the transaction * transitions between confidence states, ie, from not being seen in the chain to being seen (not necessarily in * the best chain). If you want to know when the transaction gets buried under another block, implement a * {@link BlockChainListener}, attach it to a {@link BlockChain} and then use the getters on the * confidence object to determine the new depth.</p> */ public void addEventListener(Listener listener) { addEventListener(listener, Threading.USER_THREAD); } public boolean removeEventListener(Listener listener) { checkNotNull(listener); boolean removed = ListenerRegistration.removeFromList(listener, listeners); if (listeners.isEmpty()) pinnedConfidenceObjects.remove(this); return removed; } /** * Returns the chain height at which the transaction appeared if confidence type is BUILDING. * @throws IllegalStateException if the confidence type is not BUILDING. */ public synchronized int getAppearedAtChainHeight() { if (getConfidenceType() != ConfidenceType.BUILDING) throw new IllegalStateException("Confidence type is " + getConfidenceType() + ", not BUILDING"); return appearedAtChainHeight; } /** * The chain height at which the transaction appeared, if it has been seen in the best chain. Automatically sets * the current type to {@link ConfidenceType#BUILDING} and depth to one. */ public synchronized void setAppearedAtChainHeight(int appearedAtChainHeight) { if (appearedAtChainHeight < 0) throw new IllegalArgumentException("appearedAtChainHeight out of range"); this.appearedAtChainHeight = appearedAtChainHeight; this.depth = 1; setConfidenceType(ConfidenceType.BUILDING); } /** * Returns a general statement of the level of confidence you can have in this transaction. */ public synchronized ConfidenceType getConfidenceType() { return confidenceType; } /** * Called by other objects in the system, like a {@link Wallet}, when new information about the confidence of a * transaction becomes available. */ public synchronized void setConfidenceType(ConfidenceType confidenceType) { if (confidenceType == this.confidenceType) return; this.confidenceType = confidenceType; if (confidenceType != ConfidenceType.DEAD) { overridingTransaction = null; } if (confidenceType == ConfidenceType.PENDING) { depth = 0; appearedAtChainHeight = -1; } } /** * Called by a {@link Peer} when a transaction is pending and announced by a peer. The more peers announce the * transaction, the more peers have validated it (assuming your internet connection is not being intercepted). * If confidence is currently unknown, sets it to {@link ConfidenceType#PENDING}. Does not run listeners. * * @param address IP address of the peer, used as a proxy for identity. * @return true if marked, false if this address was already seen (no-op) */ public boolean markBroadcastBy(PeerAddress address) { if (!broadcastBy.addIfAbsent(address)) return false; // Duplicate. synchronized (this) { if (getConfidenceType() == ConfidenceType.UNKNOWN) { this.confidenceType = ConfidenceType.PENDING; } } return true; } /** * Returns how many peers have been passed to {@link TransactionConfidence#markBroadcastBy}. */ public int numBroadcastPeers() { return broadcastBy.size(); } /** * Returns a snapshot of {@link PeerAddress}es that announced the transaction. */ public Set<PeerAddress> getBroadcastBy() { ListIterator<PeerAddress> iterator = broadcastBy.listIterator(); return Sets.newHashSet(iterator); } /** Returns true if the given address has been seen via markBroadcastBy() */ public boolean wasBroadcastBy(PeerAddress address) { return broadcastBy.contains(address); } @Override public synchronized String toString() { StringBuilder builder = new StringBuilder(); int peers = numBroadcastPeers(); if (peers > 0) { builder.append("Seen by ").append(peers).append(peers > 1 ? " peers. " : " peer. "); } switch (getConfidenceType()) { case UNKNOWN: builder.append("Unknown confidence level."); break; case DEAD: builder.append("Dead: overridden by double spend and will not confirm."); break; case PENDING: builder.append("Pending/unconfirmed."); break; case BUILDING: builder.append(String.format(Locale.US, "Appeared in best chain at height %d, depth %d.", getAppearedAtChainHeight(), getDepthInBlocks())); break; } return builder.toString(); } /** * Called by the wallet when the tx appears on the best chain and a new block is added to the top. Updates the * internal counter that tracks how deeply buried the block is. * * @return the new depth */ public synchronized int incrementDepthInBlocks() { return ++this.depth; } /** * <p>Depth in the chain is an approximation of how much time has elapsed since the transaction has been confirmed. * On average there is supposed to be a new block every 10 minutes, but the actual rate may vary. The reference * (Satoshi) implementation considers a transaction impractical to reverse after 6 blocks, but as of EOY 2011 network * security is high enough that often only one block is considered enough even for high value transactions. For low * value transactions like songs, or other cheap items, no blocks at all may be necessary.</p> * * <p>If the transaction appears in the top block, the depth is one. If it's anything else (pending, dead, unknown) * the depth is zero.</p> */ public synchronized int getDepthInBlocks() { return depth; } /* * Set the depth in blocks. Having one block confirmation is a depth of one. */ public synchronized void setDepthInBlocks(int depth) { this.depth = depth; } /** * Erases the set of broadcast/seen peers. This cannot be called whilst the confidence is PENDING. It is useful * for saving memory and wallet space once a tx is buried so deep it doesn't seem likely to go pending again. */ public void clearBroadcastBy() { checkState(getConfidenceType() != ConfidenceType.PENDING); broadcastBy.clear(); } /** * If this transaction has been overridden by a double spend (is dead), this call returns the overriding transaction. * Note that this call <b>can return null</b> if you have migrated an old wallet, as pre-Jan 2012 wallets did not * store this information. * * @return the transaction that double spent this one * @throws IllegalStateException if confidence type is not OVERRIDDEN_BY_DOUBLE_SPEND. */ public synchronized Transaction getOverridingTransaction() { if (getConfidenceType() != ConfidenceType.DEAD) throw new IllegalStateException("Confidence type is " + getConfidenceType() + ", not OVERRIDDEN_BY_DOUBLE_SPEND"); return overridingTransaction; } /** * Called when the transaction becomes newly dead, that is, we learn that one of its inputs has already been spent * in such a way that the double-spending transaction takes precedence over this one. It will not become valid now * unless there is a re-org. Automatically sets the confidence type to DEAD. The overriding transaction may not * directly double spend this one, but could also have double spent a dependency of this tx. */ public synchronized void setOverridingTransaction(@Nullable Transaction overridingTransaction) { this.overridingTransaction = overridingTransaction; setConfidenceType(ConfidenceType.DEAD); } /** Returns a copy of this object. Event listeners are not duplicated. */ public TransactionConfidence duplicate() { TransactionConfidence c = new TransactionConfidence(hash); c.broadcastBy.addAll(broadcastBy); synchronized (this) { c.confidenceType = confidenceType; c.overridingTransaction = overridingTransaction; c.appearedAtChainHeight = appearedAtChainHeight; } return c; } /** * Call this after adjusting the confidence, for cases where listeners should be notified. This has to be done * explicitly rather than being done automatically because sometimes complex changes to transaction states can * result in a series of confidence changes that are not really useful to see separately. By invoking listeners * explicitly, more precise control is available. Note that this will run the listeners on the user code thread. */ public void queueListeners(final Listener.ChangeReason reason) { for (final ListenerRegistration<Listener> registration : listeners) { registration.executor.execute(new Runnable() { @Override public void run() { registration.listener.onConfidenceChanged(TransactionConfidence.this, reason); } }); } } /** * The source of a transaction tries to identify where it came from originally. For instance, did we download it * from the peer to peer network, or make it ourselves, or receive it via Bluetooth, or import it from another app, * and so on. This information is useful for {@link com.matthewmitchell.nubitsj.wallet.CoinSelector} implementations to risk analyze * transactions and decide when to spend them. */ public synchronized Source getSource() { return source; } /** * The source of a transaction tries to identify where it came from originally. For instance, did we download it * from the peer to peer network, or make it ourselves, or receive it via Bluetooth, or import it from another app, * and so on. This information is useful for {@link com.matthewmitchell.nubitsj.wallet.CoinSelector} implementations to risk analyze * transactions and decide when to spend them. */ public synchronized void setSource(Source source) { this.source = source; } /** * Returns a future that completes when the transaction has been confirmed by "depth" blocks. For instance setting * depth to one will wait until it appears in a block on the best chain, and zero will wait until it has been seen * on the network. */ public synchronized ListenableFuture<TransactionConfidence> getDepthFuture(final int depth, Executor executor) { final SettableFuture<TransactionConfidence> result = SettableFuture.create(); if (getDepthInBlocks() >= depth) { result.set(this); } addEventListener(new Listener() { @Override public void onConfidenceChanged(TransactionConfidence confidence, ChangeReason reason) { if (getDepthInBlocks() >= depth) { removeEventListener(this); result.set(confidence); } } }, executor); return result; } public synchronized ListenableFuture<TransactionConfidence> getDepthFuture(final int depth) { return getDepthFuture(depth, Threading.USER_THREAD); } public Sha256Hash getTransactionHash() { return hash; } }
package com.brilapps.etl.target; import java.io.File; import java.io.FileInputStream; import java.io.FileOutputStream; import java.util.ArrayList; import java.util.HashMap; import java.util.HashSet; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Set; import java.util.TreeSet; import org.apache.log4j.Logger; import org.apache.poi.hssf.usermodel.HSSFWorkbook; import org.apache.poi.ss.usermodel.Cell; import org.apache.poi.ss.usermodel.Row; import org.apache.poi.ss.usermodel.Sheet; import org.apache.poi.ss.usermodel.Workbook; import org.apache.poi.xssf.usermodel.XSSFWorkbook; import com.brilapps.etl.ETLUtil; import com.brilapps.etl.ProjectConstants; import com.brilapps.etl.source.SourceNetworkActivityColumnHeaders; import com.brilapps.etl.source.SourceNetworkHeaderColumnHeaders; public class NetworkActivityGenerator { static Logger logger = Logger.getLogger(NetworkActivityGenerator.class); public final static HashMap<TargetNetworkActivityColumnHeaders, String> destinationConstants = new HashMap<TargetNetworkActivityColumnHeaders, String>(); private static List<SourceNetworkHeaderColumnHeaders> UNIQUE_KEYS_NETWORK_ACTIVITY = new ArrayList<SourceNetworkHeaderColumnHeaders>(); static { UNIQUE_KEYS_NETWORK_ACTIVITY.add(SourceNetworkHeaderColumnHeaders.PROJECTNO); UNIQUE_KEYS_NETWORK_ACTIVITY.add(SourceNetworkHeaderColumnHeaders.ALT_TASKNO); UNIQUE_KEYS_NETWORK_ACTIVITY.add(SourceNetworkHeaderColumnHeaders.COST_TYPE); } static { // Constants Add all the constants columns here so that they will be // directly added to target WBS file. destinationConstants.put(TargetNetworkActivityColumnHeaders.DAUNE, "D"); destinationConstants.put(TargetNetworkActivityColumnHeaders.SLWID, "Z000003"); // Blank Space Constants // Constants Add all the Blank Spaces columns here so that they will be // directly added to target WBS file. // Blank Space Constants destinationConstants.put(TargetNetworkActivityColumnHeaders.ARBEI, ""); destinationConstants.put(TargetNetworkActivityColumnHeaders.ARBEH, ""); destinationConstants.put(TargetNetworkActivityColumnHeaders.INDET, ""); destinationConstants.put(TargetNetworkActivityColumnHeaders.LARNT, ""); destinationConstants.put(TargetNetworkActivityColumnHeaders.NPRIO, ""); destinationConstants.put(TargetNetworkActivityColumnHeaders.MLSTN, ""); destinationConstants.put(TargetNetworkActivityColumnHeaders.CLASF, ""); destinationConstants.put(TargetNetworkActivityColumnHeaders.VERTL, ""); destinationConstants.put(TargetNetworkActivityColumnHeaders.DAUNO, ""); destinationConstants.put(TargetNetworkActivityColumnHeaders.PREIS, ""); destinationConstants.put(TargetNetworkActivityColumnHeaders.WAERS, ""); destinationConstants.put(TargetNetworkActivityColumnHeaders.LOSVG, ""); destinationConstants.put(TargetNetworkActivityColumnHeaders.LOSME, ""); destinationConstants.put(TargetNetworkActivityColumnHeaders.AUDISP_ACTIVITY, ""); destinationConstants.put(TargetNetworkActivityColumnHeaders.EKORG, ""); destinationConstants.put(TargetNetworkActivityColumnHeaders.EKGRP, ""); destinationConstants.put(TargetNetworkActivityColumnHeaders.MATKL, ""); destinationConstants.put(TargetNetworkActivityColumnHeaders.EINSA, ""); destinationConstants.put(TargetNetworkActivityColumnHeaders.NTANF, ""); destinationConstants.put(TargetNetworkActivityColumnHeaders.EINSE, ""); destinationConstants.put(TargetNetworkActivityColumnHeaders.NTEND, ""); destinationConstants.put(TargetNetworkActivityColumnHeaders.FRSP, ""); destinationConstants.put(TargetNetworkActivityColumnHeaders.AENNR, ""); destinationConstants.put(TargetNetworkActivityColumnHeaders.RFPNT, ""); destinationConstants.put(TargetNetworkActivityColumnHeaders.TXJCD_ACTIVITY, ""); destinationConstants.put(TargetNetworkActivityColumnHeaders.USR00, ""); destinationConstants.put(TargetNetworkActivityColumnHeaders.USR01, ""); destinationConstants.put(TargetNetworkActivityColumnHeaders.USR02, ""); destinationConstants.put(TargetNetworkActivityColumnHeaders.USR04, ""); destinationConstants.put(TargetNetworkActivityColumnHeaders.USE04, ""); destinationConstants.put(TargetNetworkActivityColumnHeaders.USR05, ""); destinationConstants.put(TargetNetworkActivityColumnHeaders.USE05, ""); destinationConstants.put(TargetNetworkActivityColumnHeaders.USR06, ""); destinationConstants.put(TargetNetworkActivityColumnHeaders.USE06, ""); destinationConstants.put(TargetNetworkActivityColumnHeaders.USR07, ""); destinationConstants.put(TargetNetworkActivityColumnHeaders.USE07, ""); destinationConstants.put(TargetNetworkActivityColumnHeaders.VERSN_EV, ""); destinationConstants.put(TargetNetworkActivityColumnHeaders.EVMET_TXT_P, ""); destinationConstants.put(TargetNetworkActivityColumnHeaders.EVMET_TXT_A, ""); destinationConstants.put(TargetNetworkActivityColumnHeaders.SWRT10, ""); destinationConstants.put(TargetNetworkActivityColumnHeaders.PRKST, ""); destinationConstants.put(TargetNetworkActivityColumnHeaders.ANFKO, ""); destinationConstants.put(TargetNetworkActivityColumnHeaders.SWRT11, ""); } public void writeHeaderColumns(final Sheet sheet) { logger.debug(" entering writeHeaderColumns() "); Row row = sheet.createRow(0); int colNum = 0; for (TargetNetworkActivityColumnHeaders field : TargetNetworkActivityColumnHeaders.getColumnHeadersByIndex()) { Cell cell = row.createCell(colNum++); cell.setCellValue(field.getColumnHeader()); } logger.debug(" exiting writeHeaderColumns() "); } public ArrayList<String> getColumnHeaders(final Sheet sheet) { if (logger.isDebugEnabled()) { logger.debug(" entering getColumnHeaders() "); } ArrayList<String> headers = new ArrayList<String>(); Iterator<Row> iterator = sheet.iterator(); while (iterator.hasNext()) { Row currentRow = iterator.next(); Iterator<Cell> cellIterator = currentRow.iterator(); while (cellIterator.hasNext()) { Cell currentCell = cellIterator.next(); headers.add(ETLUtil.getCellValueAsString(currentCell, logger)); } break; } logger.debug(" exiting getColumnHeaders() "); return headers; } public void deleteDuplicateRowsAndGenerateTargetForNetworkActivity(final Sheet sourceNetworkHeaderSheet, final Sheet destinationNetworkActivitySheet, final Map<String, Integer> networkHeaderSourceHeaderColumnIndexMap) { logger.debug(" entering deleteDuplicateRowsAndGenerateTargetForNetworkActivity "); ArrayList<String> headers = getColumnHeaders(sourceNetworkHeaderSheet); ArrayList<Integer> uniqueKeyIndexes = new ArrayList<Integer>(); int indexCount = 0; int costTypeIndex = -1; for (String headerName : headers) { for (SourceNetworkHeaderColumnHeaders sourceNetworkHeaderColumnHeader : UNIQUE_KEYS_NETWORK_ACTIVITY) { if (headerName.equals(sourceNetworkHeaderColumnHeader.getColumnHeader())) { uniqueKeyIndexes.add(indexCount); } if (costTypeIndex != -1 && SourceNetworkHeaderColumnHeaders.COST_TYPE.toString() .equals(sourceNetworkHeaderColumnHeader.getColumnHeader())) { costTypeIndex = indexCount; } } indexCount++; } logger.debug(" in deleteDuplicateRowsAndGenerateTargetForNetworkActivity uniqueKeyIndexes " + uniqueKeyIndexes); Set<String> uniqueKeys = new HashSet<String>(); Set<Integer> uniqueRows = new TreeSet<Integer>(); // writeHeaderColumns Row networkActivityRow = destinationNetworkActivitySheet.createRow(0); int networkActivityColNum = 0; for (SourceNetworkActivityColumnHeaders header : SourceNetworkActivityColumnHeaders.getColumnHeadersByIndex()) { Cell cell = networkActivityRow.createCell(networkActivityColNum++); cell.setCellValue(header.getColumnHeader()); } Iterator<Row> iterator = sourceNetworkHeaderSheet.iterator(); iterator.next(); while (iterator.hasNext()) { Row currentRow = iterator.next(); StringBuffer uniqueKey = new StringBuffer(""); for (Integer uniqueKeyIndex : uniqueKeyIndexes) { if (costTypeIndex == uniqueKeyIndex) { uniqueKey.append(ETLUtil.getCellValueAsString(currentRow.getCell(uniqueKeyIndex), logger)); } else { uniqueKey.append(ETLUtil.getCellValueAsString(currentRow.getCell(uniqueKeyIndex), logger)); } } if (!uniqueKeys.contains(uniqueKey.toString())) { logger.debug(" in deleteDuplicateRowsAndGenerateTargetForNetworkActivity adding unique key record " + uniqueKey); uniqueKeys.add(uniqueKey.toString()); uniqueRows.add(currentRow.getRowNum()); } } logger.debug(" in deleteDuplicateRowsAndGenerateTargetForNetworkActivity entering uniqueRowsloop"); List<String> projectTaskNos = new ArrayList<String>(); List<String> projectAltTaskNosAndActDesc = new ArrayList<String>(); for (Integer uniqueRow : uniqueRows) { logger.debug(" in deleteDuplicateRowsAndGenerateTargetForNetworkActivity processing uniqueRow with number " + uniqueRow); Row currentRow = sourceNetworkHeaderSheet.getRow(uniqueRow); Cell costTypeCell = currentRow.getCell( networkHeaderSourceHeaderColumnIndexMap.get(SourceNetworkHeaderColumnHeaders.COST_TYPE.toString())); Object costType = ETLUtil.getCellValue(costTypeCell, logger); CostTypeReferenceTable costTypeReference = null; String projectNo = ETLUtil.getCellValueAsString(currentRow.getCell( networkHeaderSourceHeaderColumnIndexMap.get(SourceNetworkHeaderColumnHeaders.PROJECTNO.toString())), logger); String taskNo = ETLUtil.getCellValueAsString(currentRow.getCell( networkHeaderSourceHeaderColumnIndexMap.get(SourceNetworkHeaderColumnHeaders.TASKNO.toString())), logger); String altTaskNo = ETLUtil.getCellValueAsString(currentRow.getCell(networkHeaderSourceHeaderColumnIndexMap .get(SourceNetworkHeaderColumnHeaders.ALT_TASKNO.toString())), logger); if (costType != null) { costTypeReference = ETLUtil.getCostTypeReferenceTableByCostType().get(costType.toString()); if (costTypeReference == null || costTypeReference.getCostTypeActualDescription().toUpperCase() .equals(ProjectConstants.COST_TYPE_NA.toUpperCase())) { continue; } if (!projectAltTaskNosAndActDesc .contains(projectNo + altTaskNo + costTypeReference.getCostTypeActualDescription())) { projectAltTaskNosAndActDesc .add(projectNo + altTaskNo + costTypeReference.getCostTypeActualDescription()); } else { continue; } // Check if for same project no and alt task no the // ActualDescription is added earlier if yes then do not add if (costTypeReference.getCostTypeActualDescription().toUpperCase() .equals(ProjectConstants.COST_TYPE_NA.toUpperCase())) { continue; } } // if there is no record in network header for the project no and // alt task no then skip the record in network activity. NetworkHeaderActivityReferenceTable networkHeaderActivityReferenceTable = ETLUtil .getNetworkHeaderActivityReferenceTableByProjectTaskNo().get(projectNo + altTaskNo); if (networkHeaderActivityReferenceTable == null) { continue; } NetworkActivityCostTypeReferenceTable networkActivityCostTypeReferenceTable = new NetworkActivityCostTypeReferenceTable(); networkActivityCostTypeReferenceTable.setProjectNo(projectNo); networkActivityCostTypeReferenceTable.setTaskNo(taskNo); networkActivityCostTypeReferenceTable.setAltTaskNo(altTaskNo); networkActivityCostTypeReferenceTable .setCostType(ETLUtil.getCellValueAsString(currentRow .getCell(networkHeaderSourceHeaderColumnIndexMap .get(SourceNetworkHeaderColumnHeaders.COST_TYPE.toString())), logger)); networkActivityCostTypeReferenceTable.setCostTypeDescription(costTypeReference.getCostTypeDescription()); networkActivityCostTypeReferenceTable .setCostTypeActualDescription(costTypeReference.getCostTypeActualDescription()); if (!projectTaskNos.contains(projectNo + altTaskNo)) { projectTaskNos.add(projectNo + altTaskNo); } if (ETLUtil.getNetworkActivityCostTypeTableListByProjectTaskNo().get(projectNo + altTaskNo) != null) { ETLUtil.getNetworkActivityCostTypeTableListByProjectTaskNo().get(projectNo + altTaskNo) .add(networkActivityCostTypeReferenceTable); } else { List<NetworkActivityCostTypeReferenceTable> networkHeaderActivityReferences = new ArrayList<NetworkActivityCostTypeReferenceTable>(); networkHeaderActivityReferences.add(networkActivityCostTypeReferenceTable); ETLUtil.getNetworkActivityCostTypeTableListByProjectTaskNo().put(projectNo + altTaskNo, networkHeaderActivityReferences); } } for (String projectTaskNo : projectTaskNos) { List<NetworkActivityCostTypeReferenceTable> networkHeaderActivityReferences = ETLUtil .getNetworkActivityCostTypeTableListByProjectTaskNo().get(projectTaskNo); NetworkActivityCostTypeReferenceTable[] sortedNetworkHeaderActivityReferenceArray = new NetworkActivityCostTypeReferenceTable[6]; for (NetworkActivityCostTypeReferenceTable networkHeaderActivityReferenceTable : networkHeaderActivityReferences) { if (networkHeaderActivityReferenceTable.getCostTypeActualDescription().toUpperCase() .equals(ProjectConstants.COST_TYPE_MATERIAL.toUpperCase())) { sortedNetworkHeaderActivityReferenceArray[0] = networkHeaderActivityReferenceTable; } else if (networkHeaderActivityReferenceTable.getCostTypeActualDescription().toUpperCase() .equals(ProjectConstants.COST_TYPE_LABOUR_ENG.toUpperCase())) { sortedNetworkHeaderActivityReferenceArray[1] = networkHeaderActivityReferenceTable; } else if (networkHeaderActivityReferenceTable.getCostTypeActualDescription().toUpperCase() .equals(ProjectConstants.COST_TYPE_LABOUR_MFG.toUpperCase())) { sortedNetworkHeaderActivityReferenceArray[2] = networkHeaderActivityReferenceTable; } else if (networkHeaderActivityReferenceTable.getCostTypeActualDescription().toUpperCase() .equals(ProjectConstants.COST_TYPE_ODC.toUpperCase())) { sortedNetworkHeaderActivityReferenceArray[3] = networkHeaderActivityReferenceTable; } else if (networkHeaderActivityReferenceTable.getCostTypeActualDescription().toUpperCase() .equals(ProjectConstants.COST_TYPE_TRAVEL.toUpperCase())) { sortedNetworkHeaderActivityReferenceArray[4] = networkHeaderActivityReferenceTable; } else if (networkHeaderActivityReferenceTable.getCostTypeActualDescription().toUpperCase() .equals(ProjectConstants.COST_TYPE_MGMT_RES.toUpperCase())) { sortedNetworkHeaderActivityReferenceArray[5] = networkHeaderActivityReferenceTable; } } List<NetworkActivityCostTypeReferenceTable> sortedNetworkHeaderActivityReferences = new ArrayList<NetworkActivityCostTypeReferenceTable>(); for (NetworkActivityCostTypeReferenceTable networkHeaderActivityReferenceTable : sortedNetworkHeaderActivityReferenceArray) { if (networkHeaderActivityReferenceTable != null) { sortedNetworkHeaderActivityReferences.add(networkHeaderActivityReferenceTable); } } ETLUtil.getNetworkActivityCostTypeTableListByProjectTaskNo().put(projectTaskNo, sortedNetworkHeaderActivityReferences); } int rowCount = 1; for (String projectTaskNo : projectTaskNos) { List<NetworkActivityCostTypeReferenceTable> networkHeaderCostTypeReferences = ETLUtil .getNetworkActivityCostTypeTableListByProjectTaskNo().get(projectTaskNo); long vornr = 0; for (NetworkActivityCostTypeReferenceTable networkHeaderCostTypeReferenceTable : networkHeaderCostTypeReferences) { NetworkHeaderActivityReferenceTable networkHeaderActivityReferenceTable = ETLUtil .getNetworkHeaderActivityReferenceTableByProjectTaskNo() .get(networkHeaderCostTypeReferenceTable.getProjectNo() + networkHeaderCostTypeReferenceTable.getAltTaskNo()); if (networkHeaderActivityReferenceTable != null) { Row row = destinationNetworkActivitySheet.createRow(rowCount); Cell desCell = row.createCell(0); ETLUtil.setCellValue(desCell, networkHeaderCostTypeReferenceTable.getProjectNo(), logger); desCell = row.createCell(1); ETLUtil.setCellValue(desCell, networkHeaderCostTypeReferenceTable.getTaskNo(), logger); desCell = row.createCell(2); ETLUtil.setCellValue(desCell, networkHeaderCostTypeReferenceTable.getAltTaskNo(), logger); desCell = row.createCell(3); ETLUtil.setCellValue(desCell, networkHeaderActivityReferenceTable.getSerialNo(), logger); desCell = row.createCell(4); ETLUtil.setCellValue(desCell, networkHeaderCostTypeReferenceTable.getCostType(), logger); desCell = row.createCell(5); ETLUtil.setCellValue(desCell, networkHeaderCostTypeReferenceTable.getCostTypeDescription(), logger); desCell = row.createCell(6); ETLUtil.setCellValue(desCell, networkHeaderCostTypeReferenceTable.getCostTypeActualDescription(), logger); vornr = vornr + 10; desCell = row.createCell(7); ETLUtil.setCellValue(desCell, "00" + vornr, logger); rowCount++; } } } logger.debug(" exiting deleteDuplicateRowsAndGenerateTargetForNetworkActivity "); } public void generateNetworkActivityTargetFile(final File projectDefinitionDestinationFile, final File networkHeaderSourceFile, final File networkActivityNonDuplicateFile, final File destinationNetworkActivityFile) throws Exception { logger.debug("entering generateNetworkActivityTargetFile "); try { // WBS source file with duplicates FileInputStream networkHeaderSourceFileInputStream = new FileInputStream(networkHeaderSourceFile); // Create Workbook instance holding reference to .xlsx file Workbook networkHeaderSourceWorkbook = null; if (networkHeaderSourceFile.getName().endsWith(".xls")) { networkHeaderSourceWorkbook = new HSSFWorkbook(networkHeaderSourceFileInputStream); } else { networkHeaderSourceWorkbook = new XSSFWorkbook(networkHeaderSourceFileInputStream); } Sheet networkHeaderSourceSheet = networkHeaderSourceWorkbook.getSheetAt(0); logger.debug("in generateNetworkActivityTargetFile before validateColumnHeaders"); validateColumnHeaders(networkHeaderSourceSheet); logger.debug("in generateNetworkActivityTargetFile after validateColumnHeaders"); // writeHeaderColumns int colNum = 0; Map<String, Integer> networkHeaderSourceHeaderColumnIndexMap = new HashMap<String, Integer>(); for (String header : getColumnHeaders(networkHeaderSourceSheet)) { networkHeaderSourceHeaderColumnIndexMap.put(header, colNum); colNum++; } Workbook networkActivityNonDuplicateWorkbook = new HSSFWorkbook(); Sheet networkActivityNonDuplicateTargetSheet = networkActivityNonDuplicateWorkbook .createSheet(networkHeaderSourceSheet.getSheetName()); logger.debug("in generateNetworkActivityTargetFile before deleting duplicate records for network activity"); deleteDuplicateRowsAndGenerateTargetForNetworkActivity(networkHeaderSourceSheet, networkActivityNonDuplicateTargetSheet, networkHeaderSourceHeaderColumnIndexMap); logger.debug("in generateNetworkActivityTargetFile after deleting duplicate records for network activity"); FileOutputStream netWorkActivityOutputStream = new FileOutputStream(networkActivityNonDuplicateFile); networkActivityNonDuplicateWorkbook.write(netWorkActivityOutputStream); netWorkActivityOutputStream.close(); networkHeaderSourceFileInputStream.close(); // Generate Destination file Workbook targetNetworkTargetWorkbook = new HSSFWorkbook(); Sheet targetNetworkTargetSheet = targetNetworkTargetWorkbook.createSheet("NetworkTargetLoader"); writeHeaderColumns(targetNetworkTargetSheet); Sheet networkActivityNonDuplicateSheet = networkActivityNonDuplicateWorkbook.getSheetAt(0); Map<String, Integer> networkActivityNoDuplicateHeaderColumnIndexMap = new HashMap<String, Integer>(); colNum = 0; for (String header : getColumnHeaders(networkActivityNonDuplicateSheet)) { networkActivityNoDuplicateHeaderColumnIndexMap.put(header, colNum); colNum++; } Iterator<Row> networkTargetNonDuplicateSheetIterator = networkActivityNonDuplicateSheet.iterator(); // iterate header row firstand process remaining rows. networkTargetNonDuplicateSheetIterator.next(); int targetRowCount = 1; while (networkTargetNonDuplicateSheetIterator.hasNext()) { Row networkTargetNonDuplicateCurrentRow = networkTargetNonDuplicateSheetIterator.next(); String projectNo = ETLUtil.getCellValueAsString(networkTargetNonDuplicateCurrentRow .getCell(networkActivityNoDuplicateHeaderColumnIndexMap .get(SourceNetworkActivityColumnHeaders.PROJECTNO.toString())), logger); /*String taskNo = networkTargetNonDuplicateCurrentRow .getCell(networkActivityNoDuplicateHeaderColumnIndexMap .get(SourceNetworkActivityColumnHeaders.TASKNO.toString())).getStringCellValue();*/ String altTaskNo = ETLUtil.getCellValueAsString(networkTargetNonDuplicateCurrentRow .getCell(networkActivityNoDuplicateHeaderColumnIndexMap .get(SourceNetworkActivityColumnHeaders.ALT_TASKNO.toString())), logger); NetworkHeaderActivityReferenceTable networkHeaderActivityReferenceTable = ETLUtil .getNetworkHeaderActivityReferenceTableByProjectTaskNo().get(projectNo + altTaskNo); Row targetNetworkActivityRow = targetNetworkTargetSheet.createRow(targetRowCount); for (TargetNetworkActivityColumnHeaders targetNetworkActivityColumnHeader : TargetNetworkActivityColumnHeaders .getColumnHeadersByIndex()) { String actualCostType = ETLUtil.getCellValueAsString(networkTargetNonDuplicateCurrentRow .getCell(networkActivityNoDuplicateHeaderColumnIndexMap .get(SourceNetworkActivityColumnHeaders.COST_TYPE_ACTUAL_DESCRIPTION.toString())), logger); if (TargetNetworkActivityColumnHeaders.SERIAL == targetNetworkActivityColumnHeader) { Cell cell = targetNetworkActivityRow .createCell(targetNetworkActivityColumnHeader.getColumnIndex() - 1); ETLUtil.setCellValue(cell, networkTargetNonDuplicateCurrentRow .getCell(networkActivityNoDuplicateHeaderColumnIndexMap .get(SourceNetworkActivityColumnHeaders.SERIAL_NO.toString())) .getNumericCellValue(), logger); } else if (TargetNetworkActivityColumnHeaders.VORNR == targetNetworkActivityColumnHeader) { Cell cell = targetNetworkActivityRow .createCell(targetNetworkActivityColumnHeader.getColumnIndex() - 1); ETLUtil.setCellValue(cell, ETLUtil.getCellValueAsString( networkTargetNonDuplicateCurrentRow .getCell(networkActivityNoDuplicateHeaderColumnIndexMap .get(SourceNetworkActivityColumnHeaders.VORNR.toString())), logger), logger); } else if (TargetNetworkActivityColumnHeaders.LTXA1 == targetNetworkActivityColumnHeader) { Cell cell = targetNetworkActivityRow .createCell(targetNetworkActivityColumnHeader.getColumnIndex() - 1); ETLUtil.setCellValue(cell, actualCostType, logger); } else if (TargetNetworkActivityColumnHeaders.PROJN_ACTIVITY == targetNetworkActivityColumnHeader) { Cell cell = targetNetworkActivityRow .createCell(targetNetworkActivityColumnHeader.getColumnIndex() - 1); ETLUtil.setCellValue(cell, networkHeaderActivityReferenceTable.getIdent(), logger); } else if (TargetNetworkActivityColumnHeaders.ACT_TYPE == targetNetworkActivityColumnHeader) { Cell cell = targetNetworkActivityRow .createCell(targetNetworkActivityColumnHeader.getColumnIndex() - 1); String actType = "C"; if (actualCostType.toUpperCase().equals(ProjectConstants.COST_TYPE_LABOUR_ENG.toUpperCase()) || actualCostType.toUpperCase() .equals(ProjectConstants.COST_TYPE_LABOUR_MFG.toUpperCase())) { actType = "I"; } ETLUtil.setCellValue(cell, actType, logger); } else if (TargetNetworkActivityColumnHeaders.STEUS == targetNetworkActivityColumnHeader) { Cell cell = targetNetworkActivityRow .createCell(targetNetworkActivityColumnHeader.getColumnIndex() - 1); String steus = "PS03"; if (actualCostType.toUpperCase().equals(ProjectConstants.COST_TYPE_LABOUR_ENG.toUpperCase()) || actualCostType .toUpperCase().equals(ProjectConstants.COST_TYPE_LABOUR_MFG.toUpperCase())) { steus = "PS01"; } ETLUtil.setCellValue(cell, steus, logger); } else if (TargetNetworkActivityColumnHeaders.ARBPL == targetNetworkActivityColumnHeader) { Cell cell = targetNetworkActivityRow .createCell(targetNetworkActivityColumnHeader.getColumnIndex() - 1); String arbpl = ""; if (actualCostType.toUpperCase().equals(ProjectConstants.COST_TYPE_LABOUR_ENG.toUpperCase())) { arbpl = "TIMECHRG"; } else if (actualCostType.toUpperCase() .equals(ProjectConstants.COST_TYPE_LABOUR_MFG.toUpperCase())) { arbpl = "ETPRZZXX"; } ETLUtil.setCellValue(cell, arbpl, logger); } else if (TargetNetworkActivityColumnHeaders.WERKS_ACTIVITY == targetNetworkActivityColumnHeader) { Cell cell = targetNetworkActivityRow .createCell(targetNetworkActivityColumnHeader.getColumnIndex() - 1); ETLUtil.setCellValue(cell, networkHeaderActivityReferenceTable.getWerks(), logger); } else if (TargetNetworkActivityColumnHeaders.KALID == targetNetworkActivityColumnHeader) { Cell cell = targetNetworkActivityRow .createCell(targetNetworkActivityColumnHeader.getColumnIndex() - 1); ETLUtil.setCellValue(cell, networkHeaderActivityReferenceTable.getKalid(), logger); } else if (TargetNetworkActivityColumnHeaders.PRCTR_ACTIVITY == targetNetworkActivityColumnHeader) { Cell cell = targetNetworkActivityRow .createCell(targetNetworkActivityColumnHeader.getColumnIndex() - 1); ETLUtil.setCellValue(cell, networkHeaderActivityReferenceTable.getPrctr(), logger); } else if (TargetNetworkActivityColumnHeaders.KALSM_ACTIVITY == targetNetworkActivityColumnHeader) { Cell cell = targetNetworkActivityRow .createCell(targetNetworkActivityColumnHeader.getColumnIndex() - 1); ETLUtil.setCellValue(cell, networkHeaderActivityReferenceTable.getKalsm(), logger); } else if (TargetNetworkActivityColumnHeaders.ZSCHL_ACTIVITY == targetNetworkActivityColumnHeader) { Cell cell = targetNetworkActivityRow .createCell(targetNetworkActivityColumnHeader.getColumnIndex() - 1); ETLUtil.setCellValue(cell, networkHeaderActivityReferenceTable.getZschl(), logger); } else if (TargetNetworkActivityColumnHeaders.SCOPE_ACTIVITY == targetNetworkActivityColumnHeader) { Cell cell = targetNetworkActivityRow .createCell(targetNetworkActivityColumnHeader.getColumnIndex() - 1); ETLUtil.setCellValue(cell, networkHeaderActivityReferenceTable.getScope(), logger); } else if (TargetNetworkActivityColumnHeaders.USR03 == targetNetworkActivityColumnHeader) { Cell cell = targetNetworkActivityRow .createCell(targetNetworkActivityColumnHeader.getColumnIndex() - 1); ETLUtil.setCellValue(cell, networkHeaderActivityReferenceTable.getUser03(), logger); } else if (TargetNetworkActivityColumnHeaders.SAKTO == targetNetworkActivityColumnHeader) { Cell cell = targetNetworkActivityRow .createCell(targetNetworkActivityColumnHeader.getColumnIndex() - 1); String sakto = ""; if (actualCostType.toUpperCase().equals(ProjectConstants.COST_TYPE_MATERIAL.toUpperCase())) { sakto = "9000000000"; } else if (actualCostType.toUpperCase().equals(ProjectConstants.COST_TYPE_ODC.toUpperCase())) { sakto = "7780250000"; } else if (actualCostType.toUpperCase() .equals(ProjectConstants.COST_TYPE_TRAVEL.toUpperCase())) { sakto = "7350000000"; } else if (actualCostType.toUpperCase() .equals(ProjectConstants.COST_TYPE_MGMT_RES.toUpperCase())) { sakto = "9000000001"; } ETLUtil.setCellValue(cell, sakto, logger); } else if (destinationConstants.get(targetNetworkActivityColumnHeader) != null) { Cell desCell = targetNetworkActivityRow .createCell(targetNetworkActivityColumnHeader.getColumnIndex() - 1); ETLUtil.setCellValue(desCell, destinationConstants.get(targetNetworkActivityColumnHeader), logger); logger.debug("in generateWBStargetFile adding constant column " + targetNetworkActivityColumnHeader.getColumnHeader() + ETLUtil.getCellValueAsString(desCell, logger)); } } targetRowCount++; } FileOutputStream targetOutputStream = new FileOutputStream(destinationNetworkActivityFile); targetNetworkTargetWorkbook.write(targetOutputStream); targetOutputStream.close(); } catch (Exception e) { logger.error(" in generateNetworkActivityTargetFile() ", e); throw new Exception(e); } logger.debug("exiting generateNetworkActivityTargetFile "); } public ArrayList<String> validateColumnHeaders(final Sheet sheet) throws Exception { ArrayList<String> headers = getColumnHeaders(sheet); SourceNetworkHeaderColumnHeaders[] sourceNetworkHeaderColumnHeaders = SourceNetworkHeaderColumnHeaders.values(); for (SourceNetworkHeaderColumnHeaders sourceNetworkHeaderColumnHeader : sourceNetworkHeaderColumnHeaders) { if (!headers.contains(sourceNetworkHeaderColumnHeader.getColumnHeader())) { logger.error(" in validateColumnHeaders() ", new Exception("Column " + sourceNetworkHeaderColumnHeader.getColumnHeader() + " missing in source Network Header file.")); throw new Exception("Column " + sourceNetworkHeaderColumnHeader.getColumnHeader() + " missing in source Network Header file."); } } return headers; } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.catalina.startup; import java.lang.reflect.Field; import java.lang.reflect.Method; import jakarta.annotation.Resource; import jakarta.annotation.Resources; import jakarta.annotation.security.DeclareRoles; import jakarta.annotation.security.RunAs; import jakarta.servlet.ServletSecurityElement; import jakarta.servlet.annotation.ServletSecurity; import org.apache.catalina.Container; import org.apache.catalina.Context; import org.apache.catalina.Wrapper; import org.apache.catalina.core.ApplicationServletRegistration; import org.apache.catalina.util.Introspection; import org.apache.tomcat.util.descriptor.web.ContextEnvironment; import org.apache.tomcat.util.descriptor.web.ContextResource; import org.apache.tomcat.util.descriptor.web.ContextResourceEnvRef; import org.apache.tomcat.util.descriptor.web.ContextService; import org.apache.tomcat.util.descriptor.web.FilterDef; import org.apache.tomcat.util.descriptor.web.MessageDestinationRef; import org.apache.tomcat.util.res.StringManager; /** * <strong>AnnotationSet</strong> for processing the annotations of the web * application classes (<code>/WEB-INF/classes</code> and * <code>/WEB-INF/lib</code>). */ public class WebAnnotationSet { private static final String SEPARATOR = "/"; private static final String MAPPED_NAME_PROPERTY = "mappedName"; /** * The string resources for this package. */ protected static final StringManager sm = StringManager.getManager(Constants.Package); // ---------------------------------------------------------- Public Methods /** * Process the annotations on a context. * * @param context The context which will have its annotations processed */ public static void loadApplicationAnnotations(Context context) { loadApplicationListenerAnnotations(context); loadApplicationFilterAnnotations(context); loadApplicationServletAnnotations(context); } // ------------------------------------------------------- Protected Methods /** * Process the annotations for the listeners. * * @param context The context which will have its annotations processed */ protected static void loadApplicationListenerAnnotations(Context context) { String[] applicationListeners = context.findApplicationListeners(); for (String className : applicationListeners) { Class<?> clazz = Introspection.loadClass(context, className); if (clazz == null) { continue; } loadClassAnnotation(context, clazz); loadFieldsAnnotation(context, clazz); loadMethodsAnnotation(context, clazz); } } /** * Process the annotations for the filters. * * @param context The context which will have its annotations processed */ protected static void loadApplicationFilterAnnotations(Context context) { FilterDef[] filterDefs = context.findFilterDefs(); for (FilterDef filterDef : filterDefs) { Class<?> clazz = Introspection.loadClass(context, filterDef.getFilterClass()); if (clazz == null) { continue; } loadClassAnnotation(context, clazz); loadFieldsAnnotation(context, clazz); loadMethodsAnnotation(context, clazz); } } /** * Process the annotations for the servlets. * * @param context The context which will have its annotations processed */ protected static void loadApplicationServletAnnotations(Context context) { Container[] children = context.findChildren(); for (Container child : children) { if (child instanceof Wrapper) { Wrapper wrapper = (Wrapper) child; if (wrapper.getServletClass() == null) { continue; } Class<?> clazz = Introspection.loadClass(context, wrapper.getServletClass()); if (clazz == null) { continue; } loadClassAnnotation(context, clazz); loadFieldsAnnotation(context, clazz); loadMethodsAnnotation(context, clazz); /* Process RunAs annotation which can be only on servlets. * Ref JSR 250, equivalent to the run-as element in * the deployment descriptor */ RunAs runAs = clazz.getAnnotation(RunAs.class); if (runAs != null) { wrapper.setRunAs(runAs.value()); } // Process ServletSecurity annotation ServletSecurity servletSecurity = clazz.getAnnotation(ServletSecurity.class); if (servletSecurity != null) { context.addServletSecurity( new ApplicationServletRegistration(wrapper, context), new ServletSecurityElement(servletSecurity)); } } } } /** * Process the annotations on a context for a given className. * * @param context The context which will have its annotations processed * @param clazz The class to examine for Servlet annotations */ protected static void loadClassAnnotation(Context context, Class<?> clazz) { /* Process Resource annotation. * Ref JSR 250 */ Resource resourceAnnotation = clazz.getAnnotation(Resource.class); if (resourceAnnotation != null) { addResource(context, resourceAnnotation); } /* Process Resources annotation. * Ref JSR 250 */ Resources resourcesAnnotation = clazz.getAnnotation(Resources.class); if (resourcesAnnotation != null && resourcesAnnotation.value() != null) { for (Resource resource : resourcesAnnotation.value()) { addResource(context, resource); } } /* Process EJB annotation. * Ref JSR 224, equivalent to the ejb-ref or ejb-local-ref * element in the deployment descriptor. { EJB annotation = clazz.getAnnotation(EJB.class); if (annotation != null) { if ((annotation.mappedName().length() == 0) || annotation.mappedName().equals("Local")) { ContextLocalEjb ejb = new ContextLocalEjb(); ejb.setName(annotation.name()); ejb.setType(annotation.beanInterface().getCanonicalName()); ejb.setDescription(annotation.description()); ejb.setHome(annotation.beanName()); context.getNamingResources().addLocalEjb(ejb); } else if (annotation.mappedName().equals("Remote")) { ContextEjb ejb = new ContextEjb(); ejb.setName(annotation.name()); ejb.setType(annotation.beanInterface().getCanonicalName()); ejb.setDescription(annotation.description()); ejb.setHome(annotation.beanName()); context.getNamingResources().addEjb(ejb); } } } */ /* Process WebServiceRef annotation. * Ref JSR 224, equivalent to the service-ref element in * the deployment descriptor. * The service-ref registration is not implemented { WebServiceRef annotation = clazz .getAnnotation(WebServiceRef.class); if (annotation != null) { ContextService service = new ContextService(); service.setName(annotation.name()); service.setWsdlfile(annotation.wsdlLocation()); service.setType(annotation.type().getCanonicalName()); if (annotation.value() == null) service.setServiceinterface(annotation.type() .getCanonicalName()); if (annotation.type().getCanonicalName().equals("Service")) service.setServiceinterface(annotation.type() .getCanonicalName()); if (annotation.value().getCanonicalName().equals("Endpoint")) service.setServiceendpoint(annotation.type() .getCanonicalName()); service.setPortlink(annotation.type().getCanonicalName()); context.getNamingResources().addService(service); } } */ /* Process DeclareRoles annotation. * Ref JSR 250, equivalent to the security-role element in * the deployment descriptor */ DeclareRoles declareRolesAnnotation = clazz.getAnnotation(DeclareRoles.class); if (declareRolesAnnotation != null && declareRolesAnnotation.value() != null) { for (String role : declareRolesAnnotation.value()) { context.addSecurityRole(role); } } } protected static void loadFieldsAnnotation(Context context, Class<?> clazz) { // Initialize the annotations Field[] fields = Introspection.getDeclaredFields(clazz); if (fields != null && fields.length > 0) { for (Field field : fields) { Resource annotation = field.getAnnotation(Resource.class); if (annotation != null) { String defaultName = clazz.getName() + SEPARATOR + field.getName(); Class<?> defaultType = field.getType(); addResource(context, annotation, defaultName, defaultType); } } } } protected static void loadMethodsAnnotation(Context context, Class<?> clazz) { // Initialize the annotations Method[] methods = Introspection.getDeclaredMethods(clazz); if (methods != null && methods.length > 0) { for (Method method : methods) { Resource annotation = method.getAnnotation(Resource.class); if (annotation != null) { if (!Introspection.isValidSetter(method)) { throw new IllegalArgumentException(sm.getString( "webAnnotationSet.invalidInjection")); } String defaultName = clazz.getName() + SEPARATOR + Introspection.getPropertyName(method); Class<?> defaultType = (method.getParameterTypes()[0]); addResource(context, annotation, defaultName, defaultType); } } } } /** * Process a Resource annotation to set up a Resource. * Ref JSR 250, equivalent to the resource-ref, * message-destination-ref, env-ref, resource-env-ref * or service-ref element in the deployment descriptor. * @param context The context which will have its annotations processed * @param annotation The annotation that was found */ protected static void addResource(Context context, Resource annotation) { addResource(context, annotation, null, null); } protected static void addResource(Context context, Resource annotation, String defaultName, Class<?> defaultType) { String name = getName(annotation, defaultName); String type = getType(annotation, defaultType); if (type.equals("java.lang.String") || type.equals("java.lang.Character") || type.equals("java.lang.Integer") || type.equals("java.lang.Boolean") || type.equals("java.lang.Double") || type.equals("java.lang.Byte") || type.equals("java.lang.Short") || type.equals("java.lang.Long") || type.equals("java.lang.Float")) { // env-entry element ContextEnvironment resource = new ContextEnvironment(); resource.setName(name); resource.setType(type); resource.setDescription(annotation.description()); resource.setProperty(MAPPED_NAME_PROPERTY, annotation.mappedName()); resource.setLookupName(annotation.lookup()); context.getNamingResources().addEnvironment(resource); } else if (type.equals("javax.xml.rpc.Service")) { // service-ref element ContextService service = new ContextService(); service.setName(name); service.setWsdlfile(annotation.mappedName()); service.setType(type); service.setDescription(annotation.description()); service.setLookupName(annotation.lookup()); context.getNamingResources().addService(service); } else if (type.equals("javax.sql.DataSource") || type.equals("javax.jms.ConnectionFactory") || type.equals("javax.jms.QueueConnectionFactory") || type.equals("javax.jms.TopicConnectionFactory") || type.equals("jakarta.mail.Session") || type.equals("java.net.URL") || type.equals("javax.resource.cci.ConnectionFactory") || type.equals("org.omg.CORBA_2_3.ORB") || type.endsWith("ConnectionFactory")) { // resource-ref element ContextResource resource = new ContextResource(); resource.setName(name); resource.setType(type); if (annotation.authenticationType() == Resource.AuthenticationType.CONTAINER) { resource.setAuth("Container"); } else if (annotation.authenticationType() == Resource.AuthenticationType.APPLICATION) { resource.setAuth("Application"); } resource.setScope(annotation.shareable() ? "Shareable" : "Unshareable"); resource.setProperty(MAPPED_NAME_PROPERTY, annotation.mappedName()); resource.setDescription(annotation.description()); resource.setLookupName(annotation.lookup()); context.getNamingResources().addResource(resource); } else if (type.equals("javax.jms.Queue") || type.equals("javax.jms.Topic")) { // message-destination-ref MessageDestinationRef resource = new MessageDestinationRef(); resource.setName(name); resource.setType(type); resource.setUsage(annotation.mappedName()); resource.setDescription(annotation.description()); resource.setLookupName(annotation.lookup()); context.getNamingResources().addMessageDestinationRef(resource); } else { /* * General case. Also used for: * - javax.resource.cci.InteractionSpec * - jakarta.transaction.UserTransaction */ // resource-env-ref ContextResourceEnvRef resource = new ContextResourceEnvRef(); resource.setName(name); resource.setType(type); resource.setProperty(MAPPED_NAME_PROPERTY, annotation.mappedName()); resource.setDescription(annotation.description()); resource.setLookupName(annotation.lookup()); context.getNamingResources().addResourceEnvRef(resource); } } private static String getType(Resource annotation, Class<?> defaultType) { Class<?> type = annotation.type(); if (type == null || type.equals(Object.class)) { if (defaultType != null) { type = defaultType; } } return Introspection.convertPrimitiveType(type).getCanonicalName(); } private static String getName(Resource annotation, String defaultName) { String name = annotation.name(); if (name == null || name.equals("")) { if (defaultName != null) { name = defaultName; } } return name; } }
package com.hubspot.singularity.mesos; import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertFalse; import static org.junit.jupiter.api.Assertions.assertTrue; import static org.mockito.Mockito.*; import com.fasterxml.jackson.databind.ObjectMapper; import com.google.common.collect.ImmutableMap; import com.hubspot.mesos.JavaUtils; import com.hubspot.mesos.Resources; import com.hubspot.mesos.SingularityContainerInfo; import com.hubspot.mesos.SingularityContainerType; import com.hubspot.mesos.SingularityDockerImage; import com.hubspot.mesos.SingularityDockerInfo; import com.hubspot.mesos.SingularityDockerNetworkType; import com.hubspot.mesos.SingularityDockerPortMapping; import com.hubspot.mesos.SingularityDockerVolume; import com.hubspot.mesos.SingularityDockerVolumeMode; import com.hubspot.mesos.SingularityMesosImage; import com.hubspot.mesos.SingularityMesosImageType; import com.hubspot.mesos.SingularityMesosInfo; import com.hubspot.mesos.SingularityNetworkInfo; import com.hubspot.mesos.SingularityPortMapping; import com.hubspot.mesos.SingularityPortMappingType; import com.hubspot.mesos.SingularityVolume; import com.hubspot.mesos.SingularityVolumeSource; import com.hubspot.mesos.SingularityVolumeSourceType; import com.hubspot.singularity.RequestType; import com.hubspot.singularity.SingularityDeploy; import com.hubspot.singularity.SingularityDeployBuilder; import com.hubspot.singularity.SingularityPendingRequest.PendingType; import com.hubspot.singularity.SingularityPendingTask; import com.hubspot.singularity.SingularityPendingTaskBuilder; import com.hubspot.singularity.SingularityPendingTaskId; import com.hubspot.singularity.SingularityRequest; import com.hubspot.singularity.SingularityRequestBuilder; import com.hubspot.singularity.SingularityTaskRequest; import com.hubspot.singularity.config.NetworkConfiguration; import com.hubspot.singularity.config.SingularityConfiguration; import com.hubspot.singularity.data.ExecutorIdGenerator; import com.hubspot.singularity.helpers.MesosProtosUtils; import com.hubspot.singularity.helpers.MesosUtils; import com.hubspot.singularity.helpers.SingularityMesosTaskHolder; import java.util.Arrays; import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Optional; import java.util.concurrent.atomic.AtomicLong; import java.util.stream.Collectors; import org.apache.mesos.v1.Protos; import org.apache.mesos.v1.Protos.AgentID; import org.apache.mesos.v1.Protos.ContainerInfo.DockerInfo.PortMapping; import org.apache.mesos.v1.Protos.ContainerInfo.Type; import org.apache.mesos.v1.Protos.Environment.Variable; import org.apache.mesos.v1.Protos.FrameworkID; import org.apache.mesos.v1.Protos.Image; import org.apache.mesos.v1.Protos.NetworkInfo; import org.apache.mesos.v1.Protos.Offer; import org.apache.mesos.v1.Protos.OfferID; import org.apache.mesos.v1.Protos.Parameter; import org.apache.mesos.v1.Protos.TaskInfo; import org.apache.mesos.v1.Protos.Volume; import org.apache.mesos.v1.Protos.Volume.Mode; import org.apache.mesos.v1.Protos.Volume.Source.DockerVolume; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; import org.mockito.invocation.InvocationOnMock; import org.mockito.stubbing.Answer; public class SingularityMesosTaskBuilderTest { private final SingularityConfiguration configuration = new SingularityConfiguration(); private SingularityMesosTaskBuilder builder; private Resources taskResources; private Resources executorResources; private Offer offer; private SingularityOfferHolder offerHolder; private SingularityPendingTask pendingTask; private ObjectMapper objectMapper; private final String user = "testUser"; @BeforeEach public void createMocks() { pendingTask = new SingularityPendingTaskBuilder() .setPendingTaskId( new SingularityPendingTaskId("test", "1", 0, 1, PendingType.IMMEDIATE, 0) ) .setUser(user) .build(); final SingularityAgentAndRackHelper agentAndRackHelper = mock( SingularityAgentAndRackHelper.class ); final ExecutorIdGenerator idGenerator = mock(ExecutorIdGenerator.class); when(idGenerator.getNextExecutorId()).then(new CreateFakeId()); objectMapper = JavaUtils.newObjectMapper(); builder = new SingularityMesosTaskBuilder( objectMapper, idGenerator, configuration, new MesosProtosUtils(objectMapper) ); taskResources = new Resources(1, 1, 0, 0); executorResources = new Resources(0.1, 1, 0, 0); when(agentAndRackHelper.getRackId(offer)).thenReturn(Optional.empty()); when(agentAndRackHelper.getMaybeTruncatedHost(offer)).thenReturn("host"); when(agentAndRackHelper.getRackIdOrDefault(offer)).thenReturn("DEFAULT"); offer = Offer .newBuilder() .setAgentId(AgentID.newBuilder().setValue("1")) .setId(OfferID.newBuilder().setValue("1")) .setFrameworkId(FrameworkID.newBuilder().setValue("1")) .setHostname("test") .build(); offerHolder = new SingularityOfferHolder( Collections.singletonList(offer), 1, "DEFAULT", offer.getAgentId().getValue(), offer.getHostname(), Collections.emptyMap(), Collections.emptyMap() ); } @Test public void testShellCommand() { final SingularityRequest request = new SingularityRequestBuilder( "test", RequestType.WORKER ) .build(); final SingularityDeploy deploy = new SingularityDeployBuilder("test", "1") .setCommand(Optional.of("/bin/echo hi")) .build(); final SingularityTaskRequest taskRequest = new SingularityTaskRequest( request, deploy, pendingTask ); final SingularityMesosTaskHolder task = builder.buildTask( offerHolder, null, taskRequest, taskResources, executorResources ); assertEquals("/bin/echo hi", task.getMesosTask().getCommand().getValue()); assertEquals(0, task.getMesosTask().getCommand().getArgumentsCount()); assertTrue(task.getMesosTask().getCommand().getShell()); } @Test public void testJobUserPassedAsEnvironmentVariable() { final SingularityRequest request = new SingularityRequestBuilder( "test", RequestType.WORKER ) .build(); final SingularityDeploy deploy = new SingularityDeployBuilder("test", "1") .setCommand(Optional.of("/bin/echo hi")) .build(); final SingularityTaskRequest taskRequest = new SingularityTaskRequest( request, deploy, pendingTask ); final SingularityMesosTaskHolder task = builder.buildTask( offerHolder, null, taskRequest, taskResources, executorResources ); List<Variable> environmentVariables = task .getMesosTask() .getCommand() .getEnvironment() .getVariablesList(); boolean success = false; for (Variable environmentVariable : environmentVariables) { success = success || ( environmentVariable.getName().equals("STARTED_BY_USER") && environmentVariable.getValue().equals(user) ); } assertTrue(success, "Expected env variable STARTED_BY_USER to be set to " + user); } @Test public void testEnvironmentVariableOverrides() { Map<String, String> overrideVariables = new HashMap<>(); overrideVariables.put("MY_NEW_ENV_VAR", "test"); final SingularityRequest request = new SingularityRequestBuilder( "test", RequestType.WORKER ) .build(); final SingularityDeploy deploy = new SingularityDeployBuilder("test", "1") .setCommand(Optional.of("/bin/echo hi")) .build(); final SingularityPendingTask pendingTask = new SingularityPendingTaskBuilder() .setPendingTaskId( new SingularityPendingTaskId("test", "1", 0, 1, PendingType.IMMEDIATE, 0) ) .setUser(user) .setEnvOverrides(overrideVariables) .build(); final SingularityTaskRequest taskRequest = new SingularityTaskRequest( request, deploy, pendingTask ); final TaskInfo task = builder .buildTask(offerHolder, null, taskRequest, taskResources, executorResources) .getMesosTask(); Map<String, String> environmentVariables = task .getCommand() .getEnvironment() .getVariablesList() .stream() .collect(Collectors.toMap(Variable::getName, Variable::getValue)); for (String key : overrideVariables.keySet()) { assertEquals( environmentVariables.get(key), overrideVariables.get(key), "Environment variable " + key + " not overridden." ); } } @Test public void testArgumentCommand() { final SingularityRequest request = new SingularityRequestBuilder( "test", RequestType.WORKER ) .build(); final SingularityDeploy deploy = new SingularityDeployBuilder("test", "1") .setCommand(Optional.of("/bin/echo")) .setArguments(Optional.of(Collections.singletonList("wat"))) .build(); final SingularityTaskRequest taskRequest = new SingularityTaskRequest( request, deploy, pendingTask ); final SingularityMesosTaskHolder task = builder.buildTask( offerHolder, null, taskRequest, taskResources, executorResources ); assertEquals("/bin/echo", task.getMesosTask().getCommand().getValue()); assertEquals(1, task.getMesosTask().getCommand().getArgumentsCount()); assertEquals("wat", task.getMesosTask().getCommand().getArguments(0)); assertFalse(task.getMesosTask().getCommand().getShell()); } @Test public void testDockerTask() { taskResources = new Resources(1, 1, 1, 0); final Protos.Resource portsResource = Protos .Resource.newBuilder() .setName("ports") .setType(Protos.Value.Type.RANGES) .setRanges( Protos .Value.Ranges.newBuilder() .addRange(Protos.Value.Range.newBuilder().setBegin(31000).setEnd(31000).build()) .build() ) .build(); final SingularityDockerPortMapping literalMapping = new SingularityDockerPortMapping( Optional.<SingularityPortMappingType>empty(), 80, Optional.of(SingularityPortMappingType.LITERAL), 8080, Optional.<String>empty() ); final SingularityDockerPortMapping offerMapping = new SingularityDockerPortMapping( Optional.<SingularityPortMappingType>empty(), 81, Optional.of(SingularityPortMappingType.FROM_OFFER), 0, Optional.of("udp") ); final SingularityRequest request = new SingularityRequestBuilder( "test", RequestType.WORKER ) .build(); final SingularityContainerInfo containerInfo = new SingularityContainerInfo( SingularityContainerType.DOCKER, Optional.of( Arrays.asList( new SingularityVolume( "/container", Optional.of("/host"), SingularityDockerVolumeMode.RW ), new SingularityVolume( "/container/${TASK_REQUEST_ID}/${TASK_DEPLOY_ID}", Optional.of("/host/${TASK_ID}"), SingularityDockerVolumeMode.RO ) ) ), Optional.of( new SingularityDockerInfo( "docker-image", true, SingularityDockerNetworkType.BRIDGE, Optional.of(Arrays.asList(literalMapping, offerMapping)), Optional.of(false), Optional.<Map<String, String>>of(ImmutableMap.of("env", "var=value")), Optional.empty() ) ) ); final SingularityDeploy deploy = new SingularityDeployBuilder("test", "1") .setContainerInfo(Optional.of(containerInfo)) .setCommand(Optional.of("/bin/echo")) .setArguments(Optional.of(Collections.singletonList("wat"))) .build(); final SingularityTaskRequest taskRequest = new SingularityTaskRequest( request, deploy, pendingTask ); final SingularityMesosTaskHolder task = builder.buildTask( offerHolder, Collections.singletonList(portsResource), taskRequest, taskResources, executorResources ); assertEquals("/bin/echo", task.getMesosTask().getCommand().getValue()); assertEquals(1, task.getMesosTask().getCommand().getArgumentsCount()); assertEquals("wat", task.getMesosTask().getCommand().getArguments(0)); assertFalse(task.getMesosTask().getCommand().getShell()); assertEquals(Type.DOCKER, task.getMesosTask().getContainer().getType()); assertEquals( "docker-image", task.getMesosTask().getContainer().getDocker().getImage() ); assertTrue(task.getMesosTask().getContainer().getDocker().getPrivileged()); assertEquals( "/container", task.getMesosTask().getContainer().getVolumes(0).getContainerPath() ); assertEquals("/host", task.getMesosTask().getContainer().getVolumes(0).getHostPath()); assertEquals(Mode.RW, task.getMesosTask().getContainer().getVolumes(0).getMode()); Parameter envParameter = Parameter .newBuilder() .setKey("env") .setValue("var=value") .build(); assertTrue( task .getMesosTask() .getContainer() .getDocker() .getParametersList() .contains(envParameter) ); assertEquals( String.format( "/container/%s/%s", task.getTask().getTaskRequest().getDeploy().getRequestId(), task.getTask().getTaskRequest().getDeploy().getId() ), task.getMesosTask().getContainer().getVolumes(1).getContainerPath() ); assertEquals( String.format("/host/%s", task.getMesosTask().getTaskId().getValue()), task.getMesosTask().getContainer().getVolumes(1).getHostPath() ); assertEquals(Mode.RO, task.getMesosTask().getContainer().getVolumes(1).getMode()); assertEquals( 80, task.getMesosTask().getContainer().getDocker().getPortMappings(0).getContainerPort() ); assertEquals( 8080, task.getMesosTask().getContainer().getDocker().getPortMappings(0).getHostPort() ); assertEquals( "tcp", task.getMesosTask().getContainer().getDocker().getPortMappings(0).getProtocol() ); assertTrue( MesosUtils.getAllPorts(task.getMesosTask().getResourcesList()).contains(8080L) ); assertEquals( 81, task.getMesosTask().getContainer().getDocker().getPortMappings(1).getContainerPort() ); assertEquals( 31000, task.getMesosTask().getContainer().getDocker().getPortMappings(1).getHostPort() ); assertEquals( "udp", task.getMesosTask().getContainer().getDocker().getPortMappings(1).getProtocol() ); assertEquals( Protos.ContainerInfo.DockerInfo.Network.BRIDGE, task.getMesosTask().getContainer().getDocker().getNetwork() ); } @Test public void testGetPortByIndex() throws Exception { taskResources = new Resources(1, 1, 4, 0); final Protos.Resource portsResource = Protos .Resource.newBuilder() .setName("ports") .setType(Protos.Value.Type.RANGES) .setRanges( Protos .Value.Ranges.newBuilder() .addRange(Protos.Value.Range.newBuilder().setBegin(31003).setEnd(31006).build()) .build() ) .build(); final SingularityRequest request = new SingularityRequestBuilder( "test", RequestType.WORKER ) .build(); final SingularityDeploy deploy = new SingularityDeployBuilder("test", "1") .setCommand(Optional.of("/bin/echo")) .setArguments(Optional.of(Collections.singletonList("wat"))) .build(); final SingularityTaskRequest taskRequest = new SingularityTaskRequest( request, deploy, pendingTask ); final SingularityMesosTaskHolder task = builder.buildTask( offerHolder, Collections.singletonList(portsResource), taskRequest, taskResources, executorResources ); assertEquals(31005L, task.getTask().getPortByIndex(2).get().longValue()); } @Test public void testDockerMinimalNetworking() { taskResources = new Resources(1, 1, 0, 0); final SingularityRequest request = new SingularityRequestBuilder( "test", RequestType.WORKER ) .build(); final SingularityContainerInfo containerInfo = new SingularityContainerInfo( SingularityContainerType.DOCKER, Optional.empty(), Optional.of( new SingularityDockerInfo( "docker-image", true, SingularityDockerNetworkType.NONE, Optional.empty(), Optional.empty(), Optional.empty(), Optional.empty() ) ) ); final SingularityDeploy deploy = new SingularityDeployBuilder("test", "1") .setContainerInfo(Optional.of(containerInfo)) .build(); final SingularityTaskRequest taskRequest = new SingularityTaskRequest( request, deploy, pendingTask ); final SingularityMesosTaskHolder task = builder.buildTask( offerHolder, Collections.emptyList(), taskRequest, taskResources, executorResources ); assertEquals(Type.DOCKER, task.getMesosTask().getContainer().getType()); assertEquals( Protos.ContainerInfo.DockerInfo.Network.NONE, task.getMesosTask().getContainer().getDocker().getNetwork() ); } @Test public void testAutomaticPortMapping() { NetworkConfiguration netConf = new NetworkConfiguration(); netConf.setDefaultPortMapping(true); configuration.setNetworkConfiguration(netConf); taskResources = new Resources(1, 1, 2); final SingularityRequest request = new SingularityRequestBuilder( "test", RequestType.WORKER ) .build(); final SingularityContainerInfo containerInfo = new SingularityContainerInfo( SingularityContainerType.DOCKER, Optional.empty(), Optional.of( new SingularityDockerInfo( "docker-image", false, SingularityDockerNetworkType.BRIDGE, Optional.empty(), Optional.empty(), Optional.empty(), Optional.empty() ) ) ); final SingularityDeploy deploy = new SingularityDeployBuilder("test", "1") .setContainerInfo(Optional.of(containerInfo)) .build(); final SingularityTaskRequest taskRequest = new SingularityTaskRequest( request, deploy, pendingTask ); final SingularityMesosTaskHolder task = builder.buildTask( offerHolder, Collections.singletonList(MesosUtils.getPortRangeResource(31010, 31011)), taskRequest, taskResources, executorResources ); assertEquals(Type.DOCKER, task.getMesosTask().getContainer().getType()); assertEquals( Protos.ContainerInfo.DockerInfo.Network.BRIDGE, task.getMesosTask().getContainer().getDocker().getNetwork() ); List<PortMapping> portMappings = task .getMesosTask() .getContainer() .getDocker() .getPortMappingsList(); assertEquals(2, portMappings.size()); assertEquals(31010, portMappings.get(0).getHostPort()); assertEquals(31010, portMappings.get(0).getContainerPort()); assertEquals(31011, portMappings.get(1).getHostPort()); assertEquals(31011, portMappings.get(1).getContainerPort()); } @Test public void testMesosContainer() { taskResources = new Resources(1, 1, 2); final SingularityRequest request = new SingularityRequestBuilder( "test", RequestType.WORKER ) .build(); final SingularityContainerInfo containerInfo = new SingularityContainerInfo( SingularityContainerType.MESOS, Optional.of( Collections.singletonList( new SingularityVolume( "/testing", Optional.of("/host"), SingularityDockerVolumeMode.RW, Optional.of( new SingularityVolumeSource( SingularityVolumeSourceType.DOCKER_VOLUME, Optional.of( new SingularityDockerVolume( Optional.of("rexray"), Optional.of("testvolume-%i"), Collections.singletonMap("iops", "1") ) ) ) ) ) ) ), Optional.empty(), Optional.of( new SingularityMesosInfo( Optional.of( new SingularityMesosImage( SingularityMesosImageType.DOCKER, Optional.empty(), Optional.of(new SingularityDockerImage("test:image")), true ) ) ) ), Optional.of( Arrays.asList( new SingularityNetworkInfo( Optional.of("network-name"), Optional.of(Arrays.asList("blue", "purple")), Optional.of( Arrays.asList( new SingularityPortMapping(0, 8080, Optional.of("tcp")), new SingularityPortMapping(8888, 8081, Optional.of("udp")) ) ) ) ) ) ); final SingularityDeploy deploy = new SingularityDeployBuilder("test", "1") .setContainerInfo(Optional.of(containerInfo)) .build(); final SingularityTaskRequest taskRequest = new SingularityTaskRequest( request, deploy, pendingTask ); final SingularityMesosTaskHolder task = builder.buildTask( offerHolder, Collections.singletonList(MesosUtils.getPortRangeResource(31010, 31011)), taskRequest, taskResources, executorResources ); assertEquals(Type.MESOS, task.getMesosTask().getContainer().getType()); final Image image = task.getMesosTask().getContainer().getMesos().getImage(); assertEquals(Protos.Image.Type.DOCKER, image.getType()); assertEquals("test:image", image.getDocker().getName()); final Volume volume = task.getMesosTask().getContainer().getVolumesList().get(0); assertEquals("/testing", volume.getContainerPath()); assertEquals("/host", volume.getHostPath()); assertEquals(Volume.Mode.RW, volume.getMode()); assertEquals(Volume.Source.Type.DOCKER_VOLUME, volume.getSource().getType()); final DockerVolume dockerVolume = volume.getSource().getDockerVolume(); assertEquals("rexray", dockerVolume.getDriver()); assertEquals("testvolume-1", dockerVolume.getName()); assertEquals( "iops", dockerVolume.getDriverOptions().getParameterList().get(0).getKey() ); final NetworkInfo networkInfo = task .getMesosTask() .getContainer() .getNetworkInfosList() .get(0); assertEquals("network-name", networkInfo.getName()); assertEquals(Arrays.asList("blue", "purple"), networkInfo.getGroupsList()); final List<Protos.NetworkInfo.PortMapping> portMappings = networkInfo.getPortMappingsList(); assertEquals(2, portMappings.size()); assertEquals(31010, portMappings.get(0).getHostPort()); assertEquals(8080, portMappings.get(0).getContainerPort()); assertEquals("tcp", portMappings.get(0).getProtocol()); assertEquals(8888, portMappings.get(1).getHostPort()); assertEquals(8081, portMappings.get(1).getContainerPort()); assertEquals("udp", portMappings.get(1).getProtocol()); } private static class CreateFakeId implements Answer<String> { private final AtomicLong string = new AtomicLong(); @Override public String answer(InvocationOnMock invocation) throws Throwable { return String.valueOf(string.incrementAndGet()); } } }
package com.sequenceiq.it.cloudbreak.testcase.mock; import static com.sequenceiq.cloudbreak.api.endpoint.v4.recipes.requests.RecipeV4Type.POST_CLOUDERA_MANAGER_START; import static com.sequenceiq.cloudbreak.api.endpoint.v4.recipes.requests.RecipeV4Type.POST_CLUSTER_INSTALL; import static com.sequenceiq.cloudbreak.api.endpoint.v4.recipes.requests.RecipeV4Type.PRE_CLOUDERA_MANAGER_START; import static com.sequenceiq.cloudbreak.api.endpoint.v4.recipes.requests.RecipeV4Type.PRE_TERMINATION; import static com.sequenceiq.it.cloudbreak.context.RunningParameter.expectedMessage; import static com.sequenceiq.it.cloudbreak.context.RunningParameter.pollingInterval; import java.time.Duration; import java.time.temporal.ChronoUnit; import javax.inject.Inject; import javax.ws.rs.BadRequestException; import org.apache.commons.codec.binary.Base64; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.testng.annotations.DataProvider; import org.testng.annotations.Test; import com.cloudera.api.swagger.model.ApiParcel; import com.sequenceiq.cloudbreak.api.endpoint.v4.recipes.requests.RecipeV4Type; import com.sequenceiq.it.cloudbreak.ResourcePropertyProvider; import com.sequenceiq.it.cloudbreak.action.v4.stack.StackScalePostAction; import com.sequenceiq.it.cloudbreak.client.LdapTestClient; import com.sequenceiq.it.cloudbreak.client.RecipeTestClient; import com.sequenceiq.it.cloudbreak.client.StackTestClient; import com.sequenceiq.it.cloudbreak.cloud.HostGroupType; import com.sequenceiq.it.cloudbreak.context.Description; import com.sequenceiq.it.cloudbreak.context.MockedTestContext; import com.sequenceiq.it.cloudbreak.context.RunningParameter; import com.sequenceiq.it.cloudbreak.context.TestCaseDescription; import com.sequenceiq.it.cloudbreak.context.TestContext; import com.sequenceiq.it.cloudbreak.dto.ClouderaManagerProductTestDto; import com.sequenceiq.it.cloudbreak.dto.ClouderaManagerTestDto; import com.sequenceiq.it.cloudbreak.dto.ClusterTestDto; import com.sequenceiq.it.cloudbreak.dto.InstanceGroupTestDto; import com.sequenceiq.it.cloudbreak.dto.recipe.RecipeTestDto; import com.sequenceiq.it.cloudbreak.dto.stack.StackTestDto; import com.sequenceiq.it.util.cleanup.ParcelGeneratorUtil; import com.sequenceiq.it.util.cleanup.ParcelMockActivatorUtil; public class RecipeClusterTest extends AbstractMockTest { private static final int NODE_COUNT = 3; private static final String INSTANCE_GROUP_ID = "ig"; private static final String HIGHSTATE = "state.highstate"; private static final Logger LOGGER = LoggerFactory.getLogger(RecipeClusterTest.class); private static final String RECIPE_CONTENT = Base64.encodeBase64String("#!/bin/bash\necho ALMAA".getBytes()); private static final Duration POLLING_INTERVAL = Duration.of(3000, ChronoUnit.MILLIS); @Inject private LdapTestClient ldapTestClient; @Inject private RecipeTestClient recipeTestClient; @Inject private StackTestClient stackTestClient; @Inject private ResourcePropertyProvider resourcePropertyProvider; @Inject private ParcelMockActivatorUtil parcelMockActivatorUtil; @Inject private ParcelGeneratorUtil parcelGeneratorUtil; @Test(dataProvider = "dataProviderForNonPreTerminationRecipeTypes") public void testRecipeNotPreTerminationHasGotHighStateOnCluster( TestContext testContext, RecipeV4Type type, int executionTime, @Description TestCaseDescription testCaseDescription) { LOGGER.info("testing recipe execution for type: {}", type.name()); String recipeName = resourcePropertyProvider().getName(); String stackName = resourcePropertyProvider().getName(); String instanceGroupName = resourcePropertyProvider().getName(); testContext .given(recipeName, RecipeTestDto.class) .withName(recipeName) .withContent(RECIPE_CONTENT) .withRecipeType(type) .when(recipeTestClient.createV4(), RunningParameter.key(recipeName)) .given(instanceGroupName, InstanceGroupTestDto.class) .withHostGroup(HostGroupType.WORKER) .withNodeCount(NODE_COUNT) .withRecipes(recipeName) .given(stackName, StackTestDto.class) .replaceInstanceGroups(instanceGroupName) .when(stackTestClient.createV4(), RunningParameter.key(stackName)) .enableVerification() .await(STACK_AVAILABLE, RunningParameter.key(stackName)) .mockSalt().run().post().bodyContains(HIGHSTATE, 1).atLeast(executionTime).verify() .validate(); } @Test(dataProvider = TEST_CONTEXT_WITH_MOCK) @Description( given = "a deleted recipe", when = "starting cluster with deleted recipe", then = "badrequest exception is received") public void testDeletedRecipeCannotBeAssignedToCluster(MockedTestContext testContext) { LOGGER.info("testing recipe execution for type: {}", PRE_CLOUDERA_MANAGER_START.name()); String recipeName = resourcePropertyProvider().getName(); String stackName = resourcePropertyProvider().getName(); String instanceGroupName = resourcePropertyProvider().getName(); HostGroupType hostGroupTypeForRecipe = HostGroupType.WORKER; testContext .given(recipeName, RecipeTestDto.class) .withName(recipeName) .withContent(RECIPE_CONTENT) .withRecipeType(PRE_CLOUDERA_MANAGER_START) .when(recipeTestClient.createV4(), RunningParameter.key(recipeName)) .when(recipeTestClient.deleteV4(), RunningParameter.key(recipeName)) .given(instanceGroupName, InstanceGroupTestDto.class) .withHostGroup(hostGroupTypeForRecipe) .withNodeCount(NODE_COUNT) .withRecipes(recipeName) .given(stackName, StackTestDto.class) .replaceInstanceGroups(instanceGroupName) .whenException(stackTestClient.createV4(), BadRequestException.class, expectedMessage(String.format("The given recipe does not exist" + " for the instance group \"%s\": %s", hostGroupTypeForRecipe.getName(), recipeName))) .validate(); } @Test(dataProvider = TEST_CONTEXT_WITH_MOCK) @Description( given = "a created cluster with pretermination recipe", when = "calling termination", then = "the pretermination highstate has to called on pretermination recipes") public void testRecipePreTerminationRecipeHasGotHighStateOnCluster(MockedTestContext testContext) { String recipeName = resourcePropertyProvider().getName(); testContext .given(RecipeTestDto.class) .withName(recipeName) .withContent(RECIPE_CONTENT) .withRecipeType(PRE_TERMINATION) .when(recipeTestClient.createV4()) .given(INSTANCE_GROUP_ID, InstanceGroupTestDto.class) .withHostGroup(HostGroupType.WORKER) .withNodeCount(NODE_COUNT) .given(StackTestDto.class) .replaceInstanceGroups(INSTANCE_GROUP_ID) .when(stackTestClient.createV4()) .enableVerification() .await(STACK_AVAILABLE) .mockSalt().run().post().bodyContains(HIGHSTATE, 1).atLeast(1).verify() .given(StackTestDto.class) .withAttachedRecipe(HostGroupType.WORKER.getName(), recipeName) .when(stackTestClient.attachRecipeV4()) .await(STACK_AVAILABLE) .when(stackTestClient.deleteV4()) .await(STACK_DELETED) .mockSalt().run().post().bodyContains(HIGHSTATE, 1).atLeast(2).verify() .validate(); } @Test(dataProvider = TEST_CONTEXT_WITH_MOCK) @Description( given = "a created cluster with post ambari install recipe", when = "upscaling cluster", then = "the post recipe should run on the new nodes as well") public void testWhenClusterGetUpScaledThenPostClusterInstallRecipeShouldBeExecuted(MockedTestContext testContext) { ApiParcel parcel = parcelGeneratorUtil.getActivatedCDHParcel(); String clusterName = resourcePropertyProvider.getName(); parcelMockActivatorUtil.mockActivateWithDefaultParcels(testContext, clusterName, parcel); String recipeName = resourcePropertyProvider().getName(); testContext .given(RecipeTestDto.class) .withName(recipeName) .withContent(RECIPE_CONTENT) .withRecipeType(POST_CLUSTER_INSTALL) .when(recipeTestClient.createV4()) .given(INSTANCE_GROUP_ID, InstanceGroupTestDto.class) .withHostGroup(HostGroupType.WORKER) .withNodeCount(NODE_COUNT) .withRecipes(recipeName) .given("computeIg", InstanceGroupTestDto.class) .withHostGroup(HostGroupType.COMPUTE) .withNodeCount(NODE_COUNT) .withRecipes(recipeName) .given("cmpkey", ClouderaManagerProductTestDto.class) .withParcel("someParcel") .withName(parcel.getProduct()) .withVersion(parcel.getVersion()) .given("cmanager", ClouderaManagerTestDto.class) .withClouderaManagerProduct("cmpkey") .given("cmpclusterkey", ClusterTestDto.class) .withClouderaManager("cmanager") .given(StackTestDto.class) .withName(clusterName) .replaceInstanceGroups(INSTANCE_GROUP_ID) .withCluster("cmpclusterkey") .when(stackTestClient.createV4()) .enableVerification() .await(STACK_AVAILABLE) .when(StackScalePostAction.valid().withDesiredCount(4)) .await(STACK_AVAILABLE, pollingInterval(POLLING_INTERVAL)) .mockSalt().run().post().bodyContains(HIGHSTATE, 1).atLeast(1).verify() .validate(); } @Test(dataProvider = TEST_CONTEXT_WITH_MOCK) @Description( given = "a created cluster with post ambari recipe", when = "upscaling cluster on hostgroup which has no post install recipe", then = "the post recipe should not run on the new nodes because those recipe not configured on the upscaled hostgroup") public void testWhenRecipeProvidedToHostGroupAndAnotherHostGroupGetUpScaledThenThereIsNoFurtherRecipeExecutionOnTheNewNodeBesideTheDefaultOnes( MockedTestContext testContext) { ApiParcel parcel = parcelGeneratorUtil.getActivatedCDHParcel(); String recipeName = resourcePropertyProvider().getName(); String clusterName = resourcePropertyProvider.getName(); parcelMockActivatorUtil.mockActivateWithDefaultParcels(testContext, clusterName, parcel); testContext .given(RecipeTestDto.class) .withName(recipeName) .withContent(RECIPE_CONTENT) .withRecipeType(POST_CLOUDERA_MANAGER_START) .when(recipeTestClient.createV4()) .given(INSTANCE_GROUP_ID, InstanceGroupTestDto.class) .withHostGroup(HostGroupType.COMPUTE) .withNodeCount(NODE_COUNT) .withRecipes(recipeName) .given("cmpkey", ClouderaManagerProductTestDto.class) .withParcel("someParcel") .withName(parcel.getProduct()) .withVersion(parcel.getVersion()) .given("cmanager", ClouderaManagerTestDto.class) .withClouderaManagerProduct("cmpkey") .given("cmpclusterkey", ClusterTestDto.class) .withClouderaManager("cmanager") .given(StackTestDto.class) .withName(clusterName) .replaceInstanceGroups(INSTANCE_GROUP_ID) .withCluster("cmpclusterkey") .when(stackTestClient.createV4()) .enableVerification() .await(STACK_AVAILABLE) .when(StackScalePostAction.valid().withDesiredCount(4)) .await(STACK_AVAILABLE, pollingInterval(POLLING_INTERVAL)) .mockSalt().run().post().bodyContains(HIGHSTATE, 1).atLeast(1).verify() .validate(); } @Test(dataProvider = TEST_CONTEXT_WITH_MOCK) @Description( given = "a created cluster with attached recipe", when = "delete attached recipe", then = "getting BadRequestException") public void testTryToDeleteAttachedRecipe(MockedTestContext testContext) { String recipeName = resourcePropertyProvider().getName(); testContext .given(RecipeTestDto.class).withName(recipeName).withContent(RECIPE_CONTENT).withRecipeType(POST_CLOUDERA_MANAGER_START) .when(recipeTestClient.createV4()) .given(INSTANCE_GROUP_ID, InstanceGroupTestDto.class).withRecipes(recipeName) .given(StackTestDto.class).replaceInstanceGroups(INSTANCE_GROUP_ID) .when(stackTestClient.createV4()) .await(STACK_AVAILABLE) .given(RecipeTestDto.class) .whenException(recipeTestClient.deleteV4(), BadRequestException.class, expectedMessage("There is a cluster \\['.*'\\] which uses recipe" + " '.*'. Please remove this cluster before deleting the recipe")) .validate(); } @DataProvider(name = "dataProviderForNonPreTerminationRecipeTypes") public Object[][] getData() { return new Object[][]{ { getBean(MockedTestContext.class), PRE_CLOUDERA_MANAGER_START, 2, new TestCaseDescription.TestCaseDescriptionBuilder() .given("pre ambari start recipes") .when("calling cluster creation with the recipes") .then("should run 2 times") }, { getBean(MockedTestContext.class), POST_CLOUDERA_MANAGER_START, 1, new TestCaseDescription.TestCaseDescriptionBuilder() .given("post ambari start recipes") .when("calling cluster creation with the recipes") .then("should run 1 times") }, { getBean(MockedTestContext.class), POST_CLUSTER_INSTALL, 1, new TestCaseDescription.TestCaseDescriptionBuilder() .given("post cluster install recipes") .when("calling cluster creation with the recipes") .then("should run 1 times") } }; } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.flink.test.iterative.aggregators; import java.util.ArrayList; import java.util.List; import org.apache.flink.api.common.aggregators.ConvergenceCriterion; import org.apache.flink.api.common.aggregators.LongSumAggregator; import org.apache.flink.api.common.functions.RichFlatMapFunction; import org.apache.flink.api.common.functions.RichGroupReduceFunction; import org.apache.flink.api.common.functions.RichJoinFunction; import org.apache.flink.api.java.tuple.Tuple2; import org.apache.flink.configuration.Configuration; import org.apache.flink.test.util.JavaProgramTestBase; import org.apache.flink.types.LongValue; import org.apache.flink.util.Collector; import org.apache.flink.api.java.DataSet; import org.apache.flink.api.java.ExecutionEnvironment; import org.apache.flink.api.java.operators.IterativeDataSet; /** * * Connected Components test case that uses a parametrizable convergence criterion * */ public class ConnectedComponentsWithParametrizableConvergenceITCase extends JavaProgramTestBase { private static final int MAX_ITERATIONS = 10; private static final int DOP = 1; protected static List<Tuple2<Long, Long>> verticesInput = new ArrayList<Tuple2<Long, Long>>(); protected static List<Tuple2<Long, Long>> edgesInput = new ArrayList<Tuple2<Long, Long>>(); private String resultPath; private String expectedResult; @Override protected void preSubmit() throws Exception { // vertices input verticesInput.clear(); verticesInput.add(new Tuple2<Long, Long>(1l,1l)); verticesInput.add(new Tuple2<Long, Long>(2l,2l)); verticesInput.add(new Tuple2<Long, Long>(3l,3l)); verticesInput.add(new Tuple2<Long, Long>(4l,4l)); verticesInput.add(new Tuple2<Long, Long>(5l,5l)); verticesInput.add(new Tuple2<Long, Long>(6l,6l)); verticesInput.add(new Tuple2<Long, Long>(7l,7l)); verticesInput.add(new Tuple2<Long, Long>(8l,8l)); verticesInput.add(new Tuple2<Long, Long>(9l,9l)); // vertices input edgesInput.clear(); edgesInput.add(new Tuple2<Long, Long>(1l,2l)); edgesInput.add(new Tuple2<Long, Long>(1l,3l)); edgesInput.add(new Tuple2<Long, Long>(2l,3l)); edgesInput.add(new Tuple2<Long, Long>(2l,4l)); edgesInput.add(new Tuple2<Long, Long>(2l,1l)); edgesInput.add(new Tuple2<Long, Long>(3l,1l)); edgesInput.add(new Tuple2<Long, Long>(3l,2l)); edgesInput.add(new Tuple2<Long, Long>(4l,2l)); edgesInput.add(new Tuple2<Long, Long>(4l,6l)); edgesInput.add(new Tuple2<Long, Long>(5l,6l)); edgesInput.add(new Tuple2<Long, Long>(6l,4l)); edgesInput.add(new Tuple2<Long, Long>(6l,5l)); edgesInput.add(new Tuple2<Long, Long>(7l,8l)); edgesInput.add(new Tuple2<Long, Long>(7l,9l)); edgesInput.add(new Tuple2<Long, Long>(8l,7l)); edgesInput.add(new Tuple2<Long, Long>(8l,9l)); edgesInput.add(new Tuple2<Long, Long>(9l,7l)); edgesInput.add(new Tuple2<Long, Long>(9l,8l)); resultPath = getTempDirPath("result"); expectedResult = "(1,1)\n" + "(2,1)\n" + "(3,1)\n" + "(4,1)\n" + "(5,2)\n" + "(6,1)\n" + "(7,7)\n" + "(8,7)\n" + "(9,7)\n"; } @Override protected void testProgram() throws Exception { ConnectedComponentsWithConvergenceProgram.runProgram(resultPath); } @Override protected void postSubmit() throws Exception { compareResultsByLinesInMemory(expectedResult, resultPath); } private static class ConnectedComponentsWithConvergenceProgram { private static final String UPDATED_ELEMENTS = "updated.elements.aggr"; private static final long convergence_threshold = 3; // the iteration stops if less than this number os elements change value public static String runProgram(String resultPath) throws Exception { final ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment(); env.setDegreeOfParallelism(DOP); DataSet<Tuple2<Long, Long>> initialSolutionSet = env.fromCollection(verticesInput); DataSet<Tuple2<Long, Long>> edges = env.fromCollection(edgesInput); IterativeDataSet<Tuple2<Long, Long>> iteration = initialSolutionSet.iterate(MAX_ITERATIONS); // register the convergence criterion iteration.registerAggregationConvergenceCriterion(UPDATED_ELEMENTS, new LongSumAggregator(), new UpdatedElementsConvergenceCriterion(convergence_threshold)); DataSet<Tuple2<Long, Long>> verticesWithNewComponents = iteration.join(edges).where(0).equalTo(0) .with(new NeighborWithComponentIDJoin()) .groupBy(0).reduceGroup(new MinimumReduce()); DataSet<Tuple2<Long, Long>> updatedComponentId = verticesWithNewComponents.join(iteration).where(0).equalTo(0) .flatMap(new MinimumIdFilter()); iteration.closeWith(updatedComponentId).writeAsText(resultPath); env.execute(); return resultPath; } } public static final class NeighborWithComponentIDJoin extends RichJoinFunction<Tuple2<Long, Long>, Tuple2<Long, Long>, Tuple2<Long, Long>> { private static final long serialVersionUID = 1L; @Override public Tuple2<Long, Long> join(Tuple2<Long, Long> vertexWithCompId, Tuple2<Long, Long> edge) throws Exception { vertexWithCompId.setField(edge.f1, 0); return vertexWithCompId; } } public static final class MinimumReduce extends RichGroupReduceFunction<Tuple2<Long, Long>, Tuple2<Long, Long>> { private static final long serialVersionUID = 1L; final Tuple2<Long, Long> resultVertex = new Tuple2<Long, Long>(); @Override public void reduce(Iterable<Tuple2<Long, Long>> values, Collector<Tuple2<Long, Long>> out) { Long vertexId = 0L; Long minimumCompId = Long.MAX_VALUE; for (Tuple2<Long, Long> value: values) { vertexId = value.f0; Long candidateCompId = value.f1; if (candidateCompId < minimumCompId) { minimumCompId = candidateCompId; } } resultVertex.f0 = vertexId; resultVertex.f1 = minimumCompId; out.collect(resultVertex); } } @SuppressWarnings("serial") public static final class MinimumIdFilter extends RichFlatMapFunction<Tuple2<Tuple2<Long, Long>, Tuple2<Long, Long>>, Tuple2<Long, Long>> { private static LongSumAggregator aggr; @Override public void open(Configuration conf) { aggr = getIterationRuntimeContext().getIterationAggregator( ConnectedComponentsWithConvergenceProgram.UPDATED_ELEMENTS); } @Override public void flatMap( Tuple2<Tuple2<Long, Long>, Tuple2<Long, Long>> vertexWithNewAndOldId, Collector<Tuple2<Long, Long>> out) throws Exception { if (vertexWithNewAndOldId.f0.f1 < vertexWithNewAndOldId.f1.f1) { out.collect(vertexWithNewAndOldId.f0); aggr.aggregate(1l); } else { out.collect(vertexWithNewAndOldId.f1); } } } // A Convergence Criterion with one parameter @SuppressWarnings("serial") public static final class UpdatedElementsConvergenceCriterion implements ConvergenceCriterion<LongValue> { private long threshold; public UpdatedElementsConvergenceCriterion(long u_threshold) { this.threshold = u_threshold; } public long getThreshold() { return this.threshold; } @Override public boolean isConverged(int iteration, LongValue value) { return value.getValue() < this.threshold; } } }
/* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright * ownership. Elasticsearch licenses this file to you under * the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.common.logging.log4j; import org.apache.log4j.Layout; import org.apache.log4j.WriterAppender; import org.apache.log4j.helpers.LogLog; import org.elasticsearch.common.SuppressForbidden; import org.elasticsearch.common.logging.Loggers; import java.io.IOException; import java.io.OutputStream; /** * ConsoleAppender appends log events to <code>System.out</code> or * <code>System.err</code> using a layout specified by the user. The * default target is <code>System.out</code>. * <p>Elasticsearch: Adapter from log4j to allow to disable console logging...</p> * * @author Ceki G&uuml;lc&uuml; * @author Curt Arnold * @since 1.1 */ public class ConsoleAppender extends WriterAppender { public static final String SYSTEM_OUT = "System.out"; public static final String SYSTEM_ERR = "System.err"; protected String target = SYSTEM_OUT; /** * Determines if the appender honors reassignments of System.out * or System.err made after configuration. */ private boolean follow = true; /** * Constructs an unconfigured appender. */ public ConsoleAppender() { } /** * Creates a configured appender. * * @param layout layout, may not be null. */ public ConsoleAppender(Layout layout) { this(layout, SYSTEM_OUT); } /** * Creates a configured appender. * * @param layout layout, may not be null. * @param target target, either "System.err" or "System.out". */ public ConsoleAppender(Layout layout, String target) { setLayout(layout); setTarget(target); activateOptions(); } /** * Sets the value of the <b>Target</b> option. Recognized values * are "System.out" and "System.err". Any other value will be * ignored. */ public void setTarget(String value) { String v = value.trim(); if (SYSTEM_OUT.equalsIgnoreCase(v)) { target = SYSTEM_OUT; } else if (SYSTEM_ERR.equalsIgnoreCase(v)) { target = SYSTEM_ERR; } else { targetWarn(value); } } /** * Returns the current value of the <b>Target</b> property. The * default value of the option is "System.out". * <p> * See also {@link #setTarget}. */ public String getTarget() { return target; } /** * Sets whether the appender honors reassignments of System.out * or System.err made after configuration. * * @param newValue if true, appender will use value of System.out or * System.err in force at the time when logging events are appended. * @since 1.2.13 */ public final void setFollow(final boolean newValue) { follow = newValue; } /** * Gets whether the appender honors reassignments of System.out * or System.err made after configuration. * * @return true if appender will use value of System.out or * System.err in force at the time when logging events are appended. * @since 1.2.13 */ public final boolean getFollow() { return follow; } void targetWarn(String val) { LogLog.warn("[" + val + "] should be System.out or System.err."); LogLog.warn("Using previously set target, System.out by default."); } /** * Prepares the appender for use. */ @Override @SuppressForbidden(reason = "System#out") public void activateOptions() { if (follow) { if (target.equals(SYSTEM_ERR)) { setWriter(createWriter(new SystemErrStream())); } else { setWriter(createWriter(new SystemOutStream())); } } else { if (target.equals(SYSTEM_ERR)) { setWriter(createWriter(System.err)); } else { setWriter(createWriter(System.out)); } } super.activateOptions(); } /** * {@inheritDoc} */ @Override protected final void closeWriter() { if (follow) { super.closeWriter(); } } /** * An implementation of OutputStream that redirects to the * current System.err. */ @SuppressForbidden(reason = "System#err") private static class SystemErrStream extends OutputStream { public SystemErrStream() { } @Override public void close() { } @Override public void flush() { System.err.flush(); } @Override public void write(final byte[] b) throws IOException { if (!Loggers.consoleLoggingEnabled()) { return; } System.err.write(b); } @Override public void write(final byte[] b, final int off, final int len) throws IOException { if (!Loggers.consoleLoggingEnabled()) { return; } System.err.write(b, off, len); } @Override public void write(final int b) throws IOException { if (!Loggers.consoleLoggingEnabled()) { return; } System.err.write(b); } } /** * An implementation of OutputStream that redirects to the * current System.out. */ @SuppressForbidden(reason = "System#err") private static class SystemOutStream extends OutputStream { public SystemOutStream() { } @Override public void close() { } @Override public void flush() { System.out.flush(); } @Override public void write(final byte[] b) throws IOException { if (!Loggers.consoleLoggingEnabled()) { return; } System.out.write(b); } @Override public void write(final byte[] b, final int off, final int len) throws IOException { if (!Loggers.consoleLoggingEnabled()) { return; } System.out.write(b, off, len); } @Override public void write(final int b) throws IOException { if (!Loggers.consoleLoggingEnabled()) { return; } System.out.write(b); } } }
/* * Copyright 2008-2009 LinkedIn, Inc * * Licensed under the Apache License, Version 2.0 (the "License"); you may not * use this file except in compliance with the License. You may obtain a copy of * the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations under * the License. */ package voldemort.common.service; import java.util.Date; import java.util.List; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ScheduledFuture; import java.util.concurrent.ScheduledThreadPoolExecutor; import java.util.concurrent.ThreadFactory; import java.util.concurrent.TimeUnit; import javax.management.MBeanOperationInfo; import org.apache.log4j.Logger; import voldemort.annotations.jmx.JmxGetter; import voldemort.annotations.jmx.JmxManaged; import voldemort.annotations.jmx.JmxOperation; import voldemort.utils.Time; import com.google.common.collect.Lists; /** * The voldemort scheduler * * */ @SuppressWarnings("unchecked") @JmxManaged(description = "A service that runs scheduled jobs.") public class SchedulerService extends AbstractService { private static final Logger logger = Logger.getLogger(VoldemortService.class); private boolean mayInterrupt; private class ScheduledRunnable { private Runnable runnable; private Date delayDate; private long intervalMs; ScheduledRunnable(Runnable runnable, Date delayDate, long intervalMs) { this.runnable = runnable; this.delayDate = delayDate; this.intervalMs = intervalMs; } ScheduledRunnable(Runnable runnable, Date delayDate) { this(runnable, delayDate, 0); } Runnable getRunnable() { return this.runnable; } Date getDelayDate() { return this.delayDate; } long getIntervalMs() { return this.intervalMs; } } private final ScheduledThreadPoolExecutor scheduler; private final Time time; private final ConcurrentHashMap<String, ScheduledFuture> scheduledJobResults; private final ConcurrentHashMap<String, ScheduledRunnable> allJobs; public SchedulerService(int schedulerThreads, Time time) { this(schedulerThreads, time, true); } public SchedulerService(int schedulerThreads, Time time, boolean mayInterrupt) { super(ServiceType.SCHEDULER); this.time = time; this.scheduler = new SchedulerThreadPool(schedulerThreads); this.scheduledJobResults = new ConcurrentHashMap<String, ScheduledFuture>(); this.allJobs = new ConcurrentHashMap<String, ScheduledRunnable>(); this.mayInterrupt = mayInterrupt; } @Override public void startInner() {} @Override public void stopInner() { this.scheduler.shutdownNow(); } @JmxOperation(description = "Disable a particular scheduled job", impact = MBeanOperationInfo.ACTION) public void disable(String id) { if(allJobs.containsKey(id) && scheduledJobResults.containsKey(id)) { ScheduledFuture<?> future = scheduledJobResults.get(id); boolean cancelled = future.cancel(false); if(cancelled == true) { logger.info("Removed '" + id + "' from list of scheduled jobs"); scheduledJobResults.remove(id); } } } @JmxOperation(description = "Terminate a particular scheduled job", impact = MBeanOperationInfo.ACTION) public void terminate(String id) { if(allJobs.containsKey(id) && scheduledJobResults.containsKey(id)) { ScheduledFuture<?> future = scheduledJobResults.get(id); boolean cancelled = future.cancel(this.mayInterrupt); if(cancelled == true) { logger.info("Removed '" + id + "' from list of scheduled jobs"); scheduledJobResults.remove(id); } } } @JmxOperation(description = "Enable a particular scheduled job", impact = MBeanOperationInfo.ACTION) public void enable(String id) { if(allJobs.containsKey(id) && !scheduledJobResults.containsKey(id)) { ScheduledRunnable scheduledRunnable = allJobs.get(id); logger.info("Adding '" + id + "' to list of scheduled jobs"); if(scheduledRunnable.getIntervalMs() > 0) { schedule(id, scheduledRunnable.getRunnable(), scheduledRunnable.getDelayDate(), scheduledRunnable.getIntervalMs()); } else { schedule(id, scheduledRunnable.getRunnable(), scheduledRunnable.getDelayDate()); } } } @JmxGetter(name = "getScheduledJobs", description = "Returns names of jobs in the scheduler") public List<String> getScheduledJobs() { return Lists.newArrayList(scheduledJobResults.keySet()); } public void scheduleNow(Runnable runnable) { scheduler.execute(runnable); } public void schedule(String id, Runnable runnable, Date timeToRun) { ScheduledFuture<?> future = scheduler.schedule(runnable, delayMs(timeToRun), TimeUnit.MILLISECONDS); if(!allJobs.containsKey(id)) { allJobs.put(id, new ScheduledRunnable(runnable, timeToRun)); } scheduledJobResults.put(id, future); } public void schedule(String id, Runnable runnable, Date nextRun, long periodMs) { schedule(id, runnable, nextRun, periodMs, false); } public void schedule(String id, Runnable runnable, Date nextRun, long periodMs, boolean scheduleAtFixedRate) { ScheduledFuture<?> future = null; if(scheduleAtFixedRate) future = scheduler.scheduleAtFixedRate(runnable, delayMs(nextRun), periodMs, TimeUnit.MILLISECONDS); else future = scheduler.scheduleWithFixedDelay(runnable, delayMs(nextRun), periodMs, TimeUnit.MILLISECONDS); if(!allJobs.containsKey(id)) { allJobs.put(id, new ScheduledRunnable(runnable, nextRun, periodMs)); } scheduledJobResults.put(id, future); } private long delayMs(Date runDate) { return Math.max(0, runDate.getTime() - time.getMilliseconds()); } /** * A scheduled thread pool that fixes some default behaviors */ private static class SchedulerThreadPool extends ScheduledThreadPoolExecutor { public SchedulerThreadPool(int numThreads) { super(numThreads, new ThreadFactory() { public Thread newThread(Runnable r) { Thread thread = new Thread(r); thread.setDaemon(true); thread.setName(r.getClass().getName()); return thread; } }); } } }
/* * Copyright (c) 2010-2015 Pivotal Software, Inc. All rights reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); you * may not use this file except in compliance with the License. You * may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or * implied. See the License for the specific language governing * permissions and limitations under the License. See accompanying * LICENSE file. */ package dlock; import com.gemstone.gemfire.cache.*; import distcache.gemfire.*; import cacheperf.*; import hydra.*; import java.util.*; import java.util.concurrent.locks.Lock; import objects.*; import perffmwk.*; /** * * Client used to measure distributed locking service performance. * */ public class DLSPerfClient extends cacheperf.CachePerfClient { //---------------------------------------------------------------------------- // Trim interval names //---------------------------------------------------------------------------- protected static final int LOCKS = 100; protected static final int UNLOCKS = 101; protected static final String LOCKS_NAME = "locks"; protected static final String UNLOCKS_NAME = "unlocks"; //---------------------------------------------------------------------------- // Tasks //---------------------------------------------------------------------------- /** * lockTask() */ public static void lockTask() { DLSPerfClient c = new DLSPerfClient(); c.initialize( LOCKS ); c.lockObjects(); } private void lockObjects() { if (log().fineEnabled()) log().fine("locking objects"); do { int key = getNextKey(); executeTaskTerminator(); executeWarmupTerminator(); lock( key ); ++this.batchCount; ++this.count; ++this.keyCount; } while (!executeBatchTerminator()); if (log().fineEnabled()) log().fine("objects locked"); } private void lock( int i ) { Object key = ObjectHelper.createName( this.keyType, i ); long start; if (log().finerEnabled()) log().finer("locking key = " + key); Region theRegion = ((GemFireCacheTestImpl)super.cache).getRegion(); Lock entryLock = null; try { entryLock = theRegion.getDistributedLock( key ); start = this.dlsstats.startLock(); entryLock.lock(); } catch(Exception e) { // IllegalState or IllegalArgumentExceptions throw new CachePerfException( "Could not get distributed lock", e ); } this.dlsstats.endLock( start ); if (log().finerEnabled()) log().finer("locked key = " + key); if (DLSPerfPrms.unlockAfterLock()) { if (log().finerEnabled()) log().finer("unlocking key = " + key); entryLock.unlock(); if (log().finerEnabled()) log().finer("unlocked key = " + key); } } /** * unlockTask() */ public static void unlockTask() { DLSPerfClient c = new DLSPerfClient(); c.initialize( UNLOCKS ); c.unlockObjects(); } private void unlockObjects() { if (log().fineEnabled()) log().fine("unlocking objects"); do { int key = getNextKey(); executeTaskTerminator(); executeWarmupTerminator(); lock( key ); unlock( key ); ++this.batchCount; ++this.count; ++this.keyCount; } while (!executeBatchTerminator()); if (log().fineEnabled()) log().fine("objects unlocked"); } private void unlock( int i ) { Object key = ObjectHelper.createName( this.keyType, i ); if (log().finerEnabled()) log().finer("unlocking key = " + key); Region theRegion = ((GemFireCacheTestImpl)super.cache).getRegion(); Lock entryLock = null; try { entryLock = theRegion.getDistributedLock( key ); } finally { long start = this.dlsstats.startUnlock(); entryLock.unlock(); this.dlsstats.endUnlock( start ); } if (log().finerEnabled()) log().finer("unlocked key = " + key); } /** * TASK to register the DLS performance statistics object. * (DistributedLockService) */ public static void openStatisticsTask() { DLSPerfClient c = new DLSPerfClient(); c.initHydraThreadLocals(); c.openStatistics(); c.updateHydraThreadLocals(); } private void openStatistics() { if ( this.dlsstats == null ) { log().info( "Opening per-thread DLS performance statistics" ); this.dlsstats = DLSPerfStats.getInstance(); log().info( "Opened per-thread DLS performance statistics" ); } } /** * TASK to unregister the DLS performance statistics object. * (DistributedLockService) */ public static void closeStatisticsTask() { DLSPerfClient c = new DLSPerfClient(); c.initHydraThreadLocals(); c.closeStatistics(); c.updateHydraThreadLocals(); } protected void closeStatistics() { MasterController.sleepForMs( 2000 ); if ( this.dlsstats != null ) { log().info( "Closing per-thread DLS performance statistics" ); this.dlsstats.close(); log().info( "Closed per-thread DLS performance statistics" ); } } //---------------------------------------------------------------------------- // Hydra thread locals and their instance field counterparts //---------------------------------------------------------------------------- public DLSPerfStats dlsstats; public HashMap objectlist; private static HydraThreadLocal localdlsstats = new HydraThreadLocal(); private static HydraThreadLocal localobjectlist = new HydraThreadLocal(); protected void initHydraThreadLocals() { super.initHydraThreadLocals(); this.dlsstats = getDLSStats(); this.objectlist = getObjectList(); } protected void updateHydraThreadLocals() { super.updateHydraThreadLocals(); setDLSStats( this.dlsstats ); setObjectList( this.objectlist ); } /** * Gets the per-thread objectList wrapper instance. */ protected HashMap getObjectList() { HashMap objectList = (HashMap) localobjectlist.get(); return objectList; } /** * Sets the per-thread objectList wrapper instance. */ protected void setObjectList( HashMap objectlist ) { localobjectlist.set( objectlist ); } /** * Gets the per-thread DLSStats wrapper instance. * (DistributedLockService) */ protected DLSPerfStats getDLSStats() { DLSPerfStats dlsstats = (DLSPerfStats) localdlsstats.get(); return dlsstats; } /** * Sets the per-thread DLSStats wrapper instance. * (DistributedLockService) */ protected void setDLSStats( DLSPerfStats dlsstats ) { localdlsstats.set( dlsstats ); } //---------------------------------------------------------------------------- // Overridden methods //---------------------------------------------------------------------------- protected String nameFor( int name ) { switch (name) { case LOCKS: return LOCKS_NAME; case UNLOCKS: return UNLOCKS_NAME; } return super.nameFor(name); } }
/* * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one * or more contributor license agreements. Licensed under the Elastic License * 2.0; you may not use this file except in compliance with the Elastic License * 2.0. */ package org.elasticsearch.xpack.core.security.authz.accesscontrol; import org.apache.lucene.codecs.StoredFieldsReader; import org.apache.lucene.index.DirectoryReader; import org.apache.lucene.index.FilterDirectoryReader; import org.apache.lucene.index.IndexReader; import org.apache.lucene.index.LeafReader; import org.apache.lucene.search.DocIdSetIterator; import org.apache.lucene.search.Query; import org.apache.lucene.store.AlreadyClosedException; import org.apache.lucene.util.BitSet; import org.apache.lucene.util.BitSetIterator; import org.apache.lucene.util.Bits; import org.apache.lucene.util.CombinedBitSet; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.common.cache.Cache; import org.elasticsearch.common.cache.CacheBuilder; import org.elasticsearch.common.logging.LoggerMessageFormat; import org.elasticsearch.common.lucene.index.SequentialStoredFieldsLeafReader; import java.io.IOException; import java.io.UncheckedIOException; import java.util.Map; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ExecutionException; /** * A reader that only exposes documents via {@link #getLiveDocs()} that matches with the provided role query. */ public final class DocumentSubsetReader extends SequentialStoredFieldsLeafReader { public static DocumentSubsetDirectoryReader wrap(DirectoryReader in, DocumentSubsetBitsetCache bitsetCache, Query roleQuery) throws IOException { return new DocumentSubsetDirectoryReader(in, bitsetCache, roleQuery); } /** * Cache of the number of live docs for a given (segment, role query) pair. * This is useful because numDocs() is called eagerly by BaseCompositeReader so computing * numDocs() lazily doesn't help. Plus it helps reuse the result of the computation either * between refreshes, or across refreshes if no more documents were deleted in the * considered segment. The size of the top-level map is bounded by the number of segments * on the node. */ static final Map<IndexReader.CacheKey, Cache<Query, Integer>> NUM_DOCS_CACHE = new ConcurrentHashMap<>(); /** * Compute the number of live documents. This method is SLOW. */ private static int computeNumDocs(LeafReader reader, BitSet roleQueryBits) { final Bits liveDocs = reader.getLiveDocs(); if (roleQueryBits == null) { return 0; } else if (roleQueryBits instanceof MatchAllRoleBitSet) { return reader.numDocs(); } else if (liveDocs == null) { // slow return roleQueryBits.cardinality(); } else { // very slow, but necessary in order to be correct int numDocs = 0; DocIdSetIterator it = new BitSetIterator(roleQueryBits, 0L); // we don't use the cost try { for (int doc = it.nextDoc(); doc != DocIdSetIterator.NO_MORE_DOCS; doc = it.nextDoc()) { if (liveDocs.get(doc)) { numDocs++; } } return numDocs; } catch (IOException e) { throw new UncheckedIOException(e); } } } /** * Like {@link #computeNumDocs} but caches results. */ private static int getNumDocs(LeafReader reader, Query roleQuery, BitSet roleQueryBits) throws IOException, ExecutionException { IndexReader.CacheHelper cacheHelper = reader.getReaderCacheHelper(); // this one takes deletes into account if (cacheHelper == null) { throw new IllegalStateException("Reader " + reader + " does not support caching"); } final boolean[] added = new boolean[] { false }; Cache<Query, Integer> perReaderCache = NUM_DOCS_CACHE.computeIfAbsent(cacheHelper.getKey(), key -> { added[0] = true; return CacheBuilder.<Query, Integer>builder() // Not configurable, this limit only exists so that if a role query is updated // then we won't risk OOME because of old role queries that are not used anymore .setMaximumWeight(1000) .weigher((k, v) -> 1) // just count .build(); }); if (added[0]) { IndexReader.ClosedListener closedListener = NUM_DOCS_CACHE::remove; try { cacheHelper.addClosedListener(closedListener); } catch (AlreadyClosedException e) { closedListener.onClose(cacheHelper.getKey()); throw e; } } return perReaderCache.computeIfAbsent(roleQuery, q -> computeNumDocs(reader, roleQueryBits)); } public static final class DocumentSubsetDirectoryReader extends FilterDirectoryReader { private final Query roleQuery; private final DocumentSubsetBitsetCache bitsetCache; DocumentSubsetDirectoryReader(final DirectoryReader in, final DocumentSubsetBitsetCache bitsetCache, final Query roleQuery) throws IOException { super(in, new SubReaderWrapper() { @Override public LeafReader wrap(LeafReader reader) { try { return new DocumentSubsetReader(reader, bitsetCache, roleQuery); } catch (Exception e) { throw ExceptionsHelper.convertToElastic(e); } } }); this.bitsetCache = bitsetCache; this.roleQuery = roleQuery; verifyNoOtherDocumentSubsetDirectoryReaderIsWrapped(in); } @Override protected DirectoryReader doWrapDirectoryReader(DirectoryReader in) throws IOException { return new DocumentSubsetDirectoryReader(in, bitsetCache, roleQuery); } private static void verifyNoOtherDocumentSubsetDirectoryReaderIsWrapped(DirectoryReader reader) { if (reader instanceof FilterDirectoryReader) { FilterDirectoryReader filterDirectoryReader = (FilterDirectoryReader) reader; if (filterDirectoryReader instanceof DocumentSubsetDirectoryReader) { throw new IllegalArgumentException(LoggerMessageFormat.format("Can't wrap [{}] twice", DocumentSubsetDirectoryReader.class)); } else { verifyNoOtherDocumentSubsetDirectoryReaderIsWrapped(filterDirectoryReader.getDelegate()); } } } @Override public CacheHelper getReaderCacheHelper() { return in.getReaderCacheHelper(); } } private final DocumentSubsetBitsetCache bitsetCache; private final Query roleQuery; // we don't use a volatile here because the bitset is resolved before numDocs in the synchronized block // so any thread that see numDocs != -1 should also see the true value of the roleQueryBits (happens-before). private BitSet roleQueryBits; private volatile int numDocs = -1; private DocumentSubsetReader(final LeafReader in, DocumentSubsetBitsetCache bitsetCache, final Query roleQuery) { super(in); this.bitsetCache = bitsetCache; this.roleQuery = roleQuery; } /** * Resolve the role query and the number of docs lazily */ private void computeNumDocsIfNeeded() { if (numDocs == -1) { synchronized (this) { if (numDocs == -1) { try { roleQueryBits = bitsetCache.getBitSet(roleQuery, in.getContext()); numDocs = getNumDocs(in, roleQuery, roleQueryBits); } catch (Exception e) { throw new ElasticsearchException("Failed to load role query", e); } } } } } @Override public Bits getLiveDocs() { computeNumDocsIfNeeded(); final Bits actualLiveDocs = in.getLiveDocs(); if (roleQueryBits == null) { // If we would return a <code>null</code> liveDocs then that would mean that no docs are marked as deleted, // but that isn't the case. No docs match with the role query and therefore all docs are marked as deleted return new Bits.MatchNoBits(in.maxDoc()); } else if (roleQueryBits instanceof MatchAllRoleBitSet) { return actualLiveDocs; } else if (actualLiveDocs == null) { return roleQueryBits; } else { // apply deletes when needed: return new CombinedBitSet(roleQueryBits, actualLiveDocs); } } @Override public int numDocs() { computeNumDocsIfNeeded(); return numDocs; } @Override public boolean hasDeletions() { // we always return liveDocs and hide docs: return true; } @Override public CacheHelper getCoreCacheHelper() { return in.getCoreCacheHelper(); } @Override public CacheHelper getReaderCacheHelper() { // Not delegated since we change the live docs return null; } @Override protected StoredFieldsReader doGetSequentialStoredFieldsReader(StoredFieldsReader reader) { return reader; } }
/** * Copyright Red Hat, Inc, and individual contributors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.feedhenry.sdk; import android.content.Context; import com.feedhenry.sdk.utils.FHLog; import java.io.IOException; import java.io.InputStream; import java.util.Arrays; import java.util.List; import java.util.Properties; /** * Class represents the settings in fh.properties */ public class AppProps { private static final String APP_HOST_KEY = "host"; private static final String APP_PROJECT_KEY = "projectid"; private static final String APP_CONNECTION_TAG_KEY = "connectiontag"; private static final String APP_ID_KEY = "appid"; private static final String APP_APIKEY_KEY = "appkey"; private static final String APP_MODE_KEY = "mode"; // AeroGear UnifiedPush properties private static final String PUSH_SERVER_URL_ENDPOINT = "/api/v2/ag-push"; private static final String PUSH_SENDER_ID = "PUSH_SENDER_ID"; private static final String PUSH_VARIANT = "PUSH_VARIANT"; private static final String PUSH_SECRET = "PUSH_SECRET"; private static final String OLD_PROPERTY_FILE = "fh.properties"; private static final String NEW_PROPERTY_FILE = "fhconfig.properties"; private static final String DEBUG_PROPERTY_FILE = "fhconfig.local.properties"; private static final String LOG_TAG = "com.feedhenry.sdk.AppProps"; private final Properties mProps; private final boolean isLocalDev; private static AppProps mInstance; private AppProps(Properties props, boolean isLocalDev) { this.mProps = props; this.isLocalDev = isLocalDev; } /** * Gets the value of "host" in the fh.properties file. * * @return the host value */ public String getHost() { return this.mProps.getProperty(APP_HOST_KEY); } /** * Gets the value of "projectid" in the fh.properties file. * * @return the project id */ public String getProjectId() { return this.mProps.getProperty(APP_PROJECT_KEY); } /** * Gets the value of "appid" in the fh.properties file. * * @return the app id */ public String getAppId() { return this.mProps.getProperty(APP_ID_KEY); } /** * Gets the value of "appkey" in the fh.properties file. * * @return the app API key */ public String getAppApiKey() { return this.mProps.getProperty(APP_APIKEY_KEY); } /** * Gets the value of "connectiontag" in the fh.properties file. * * @return the connection tag */ public String getConnectionTag() { return this.mProps.getProperty(APP_CONNECTION_TAG_KEY); } @Deprecated /** * Gets the value of "mode" in the fh.properties file. * This is a legacy field and should not be used anymore. * @return the legacy app mode. Can be null. */ public String getAppMode() { return this.mProps.getProperty(APP_MODE_KEY); } /** * Return if the app is running in local dev mode * (i.e., if fhconfig.local.properties file is found in the assets directory). * If true, the cloud host value returned in CloudProps will be the host value set in the property file. * * @return if the app is running in local dev mode */ public boolean isLocalDevelopment() { return this.isLocalDev; } /** * Gets the value of the UnifiedPush server URL in the fhconfig.properties file. * * @return UnifiedPush server URL */ public String getPushServerUrl() { return this.mProps.getProperty(APP_HOST_KEY) + PUSH_SERVER_URL_ENDPOINT; } /** * Gets the value of the Sender ID in the fhconfig.properties file. * * @return Sender ID */ public String getPushSenderId() { return this.mProps.getProperty(PUSH_SENDER_ID); } /** * Gets the value of the UnifiedPush variant in the fhconfig.properties file. * * @return UnifiedPush variant */ public String getPushVariant() { return this.mProps.getProperty(PUSH_VARIANT); } /** * Gets the value of the variant secret in the fhconfig.properties file. * * @return Variant secret */ public String getPushSecret() { return this.mProps.getProperty(PUSH_SECRET); } /** * A method to retrieve the singleton instance of AppProps. * * @return The singleton instance of AppProps * @throws IllegalStateException if the application is not initialized. */ public static AppProps getInstance() { if (mInstance == null) { throw new IllegalStateException("AppProps is not initialised"); } return mInstance; } /** * Loads the fh.properties file. * * @param context Application context * @return the AppProps after read the properties file * * @throws IOException if property file could not be created */ public static AppProps load(Context context) throws IOException { if (mInstance == null) { List<String> assetFiles = Arrays.asList(context.getAssets().list("")); if(assetFiles.contains(DEBUG_PROPERTY_FILE)) { createNewInstanceFromPropertyFile(context, DEBUG_PROPERTY_FILE, true); } else if (assetFiles.contains(NEW_PROPERTY_FILE)) { createNewInstanceFromPropertyFile(context, NEW_PROPERTY_FILE, false); } else if (assetFiles.contains(OLD_PROPERTY_FILE)) { createNewInstanceFromPropertyFile(context, OLD_PROPERTY_FILE, false); } else { throw new IOException("No config file was found"); } } return mInstance; } /** * * Attempts to open and load property file in a new AppProps instance. * * @param context Application context * @param fileName Property file name to be loaded * @param isLocalDev Flag if it's a debug (local developer) property file * @return the AppProps after read the properties file */ private static AppProps createNewInstanceFromPropertyFile(Context context, String fileName, boolean isLocalDev) throws IOException { InputStream in = null; try { in = context.getAssets().open(fileName); Properties props = new Properties(); props.load(in); mInstance = new AppProps(props, isLocalDev); } catch (IOException e) { FHLog.e(LOG_TAG, "Could not find asset " + fileName, e); throw e; } finally { try { if(in != null) { in.close(); } } catch (IOException ex) { FHLog.e(LOG_TAG, "Failed to close stream", ex); throw ex; } } return mInstance; } }
/* * Copyright 2015-2016 Open Networking Laboratory * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.onosproject.segmentrouting; import org.apache.felix.scr.annotations.Activate; import org.apache.felix.scr.annotations.Component; import org.apache.felix.scr.annotations.Deactivate; import org.apache.felix.scr.annotations.Reference; import org.apache.felix.scr.annotations.ReferenceCardinality; import org.apache.felix.scr.annotations.Service; import org.onlab.packet.Ethernet; import org.onlab.packet.MacAddress; import org.onlab.packet.VlanId; import org.onlab.packet.IPv4; import org.onlab.packet.Ip4Address; import org.onlab.packet.Ip4Prefix; import org.onlab.packet.IpAddress; import org.onlab.packet.IpPrefix; import org.onlab.util.KryoNamespace; import org.onosproject.core.ApplicationId; import org.onosproject.core.CoreService; import org.onosproject.event.Event; import org.onosproject.net.ConnectPoint; import org.onosproject.net.PortNumber; import org.onosproject.net.config.ConfigFactory; import org.onosproject.net.config.NetworkConfigEvent; import org.onosproject.net.config.NetworkConfigRegistry; import org.onosproject.net.config.NetworkConfigListener; import org.onosproject.net.config.basics.SubjectFactories; import org.onosproject.net.flow.DefaultTrafficSelector; import org.onosproject.net.flow.DefaultTrafficTreatment; import org.onosproject.net.flow.TrafficSelector; import org.onosproject.net.flow.TrafficTreatment; import org.onosproject.net.flowobjective.DefaultForwardingObjective; import org.onosproject.net.flowobjective.ForwardingObjective; import org.onosproject.net.flowobjective.Objective; import org.onosproject.net.flowobjective.ObjectiveContext; import org.onosproject.net.flowobjective.ObjectiveError; import org.onosproject.net.host.HostEvent; import org.onosproject.net.host.HostListener; import org.onosproject.net.packet.PacketPriority; import org.onosproject.segmentrouting.config.DeviceConfigNotFoundException; import org.onosproject.segmentrouting.config.DeviceConfiguration; import org.onosproject.segmentrouting.config.SegmentRoutingDeviceConfig; import org.onosproject.segmentrouting.config.SegmentRoutingAppConfig; import org.onosproject.segmentrouting.grouphandler.DefaultGroupHandler; import org.onosproject.segmentrouting.grouphandler.NeighborSet; import org.onosproject.segmentrouting.grouphandler.NeighborSetNextObjectiveStoreKey; import org.onosproject.segmentrouting.grouphandler.PortNextObjectiveStoreKey; import org.onosproject.mastership.MastershipService; import org.onosproject.net.Device; import org.onosproject.net.DeviceId; import org.onosproject.net.Link; import org.onosproject.net.Port; import org.onosproject.net.device.DeviceEvent; import org.onosproject.net.device.DeviceListener; import org.onosproject.net.device.DeviceService; import org.onosproject.net.flowobjective.FlowObjectiveService; import org.onosproject.net.host.HostService; import org.onosproject.net.link.LinkEvent; import org.onosproject.net.link.LinkListener; import org.onosproject.net.link.LinkService; import org.onosproject.net.packet.InboundPacket; import org.onosproject.net.packet.PacketContext; import org.onosproject.net.packet.PacketProcessor; import org.onosproject.net.packet.PacketService; import org.onosproject.segmentrouting.grouphandler.SubnetNextObjectiveStoreKey; import org.onosproject.segmentrouting.grouphandler.XConnectNextObjectiveStoreKey; import org.onosproject.store.service.EventuallyConsistentMap; import org.onosproject.store.service.EventuallyConsistentMapBuilder; import org.onosproject.store.service.StorageService; import org.onosproject.store.service.WallClockTimestamp; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.net.URI; import java.util.Collections; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Optional; import java.util.Set; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentLinkedQueue; import java.util.concurrent.Executors; import java.util.concurrent.ScheduledExecutorService; import java.util.concurrent.ScheduledFuture; import java.util.concurrent.TimeUnit; import static com.google.common.base.Preconditions.checkState; @Service @Component(immediate = true) /** * Segment routing manager. */ public class SegmentRoutingManager implements SegmentRoutingService { private static Logger log = LoggerFactory .getLogger(SegmentRoutingManager.class); @Reference(cardinality = ReferenceCardinality.MANDATORY_UNARY) protected CoreService coreService; @Reference(cardinality = ReferenceCardinality.MANDATORY_UNARY) protected PacketService packetService; @Reference(cardinality = ReferenceCardinality.MANDATORY_UNARY) protected HostService hostService; @Reference(cardinality = ReferenceCardinality.MANDATORY_UNARY) protected DeviceService deviceService; @Reference(cardinality = ReferenceCardinality.MANDATORY_UNARY) protected FlowObjectiveService flowObjectiveService; @Reference(cardinality = ReferenceCardinality.MANDATORY_UNARY) protected LinkService linkService; @Reference(cardinality = ReferenceCardinality.MANDATORY_UNARY) protected MastershipService mastershipService; @Reference(cardinality = ReferenceCardinality.MANDATORY_UNARY) protected StorageService storageService; @Reference(cardinality = ReferenceCardinality.MANDATORY_UNARY) protected NetworkConfigRegistry cfgService; protected ArpHandler arpHandler = null; protected IcmpHandler icmpHandler = null; protected IpHandler ipHandler = null; protected RoutingRulePopulator routingRulePopulator = null; protected ApplicationId appId; protected DeviceConfiguration deviceConfiguration = null; private DefaultRoutingHandler defaultRoutingHandler = null; private TunnelHandler tunnelHandler = null; private PolicyHandler policyHandler = null; private InternalPacketProcessor processor = null; private InternalLinkListener linkListener = null; private InternalDeviceListener deviceListener = null; private NetworkConfigEventHandler netcfgHandler = null; private InternalEventHandler eventHandler = new InternalEventHandler(); private final InternalHostListener hostListener = new InternalHostListener(); private ScheduledExecutorService executorService = Executors .newScheduledThreadPool(1); @SuppressWarnings("unused") private static ScheduledFuture<?> eventHandlerFuture = null; @SuppressWarnings("rawtypes") private ConcurrentLinkedQueue<Event> eventQueue = new ConcurrentLinkedQueue<Event>(); private Map<DeviceId, DefaultGroupHandler> groupHandlerMap = new ConcurrentHashMap<>(); /** * Per device next objective ID store with (device id + neighbor set) as key. */ public EventuallyConsistentMap<NeighborSetNextObjectiveStoreKey, Integer> nsNextObjStore = null; /** * Per device next objective ID store with (device id + subnet) as key. */ public EventuallyConsistentMap<SubnetNextObjectiveStoreKey, Integer> subnetNextObjStore = null; /** * Per device next objective ID store with (device id + port) as key. */ public EventuallyConsistentMap<PortNextObjectiveStoreKey, Integer> portNextObjStore = null; /** * Per cross-connect objective ID store with VLAN ID as key. */ public EventuallyConsistentMap<XConnectNextObjectiveStoreKey, Integer> xConnectNextObjStore = null; // Per device, per-subnet assigned-vlans store, with (device id + subnet // IPv4 prefix) as key private EventuallyConsistentMap<SubnetAssignedVidStoreKey, VlanId> subnetVidStore = null; private EventuallyConsistentMap<String, Tunnel> tunnelStore = null; private EventuallyConsistentMap<String, Policy> policyStore = null; private final InternalConfigListener cfgListener = new InternalConfigListener(this); private final ConfigFactory<DeviceId, SegmentRoutingDeviceConfig> cfgDeviceFactory = new ConfigFactory<DeviceId, SegmentRoutingDeviceConfig>(SubjectFactories.DEVICE_SUBJECT_FACTORY, SegmentRoutingDeviceConfig.class, "segmentrouting") { @Override public SegmentRoutingDeviceConfig createConfig() { return new SegmentRoutingDeviceConfig(); } }; private final ConfigFactory<ApplicationId, SegmentRoutingAppConfig> cfgAppFactory = new ConfigFactory<ApplicationId, SegmentRoutingAppConfig>(SubjectFactories.APP_SUBJECT_FACTORY, SegmentRoutingAppConfig.class, "segmentrouting") { @Override public SegmentRoutingAppConfig createConfig() { return new SegmentRoutingAppConfig(); } }; private Object threadSchedulerLock = new Object(); private static int numOfEventsQueued = 0; private static int numOfEventsExecuted = 0; private static int numOfHandlerExecution = 0; private static int numOfHandlerScheduled = 0; private KryoNamespace.Builder kryoBuilder = null; /** * Segment Routing App ID. */ public static final String SR_APP_ID = "org.onosproject.segmentrouting"; /** * The starting value of per-subnet VLAN ID assignment. */ private static final short ASSIGNED_VLAN_START = 4093; /** * The default VLAN ID assigned to the interfaces without subnet config. */ public static final short ASSIGNED_VLAN_NO_SUBNET = 4094; @Activate protected void activate() { appId = coreService .registerApplication(SR_APP_ID); kryoBuilder = new KryoNamespace.Builder() .register(NeighborSetNextObjectiveStoreKey.class, SubnetNextObjectiveStoreKey.class, SubnetAssignedVidStoreKey.class, NeighborSet.class, DeviceId.class, URI.class, WallClockTimestamp.class, org.onosproject.cluster.NodeId.class, HashSet.class, Tunnel.class, DefaultTunnel.class, Policy.class, TunnelPolicy.class, Policy.Type.class, VlanId.class, Ip4Address.class, Ip4Prefix.class, IpAddress.Version.class, ConnectPoint.class ); log.debug("Creating EC map nsnextobjectivestore"); EventuallyConsistentMapBuilder<NeighborSetNextObjectiveStoreKey, Integer> nsNextObjMapBuilder = storageService.eventuallyConsistentMapBuilder(); nsNextObjStore = nsNextObjMapBuilder .withName("nsnextobjectivestore") .withSerializer(kryoBuilder) .withTimestampProvider((k, v) -> new WallClockTimestamp()) .build(); log.trace("Current size {}", nsNextObjStore.size()); log.debug("Creating EC map subnetnextobjectivestore"); EventuallyConsistentMapBuilder<SubnetNextObjectiveStoreKey, Integer> subnetNextObjMapBuilder = storageService.eventuallyConsistentMapBuilder(); subnetNextObjStore = subnetNextObjMapBuilder .withName("subnetnextobjectivestore") .withSerializer(kryoBuilder) .withTimestampProvider((k, v) -> new WallClockTimestamp()) .build(); log.debug("Creating EC map subnetnextobjectivestore"); EventuallyConsistentMapBuilder<PortNextObjectiveStoreKey, Integer> portNextObjMapBuilder = storageService.eventuallyConsistentMapBuilder(); portNextObjStore = portNextObjMapBuilder .withName("portnextobjectivestore") .withSerializer(kryoBuilder) .withTimestampProvider((k, v) -> new WallClockTimestamp()) .build(); log.debug("Creating EC map xconnectnextobjectivestore"); EventuallyConsistentMapBuilder<XConnectNextObjectiveStoreKey, Integer> xConnectNextObjStoreBuilder = storageService.eventuallyConsistentMapBuilder(); xConnectNextObjStore = xConnectNextObjStoreBuilder .withName("xconnectnextobjectivestore") .withSerializer(kryoBuilder) .withTimestampProvider((k, v) -> new WallClockTimestamp()) .build(); EventuallyConsistentMapBuilder<String, Tunnel> tunnelMapBuilder = storageService.eventuallyConsistentMapBuilder(); tunnelStore = tunnelMapBuilder .withName("tunnelstore") .withSerializer(kryoBuilder) .withTimestampProvider((k, v) -> new WallClockTimestamp()) .build(); EventuallyConsistentMapBuilder<String, Policy> policyMapBuilder = storageService.eventuallyConsistentMapBuilder(); policyStore = policyMapBuilder .withName("policystore") .withSerializer(kryoBuilder) .withTimestampProvider((k, v) -> new WallClockTimestamp()) .build(); EventuallyConsistentMapBuilder<SubnetAssignedVidStoreKey, VlanId> subnetVidStoreMapBuilder = storageService.eventuallyConsistentMapBuilder(); subnetVidStore = subnetVidStoreMapBuilder .withName("subnetvidstore") .withSerializer(kryoBuilder) .withTimestampProvider((k, v) -> new WallClockTimestamp()) .build(); processor = new InternalPacketProcessor(); linkListener = new InternalLinkListener(); deviceListener = new InternalDeviceListener(); netcfgHandler = new NetworkConfigEventHandler(this); cfgService.addListener(cfgListener); cfgService.registerConfigFactory(cfgDeviceFactory); cfgService.registerConfigFactory(cfgAppFactory); hostService.addListener(hostListener); packetService.addProcessor(processor, PacketProcessor.director(2)); linkService.addListener(linkListener); deviceService.addListener(deviceListener); // Request ARP packet-in TrafficSelector.Builder selector = DefaultTrafficSelector.builder(); selector.matchEthType(Ethernet.TYPE_ARP); packetService.requestPackets(selector.build(), PacketPriority.CONTROL, appId, Optional.empty()); cfgListener.configureNetwork(); log.info("Started"); } @Deactivate protected void deactivate() { cfgService.removeListener(cfgListener); cfgService.unregisterConfigFactory(cfgDeviceFactory); cfgService.unregisterConfigFactory(cfgAppFactory); // Withdraw ARP packet-in TrafficSelector.Builder selector = DefaultTrafficSelector.builder(); selector.matchEthType(Ethernet.TYPE_ARP); packetService.cancelPackets(selector.build(), PacketPriority.CONTROL, appId, Optional.empty()); packetService.removeProcessor(processor); linkService.removeListener(linkListener); deviceService.removeListener(deviceListener); processor = null; linkListener = null; deviceService = null; groupHandlerMap.clear(); log.info("Stopped"); } @Override public List<Tunnel> getTunnels() { return tunnelHandler.getTunnels(); } @Override public TunnelHandler.Result createTunnel(Tunnel tunnel) { return tunnelHandler.createTunnel(tunnel); } @Override public TunnelHandler.Result removeTunnel(Tunnel tunnel) { for (Policy policy: policyHandler.getPolicies()) { if (policy.type() == Policy.Type.TUNNEL_FLOW) { TunnelPolicy tunnelPolicy = (TunnelPolicy) policy; if (tunnelPolicy.tunnelId().equals(tunnel.id())) { log.warn("Cannot remove the tunnel used by a policy"); return TunnelHandler.Result.TUNNEL_IN_USE; } } } return tunnelHandler.removeTunnel(tunnel); } @Override public PolicyHandler.Result removePolicy(Policy policy) { return policyHandler.removePolicy(policy); } @Override public PolicyHandler.Result createPolicy(Policy policy) { return policyHandler.createPolicy(policy); } @Override public List<Policy> getPolicies() { return policyHandler.getPolicies(); } /** * Returns the tunnel object with the tunnel ID. * * @param tunnelId Tunnel ID * @return Tunnel reference */ public Tunnel getTunnel(String tunnelId) { return tunnelHandler.getTunnel(tunnelId); } /** * Returns the vlan-id assigned to the subnet configured for a device. * If no vlan-id has been assigned, a new one is assigned out of a pool of ids, * if and only if this controller instance is the master for the device. * <p> * USAGE: The assigned vlans are meant to be applied to untagged packets on those * switches/pipelines that need this functionality. These vids are meant * to be used internally within a switch, and thus need to be unique only * on a switch level. Note that packets never go out on the wire with these * vlans. Currently, vlan ids are assigned from value 4093 down. * Vlan id 4094 expected to be used for all ports that are not assigned subnets. * Vlan id 4095 is reserved and unused. Only a single vlan id is assigned * per subnet. * * @param deviceId switch dpid * @param subnet IPv4 prefix for which assigned vlan is desired * @return VlanId assigned for the subnet on the device, or * null if no vlan assignment was found and this instance is not * the master for the device. */ // TODO: We should avoid assigning VLAN IDs that are used by VLAN cross-connection. public VlanId getSubnetAssignedVlanId(DeviceId deviceId, Ip4Prefix subnet) { VlanId assignedVid = subnetVidStore.get(new SubnetAssignedVidStoreKey( deviceId, subnet)); if (assignedVid != null) { log.debug("Query for subnet:{} on device:{} returned assigned-vlan " + "{}", subnet, deviceId, assignedVid); return assignedVid; } //check mastership for the right to assign a vlan if (!mastershipService.isLocalMaster(deviceId)) { log.warn("This controller instance is not the master for device {}. " + "Cannot assign vlan-id for subnet {}", deviceId, subnet); return null; } // vlan assignment is expensive but done only once Set<Ip4Prefix> configuredSubnets = deviceConfiguration.getSubnets(deviceId); Set<Short> assignedVlans = new HashSet<>(); Set<Ip4Prefix> unassignedSubnets = new HashSet<>(); for (Ip4Prefix sub : configuredSubnets) { VlanId v = subnetVidStore.get(new SubnetAssignedVidStoreKey(deviceId, sub)); if (v != null) { assignedVlans.add(v.toShort()); } else { unassignedSubnets.add(sub); } } short nextAssignedVlan = ASSIGNED_VLAN_START; if (!assignedVlans.isEmpty()) { nextAssignedVlan = (short) (Collections.min(assignedVlans) - 1); } for (Ip4Prefix unsub : unassignedSubnets) { // Special case for default route. Assign default VLAN ID to /32 and /0 subnets if (unsub.prefixLength() == IpPrefix.MAX_INET_MASK_LENGTH || unsub.prefixLength() == 0) { subnetVidStore.put(new SubnetAssignedVidStoreKey(deviceId, unsub), VlanId.vlanId(ASSIGNED_VLAN_NO_SUBNET)); } else { subnetVidStore.put(new SubnetAssignedVidStoreKey(deviceId, unsub), VlanId.vlanId(nextAssignedVlan--)); log.info("Assigned vlan: {} to subnet: {} on device: {}", nextAssignedVlan + 1, unsub, deviceId); } } return subnetVidStore.get(new SubnetAssignedVidStoreKey(deviceId, subnet)); } /** * Returns the next objective ID for the given NeighborSet. * If the nextObjective does not exist, a new one is created and * its id is returned. * * @param deviceId Device ID * @param ns NegighborSet * @param meta metadata passed into the creation of a Next Objective * @return next objective ID or -1 if an error was encountered during the * creation of the nextObjective */ public int getNextObjectiveId(DeviceId deviceId, NeighborSet ns, TrafficSelector meta) { if (groupHandlerMap.get(deviceId) != null) { log.trace("getNextObjectiveId query in device {}", deviceId); return groupHandlerMap .get(deviceId).getNextObjectiveId(ns, meta); } else { log.warn("getNextObjectiveId query - groupHandler for device {} " + "not found", deviceId); return -1; } } /** * Returns the next objective ID for the given subnet prefix. It is expected * that the next-objective has been pre-created from configuration. * * @param deviceId Device ID * @param prefix Subnet * @return next objective ID or -1 if it was not found */ public int getSubnetNextObjectiveId(DeviceId deviceId, IpPrefix prefix) { if (groupHandlerMap.get(deviceId) != null) { log.trace("getSubnetNextObjectiveId query in device {}", deviceId); return groupHandlerMap .get(deviceId).getSubnetNextObjectiveId(prefix); } else { log.warn("getSubnetNextObjectiveId query - groupHandler for " + "device {} not found", deviceId); return -1; } } /** * Returns the next objective ID for the given portNumber, given the treatment. * There could be multiple different treatments to the same outport, which * would result in different objectives. If the next object * does not exist, a new one is created and its id is returned. * * @param deviceId Device ID * @param portNum port number on device for which NextObjective is queried * @param treatment the actions to apply on the packets (should include outport) * @param meta metadata passed into the creation of a Next Objective if necessary * @return next objective ID or -1 if it was not found */ public int getPortNextObjectiveId(DeviceId deviceId, PortNumber portNum, TrafficTreatment treatment, TrafficSelector meta) { DefaultGroupHandler ghdlr = groupHandlerMap.get(deviceId); if (ghdlr != null) { return ghdlr.getPortNextObjectiveId(portNum, treatment, meta); } else { log.warn("getPortNextObjectiveId query - groupHandler for device {}" + " not found", deviceId); return -1; } } /** * Returns the next objective ID of type broadcast associated with the VLAN * cross-connection. * * @param deviceId Device ID for the cross-connection * @param vlanId VLAN ID for the cross-connection * @return next objective ID or -1 if it was not found */ public int getXConnectNextObjectiveId(DeviceId deviceId, VlanId vlanId) { DefaultGroupHandler ghdlr = groupHandlerMap.get(deviceId); if (ghdlr != null) { return ghdlr.getXConnectNextObjectiveId(vlanId); } else { log.warn("getPortNextObjectiveId query - groupHandler for device {}" + " not found", deviceId); return -1; } } private class InternalPacketProcessor implements PacketProcessor { @Override public void process(PacketContext context) { if (context.isHandled()) { return; } InboundPacket pkt = context.inPacket(); Ethernet ethernet = pkt.parsed(); log.trace("Rcvd pktin: {}", ethernet); if (ethernet.getEtherType() == Ethernet.TYPE_ARP) { arpHandler.processPacketIn(pkt); } else if (ethernet.getEtherType() == Ethernet.TYPE_IPV4) { IPv4 ipPacket = (IPv4) ethernet.getPayload(); ipHandler.addToPacketBuffer(ipPacket); if (ipPacket.getProtocol() == IPv4.PROTOCOL_ICMP) { icmpHandler.processPacketIn(pkt); } else { ipHandler.processPacketIn(pkt); } } } } private class InternalLinkListener implements LinkListener { @Override public void event(LinkEvent event) { if (event.type() == LinkEvent.Type.LINK_ADDED || event.type() == LinkEvent.Type.LINK_REMOVED) { log.debug("Event {} received from Link Service", event.type()); scheduleEventHandlerIfNotScheduled(event); } } } private class InternalDeviceListener implements DeviceListener { @Override public void event(DeviceEvent event) { switch (event.type()) { case DEVICE_ADDED: case PORT_REMOVED: case DEVICE_UPDATED: case DEVICE_AVAILABILITY_CHANGED: log.debug("Event {} received from Device Service", event.type()); scheduleEventHandlerIfNotScheduled(event); break; default: } } } @SuppressWarnings("rawtypes") private void scheduleEventHandlerIfNotScheduled(Event event) { synchronized (threadSchedulerLock) { eventQueue.add(event); numOfEventsQueued++; if ((numOfHandlerScheduled - numOfHandlerExecution) == 0) { //No pending scheduled event handling threads. So start a new one. eventHandlerFuture = executorService .schedule(eventHandler, 100, TimeUnit.MILLISECONDS); numOfHandlerScheduled++; } log.trace("numOfEventsQueued {}, numOfEventHanlderScheduled {}", numOfEventsQueued, numOfHandlerScheduled); } } private class InternalEventHandler implements Runnable { @Override public void run() { try { while (true) { @SuppressWarnings("rawtypes") Event event = null; synchronized (threadSchedulerLock) { if (!eventQueue.isEmpty()) { event = eventQueue.poll(); numOfEventsExecuted++; } else { numOfHandlerExecution++; log.debug("numOfHandlerExecution {} numOfEventsExecuted {}", numOfHandlerExecution, numOfEventsExecuted); break; } } if (event.type() == LinkEvent.Type.LINK_ADDED) { processLinkAdded((Link) event.subject()); } else if (event.type() == LinkEvent.Type.LINK_REMOVED) { processLinkRemoved((Link) event.subject()); } else if (event.type() == DeviceEvent.Type.DEVICE_ADDED || event.type() == DeviceEvent.Type.DEVICE_AVAILABILITY_CHANGED || event.type() == DeviceEvent.Type.DEVICE_UPDATED) { DeviceId deviceId = ((Device) event.subject()).id(); if (deviceService.isAvailable(deviceId)) { log.info("Processing device event {} for available device {}", event.type(), ((Device) event.subject()).id()); processDeviceAdded((Device) event.subject()); } /* else { if (event.type() == DeviceEvent.Type.DEVICE_AVAILABILITY_CHANGED) { // availability changed and not available - dev gone DefaultGroupHandler groupHandler = groupHandlerMap.get(deviceId); if (groupHandler != null) { groupHandler.removeAllGroups(); } } }*/ } else if (event.type() == DeviceEvent.Type.PORT_REMOVED) { processPortRemoved((Device) event.subject(), ((DeviceEvent) event).port()); } else { log.warn("Unhandled event type: {}", event.type()); } } } catch (Exception e) { log.error("SegmentRouting event handler " + "thread thrown an exception: {}", e); } } } private void processLinkAdded(Link link) { log.debug("A new link {} was added", link.toString()); if (!deviceConfiguration.isConfigured(link.src().deviceId())) { log.warn("Source device of this link is not configured."); return; } //Irrespective whether the local is a MASTER or not for this device, //create group handler instance and push default TTP flow rules. //Because in a multi-instance setup, instances can initiate //groups for any devices. Also the default TTP rules are needed //to be pushed before inserting any IP table entries for any device DefaultGroupHandler groupHandler = groupHandlerMap.get(link.src() .deviceId()); if (groupHandler != null) { groupHandler.linkUp(link, mastershipService.isLocalMaster( link.src().deviceId())); } else { Device device = deviceService.getDevice(link.src().deviceId()); if (device != null) { log.warn("processLinkAdded: Link Added " + "Notification without Device Added " + "event, still handling it"); processDeviceAdded(device); groupHandler = groupHandlerMap.get(link.src() .deviceId()); groupHandler.linkUp(link, mastershipService.isLocalMaster(device.id())); } } log.trace("Starting optimized route population process"); defaultRoutingHandler.populateRoutingRulesForLinkStatusChange(null); //log.trace("processLinkAdded: re-starting route population process"); //defaultRoutingHandler.startPopulationProcess(); } private void processLinkRemoved(Link link) { log.debug("A link {} was removed", link.toString()); DefaultGroupHandler groupHandler = groupHandlerMap.get(link.src().deviceId()); if (groupHandler != null) { groupHandler.portDown(link.src().port(), mastershipService.isLocalMaster(link.src().deviceId())); } log.trace("Starting optimized route population process"); defaultRoutingHandler.populateRoutingRulesForLinkStatusChange(link); //log.trace("processLinkRemoved: re-starting route population process"); //defaultRoutingHandler.startPopulationProcess(); } private void processDeviceAdded(Device device) { log.debug("A new device with ID {} was added", device.id()); if (deviceConfiguration == null || !deviceConfiguration.isConfigured(device.id())) { log.warn("Device configuration uploading. Device {} will be " + "processed after config completes.", device.id()); return; } // Irrespective of whether the local is a MASTER or not for this device, // we need to create a SR-group-handler instance. This is because in a // multi-instance setup, any instance can initiate forwarding/next-objectives // for any switch (even if this instance is a SLAVE or not even connected // to the switch). To handle this, a default-group-handler instance is necessary // per switch. if (groupHandlerMap.get(device.id()) == null) { DefaultGroupHandler groupHandler; try { groupHandler = DefaultGroupHandler. createGroupHandler(device.id(), appId, deviceConfiguration, linkService, flowObjectiveService, this); } catch (DeviceConfigNotFoundException e) { log.warn(e.getMessage() + " Aborting processDeviceAdded."); return; } groupHandlerMap.put(device.id(), groupHandler); // Also, in some cases, drivers may need extra // information to process rules (eg. Router IP/MAC); and so, we send // port addressing rules to the driver as well irrespective of whether // this instance is the master or not. defaultRoutingHandler.populatePortAddressingRules(device.id()); hostListener.readInitialHosts(); } if (mastershipService.isLocalMaster(device.id())) { DefaultGroupHandler groupHandler = groupHandlerMap.get(device.id()); groupHandler.createGroupsFromSubnetConfig(); routingRulePopulator.populateSubnetBroadcastRule(device.id()); groupHandler.createGroupsForXConnect(device.id()); routingRulePopulator.populateXConnectBroadcastRule(device.id()); } netcfgHandler.initVRouters(device.id()); } private void processPortRemoved(Device device, Port port) { log.debug("Port {} was removed", port.toString()); DefaultGroupHandler groupHandler = groupHandlerMap.get(device.id()); if (groupHandler != null) { groupHandler.portDown(port.number(), mastershipService.isLocalMaster(device.id())); } } private class InternalConfigListener implements NetworkConfigListener { SegmentRoutingManager segmentRoutingManager; /** * Constructs the internal network config listener. * * @param srMgr segment routing manager */ public InternalConfigListener(SegmentRoutingManager srMgr) { this.segmentRoutingManager = srMgr; } /** * Reads network config and initializes related data structure accordingly. */ public void configureNetwork() { deviceConfiguration = new DeviceConfiguration(appId, segmentRoutingManager.cfgService); arpHandler = new ArpHandler(segmentRoutingManager); icmpHandler = new IcmpHandler(segmentRoutingManager); ipHandler = new IpHandler(segmentRoutingManager); routingRulePopulator = new RoutingRulePopulator(segmentRoutingManager); defaultRoutingHandler = new DefaultRoutingHandler(segmentRoutingManager); tunnelHandler = new TunnelHandler(linkService, deviceConfiguration, groupHandlerMap, tunnelStore); policyHandler = new PolicyHandler(appId, deviceConfiguration, flowObjectiveService, tunnelHandler, policyStore); for (Device device : deviceService.getDevices()) { // Irrespective of whether the local is a MASTER or not for this device, // we need to create a SR-group-handler instance. This is because in a // multi-instance setup, any instance can initiate forwarding/next-objectives // for any switch (even if this instance is a SLAVE or not even connected // to the switch). To handle this, a default-group-handler instance is necessary // per switch. if (groupHandlerMap.get(device.id()) == null) { DefaultGroupHandler groupHandler; try { groupHandler = DefaultGroupHandler. createGroupHandler(device.id(), appId, deviceConfiguration, linkService, flowObjectiveService, segmentRoutingManager); } catch (DeviceConfigNotFoundException e) { log.warn(e.getMessage() + " Aborting configureNetwork."); return; } groupHandlerMap.put(device.id(), groupHandler); // Also, in some cases, drivers may need extra // information to process rules (eg. Router IP/MAC); and so, we send // port addressing rules to the driver as well, irrespective of whether // this instance is the master or not. defaultRoutingHandler.populatePortAddressingRules(device.id()); hostListener.readInitialHosts(); } if (mastershipService.isLocalMaster(device.id())) { DefaultGroupHandler groupHandler = groupHandlerMap.get(device.id()); groupHandler.createGroupsFromSubnetConfig(); routingRulePopulator.populateSubnetBroadcastRule(device.id()); groupHandler.createGroupsForXConnect(device.id()); routingRulePopulator.populateXConnectBroadcastRule(device.id()); } } defaultRoutingHandler.startPopulationProcess(); } @Override public void event(NetworkConfigEvent event) { // TODO move this part to NetworkConfigEventHandler if (event.configClass().equals(SegmentRoutingDeviceConfig.class)) { switch (event.type()) { case CONFIG_ADDED: log.info("Segment Routing Config added."); configureNetwork(); break; case CONFIG_UPDATED: log.info("Segment Routing Config updated."); // TODO support dynamic configuration break; default: break; } } else if (event.configClass().equals(SegmentRoutingAppConfig.class)) { checkState(netcfgHandler != null, "NetworkConfigEventHandler is not initialized"); switch (event.type()) { case CONFIG_ADDED: netcfgHandler.processVRouterConfigAdded(event); break; case CONFIG_UPDATED: netcfgHandler.processVRouterConfigUpdated(event); break; case CONFIG_REMOVED: netcfgHandler.processVRouterConfigRemoved(event); break; default: break; } } } } // TODO Move bridging table population to a separate class private class InternalHostListener implements HostListener { private void readInitialHosts() { hostService.getHosts().forEach(host -> { MacAddress mac = host.mac(); VlanId vlanId = host.vlan(); DeviceId deviceId = host.location().deviceId(); PortNumber port = host.location().port(); Set<IpAddress> ips = host.ipAddresses(); log.debug("Host {}/{} is added at {}:{}", mac, vlanId, deviceId, port); // Populate bridging table entry ForwardingObjective.Builder fob = getForwardingObjectiveBuilder(deviceId, mac, vlanId, port); flowObjectiveService.forward(deviceId, fob.add( new BridgingTableObjectiveContext(mac, vlanId) )); // Populate IP table entry ips.forEach(ip -> { if (ip.isIp4()) { routingRulePopulator.populateIpRuleForHost( deviceId, ip.getIp4Address(), mac, port); } }); }); } private ForwardingObjective.Builder getForwardingObjectiveBuilder( DeviceId deviceId, MacAddress mac, VlanId vlanId, PortNumber outport) { // Get assigned VLAN for the subnet VlanId outvlan = null; Ip4Prefix subnet = deviceConfiguration.getPortSubnet(deviceId, outport); if (subnet == null) { outvlan = VlanId.vlanId(ASSIGNED_VLAN_NO_SUBNET); } else { outvlan = getSubnetAssignedVlanId(deviceId, subnet); } // match rule TrafficSelector.Builder sbuilder = DefaultTrafficSelector.builder(); sbuilder.matchEthDst(mac); /* * Note: for untagged packets, match on the assigned VLAN. * for tagged packets, match on its incoming VLAN. */ if (vlanId.equals(VlanId.NONE)) { sbuilder.matchVlanId(outvlan); } else { sbuilder.matchVlanId(vlanId); } TrafficTreatment.Builder tbuilder = DefaultTrafficTreatment.builder(); tbuilder.immediate().popVlan(); tbuilder.immediate().setOutput(outport); // for switch pipelines that need it, provide outgoing vlan as metadata TrafficSelector meta = DefaultTrafficSelector.builder() .matchVlanId(outvlan).build(); // All forwarding is via Groups. Drivers can re-purpose to flow-actions if needed. int portNextObjId = getPortNextObjectiveId(deviceId, outport, tbuilder.build(), meta); return DefaultForwardingObjective.builder() .withFlag(ForwardingObjective.Flag.SPECIFIC) .withSelector(sbuilder.build()) .nextStep(portNextObjId) .withPriority(100) .fromApp(appId) .makePermanent(); } private void processHostAddedEvent(HostEvent event) { MacAddress mac = event.subject().mac(); VlanId vlanId = event.subject().vlan(); DeviceId deviceId = event.subject().location().deviceId(); PortNumber port = event.subject().location().port(); Set<IpAddress> ips = event.subject().ipAddresses(); log.info("Host {}/{} is added at {}:{}", mac, vlanId, deviceId, port); if (!deviceConfiguration.suppressHost() .contains(new ConnectPoint(deviceId, port))) { // Populate bridging table entry log.debug("Populate L2 table entry for host {} at {}:{}", mac, deviceId, port); ForwardingObjective.Builder fob = getForwardingObjectiveBuilder(deviceId, mac, vlanId, port); flowObjectiveService.forward(deviceId, fob.add( new BridgingTableObjectiveContext(mac, vlanId) )); // Populate IP table entry ips.forEach(ip -> { if (ip.isIp4()) { routingRulePopulator.populateIpRuleForHost( deviceId, ip.getIp4Address(), mac, port); } }); } } private void processHostRemoveEvent(HostEvent event) { MacAddress mac = event.subject().mac(); VlanId vlanId = event.subject().vlan(); DeviceId deviceId = event.subject().location().deviceId(); PortNumber port = event.subject().location().port(); Set<IpAddress> ips = event.subject().ipAddresses(); log.debug("Host {}/{} is removed from {}:{}", mac, vlanId, deviceId, port); if (!deviceConfiguration.suppressHost() .contains(new ConnectPoint(deviceId, port))) { // Revoke bridging table entry ForwardingObjective.Builder fob = getForwardingObjectiveBuilder(deviceId, mac, vlanId, port); flowObjectiveService.forward(deviceId, fob.remove( new BridgingTableObjectiveContext(mac, vlanId) )); // Revoke IP table entry ips.forEach(ip -> { if (ip.isIp4()) { routingRulePopulator.revokeIpRuleForHost( deviceId, ip.getIp4Address(), mac, port); } }); } } private void processHostMovedEvent(HostEvent event) { MacAddress mac = event.subject().mac(); VlanId vlanId = event.subject().vlan(); DeviceId prevDeviceId = event.prevSubject().location().deviceId(); PortNumber prevPort = event.prevSubject().location().port(); Set<IpAddress> prevIps = event.prevSubject().ipAddresses(); DeviceId newDeviceId = event.subject().location().deviceId(); PortNumber newPort = event.subject().location().port(); Set<IpAddress> newIps = event.subject().ipAddresses(); log.debug("Host {}/{} is moved from {}:{} to {}:{}", mac, vlanId, prevDeviceId, prevPort, newDeviceId, newPort); if (!deviceConfiguration.suppressHost() .contains(new ConnectPoint(prevDeviceId, prevPort))) { // Revoke previous bridging table entry ForwardingObjective.Builder prevFob = getForwardingObjectiveBuilder(prevDeviceId, mac, vlanId, prevPort); flowObjectiveService.forward(prevDeviceId, prevFob.remove( new BridgingTableObjectiveContext(mac, vlanId) )); // Revoke previous IP table entry prevIps.forEach(ip -> { if (ip.isIp4()) { routingRulePopulator.revokeIpRuleForHost( prevDeviceId, ip.getIp4Address(), mac, prevPort); } }); } if (!deviceConfiguration.suppressHost() .contains(new ConnectPoint(newDeviceId, newPort))) { // Populate new bridging table entry ForwardingObjective.Builder newFob = getForwardingObjectiveBuilder(newDeviceId, mac, vlanId, newPort); flowObjectiveService.forward(newDeviceId, newFob.add( new BridgingTableObjectiveContext(mac, vlanId) )); // Populate new IP table entry newIps.forEach(ip -> { if (ip.isIp4()) { routingRulePopulator.populateIpRuleForHost( newDeviceId, ip.getIp4Address(), mac, newPort); } }); } } private void processHostUpdatedEvent(HostEvent event) { MacAddress mac = event.subject().mac(); VlanId vlanId = event.subject().vlan(); DeviceId prevDeviceId = event.prevSubject().location().deviceId(); PortNumber prevPort = event.prevSubject().location().port(); Set<IpAddress> prevIps = event.prevSubject().ipAddresses(); DeviceId newDeviceId = event.subject().location().deviceId(); PortNumber newPort = event.subject().location().port(); Set<IpAddress> newIps = event.subject().ipAddresses(); log.debug("Host {}/{} is updated", mac, vlanId); if (!deviceConfiguration.suppressHost() .contains(new ConnectPoint(prevDeviceId, prevPort))) { // Revoke previous IP table entry prevIps.forEach(ip -> { if (ip.isIp4()) { routingRulePopulator.revokeIpRuleForHost( prevDeviceId, ip.getIp4Address(), mac, prevPort); } }); } if (!deviceConfiguration.suppressHost() .contains(new ConnectPoint(newDeviceId, newPort))) { // Populate new IP table entry newIps.forEach(ip -> { if (ip.isIp4()) { routingRulePopulator.populateIpRuleForHost( newDeviceId, ip.getIp4Address(), mac, newPort); } }); } } @Override public void event(HostEvent event) { // Do not proceed without mastership DeviceId deviceId = event.subject().location().deviceId(); if (!mastershipService.isLocalMaster(deviceId)) { return; } switch (event.type()) { case HOST_ADDED: processHostAddedEvent(event); break; case HOST_MOVED: processHostMovedEvent(event); break; case HOST_REMOVED: processHostRemoveEvent(event); break; case HOST_UPDATED: processHostUpdatedEvent(event); break; default: log.warn("Unsupported host event type: {}", event.type()); break; } } } private static class BridgingTableObjectiveContext implements ObjectiveContext { final MacAddress mac; final VlanId vlanId; BridgingTableObjectiveContext(MacAddress mac, VlanId vlanId) { this.mac = mac; this.vlanId = vlanId; } @Override public void onSuccess(Objective objective) { if (objective.op() == Objective.Operation.ADD) { log.debug("Successfully populate bridging table entry for {}/{}", mac, vlanId); } else { log.debug("Successfully revoke bridging table entry for {}/{}", mac, vlanId); } } @Override public void onError(Objective objective, ObjectiveError error) { if (objective.op() == Objective.Operation.ADD) { log.debug("Fail to populate bridging table entry for {}/{}. {}", mac, vlanId, error); } else { log.debug("Fail to revoke bridging table entry for {}/{}. {}", mac, vlanId, error); } } } }
/* * Copyright 2000-2014 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intellij.openapi.vfs.impl.http; import com.intellij.openapi.application.ApplicationManager; import com.intellij.openapi.fileEditor.FileDocumentManager; import com.intellij.openapi.fileTypes.FileType; import com.intellij.openapi.util.text.StringUtil; import com.intellij.openapi.vfs.VirtualFile; import com.intellij.openapi.vfs.VirtualFileSystem; import com.intellij.util.ArrayUtil; import com.intellij.util.FileContentUtilCore; import com.intellij.util.SmartList; import com.intellij.util.UriUtil; import com.intellij.util.containers.ContainerUtil; import org.jetbrains.annotations.NonNls; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import java.util.List; class HttpVirtualFileImpl extends HttpVirtualFile { private final HttpFileSystemBase myFileSystem; @Nullable private final RemoteFileInfoImpl myFileInfo; private FileType myInitialFileType; private final String myPath; private final String myParentPath; private final String myName; private List<VirtualFile> myChildren; HttpVirtualFileImpl(@NotNull HttpFileSystemBase fileSystem, @Nullable HttpVirtualFileImpl parent, String path, @Nullable RemoteFileInfoImpl fileInfo) { if (parent != null) { if (parent.myChildren == null) { parent.myChildren = new SmartList<>(); } parent.myChildren.add(this); } myFileSystem = fileSystem; myPath = path; myFileInfo = fileInfo; if (myFileInfo != null) { myFileInfo.addDownloadingListener(new FileDownloadingAdapter() { @Override public void fileDownloaded(final VirtualFile localFile) { ApplicationManager.getApplication().invokeLater(() -> { HttpVirtualFileImpl file = HttpVirtualFileImpl.this; FileDocumentManager.getInstance().reloadFiles(file); if (!localFile.getFileType().equals(myInitialFileType)) { FileContentUtilCore.reparseFiles(file); } }); } }); path = UriUtil.trimTrailingSlashes(UriUtil.trimParameters(path)); int lastSlash = path.lastIndexOf('/'); if (lastSlash == -1) { myParentPath = null; myName = path; } else { myParentPath = path.substring(0, lastSlash); myName = path.substring(lastSlash + 1); } } else { int lastSlash = path.lastIndexOf('/'); if (lastSlash == path.length() - 1) { myParentPath = null; myName = path; } else { int prevSlash = path.lastIndexOf('/', lastSlash - 1); if (prevSlash < 0) { myParentPath = path.substring(0, lastSlash + 1); myName = path.substring(lastSlash + 1); } else { myParentPath = path.substring(0, lastSlash); myName = path.substring(lastSlash + 1); } } } } @Override @Nullable public RemoteFileInfoImpl getFileInfo() { return myFileInfo; } @Override @NotNull public VirtualFileSystem getFileSystem() { return myFileSystem; } @NotNull @Override public String getPath() { return myPath; } @Override @NotNull public String getName() { return myName; } @Override public String toString() { return "HttpVirtualFile:" + myPath + ", info=" + myFileInfo; } @Override public VirtualFile getParent() { return myParentPath == null ? null : myFileSystem.findFileByPath(myParentPath, true); } @Override public boolean isWritable() { return false; } @Override public boolean isValid() { return true; } @Override public boolean isDirectory() { return myFileInfo == null; } @Override public VirtualFile[] getChildren() { return ContainerUtil.isEmpty(myChildren) ? EMPTY_ARRAY : myChildren.toArray(VirtualFile.EMPTY_ARRAY); } @Nullable @Override public VirtualFile findChild(@NotNull @NonNls String name) { if (!ContainerUtil.isEmpty(myChildren)) { for (VirtualFile child : myChildren) { if (StringUtil.equals(child.getNameSequence(), name)) { return child; } } } return null; } @Override @NotNull public FileType getFileType() { if (myFileInfo == null) { return super.getFileType(); } VirtualFile localFile = myFileInfo.getLocalFile(); if (localFile != null) { return localFile.getFileType(); } FileType fileType = super.getFileType(); if (myInitialFileType == null) { myInitialFileType = fileType; } return fileType; } @Override public InputStream getInputStream() throws IOException { if (myFileInfo != null) { VirtualFile localFile = myFileInfo.getLocalFile(); if (localFile != null) { return localFile.getInputStream(); } } throw new UnsupportedOperationException(); } @Override @NotNull public OutputStream getOutputStream(Object requestor, long newModificationStamp, long newTimeStamp) throws IOException { if (myFileInfo != null) { VirtualFile localFile = myFileInfo.getLocalFile(); if (localFile != null) { return localFile.getOutputStream(requestor, newModificationStamp, newTimeStamp); } } throw new UnsupportedOperationException(); } @Override @NotNull public byte[] contentsToByteArray() throws IOException { if (myFileInfo == null) { throw new UnsupportedOperationException(); } VirtualFile localFile = myFileInfo.getLocalFile(); if (localFile != null) { return localFile.contentsToByteArray(); } return ArrayUtil.EMPTY_BYTE_ARRAY; } @Override public long getTimeStamp() { return 0; } @Override public long getModificationStamp() { return 0; } @Override public long getLength() { return -1; } @Override public void refresh(final boolean asynchronous, final boolean recursive, final Runnable postRunnable) { if (myFileInfo != null) { myFileInfo.refresh(postRunnable); } else if (postRunnable != null) { postRunnable.run(); } } }
/* * Copyright 2008 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.codenarc.rule; import org.codehaus.groovy.ast.ASTNode; import org.codehaus.groovy.ast.ImportNode; import org.codenarc.source.SourceCode; import org.codenarc.source.SourceCodeCriteria; import org.codenarc.util.ImportUtil; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.util.ArrayList; import java.util.List; import java.util.Map; import java.util.regex.Pattern; /** * Abstract superclass for Rules. * <p/> * Each subclass must define an <code>name</code> property (String) and a <code>priority</code> property * (integer 1..3). * * @author Chris Mair * @author Hamlet D'Arcy */ public abstract class AbstractRule implements Rule { private static final Logger LOG = LoggerFactory.getLogger(AbstractRule.class); /** * Flag indicating whether this rule should be enabled (applied). Defaults to true. * If set to false, this rule will not produce any violations. */ private boolean enabled = true; /** * This rule is only applied to source code (file) pathnames matching this regular expression. */ private String applyToFilesMatching; /** * This rule is NOT applied to source code (file) pathnames matching this regular expression. */ private String doNotApplyToFilesMatching; /** * This rule is only applied to source code (file) names matching this value. The name may optionally * contain a path. If a path is specified, then the source code path must match it. If no path is * specified, then only the source code (file) name is compared (i.e., its path is ignored). * The value may optionally be a comma-separated list of names, in which case one of the names must match. * The name(s) may optionally include wildcard characters ('*' or '?'). */ private String applyToFileNames; /** * This rule is NOT applied to source code (file) names matching this value. The name may optionally * contain a path. If a path is specified, then the source code path must match it. If no path is * specified, then only the source code (file) name is compared (i.e., its path is ignored). * The value may optionally be a comma-separated list of names, in which case any one of the names can match. * The name(s) may optionally include wildcard characters ('*' or '?'). */ private String doNotApplyToFileNames; /** * If not null, this is used as the message for all violations of this rule, overriding any * message generated by the concrete rule subclass. Defaults to null. Note that setting this * to an empty string "hides" the message, if any, generated by the actual rule. */ private String violationMessage; /** * If not null, this is used as the description text for this rule, overriding any * description text found in the i18n resource bundles. Defaults to null. */ private String description; /** * @return the unique name for this rule */ public abstract String getName(); /** * Set the unique name for this rule * @param name - the name for this rule; this should be unique */ public abstract void setName(String name); /** * @return the priority of this rule, between 1 (highest priority) and 3 (lowest priority), inclusive. */ public abstract int getPriority(); /** * Set the priority for this rule * @param priority - the priority of this rule, between 1 (highest priority) and 3 (lowest priority), inclusive. */ public abstract void setPriority(int priority); /** * @return the required compiler phase (as in {@link org.codehaus.groovy.control.Phases}) * of the AST of the {@link SourceCode} * handed to the rule via {@link #applyTo(SourceCode sourceCode)} */ public int getCompilerPhase() { return SourceCode.DEFAULT_COMPILER_PHASE; } /** * Apply this rule to the specified source and return a list of violations (or an empty List) * @param sourceCode - the source to apply this rule to * @param violations - the List of violations to which new violations from this rule are to be added */ public abstract void applyTo(SourceCode sourceCode, List<Violation> violations); /** * Apply this rule to the specified source and return a list of violations (or an empty List). * This implementation delegates to the abstract applyCode(SourceCode,List), provided by * concrete subclasses. This template method simplifies subclass implementations and also * enables common handling of enablement logic. * @param sourceCode - the source to apply this rule to * @return the List of violations; may be empty */ public List<Violation> applyTo(SourceCode sourceCode) throws Throwable { try { validateAstCompilerPhase(sourceCode); validate(); List<Violation> violations = new ArrayList<Violation>(); if (shouldApplyThisRuleTo(sourceCode)) { applyTo(sourceCode, violations); } overrideViolationMessageIfNecessary(violations); return violations; } catch(Throwable t) { LOG.error("Error from [" + getClass().getName() + "] processing source file [" + sourceCode.getPath() + "]", t); throw t; } } private void validateAstCompilerPhase(SourceCode sourceCode) { if (sourceCode.getAstCompilerPhase() != getCompilerPhase()) { throw new IllegalArgumentException("This rule requires SourceCode with AST compiler phase '" + getCompilerPhase() + "', but was handed one with AST compiler phase '" + sourceCode.getAstCompilerPhase() + "'"); } } /** * Allows rules to check whether preconditions are satisfied and short-circuit execution * (i.e., do nothing) if those preconditions are not satisfied. Return true by default. * This method is provided as a placeholder so subclasses can optionally override. * @return true if all preconditions for this rule are satisfied */ public boolean isReady() { return true; } /** * Allows rules to perform validation. Do nothing by default. * This method is provided as a placeholder so subclasses can optionally override. * Subclasses will typically use <code>assert</code> calls to verify required preconditions. */ public void validate() { } public String toString() { return String.format( "%s[name=%s, priority=%s]", getClassNameNoPackage(), getName(), getPriority() ); } /** * Create and return a new Violation for this rule and the specified values * @param lineNumber - the line number for the violation; may be null * @param sourceLine - the source line for the violation; may be null * @param message - the message for the violation; may be null * @return a new Violation object */ protected Violation createViolation(Integer lineNumber, String sourceLine, String message) { Violation violation = new Violation(); violation.setRule(this); violation.setSourceLine(sourceLine); violation.setLineNumber(lineNumber); violation.setMessage(message); return violation; } /** * Create a new Violation for the AST node. * @param sourceCode - the SourceCode * @param node - the Groovy AST Node * @param message - the message for the violation; defaults to null */ protected Violation createViolation(SourceCode sourceCode, ASTNode node, String message) { String sourceLine = sourceCode.line(node.getLineNumber()-1); return createViolation(node.getLineNumber(), sourceLine, message); } /** * Create and return a new Violation for this rule and the specified import * @param sourceCode - the SourceCode * @param importNode - the ImportNode for the import triggering the violation * @return a new Violation object */ protected Violation createViolationForImport(SourceCode sourceCode, ImportNode importNode, String message) { Map importInfo = ImportUtil.sourceLineAndNumberForImport(sourceCode, importNode); Violation violation = new Violation(); violation.setRule(this); violation.setSourceLine((String) importInfo.get("sourceLine")); violation.setLineNumber((Integer) importInfo.get("lineNumber")); violation.setMessage(message); return violation; } /** * Create and return a new Violation for this rule and the specified import className and alias * @param sourceCode - the SourceCode * @param className - the class name (as specified within the import statement) * @param alias - the alias for the import statement * @param violationMessage - the violation message; may be null * @return a new Violation object */ protected Violation createViolationForImport(SourceCode sourceCode, String className, String alias, String violationMessage) { Map importInfo = ImportUtil.sourceLineAndNumberForImport(sourceCode, className, alias); Violation violation = new Violation(); violation.setRule(this); violation.setSourceLine((String) importInfo.get("sourceLine")); violation.setLineNumber((Integer) importInfo.get("lineNumber")); violation.setMessage(violationMessage); return violation; } private boolean shouldApplyThisRuleTo(SourceCode sourceCode) { if (!enabled) return false; if (!isReady()) return false; SourceCodeCriteria criteria = new SourceCodeCriteria(); criteria.setApplyToFilesMatching(getApplyToFilesMatching()); criteria.setDoNotApplyToFilesMatching(getDoNotApplyToFilesMatching()); criteria.setApplyToFileNames(getApplyToFileNames()); criteria.setDoNotApplyToFileNames(getDoNotApplyToFileNames()); return criteria.matches(sourceCode); } private String getClassNameNoPackage() { String className = getClass().getName(); int indexOfLastPeriod = className.lastIndexOf('.'); return (indexOfLastPeriod == -1) ? className : className.substring(indexOfLastPeriod + 1); } /** * If the violationMessage property of this rule has been set, then use it to set the * message within each violation, overriding the original message(s), if any. */ private void overrideViolationMessageIfNecessary(List<Violation> violations) { if (violationMessage != null && violations != null) { for (Violation violation : violations) { violation.setMessage(violationMessage); } } } public boolean isEnabled() { return enabled; } public void setEnabled(boolean enabled) { this.enabled = enabled; } public String getApplyToFilesMatching() { return applyToFilesMatching; } public void setApplyToFilesMatching(String applyToFilesMatching) { validateRegularExpression(applyToFilesMatching); this.applyToFilesMatching = applyToFilesMatching; } public String getDoNotApplyToFilesMatching() { return doNotApplyToFilesMatching; } public void setDoNotApplyToFilesMatching(String doNotApplyToFilesMatching) { validateRegularExpression(doNotApplyToFilesMatching); this.doNotApplyToFilesMatching = doNotApplyToFilesMatching; } public String getApplyToFileNames() { return applyToFileNames; } public void setApplyToFileNames(String applyToFileNames) { this.applyToFileNames = applyToFileNames; } public String getDoNotApplyToFileNames() { return doNotApplyToFileNames; } public void setDoNotApplyToFileNames(String doNotApplyToFileNames) { this.doNotApplyToFileNames = doNotApplyToFileNames; } public String getViolationMessage() { return violationMessage; } public void setViolationMessage(String violationMessage) { this.violationMessage = violationMessage; } public String getDescription() { return description; } public void setDescription(String description) { this.description = description; } private void validateRegularExpression(String regex) { Pattern.compile(regex); } }
/* * $Id$ * * SARL is an general-purpose agent programming language. * More details on http://www.sarl.io * * Copyright (C) 2014-2021 the original authors or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.sarl.lang.tests.general.compilation.general; import static io.sarl.tests.api.tools.TestUtils.multilineString; import static org.junit.jupiter.api.Assertions.assertEquals; import org.junit.jupiter.api.DisplayName; import org.junit.jupiter.api.Nested; import org.junit.jupiter.api.Tag; import org.junit.jupiter.api.Test; import io.sarl.lang.SARLVersion; import io.sarl.lang.sarl.SarlPackage; import io.sarl.tests.api.AbstractSarlTest; /** * @author $Author: sgalland$ * @version $Name$ $Revision$ $Date$ * @mavengroupid $GroupId$ * @mavenartifactid $ArtifactId$ */ @SuppressWarnings("all") @DisplayName("Compilation: active annotations") @Tag("core") @Tag("compileToJava") public class ActiveAnnotationTest { @Nested @DisplayName("Compilation: @Accessors") public class AccessorsTest extends AbstractSarlTest { @Test public void inClassField_01() throws Exception { String source = multilineString( "import org.eclipse.xtend.lib.annotations.Accessors", "class C1 {", " @Accessors var field : double = 0", "}" ); String expected = multilineString( "import io.sarl.lang.annotation.SarlElementType;", "import io.sarl.lang.annotation.SarlSpecification;", "import io.sarl.lang.annotation.SyntheticMember;", "import org.eclipse.xtend.lib.annotations.Accessors;", "import org.eclipse.xtext.xbase.lib.Pure;", "", "@SarlSpecification(\"" + SARLVersion.SPECIFICATION_RELEASE_VERSION_STRING + "\")", "@SarlElementType(" + SarlPackage.SARL_CLASS + ")", "@SuppressWarnings(\"all\")", "public class C1 {", " @Accessors", " private double field = 0;", " ", " @Override", " @Pure", " @SyntheticMember", " public boolean equals(final Object obj) {", " if (this == obj)", " return true;", " if (obj == null)", " return false;", " if (getClass() != obj.getClass())", " return false;", " C1 other = (C1) obj;", " if (Double.doubleToLongBits(other.field) != Double.doubleToLongBits(this.field))", " return false;", " return super.equals(obj);", " }", " ", " @Override", " @Pure", " @SyntheticMember", " public int hashCode() {", " int result = super.hashCode();", " final int prime = 31;", " result = prime * result + Double.hashCode(this.field);", " return result;", " }", " ", " @SyntheticMember", " public C1() {", " super();", " }", " ", " @Pure", " public double getField() {", " return this.field;", " }", " ", " public void setField(final double field) {", " this.field = field;", " }", "}", "" ); getCompileHelper().assertCompilesTo(source, expected); } @Test public void inClassField_02() throws Exception { String source = multilineString( "import org.eclipse.xtend.lib.annotations.Accessors", "import org.eclipse.xtend.lib.annotations.AccessorType", "class C1 {", " @Accessors(PROTECTED_SETTER) var field : double = 0", "}" ); String expected = multilineString( "import io.sarl.lang.annotation.SarlElementType;", "import io.sarl.lang.annotation.SarlSpecification;", "import io.sarl.lang.annotation.SyntheticMember;", "import org.eclipse.xtend.lib.annotations.AccessorType;", "import org.eclipse.xtend.lib.annotations.Accessors;", "import org.eclipse.xtext.xbase.lib.Pure;", "", "@SarlSpecification(\"" + SARLVersion.SPECIFICATION_RELEASE_VERSION_STRING + "\")", "@SarlElementType(" + SarlPackage.SARL_CLASS + ")", "@SuppressWarnings(\"all\")", "public class C1 {", " @Accessors(AccessorType.PROTECTED_SETTER)", " private double field = 0;", " ", " @Override", " @Pure", " @SyntheticMember", " public boolean equals(final Object obj) {", " if (this == obj)", " return true;", " if (obj == null)", " return false;", " if (getClass() != obj.getClass())", " return false;", " C1 other = (C1) obj;", " if (Double.doubleToLongBits(other.field) != Double.doubleToLongBits(this.field))", " return false;", " return super.equals(obj);", " }", " ", " @Override", " @Pure", " @SyntheticMember", " public int hashCode() {", " int result = super.hashCode();", " final int prime = 31;", " result = prime * result + Double.hashCode(this.field);", " return result;", " }", " ", " @SyntheticMember", " public C1() {", " super();", " }", " ", " protected void setField(final double field) {", " this.field = field;", " }", "}", "" ); getCompileHelper().assertCompilesTo(source, expected); } @Test public void inClass_01() throws Exception { String source = multilineString( "import org.eclipse.xtend.lib.annotations.Accessors", "@Accessors class C1 {", " var field : double = 0", "}" ); String expected = multilineString( "import io.sarl.lang.annotation.SarlElementType;", "import io.sarl.lang.annotation.SarlSpecification;", "import io.sarl.lang.annotation.SyntheticMember;", "import org.eclipse.xtend.lib.annotations.Accessors;", "import org.eclipse.xtext.xbase.lib.Pure;", "", "@Accessors", "@SarlSpecification(\"" + SARLVersion.SPECIFICATION_RELEASE_VERSION_STRING + "\")", "@SarlElementType(" + SarlPackage.SARL_CLASS + ")", "@SuppressWarnings(\"all\")", "public class C1 {", " private double field = 0;", " ", " @Override", " @Pure", " @SyntheticMember", " public boolean equals(final Object obj) {", " if (this == obj)", " return true;", " if (obj == null)", " return false;", " if (getClass() != obj.getClass())", " return false;", " C1 other = (C1) obj;", " if (Double.doubleToLongBits(other.field) != Double.doubleToLongBits(this.field))", " return false;", " return super.equals(obj);", " }", " ", " @Override", " @Pure", " @SyntheticMember", " public int hashCode() {", " int result = super.hashCode();", " final int prime = 31;", " result = prime * result + Double.hashCode(this.field);", " return result;", " }", " ", " @SyntheticMember", " public C1() {", " super();", " }", " ", " @Pure", " public double getField() {", " return this.field;", " }", " ", " public void setField(final double field) {", " this.field = field;", " }", "}", "" ); getCompileHelper().assertCompilesTo(source, expected); } @Test public void inClass_02() throws Exception { String source = multilineString( "import org.eclipse.xtend.lib.annotations.Accessors", "import org.eclipse.xtend.lib.annotations.AccessorType", "@Accessors(PROTECTED_SETTER) class C1 {", " var field : double = 0", "}" ); String expected = multilineString( "import io.sarl.lang.annotation.SarlElementType;", "import io.sarl.lang.annotation.SarlSpecification;", "import io.sarl.lang.annotation.SyntheticMember;", "import org.eclipse.xtend.lib.annotations.AccessorType;", "import org.eclipse.xtend.lib.annotations.Accessors;", "import org.eclipse.xtext.xbase.lib.Pure;", "", "@Accessors(AccessorType.PROTECTED_SETTER)", "@SarlSpecification(\"" + SARLVersion.SPECIFICATION_RELEASE_VERSION_STRING + "\")", "@SarlElementType(" + SarlPackage.SARL_CLASS + ")", "@SuppressWarnings(\"all\")", "public class C1 {", " private double field = 0;", " ", " @Override", " @Pure", " @SyntheticMember", " public boolean equals(final Object obj) {", " if (this == obj)", " return true;", " if (obj == null)", " return false;", " if (getClass() != obj.getClass())", " return false;", " C1 other = (C1) obj;", " if (Double.doubleToLongBits(other.field) != Double.doubleToLongBits(this.field))", " return false;", " return super.equals(obj);", " }", " ", " @Override", " @Pure", " @SyntheticMember", " public int hashCode() {", " int result = super.hashCode();", " final int prime = 31;", " result = prime * result + Double.hashCode(this.field);", " return result;", " }", " ", " @SyntheticMember", " public C1() {", " super();", " }", " ", " protected void setField(final double field) {", " this.field = field;", " }", "}", "" ); getCompileHelper().assertCompilesTo(source, expected); } } @Nested @DisplayName("Compilation: @Data") public class DataTest extends AbstractSarlTest { @Test public void inClass_01() throws Exception { String source = multilineString( "import org.eclipse.xtend.lib.annotations.Data", "@Data class C1 {", " val field : double", "}" ); String expected = multilineString( "import io.sarl.lang.annotation.SarlElementType;", "import io.sarl.lang.annotation.SarlSpecification;", "import io.sarl.lang.annotation.SyntheticMember;", "import org.eclipse.xtend.lib.annotations.Data;", "import org.eclipse.xtext.xbase.lib.Pure;", "import org.eclipse.xtext.xbase.lib.util.ToStringBuilder;", "", "@Data", "@SarlSpecification(\"" + SARLVersion.SPECIFICATION_RELEASE_VERSION_STRING + "\")", "@SarlElementType(" + SarlPackage.SARL_CLASS + ")", "@SuppressWarnings(\"all\")", "public class C1 {", " private final double field;", " ", " @Override", " @Pure", " @SyntheticMember", " public boolean equals(final Object obj) {", " if (this == obj)", " return true;", " if (obj == null)", " return false;", " if (getClass() != obj.getClass())", " return false;", " C1 other = (C1) obj;", " if (Double.doubleToLongBits(other.field) != Double.doubleToLongBits(this.field))", " return false;", " return super.equals(obj);", " }", " ", " @Override", " @Pure", " @SyntheticMember", " public int hashCode() {", " int result = super.hashCode();", " final int prime = 31;", " result = prime * result + Double.hashCode(this.field);", " return result;", " }", " ", " public C1(final double field) {", " super();", " this.field = field;", " }", " ", " @Override", " @Pure", " public String toString() {", " ToStringBuilder b = new ToStringBuilder(this);", " b.add(\"field\", this.field);", " return b.toString();", " }", " ", " @Pure", " public double getField() {", " return this.field;", " }", "}", "" ); getCompileHelper().assertCompilesTo(source, expected); } } @Nested @DisplayName("Compilation: @Delegate") public class DelegateTest extends AbstractSarlTest { @Test public void inClass_01() throws Exception { String source = multilineString( "import org.eclipse.xtend.lib.annotations.Delegate", "interface I1 {", " def myFct", "}", "class C1 implements I1 {", " @Delegate var field : I1", "}" ); String expected = multilineString( "import io.sarl.lang.annotation.SarlElementType;", "import io.sarl.lang.annotation.SarlSpecification;", "import io.sarl.lang.annotation.SyntheticMember;", "import org.eclipse.xtend.lib.annotations.Delegate;", "import org.eclipse.xtext.xbase.lib.Pure;", "", "@SarlSpecification(\"" + SARLVersion.SPECIFICATION_RELEASE_VERSION_STRING + "\")", "@SarlElementType(" + SarlPackage.SARL_CLASS + ")", "@SuppressWarnings(\"all\")", "public class C1 implements I1 {", " @Delegate", " private I1 field;", " ", " @Override", " @Pure", " @SyntheticMember", " public boolean equals(final Object obj) {", " return super.equals(obj);", " }", " ", " @Override", " @Pure", " @SyntheticMember", " public int hashCode() {", " int result = super.hashCode();", " return result;", " }", " ", " @SyntheticMember", " public C1() {", " super();", " }", " ", " public void myFct() {", " this.field.myFct();", " }", "}", "" ); getCompileHelper().compile(source, (it) -> { assertEquals(expected, it.getGeneratedCode("C1")); }); } @Test public void inClass_02() throws Exception { String source = multilineString( "import org.eclipse.xtend.lib.annotations.Delegate", "interface I1 {", " def compareTo(a : String) : int", "}", "class MyDelegate implements I1 {", " def compareTo(a : String) : int { 0 }", "}", "class C1 implements I1 {", " @Delegate def provideDelegate(methodName : String, paramTypes : Class<?>[], actualArguments : Object[]) : I1 {", " return new MyDelegate", " }", "}" ); String expected = multilineString( "import io.sarl.lang.annotation.SarlElementType;", "import io.sarl.lang.annotation.SarlSpecification;", "import io.sarl.lang.annotation.SyntheticMember;", "import org.eclipse.xtend.lib.annotations.Delegate;", "import org.eclipse.xtext.xbase.lib.Pure;", "", "@SarlSpecification(\"" + SARLVersion.SPECIFICATION_RELEASE_VERSION_STRING + "\")", "@SarlElementType(" + SarlPackage.SARL_CLASS + ")", "@SuppressWarnings(\"all\")", "public class C1 implements I1 {", " @Delegate", " @Pure", " public I1 provideDelegate(final String methodName, final Class<?>[] paramTypes, final Object[] actualArguments) {", " return new MyDelegate();", " }", " ", " @SyntheticMember", " public C1() {", " super();", " }", " ", " public int compareTo(final String a) {", " return this.provideDelegate(\"compareTo\", new Class[]{String.class}, new Object[]{a}).compareTo(a);", " }", "}", "" ); getCompileHelper().compile(source, (it) -> { assertEquals(expected, it.getGeneratedCode("C1")); }); } } @Nested @DisplayName("Compilation: @ToString") public class ToStringTest extends AbstractSarlTest { @Test public void inClass_01() throws Exception { String source = multilineString( "import org.eclipse.xtend.lib.annotations.ToString", "@ToString class C1 {", " var field : double = 0", "}" ); String expected = multilineString( "import io.sarl.lang.annotation.SarlElementType;", "import io.sarl.lang.annotation.SarlSpecification;", "import io.sarl.lang.annotation.SyntheticMember;", "import org.eclipse.xtend.lib.annotations.ToString;", "import org.eclipse.xtext.xbase.lib.Pure;", "import org.eclipse.xtext.xbase.lib.util.ToStringBuilder;", "", "@ToString", "@SarlSpecification(\"" + SARLVersion.SPECIFICATION_RELEASE_VERSION_STRING + "\")", "@SarlElementType(" + SarlPackage.SARL_CLASS + ")", "@SuppressWarnings(\"all\")", "public class C1 {", " private double field = 0;", " ", " @Override", " @Pure", " @SyntheticMember", " public boolean equals(final Object obj) {", " if (this == obj)", " return true;", " if (obj == null)", " return false;", " if (getClass() != obj.getClass())", " return false;", " C1 other = (C1) obj;", " if (Double.doubleToLongBits(other.field) != Double.doubleToLongBits(this.field))", " return false;", " return super.equals(obj);", " }", " ", " @Override", " @Pure", " @SyntheticMember", " public int hashCode() {", " int result = super.hashCode();", " final int prime = 31;", " result = prime * result + Double.hashCode(this.field);", " return result;", " }", " ", " @SyntheticMember", " public C1() {", " super();", " }", " ", " @Override", " @Pure", " public String toString() {", " ToStringBuilder b = new ToStringBuilder(this);", " b.add(\"field\", this.field);", " return b.toString();", " }", "}", "" ); getCompileHelper().assertCompilesTo(source, expected); } } }
/* I2PTunnel is GPL'ed (with the exception mentioned in I2PTunnel.java) * (c) 2003 - 2004 mihi */ package net.i2p.i2ptunnel; import java.io.BufferedWriter; import java.io.File; import java.io.FileInputStream; import java.io.IOException; import java.io.OutputStream; import java.io.OutputStreamWriter; import java.io.Reader; import java.io.UnsupportedEncodingException; import java.io.Writer; import java.net.Socket; import java.net.URI; import java.net.URISyntaxException; import java.util.ArrayList; import java.util.BitSet; import java.util.Date; import java.util.HashMap; import java.util.Iterator; import java.util.List; import java.util.Locale; import java.util.Map; import java.util.Properties; import java.util.StringTokenizer; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.atomic.AtomicLong; import net.i2p.I2PAppContext; import net.i2p.client.streaming.I2PSocketException; import net.i2p.client.streaming.I2PSocketManager; import net.i2p.data.Base64; import net.i2p.data.DataHelper; import net.i2p.data.Destination; import net.i2p.data.i2cp.MessageStatusMessage; import net.i2p.util.EepGet; import net.i2p.util.EventDispatcher; import net.i2p.util.InternalSocket; import net.i2p.util.Log; import net.i2p.util.PasswordManager; import net.i2p.util.Translate; import net.i2p.util.TranslateReader; /** * Common things for HTTPClient and ConnectClient * Retrofit over them in 0.8.2 * * @since 0.8.2 */ public abstract class I2PTunnelHTTPClientBase extends I2PTunnelClientBase implements Runnable { private static final int PROXYNONCE_BYTES = 8; private static final int MD5_BYTES = 16; /** 24 */ private static final int NONCE_BYTES = DataHelper.DATE_LENGTH + MD5_BYTES; private static final long MAX_NONCE_AGE = 60*60*1000L; private static final int MAX_NONCE_COUNT = 1024; private static final String ERR_AUTH1 = "HTTP/1.1 407 Proxy Authentication Required\r\n" + "Content-Type: text/html; charset=UTF-8\r\n" + "Cache-control: no-cache\r\n" + "Connection: close\r\n"+ "Proxy-Connection: close\r\n"+ "Accept-Charset: ISO-8859-1,utf-8;q=0.7,*;q=0.5\r\n" + // try to get a UTF-8-encoded response back for the password "Proxy-Authenticate: "; // put the auth type and realm in between private static final String ERR_AUTH2 = "\r\n" + "\r\n" + "<html><body><H1>I2P ERROR: PROXY AUTHENTICATION REQUIRED</H1>" + "This proxy is configured to require authentication."; protected final List<String> _proxyList; protected final static String ERR_NO_OUTPROXY = "HTTP/1.1 503 Service Unavailable\r\n"+ "Content-Type: text/html; charset=iso-8859-1\r\n"+ "Cache-control: no-cache\r\n"+ "Connection: close\r\n"+ "Proxy-Connection: close\r\n"+ "\r\n"+ "<html><body><H1>I2P ERROR: No outproxy found</H1>"+ "Your request was for a site outside of I2P, but you have no "+ "HTTP outproxy configured. Please configure an outproxy in I2PTunnel"; protected final static String ERR_DESTINATION_UNKNOWN = "HTTP/1.1 503 Service Unavailable\r\n" + "Content-Type: text/html; charset=iso-8859-1\r\n" + "Cache-control: no-cache\r\n" + "Connection: close\r\n"+ "Proxy-Connection: close\r\n"+ "\r\n" + "<html><body><H1>I2P ERROR: DESTINATION NOT FOUND</H1>" + "That I2P Destination was not found. Perhaps you pasted in the " + "wrong BASE64 I2P Destination or the link you are following is " + "bad. The host (or the WWW proxy, if you're using one) could also " + "be temporarily offline. You may want to <b>retry</b>. " + "Could not find the following Destination:<BR><BR><div>"; protected final static String SUCCESS_RESPONSE = "HTTP/1.1 200 Connection Established\r\n"+ "Proxy-agent: I2P\r\n"+ "\r\n"; private final byte[] _proxyNonce; private final ConcurrentHashMap<String, NonceInfo> _nonces; private final AtomicInteger _nonceCleanCounter = new AtomicInteger(); protected String getPrefix(long requestId) { return "HTTPClient[" + _clientId + '/' + requestId + "]: "; } protected String selectProxy() { synchronized (_proxyList) { int size = _proxyList.size(); if (size <= 0) return null; int index = _context.random().nextInt(size); return _proxyList.get(index); } } protected static final int DEFAULT_READ_TIMEOUT = 5*60*1000; protected static final AtomicLong __requestId = new AtomicLong(); public I2PTunnelHTTPClientBase(int localPort, boolean ownDest, Logging l, EventDispatcher notifyThis, String handlerName, I2PTunnel tunnel) throws IllegalArgumentException { super(localPort, ownDest, l, notifyThis, handlerName, tunnel); _proxyList = new ArrayList<String>(4); _proxyNonce = new byte[PROXYNONCE_BYTES]; _context.random().nextBytes(_proxyNonce); _nonces = new ConcurrentHashMap<String, NonceInfo>(); } /** * This constructor always starts the tunnel (ignoring the i2cp.delayOpen option). * It is used to add a client to an existing socket manager. * * @param sktMgr the existing socket manager */ public I2PTunnelHTTPClientBase(int localPort, Logging l, I2PSocketManager sktMgr, I2PTunnel tunnel, EventDispatcher notifyThis, long clientId ) throws IllegalArgumentException { super(localPort, l, sktMgr, tunnel, notifyThis, clientId); _proxyList = new ArrayList<String>(4); _proxyNonce = new byte[PROXYNONCE_BYTES]; _context.random().nextBytes(_proxyNonce); _nonces = new ConcurrentHashMap<String, NonceInfo>(); } //////// Authorization stuff /** all auth @since 0.8.2 */ public static final String PROP_AUTH = "proxyAuth"; public static final String PROP_USER = "proxyUsername"; public static final String PROP_PW = "proxyPassword"; /** additional users may be added with proxyPassword.user=pw */ public static final String PROP_PW_PREFIX = PROP_PW + '.'; public static final String PROP_OUTPROXY_AUTH = "outproxyAuth"; public static final String PROP_OUTPROXY_USER = "outproxyUsername"; public static final String PROP_OUTPROXY_PW = "outproxyPassword"; /** passwords for specific outproxies may be added with outproxyUsername.fooproxy.i2p=user and outproxyPassword.fooproxy.i2p=pw */ public static final String PROP_OUTPROXY_USER_PREFIX = PROP_OUTPROXY_USER + '.'; public static final String PROP_OUTPROXY_PW_PREFIX = PROP_OUTPROXY_PW + '.'; /** new style MD5 auth */ public static final String PROP_PROXY_DIGEST_PREFIX = "proxy.auth."; public static final String PROP_PROXY_DIGEST_SUFFIX = ".md5"; public static final String BASIC_AUTH = "basic"; public static final String DIGEST_AUTH = "digest"; protected abstract String getRealm(); protected enum AuthResult {AUTH_BAD_REQ, AUTH_BAD, AUTH_STALE, AUTH_GOOD} /** * @since 0.9.6 */ private static class NonceInfo { private final long expires; private final BitSet counts; public NonceInfo(long exp) { expires = exp; counts = new BitSet(MAX_NONCE_COUNT); } public long getExpires() { return expires; } public AuthResult isValid(int nc) { if (nc <= 0) return AuthResult.AUTH_BAD; if (nc >= MAX_NONCE_COUNT) return AuthResult.AUTH_STALE; synchronized(counts) { if (counts.get(nc)) return AuthResult.AUTH_BAD; counts.set(nc); } return AuthResult.AUTH_GOOD; } } /** * Update the outproxy list then call super. * * @since 0.9.12 */ @Override public void optionsUpdated(I2PTunnel tunnel) { if (getTunnel() != tunnel) return; Properties props = tunnel.getClientOptions(); // see TunnelController.setSessionOptions() String proxies = props.getProperty("proxyList"); if (proxies != null) { StringTokenizer tok = new StringTokenizer(proxies, ",; \r\n\t"); synchronized(_proxyList) { _proxyList.clear(); while (tok.hasMoreTokens()) { String p = tok.nextToken().trim(); if (p.length() > 0) _proxyList.add(p); } } } else { synchronized(_proxyList) { _proxyList.clear(); } } super.optionsUpdated(tunnel); } /** * @since 0.9.4 */ protected boolean isDigestAuthRequired() { String authRequired = getTunnel().getClientOptions().getProperty(PROP_AUTH); if (authRequired == null) return false; return authRequired.toLowerCase(Locale.US).equals("digest"); } /** * Authorization * Ref: RFC 2617 * If the socket is an InternalSocket, no auth required. * * @param method GET, POST, etc. * @param authorization may be null, the full auth line e.g. "Basic lskjlksjf" * @return success */ protected AuthResult authorize(Socket s, long requestId, String method, String authorization) { String authRequired = getTunnel().getClientOptions().getProperty(PROP_AUTH); if (authRequired == null) return AuthResult.AUTH_GOOD; authRequired = authRequired.toLowerCase(Locale.US); if (authRequired.equals("false")) return AuthResult.AUTH_GOOD; if (s instanceof InternalSocket) { if (_log.shouldLog(Log.INFO)) _log.info(getPrefix(requestId) + "Internal access, no auth required"); return AuthResult.AUTH_GOOD; } if (authorization == null) return AuthResult.AUTH_BAD; if (_log.shouldLog(Log.INFO)) _log.info(getPrefix(requestId) + "Auth: " + authorization); String authLC = authorization.toLowerCase(Locale.US); if (authRequired.equals("true") || authRequired.equals(BASIC_AUTH)) { if (!authLC.startsWith("basic ")) return AuthResult.AUTH_BAD; authorization = authorization.substring(6); // hmm safeDecode(foo, true) to use standard alphabet is private in Base64 byte[] decoded = Base64.decode(authorization.replace("/", "~").replace("+", "=")); if (decoded != null) { // We send Accept-Charset: UTF-8 in the 407 so hopefully it comes back that way inside the B64 ? try { String dec = new String(decoded, "UTF-8"); String[] parts = dec.split(":"); String user = parts[0]; String pw = parts[1]; // first try pw for that user String configPW = getTunnel().getClientOptions().getProperty(PROP_PW_PREFIX + user); if (configPW == null) { // if not, look at default user and pw String configUser = getTunnel().getClientOptions().getProperty(PROP_USER); if (user.equals(configUser)) configPW = getTunnel().getClientOptions().getProperty(PROP_PW); } if (configPW != null) { if (pw.equals(configPW)) { if (_log.shouldLog(Log.INFO)) _log.info(getPrefix(requestId) + "Good auth - user: " + user + " pw: " + pw); return AuthResult.AUTH_GOOD; } } _log.logAlways(Log.WARN, "PROXY AUTH FAILURE: user " + user); } catch (UnsupportedEncodingException uee) { _log.error(getPrefix(requestId) + "No UTF-8 support? B64: " + authorization, uee); } catch (ArrayIndexOutOfBoundsException aioobe) { // no ':' in response if (_log.shouldLog(Log.WARN)) _log.warn(getPrefix(requestId) + "Bad auth B64: " + authorization, aioobe); return AuthResult.AUTH_BAD_REQ; } return AuthResult.AUTH_BAD; } else { if (_log.shouldLog(Log.WARN)) _log.warn(getPrefix(requestId) + "Bad auth B64: " + authorization); return AuthResult.AUTH_BAD_REQ; } } else if (authRequired.equals(DIGEST_AUTH)) { if (!authLC.startsWith("digest ")) return AuthResult.AUTH_BAD; authorization = authorization.substring(7); Map<String, String> args = parseArgs(authorization); AuthResult rv = validateDigest(method, args); return rv; } else { _log.error("Unknown proxy authorization type configured: " + authRequired); return AuthResult.AUTH_BAD_REQ; } } /** * Verify all of it. * Ref: RFC 2617 * @since 0.9.4 */ private AuthResult validateDigest(String method, Map<String, String> args) { String user = args.get("username"); String realm = args.get("realm"); String nonce = args.get("nonce"); String qop = args.get("qop"); String uri = args.get("uri"); String cnonce = args.get("cnonce"); String nc = args.get("nc"); String response = args.get("response"); if (user == null || realm == null || nonce == null || qop == null || uri == null || cnonce == null || nc == null || response == null) { if (_log.shouldLog(Log.INFO)) _log.info("Bad digest request: " + DataHelper.toString(args)); return AuthResult.AUTH_BAD_REQ; } // nonce check AuthResult check = verifyNonce(nonce, nc); if (check != AuthResult.AUTH_GOOD) { if (_log.shouldLog(Log.INFO)) _log.info("Bad digest nonce: " + check + ' ' + DataHelper.toString(args)); return check; } // get H(A1) == stored password String ha1 = getTunnel().getClientOptions().getProperty(PROP_PROXY_DIGEST_PREFIX + user + PROP_PROXY_DIGEST_SUFFIX); if (ha1 == null) { _log.logAlways(Log.WARN, "PROXY AUTH FAILURE: user " + user); return AuthResult.AUTH_BAD; } // get H(A2) String a2 = method + ':' + uri; String ha2 = PasswordManager.md5Hex(a2); // response check String kd = ha1 + ':' + nonce + ':' + nc + ':' + cnonce + ':' + qop + ':' + ha2; String hkd = PasswordManager.md5Hex(kd); if (!response.equals(hkd)) { _log.logAlways(Log.WARN, "PROXY AUTH FAILURE: user " + user); if (_log.shouldLog(Log.INFO)) _log.info("Bad digest auth: " + DataHelper.toString(args)); return AuthResult.AUTH_BAD; } if (_log.shouldLog(Log.INFO)) _log.info("Good digest auth - user: " + user); return AuthResult.AUTH_GOOD; } /** * The Base 64 of 24 bytes: (now, md5 of (now, proxy nonce)) * @since 0.9.4 */ private String getNonce() { byte[] b = new byte[DataHelper.DATE_LENGTH + PROXYNONCE_BYTES]; byte[] n = new byte[NONCE_BYTES]; long now = _context.clock().now(); DataHelper.toLong(b, 0, DataHelper.DATE_LENGTH, now); System.arraycopy(_proxyNonce, 0, b, DataHelper.DATE_LENGTH, PROXYNONCE_BYTES); System.arraycopy(b, 0, n, 0, DataHelper.DATE_LENGTH); byte[] md5 = PasswordManager.md5Sum(b); System.arraycopy(md5, 0, n, DataHelper.DATE_LENGTH, MD5_BYTES); String rv = Base64.encode(n); _nonces.putIfAbsent(rv, new NonceInfo(now + MAX_NONCE_AGE)); return rv; } /** * Verify the Base 64 of 24 bytes: (now, md5 of (now, proxy nonce)) * and the nonce count. * @param b64 nonce non-null * @param ncs nonce count string non-null * @since 0.9.4 */ private AuthResult verifyNonce(String b64, String ncs) { if (_nonceCleanCounter.incrementAndGet() % 16 == 0) cleanNonces(); byte[] n = Base64.decode(b64); if (n == null || n.length != NONCE_BYTES) return AuthResult.AUTH_BAD; long now = _context.clock().now(); long stamp = DataHelper.fromLong(n, 0, DataHelper.DATE_LENGTH); if (now - stamp > MAX_NONCE_AGE) { _nonces.remove(b64); return AuthResult.AUTH_STALE; } NonceInfo info = _nonces.get(b64); if (info == null) return AuthResult.AUTH_STALE; byte[] b = new byte[DataHelper.DATE_LENGTH + PROXYNONCE_BYTES]; System.arraycopy(n, 0, b, 0, DataHelper.DATE_LENGTH); System.arraycopy(_proxyNonce, 0, b, DataHelper.DATE_LENGTH, PROXYNONCE_BYTES); byte[] md5 = PasswordManager.md5Sum(b); if (!DataHelper.eq(md5, 0, n, DataHelper.DATE_LENGTH, MD5_BYTES)) return AuthResult.AUTH_BAD; try { int nc = Integer.parseInt(ncs, 16); return info.isValid(nc); } catch (NumberFormatException nfe) { return AuthResult.AUTH_BAD; } } /** * Remove expired nonces from map * @since 0.9.6 */ private void cleanNonces() { long now = _context.clock().now(); for (Iterator<NonceInfo> iter = _nonces.values().iterator(); iter.hasNext(); ) { NonceInfo info = iter.next(); if (info.getExpires() <= now) iter.remove(); } } /** * What to send if digest auth fails * @since 0.9.4 */ protected String getAuthError(boolean isStale) { boolean isDigest = isDigestAuthRequired(); return ERR_AUTH1 + (isDigest ? "Digest" : "Basic") + " realm=\"" + getRealm() + '"' + (isDigest ? ", nonce=\"" + getNonce() + "\"," + " algorithm=MD5," + " qop=\"auth\"" + (isStale ? ", stale=true" : "") : "") + ERR_AUTH2; } /** * Modified from LoadClientAppsJob. * All keys are mapped to lower case. * Ref: RFC 2617 * * @param args non-null * @since 0.9.4 */ private static Map<String, String> parseArgs(String args) { // moved to EepGet, since it needs this too return EepGet.parseAuthArgs(args); } //////// Error page stuff /** * foo => errordir/foo-header_xx.ht for lang xx, or errordir/foo-header.ht, * or the backup byte array on fail. * * .ht files must be UTF-8 encoded and use \r\n terminators so the * HTTP headers are conformant. * We can't use FileUtil.readFile() because it strips \r * * @return non-null * @since 0.9.4 moved from I2PTunnelHTTPClient */ protected String getErrorPage(String base, String backup) { return getErrorPage(_context, base, backup); } /** * foo => errordir/foo-header_xx.ht for lang xx, or errordir/foo-header.ht, * or the backup byte array on fail. * * .ht files must be UTF-8 encoded and use \r\n terminators so the * HTTP headers are conformant. * We can't use FileUtil.readFile() because it strips \r * * @return non-null * @since 0.9.4 moved from I2PTunnelHTTPClient */ protected static String getErrorPage(I2PAppContext ctx, String base, String backup) { File errorDir = new File(ctx.getBaseDir(), "docs"); File file = new File(errorDir, base + "-header.ht"); try { return readFile(ctx, file); } catch(IOException ioe) { return backup; } } /** these strings go in the jar, not the war */ private static final String BUNDLE_NAME = "net.i2p.i2ptunnel.proxy.messages"; /** * @since 0.9.4 moved from I2PTunnelHTTPClient */ private static String readFile(I2PAppContext ctx, File file) throws IOException { Reader reader = null; char[] buf = new char[512]; StringBuilder out = new StringBuilder(2048); try { int len; reader = new TranslateReader(ctx, BUNDLE_NAME, new FileInputStream(file)); while((len = reader.read(buf)) > 0) { out.append(buf, 0, len); } return out.toString(); } finally { try { if(reader != null) reader.close(); } catch(IOException foo) {} } // we won't ever get here } /** * @since 0.9.14 moved from subclasses */ protected class OnTimeout implements I2PTunnelRunner.FailCallback { private final Socket _socket; private final OutputStream _out; private final String _target; private final boolean _usingProxy; private final String _wwwProxy; private final long _requestId; public OnTimeout(Socket s, OutputStream out, String target, boolean usingProxy, String wwwProxy, long id) { _socket = s; _out = out; _target = target; _usingProxy = usingProxy; _wwwProxy = wwwProxy; _requestId = id; } /** * @param ex may be null */ public void onFail(Exception ex) { Throwable cause = ex != null ? ex.getCause() : null; if (cause != null && cause instanceof I2PSocketException) { I2PSocketException ise = (I2PSocketException) cause; handleI2PSocketException(ise, _out, _target, _usingProxy, _wwwProxy); } else { handleClientException(ex, _out, _target, _usingProxy, _wwwProxy, _requestId); } closeSocket(_socket); } } /** * @param ex may be null * @since 0.9.14 moved from subclasses */ protected void handleClientException(Exception ex, OutputStream out, String targetRequest, boolean usingWWWProxy, String wwwProxy, long requestId) { if (out == null) return; String header; if (usingWWWProxy) header = getErrorPage(I2PAppContext.getGlobalContext(), "dnfp", ERR_DESTINATION_UNKNOWN); else header = getErrorPage(I2PAppContext.getGlobalContext(), "dnf", ERR_DESTINATION_UNKNOWN); try { writeErrorMessage(header, out, targetRequest, usingWWWProxy, wwwProxy); } catch (IOException ioe) {} } /** * Generate an error page based on the status code * in our custom exception. * * @param ise may be null * @since 0.9.14 */ protected void handleI2PSocketException(I2PSocketException ise, OutputStream out, String targetRequest, boolean usingWWWProxy, String wwwProxy) { if (out == null) return; int status = ise != null ? ise.getStatus() : -1; String error; if (status == MessageStatusMessage.STATUS_SEND_FAILURE_NO_LEASESET) { // We won't get this one unless it is treated as a hard failure // in streaming. See PacketQueue.java error = usingWWWProxy ? "nolsp" : "nols"; } else if (status == MessageStatusMessage.STATUS_SEND_FAILURE_UNSUPPORTED_ENCRYPTION) { error = usingWWWProxy ? "encp" : "enc"; } else if (status == I2PSocketException.STATUS_CONNECTION_RESET) { error = usingWWWProxy ? "resetp" : "reset"; } else { error = usingWWWProxy ? "dnfp" : "dnf"; } String header = getErrorPage(error, ERR_DESTINATION_UNKNOWN); String message = ise != null ? ise.getLocalizedMessage() : "unknown error"; try { writeErrorMessage(header, message, out, targetRequest, usingWWWProxy, wwwProxy); } catch(IOException ioe) {} } /** * No jump servers or extra message * @since 0.9.14 */ protected void writeErrorMessage(String errMessage, OutputStream out, String targetRequest, boolean usingWWWProxy, String wwwProxy) throws IOException { writeErrorMessage(errMessage, null, out, targetRequest, usingWWWProxy, wwwProxy, null); } /** * No extra message * @param jumpServers comma- or space-separated list, or null * @since 0.9.14 moved from subclasses */ protected void writeErrorMessage(String errMessage, OutputStream out, String targetRequest, boolean usingWWWProxy, String wwwProxy, String jumpServers) throws IOException { writeErrorMessage(errMessage, null, out, targetRequest, usingWWWProxy, wwwProxy, jumpServers); } /** * No jump servers * @param extraMessage extra message or null, will be HTML-escaped * @since 0.9.14 */ protected void writeErrorMessage(String errMessage, String extraMessage, OutputStream out, String targetRequest, boolean usingWWWProxy, String wwwProxy) throws IOException { writeErrorMessage(errMessage, extraMessage, out, targetRequest, usingWWWProxy, wwwProxy, null); } /** * @param jumpServers comma- or space-separated list, or null * @param extraMessage extra message or null, will be HTML-escaped * @since 0.9.14 */ protected void writeErrorMessage(String errMessage, String extraMessage, OutputStream outs, String targetRequest, boolean usingWWWProxy, String wwwProxy, String jumpServers) throws IOException { if (outs == null) return; Writer out = new BufferedWriter(new OutputStreamWriter(outs, "UTF-8")); out.write(errMessage); if (targetRequest != null) { String uri = DataHelper.escapeHTML(targetRequest); out.write("<a href=\""); out.write(uri); out.write("\">"); if (targetRequest.length() > 80) out.write(DataHelper.escapeHTML(targetRequest.substring(0, 75)) + "&hellip;"); else out.write(uri); out.write("</a>"); if (usingWWWProxy) { out.write("<br><br><b>"); out.write(_("HTTP Outproxy")); out.write(":</b> " + wwwProxy); } if (extraMessage != null) { out.write("<br><br><b>" + DataHelper.escapeHTML(extraMessage) + "</b>"); } if (jumpServers != null && jumpServers.length() > 0) { boolean first = true; if(uri.startsWith("http://")) { uri = uri.substring(7); } StringTokenizer tok = new StringTokenizer(jumpServers, ", "); while(tok.hasMoreTokens()) { String jurl = tok.nextToken(); String jumphost; try { URI jURI = new URI(jurl); String proto = jURI.getScheme(); jumphost = jURI.getHost(); if (proto == null || jumphost == null || !proto.toLowerCase(Locale.US).equals("http")) continue; jumphost = jumphost.toLowerCase(Locale.US); if (!jumphost.endsWith(".i2p")) continue; } catch(URISyntaxException use) { continue; } // Skip jump servers we don't know if (!jumphost.endsWith(".b32.i2p")) { Destination dest = _context.namingService().lookup(jumphost); if(dest == null) { continue; } } if (first) { first = false; out.write("<br><br><h3>"); out.write(_("Click a link below for an address helper from a jump service")); out.write("</h3>\n"); } else { out.write("<br>"); } out.write("<a href=\""); out.write(jurl); out.write(uri); out.write("\">"); // Translators: parameter is a host name out.write(_("{0} jump service", jumphost)); out.write("</a>\n"); } } } out.write("</div>"); writeFooter(out); } /** * Flushes. * * Public only for LocalHTTPServer, not for general use * @since 0.9.14 moved from I2PTunnelHTTPClient */ public static void writeFooter(OutputStream out) throws IOException { out.write(getFooter().getBytes("UTF-8")); out.flush(); } /** * Flushes. * * Public only for LocalHTTPServer, not for general use * @since 0.9.19 */ public static void writeFooter(Writer out) throws IOException { out.write(getFooter()); out.flush(); } private static String getFooter() { // The css is hiding this div for now, but we'll keep it here anyway // Tag the strings below for translation if we unhide it. StringBuilder buf = new StringBuilder(128); buf.append("<div class=\"proxyfooter\"><p><i>I2P HTTP Proxy Server<br>Generated on: ") .append(new Date().toString()) .append("</i></div></body></html>\n"); return buf.toString(); } /** * Translate * @since 0.9.14 moved from I2PTunnelHTTPClient */ protected String _(String key) { return Translate.getString(key, _context, BUNDLE_NAME); } /** * Translate * {0} * @since 0.9.14 moved from I2PTunnelHTTPClient */ protected String _(String key, Object o) { return Translate.getString(key, o, _context, BUNDLE_NAME); } /** * Translate * {0} and {1} * @since 0.9.14 moved from I2PTunnelHTTPClient */ protected String _(String key, Object o, Object o2) { return Translate.getString(key, o, o2, _context, BUNDLE_NAME); } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.jackrabbit.oak.plugins.identifier; import java.text.ParseException; import java.util.Collections; import java.util.Iterator; import java.util.Map; import java.util.UUID; import javax.annotation.CheckForNull; import javax.annotation.Nonnull; import javax.jcr.PropertyType; import javax.jcr.query.Query; import com.google.common.base.Charsets; import com.google.common.base.Function; import com.google.common.collect.Iterators; import org.apache.jackrabbit.JcrConstants; import org.apache.jackrabbit.oak.api.PropertyState; import org.apache.jackrabbit.oak.api.PropertyValue; import org.apache.jackrabbit.oak.api.Result; import org.apache.jackrabbit.oak.api.ResultRow; import org.apache.jackrabbit.oak.api.Root; import org.apache.jackrabbit.oak.api.Tree; import org.apache.jackrabbit.oak.api.Type; import org.apache.jackrabbit.oak.commons.PathUtils; import org.apache.jackrabbit.oak.namepath.NamePathMapper; import org.apache.jackrabbit.oak.plugins.memory.StringPropertyState; import org.apache.jackrabbit.oak.plugins.nodetype.ReadOnlyNodeTypeManager; import org.apache.jackrabbit.oak.plugins.version.VersionConstants; import org.apache.jackrabbit.oak.spi.query.PropertyValues; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import static com.google.common.base.Preconditions.checkArgument; import static com.google.common.base.Predicates.notNull; import static com.google.common.collect.Iterators.emptyIterator; import static com.google.common.collect.Iterators.filter; import static com.google.common.collect.Iterators.singletonIterator; import static com.google.common.collect.Iterators.transform; import static org.apache.jackrabbit.oak.api.QueryEngine.NO_MAPPINGS; /** * TODO document */ public class IdentifierManager { private static final Logger log = LoggerFactory.getLogger(IdentifierManager.class); private final Root root; private final ReadOnlyNodeTypeManager nodeTypeManager; public IdentifierManager(Root root) { this.root = root; this.nodeTypeManager = ReadOnlyNodeTypeManager.getInstance(root, NamePathMapper.DEFAULT); } @Nonnull public static String generateUUID() { return UUID.randomUUID().toString(); } @Nonnull public static String generateUUID(String hint) { UUID uuid = UUID.nameUUIDFromBytes(hint.getBytes(Charsets.UTF_8)); return uuid.toString(); } public static boolean isValidUUID(String uuid) { try { UUID.fromString(uuid); return true; } catch (IllegalArgumentException e) { return false; } } /** * Return the identifier of a tree. * * @param tree a tree * @return identifier of {@code tree} */ @Nonnull public static String getIdentifier(Tree tree) { PropertyState property = tree.getProperty(JcrConstants.JCR_UUID); if (property != null) { return property.getValue(Type.STRING); } else if (tree.isRoot()) { return "/"; } else { String parentId = getIdentifier(tree.getParent()); return PathUtils.concat(parentId, tree.getName()); } } /** * The possibly non existing tree identified by the specified {@code identifier} or {@code null}. * * @param identifier The identifier of the tree such as exposed by {@link #getIdentifier(Tree)} * @return The tree with the given {@code identifier} or {@code null} if no * such tree exists. */ @CheckForNull public Tree getTree(String identifier) { if (identifier.startsWith("/")) { return root.getTree(identifier); } else { int k = identifier.indexOf('/'); String uuid = k == -1 ? identifier : identifier.substring(0, k); checkArgument(isValidUUID(uuid), "Not a valid identifier '" + identifier + '\''); String basePath = resolveUUID(uuid); if (basePath == null) { return null; } else if (k == -1) { return root.getTree(basePath); } else { return root.getTree(PathUtils.concat(basePath, identifier.substring(k + 1))); } } } /** * The path of the tree identified by the specified {@code identifier} or {@code null}. * * @param identifier The identifier of the tree such as exposed by {@link #getIdentifier(Tree)} * @return The path of the tree with the given {@code identifier} or {@code null} if no * such tree exists or if the tree is not accessible. */ @CheckForNull public String getPath(String identifier) { Tree tree = getTree(identifier); return tree != null && tree.exists() ? tree.getPath() : null; } /** * Returns the path of the tree references by the specified (weak) * reference {@code PropertyState}. * * @param referenceValue A (weak) reference value. * @return The tree with the given {@code identifier} or {@code null} if no * such tree exists or isn't accessible to the content session. */ @CheckForNull public String getPath(PropertyState referenceValue) { int type = referenceValue.getType().tag(); if (type == PropertyType.REFERENCE || type == PropertyType.WEAKREFERENCE) { return resolveUUID(referenceValue); } else { throw new IllegalArgumentException("Invalid value type"); } } /** * Returns the path of the tree references by the specified (weak) * reference {@code PropertyState}. * * @param referenceValue A (weak) reference value. * @return The tree with the given {@code identifier} or {@code null} if no * such tree exists or isn't accessible to the content session. */ @CheckForNull public String getPath(PropertyValue referenceValue) { int type = referenceValue.getType().tag(); if (type == PropertyType.REFERENCE || type == PropertyType.WEAKREFERENCE) { return resolveUUID(referenceValue); } else { throw new IllegalArgumentException("Invalid value type"); } } /** * Searches all reference properties to the specified {@code tree} that match * the given name and node type constraints. * * @param weak if {@code true} only weak references are returned. Otherwise only * hard references are returned. * @param tree The tree for which references should be searched. * @param propertyName A name constraint for the reference properties; * {@code null} if no constraint should be enforced. * @param nodeTypeNames Node type constraints to be enforced when using * for reference properties; the specified names are expected to be internal * oak names. * @return A set of oak paths of those reference properties referring to the * specified {@code tree} and matching the constraints. */ @Nonnull public Iterable<String> getReferences(boolean weak, Tree tree, final String propertyName, final String... nodeTypeNames) { if (!nodeTypeManager.isNodeType(tree, JcrConstants.MIX_REFERENCEABLE)) { return Collections.emptySet(); // shortcut } final String uuid = getIdentifier(tree); String reference = weak ? PropertyType.TYPENAME_WEAKREFERENCE : PropertyType.TYPENAME_REFERENCE; String pName = propertyName == null ? "*" : propertyName; // TODO: sanitize against injection attacks!? Map<String, ? extends PropertyValue> bindings = Collections.singletonMap("uuid", PropertyValues.newString(uuid)); try { Result result = root.getQueryEngine().executeQuery( "SELECT * FROM [nt:base] WHERE PROPERTY([" + pName + "], '" + reference + "') = $uuid", Query.JCR_SQL2, Long.MAX_VALUE, 0, bindings, NO_MAPPINGS); return findPaths(result, uuid, propertyName, nodeTypeNames, weak ? Type.WEAKREFERENCE : Type.REFERENCE, weak ? Type.WEAKREFERENCES : Type.REFERENCES ); } catch (ParseException e) { log.error("query failed", e); return Collections.emptySet(); } } private Iterable<String> findPaths(final Result result, final String uuid, final String propertyName, final String[] nodeTypeNames, final Type<?> type, final Type<?> types) { return new Iterable<String>() { @Override public Iterator<String> iterator() { return Iterators.concat( transform(result.getRows().iterator(), new RowToPaths())); } class RowToPaths implements Function<ResultRow, Iterator<String>> { @Override public Iterator<String> apply(ResultRow row) { final String rowPath = row.getPath(); class PropertyToPath implements Function<PropertyState, String> { @Override public String apply(PropertyState pState) { if (pState.isArray()) { if (pState.getType() == types) { for (String value : pState.getValue(Type.STRINGS)) { if (uuid.equals(value)) { return PathUtils.concat(rowPath, pState.getName()); } } } } else { if (pState.getType() == type) { if (uuid.equals(pState.getValue(Type.STRING))) { return PathUtils.concat(rowPath, pState.getName()); } } } return null; } } // skip references from the version storage (OAK-1196) if (!rowPath.startsWith(VersionConstants.VERSION_STORE_PATH)) { Tree tree = root.getTree(rowPath); if (nodeTypeNames.length == 0 || containsNodeType(tree, nodeTypeNames)) { if (propertyName == null) { return filter( transform(tree.getProperties().iterator(), new PropertyToPath()), notNull()); } else { // for a fixed property name, we don't need to look for it, but just assume that // the search found the correct one return singletonIterator(PathUtils.concat(rowPath, propertyName)); } } } return emptyIterator(); } private boolean containsNodeType(Tree tree, String[] nodeTypeNames) { for (String ntName : nodeTypeNames) { if (nodeTypeManager.isNodeType(tree, ntName)) { return true; } } return false; } } }; } @CheckForNull public String resolveUUID(String uuid) { return resolveUUID(StringPropertyState.stringProperty("", uuid)); } private String resolveUUID(PropertyState uuid) { return resolveUUID(PropertyValues.create(uuid)); } private String resolveUUID(PropertyValue uuid) { try { Map<String, PropertyValue> bindings = Collections.singletonMap("id", uuid); Result result = root.getQueryEngine().executeQuery( "SELECT * FROM [nt:base] WHERE [jcr:uuid] = $id", Query.JCR_SQL2, Long.MAX_VALUE, 0, bindings, NO_MAPPINGS); String path = null; for (ResultRow rr : result.getRows()) { if (path != null) { log.error("multiple results for identifier lookup: " + path + " vs. " + rr.getPath()); return null; } else { path = rr.getPath(); } } return path; } catch (ParseException ex) { log.error("query failed", ex); return null; } } }