repo_name stringlengths 5 108 | path stringlengths 6 333 | size stringlengths 1 6 | content stringlengths 4 977k | license stringclasses 15 values |
|---|---|---|---|---|
PierreR/sonarqube | server/sonar-server/src/test/java/org/sonar/server/ws/WebServiceEngineTest.java | 15044 | /*
* SonarQube, open source software quality management tool.
* Copyright (C) 2008-2014 SonarSource
* mailto:contact AT sonarsource DOT com
*
* SonarQube is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 3 of the License, or (at your option) any later version.
*
* SonarQube is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with this program; if not, write to the Free Software Foundation,
* Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
*/
package org.sonar.server.ws;
import com.google.common.collect.Maps;
import java.io.IOException;
import java.io.InputStream;
import java.util.Locale;
import java.util.Map;
import javax.annotation.Nullable;
import org.apache.commons.io.IOUtils;
import org.junit.After;
import org.junit.Before;
import org.junit.Rule;
import org.junit.Test;
import org.sonar.api.i18n.I18n;
import org.sonar.api.server.ws.Request;
import org.sonar.api.server.ws.RequestHandler;
import org.sonar.api.server.ws.Response;
import org.sonar.api.server.ws.WebService;
import org.sonar.api.server.ws.internal.ValidatingRequest;
import org.sonar.server.exceptions.BadRequestException;
import org.sonar.server.exceptions.Errors;
import org.sonar.server.exceptions.Message;
import org.sonar.server.plugins.MimeTypes;
import org.sonar.server.tester.UserSessionRule;
import static org.assertj.core.api.Assertions.assertThat;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
public class WebServiceEngineTest {
private static class SimpleRequest extends ValidatingRequest {
private final String method;
private Map<String, String> params = Maps.newHashMap();
private SimpleRequest(String method) {
this.method = method;
}
@Override
public String method() {
return method;
}
@Override
public String getMediaType() {
return MimeTypes.JSON;
}
@Override
public boolean hasParam(String key) {
return params.keySet().contains(key);
}
@Override
protected String readParam(String key) {
return params.get(key);
}
@Override
protected InputStream readInputStreamParam(String key) {
String param = readParam(key);
return param == null ? null : IOUtils.toInputStream(param);
}
public SimpleRequest setParams(Map<String, String> m) {
this.params = m;
return this;
}
public SimpleRequest setParam(String key, @Nullable String value) {
if (value != null) {
params.put(key, value);
}
return this;
}
}
@Rule
public UserSessionRule userSessionRule = UserSessionRule.standalone();
I18n i18n = mock(I18n.class);
WebServiceEngine engine = new WebServiceEngine(new WebService[] {new SystemWs()}, i18n, userSessionRule);
@Before
public void start() {
engine.start();
}
@After
public void stop() {
engine.stop();
}
@Test
public void load_ws_definitions_at_startup() {
assertThat(engine.controllers()).hasSize(1);
assertThat(engine.controllers().get(0).path()).isEqualTo("api/system");
}
@Test
public void execute_request() {
ValidatingRequest request = new SimpleRequest("GET");
ServletResponse response = new ServletResponse();
engine.execute(request, response, "api/system", "health");
assertThat(response.stream().outputAsString()).isEqualTo("good");
}
@Test
public void no_content() {
ValidatingRequest request = new SimpleRequest("GET");
ServletResponse response = new ServletResponse();
engine.execute(request, response, "api/system", "alive");
assertThat(response.stream().outputAsString()).isEmpty();
}
@Test
public void bad_controller() {
ValidatingRequest request = new SimpleRequest("GET");
ServletResponse response = new ServletResponse();
engine.execute(request, response, "api/xxx", "health");
assertThat(response.stream().outputAsString()).isEqualTo("{\"errors\":[{\"msg\":\"Unknown web service: api/xxx\"}]}");
}
@Test
public void bad_action() {
ValidatingRequest request = new SimpleRequest("GET");
ServletResponse response = new ServletResponse();
engine.execute(request, response, "api/system", "xxx");
assertThat(response.stream().outputAsString()).isEqualTo("{\"errors\":[{\"msg\":\"Unknown action: api/system/xxx\"}]}");
}
@Test
public void method_get_not_allowed() {
ValidatingRequest request = new SimpleRequest("GET");
ServletResponse response = new ServletResponse();
engine.execute(request, response, "api/system", "ping");
assertThat(response.stream().outputAsString()).isEqualTo("{\"errors\":[{\"msg\":\"HTTP method POST is required\"}]}");
}
@Test
public void method_post_required() {
ValidatingRequest request = new SimpleRequest("POST");
ServletResponse response = new ServletResponse();
engine.execute(request, response, "api/system", "ping");
assertThat(response.stream().outputAsString()).isEqualTo("pong");
}
@Test
public void unknown_parameter_is_set() {
ValidatingRequest request = new SimpleRequest("GET").setParam("unknown", "Unknown");
ServletResponse response = new ServletResponse();
engine.execute(request, response, "api/system", "fail_with_undeclared_parameter");
assertThat(response.stream().outputAsString()).isEqualTo("{\"errors\":[{\"msg\":\"BUG - parameter 'unknown' is undefined for action 'fail_with_undeclared_parameter'\"}]}");
}
@Test
public void required_parameter_is_not_set() {
ValidatingRequest request = new SimpleRequest("GET");
ServletResponse response = new ServletResponse();
engine.execute(request, response, "api/system", "print");
assertThat(response.stream().outputAsString()).isEqualTo("{\"errors\":[{\"msg\":\"Parameter 'message' is missing\"}]}");
}
@Test
public void optional_parameter_is_not_set() {
ValidatingRequest request = new SimpleRequest("GET").setParam("message", "Hello World");
ServletResponse response = new ServletResponse();
engine.execute(request, response, "api/system", "print");
assertThat(response.stream().outputAsString()).isEqualTo("Hello World by -");
}
@Test
public void optional_parameter_is_set() {
ValidatingRequest request = new SimpleRequest("GET")
.setParam("message", "Hello World")
.setParam("author", "Marcel");
ServletResponse response = new ServletResponse();
engine.execute(request, response, "api/system", "print");
assertThat(response.stream().outputAsString()).isEqualTo("Hello World by Marcel");
}
@Test
public void param_value_is_in_possible_values() {
ValidatingRequest request = new SimpleRequest("GET")
.setParam("message", "Hello World")
.setParam("format", "json");
ServletResponse response = new ServletResponse();
engine.execute(request, response, "api/system", "print");
assertThat(response.stream().outputAsString()).isEqualTo("Hello World by -");
}
@Test
public void param_value_is_not_in_possible_values() {
ValidatingRequest request = new SimpleRequest("GET")
.setParam("message", "Hello World")
.setParam("format", "html");
ServletResponse response = new ServletResponse();
engine.execute(request, response, "api/system", "print");
assertThat(response.stream().outputAsString()).isEqualTo("{\"errors\":[{\"msg\":\"Value of parameter 'format' (html) must be one of: [json, xml]\"}]}");
}
@Test
public void internal_error() {
ValidatingRequest request = new SimpleRequest("GET");
ServletResponse response = new ServletResponse();
engine.execute(request, response, "api/system", "fail");
assertThat(response.stream().outputAsString()).isEqualTo("{\"errors\":[{\"msg\":\"Unexpected\"}]}");
assertThat(response.stream().httpStatus()).isEqualTo(500);
assertThat(response.stream().mediaType()).isEqualTo(MimeTypes.JSON);
}
@Test
public void bad_request_with_i18n_message() {
userSessionRule.setLocale(Locale.ENGLISH);
ValidatingRequest request = new SimpleRequest("GET").setParam("count", "3");
ServletResponse response = new ServletResponse();
when(i18n.message(Locale.ENGLISH, "bad.request.reason", "bad.request.reason", 0)).thenReturn("reason #0");
engine.execute(request, response, "api/system", "fail_with_i18n_message");
assertThat(response.stream().outputAsString()).isEqualTo(
"{\"errors\":[{\"msg\":\"reason #0\"}]}"
);
assertThat(response.stream().httpStatus()).isEqualTo(400);
assertThat(response.stream().mediaType()).isEqualTo(MimeTypes.JSON);
}
@Test
public void bad_request_with_multiple_messages() {
ValidatingRequest request = new SimpleRequest("GET").setParam("count", "3");
ServletResponse response = new ServletResponse();
engine.execute(request, response, "api/system", "fail_with_multiple_messages");
assertThat(response.stream().outputAsString()).isEqualTo("{\"errors\":["
+ "{\"msg\":\"Bad request reason #0\"},"
+ "{\"msg\":\"Bad request reason #1\"},"
+ "{\"msg\":\"Bad request reason #2\"}"
+ "]}");
assertThat(response.stream().httpStatus()).isEqualTo(400);
assertThat(response.stream().mediaType()).isEqualTo(MimeTypes.JSON);
}
@Test
public void bad_request_with_multiple_i18n_messages() {
userSessionRule.setLocale(Locale.ENGLISH);
ValidatingRequest request = new SimpleRequest("GET").setParam("count", "3");
ServletResponse response = new ServletResponse();
when(i18n.message(Locale.ENGLISH, "bad.request.reason", "bad.request.reason", 0)).thenReturn("reason #0");
when(i18n.message(Locale.ENGLISH, "bad.request.reason", "bad.request.reason", 1)).thenReturn("reason #1");
when(i18n.message(Locale.ENGLISH, "bad.request.reason", "bad.request.reason", 2)).thenReturn("reason #2");
engine.execute(request, response, "api/system", "fail_with_multiple_i18n_messages");
assertThat(response.stream().outputAsString()).isEqualTo("{\"errors\":[" +
"{\"msg\":\"reason #0\"}," +
"{\"msg\":\"reason #1\"}," +
"{\"msg\":\"reason #2\"}]}");
assertThat(response.stream().httpStatus()).isEqualTo(400);
assertThat(response.stream().mediaType()).isEqualTo(MimeTypes.JSON);
}
@Test
public void should_handle_headers() {
ServletResponse response = new ServletResponse();
String name = "Content-Disposition";
String value = "attachment; filename=sonarqube.zip";
response.setHeader(name, value);
assertThat(response.getHeaderNames()).containsExactly(name);
assertThat(response.getHeader(name)).isEqualTo(value);
}
static class SystemWs implements WebService {
@Override
public void define(Context context) {
NewController newController = context.createController("api/system");
newController.createAction("health")
.setHandler(new RequestHandler() {
@Override
public void handle(Request request, Response response) {
try {
response.stream().output().write("good".getBytes());
} catch (IOException e) {
throw new IllegalStateException(e);
}
}
});
newController.createAction("ping")
.setPost(true)
.setHandler(new RequestHandler() {
@Override
public void handle(Request request, Response response) {
try {
response.stream().output().write("pong".getBytes());
} catch (IOException e) {
throw new IllegalStateException(e);
}
}
});
newController.createAction("fail")
.setHandler(new RequestHandler() {
@Override
public void handle(Request request, Response response) {
throw new IllegalStateException("Unexpected");
}
});
newController.createAction("fail_with_i18n_message")
.setHandler(new RequestHandler() {
@Override
public void handle(Request request, Response response) {
throw new BadRequestException("bad.request.reason", 0);
}
});
newController.createAction("fail_with_multiple_messages")
.createParam("count", "Number of error messages to generate")
.setHandler(new RequestHandler() {
@Override
public void handle(Request request, Response response) {
Errors errors = new Errors();
for (int count = 0; count < Integer.valueOf(request.param("count")); count++) {
errors.add(Message.of("Bad request reason #" + count));
}
throw new BadRequestException(errors);
}
});
newController.createAction("fail_with_multiple_i18n_messages")
.createParam("count", "Number of error messages to generate")
.setHandler(new RequestHandler() {
@Override
public void handle(Request request, Response response) {
Errors errors = new Errors();
for (int count = 0; count < Integer.valueOf(request.param("count")); count++) {
errors.add(Message.of("bad.request.reason", count));
}
throw new BadRequestException(errors);
}
});
newController.createAction("alive")
.setHandler(new RequestHandler() {
@Override
public void handle(Request request, Response response) {
response.noContent();
}
});
newController.createAction("fail_with_undeclared_parameter")
.setHandler(new RequestHandler() {
@Override
public void handle(Request request, Response response) {
response.newJsonWriter().prop("unknown", request.param("unknown"));
}
});
// parameter "message" is required but not "author"
NewAction print = newController.createAction("print");
print.createParam("message").setDescription("required message").setRequired(true);
print.createParam("author").setDescription("optional author").setDefaultValue("-");
print.createParam("format").setDescription("optional format").setPossibleValues("json", "xml");
print.setHandler(new RequestHandler() {
@Override
public void handle(Request request, Response response) {
try {
request.param("format");
IOUtils.write(
request.mandatoryParam("message") + " by " + request.param("author", "nobody"), response.stream().output());
} catch (IOException e) {
throw new IllegalStateException(e);
}
}
});
newController.done();
}
}
}
| lgpl-3.0 |
abbeyj/sonarqube | server/sonar-server/src/test/java/org/sonar/server/computation/queue/report/CleanReportQueueListenerTest.java | 1990 | /*
* SonarQube, open source software quality management tool.
* Copyright (C) 2008-2014 SonarSource
* mailto:contact AT sonarsource DOT com
*
* SonarQube is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 3 of the License, or (at your option) any later version.
*
* SonarQube is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with this program; if not, write to the Free Software Foundation,
* Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
*/
package org.sonar.server.computation.queue.report;
import org.junit.Test;
import org.sonar.db.ce.CeActivityDto;
import org.sonar.db.ce.CeTaskTypes;
import org.sonar.server.computation.queue.CeTask;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.verify;
public class CleanReportQueueListenerTest {
ReportFiles reportFiles = mock(ReportFiles.class);
CleanReportQueueListener underTest = new CleanReportQueueListener(reportFiles);
@Test
public void remove_report_file_if_success() {
CeTask task = new CeTask.Builder().setUuid("TASK_1").setType(CeTaskTypes.REPORT).setComponentUuid("PROJECT_1").setSubmitterLogin(null).build();
underTest.onRemoved(task, CeActivityDto.Status.SUCCESS);
verify(reportFiles).deleteIfExists("TASK_1");
}
@Test
public void remove_report_file_if_failure() {
CeTask task = new CeTask.Builder().setUuid("TASK_1").setType(CeTaskTypes.REPORT).setComponentUuid("PROJECT_1").setSubmitterLogin(null).build();
underTest.onRemoved(task, CeActivityDto.Status.FAILED);
verify(reportFiles).deleteIfExists("TASK_1");
}
}
| lgpl-3.0 |
yogthos/itext | src/com/lowagie/text/FontFactoryImp.java | 28617 | /*
* $Id: FontFactoryImp.java 4063 2009-09-13 19:02:46Z psoares33 $
*
* Copyright 2002 by Bruno Lowagie.
*
* The contents of this file are subject to the Mozilla Public License Version 1.1
* (the "License"); you may not use this file except in compliance with the License.
* You may obtain a copy of the License at http://www.mozilla.org/MPL/
*
* Software distributed under the License is distributed on an "AS IS" basis,
* WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License
* for the specific language governing rights and limitations under the License.
*
* The Original Code is 'iText, a free JAVA-PDF library'.
*
* The Initial Developer of the Original Code is Bruno Lowagie. Portions created by
* the Initial Developer are Copyright (C) 1999, 2000, 2001, 2002 by Bruno Lowagie.
* All Rights Reserved.
* Co-Developer of the code is Paulo Soares. Portions created by the Co-Developer
* are Copyright (C) 2000, 2001, 2002 by Paulo Soares. All Rights Reserved.
*
* Contributor(s): all the names of the contributors are added in the source code
* where applicable.
*
* Alternatively, the contents of this file may be used under the terms of the
* LGPL license (the "GNU LIBRARY GENERAL PUBLIC LICENSE"), in which case the
* provisions of LGPL are applicable instead of those above. If you wish to
* allow use of your version of this file only under the terms of the LGPL
* License and not to allow others to use your version of this file under
* the MPL, indicate your decision by deleting the provisions above and
* replace them with the notice and other provisions required by the LGPL.
* If you do not delete the provisions above, a recipient may use your version
* of this file under either the MPL or the GNU LIBRARY GENERAL PUBLIC LICENSE.
*
* This library is free software; you can redistribute it and/or modify it
* under the terms of the MPL as stated above or under the terms of the GNU
* Library General Public License as published by the Free Software Foundation;
* either version 2 of the License, or any later version.
*
* This library is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
* FOR A PARTICULAR PURPOSE. See the GNU Library general Public License for more
* details.
*
* If you didn't download this code from the following link, you should check if
* you aren't using an obsolete version:
* http://www.lowagie.com/iText/
*/
package com.lowagie.text;
import java.awt.Color;
import java.io.File;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Enumeration;
import java.util.Hashtable;
import java.util.Iterator;
import java.util.Properties;
import java.util.Set;
import com.lowagie.text.html.Markup;
import com.lowagie.text.pdf.BaseFont;
/**
* If you are using True Type fonts, you can declare the paths of the different ttf- and ttc-files
* to this class first and then create fonts in your code using one of the getFont method
* without having to enter a path as parameter.
*
* @author Bruno Lowagie
*/
public class FontFactoryImp implements FontProvider {
/** This is a map of postscriptfontnames of True Type fonts and the path of their ttf- or ttc-file. */
private Properties trueTypeFonts = new Properties();
private static String[] TTFamilyOrder = {
"3", "1", "1033",
"3", "0", "1033",
"1", "0", "0",
"0", "3", "0"
};
/** This is a map of fontfamilies. */
private Hashtable fontFamilies = new Hashtable();
/** This is the default encoding to use. */
public String defaultEncoding = BaseFont.WINANSI;
/** This is the default value of the <VAR>embedded</VAR> variable. */
public boolean defaultEmbedding = BaseFont.NOT_EMBEDDED;
/** Creates new FontFactory */
public FontFactoryImp() {
trueTypeFonts.setProperty(FontFactory.COURIER.toLowerCase(), FontFactory.COURIER);
trueTypeFonts.setProperty(FontFactory.COURIER_BOLD.toLowerCase(), FontFactory.COURIER_BOLD);
trueTypeFonts.setProperty(FontFactory.COURIER_OBLIQUE.toLowerCase(), FontFactory.COURIER_OBLIQUE);
trueTypeFonts.setProperty(FontFactory.COURIER_BOLDOBLIQUE.toLowerCase(), FontFactory.COURIER_BOLDOBLIQUE);
trueTypeFonts.setProperty(FontFactory.HELVETICA.toLowerCase(), FontFactory.HELVETICA);
trueTypeFonts.setProperty(FontFactory.HELVETICA_BOLD.toLowerCase(), FontFactory.HELVETICA_BOLD);
trueTypeFonts.setProperty(FontFactory.HELVETICA_OBLIQUE.toLowerCase(), FontFactory.HELVETICA_OBLIQUE);
trueTypeFonts.setProperty(FontFactory.HELVETICA_BOLDOBLIQUE.toLowerCase(), FontFactory.HELVETICA_BOLDOBLIQUE);
trueTypeFonts.setProperty(FontFactory.SYMBOL.toLowerCase(), FontFactory.SYMBOL);
trueTypeFonts.setProperty(FontFactory.TIMES_ROMAN.toLowerCase(), FontFactory.TIMES_ROMAN);
trueTypeFonts.setProperty(FontFactory.TIMES_BOLD.toLowerCase(), FontFactory.TIMES_BOLD);
trueTypeFonts.setProperty(FontFactory.TIMES_ITALIC.toLowerCase(), FontFactory.TIMES_ITALIC);
trueTypeFonts.setProperty(FontFactory.TIMES_BOLDITALIC.toLowerCase(), FontFactory.TIMES_BOLDITALIC);
trueTypeFonts.setProperty(FontFactory.ZAPFDINGBATS.toLowerCase(), FontFactory.ZAPFDINGBATS);
ArrayList tmp;
tmp = new ArrayList();
tmp.add(FontFactory.COURIER);
tmp.add(FontFactory.COURIER_BOLD);
tmp.add(FontFactory.COURIER_OBLIQUE);
tmp.add(FontFactory.COURIER_BOLDOBLIQUE);
fontFamilies.put(FontFactory.COURIER.toLowerCase(), tmp);
tmp = new ArrayList();
tmp.add(FontFactory.HELVETICA);
tmp.add(FontFactory.HELVETICA_BOLD);
tmp.add(FontFactory.HELVETICA_OBLIQUE);
tmp.add(FontFactory.HELVETICA_BOLDOBLIQUE);
fontFamilies.put(FontFactory.HELVETICA.toLowerCase(), tmp);
tmp = new ArrayList();
tmp.add(FontFactory.SYMBOL);
fontFamilies.put(FontFactory.SYMBOL.toLowerCase(), tmp);
tmp = new ArrayList();
tmp.add(FontFactory.TIMES_ROMAN);
tmp.add(FontFactory.TIMES_BOLD);
tmp.add(FontFactory.TIMES_ITALIC);
tmp.add(FontFactory.TIMES_BOLDITALIC);
fontFamilies.put(FontFactory.TIMES.toLowerCase(), tmp);
fontFamilies.put(FontFactory.TIMES_ROMAN.toLowerCase(), tmp);
tmp = new ArrayList();
tmp.add(FontFactory.ZAPFDINGBATS);
fontFamilies.put(FontFactory.ZAPFDINGBATS.toLowerCase(), tmp);
}
/**
* Constructs a <CODE>Font</CODE>-object.
*
* @param fontname the name of the font
* @param encoding the encoding of the font
* @param embedded true if the font is to be embedded in the PDF
* @param size the size of this font
* @param style the style of this font
* @param color the <CODE>Color</CODE> of this font.
* @return the Font constructed based on the parameters
*/
public Font getFont(String fontname, String encoding, boolean embedded, float size, int style, Color color) {
return getFont(fontname, encoding, embedded, size, style, color, true);
}
/**
* Constructs a <CODE>Font</CODE>-object.
*
* @param fontname the name of the font
* @param encoding the encoding of the font
* @param embedded true if the font is to be embedded in the PDF
* @param size the size of this font
* @param style the style of this font
* @param color the <CODE>Color</CODE> of this font.
* @param cached true if the font comes from the cache or is added to
* the cache if new, false if the font is always created new
* @return the Font constructed based on the parameters
*/
public Font getFont(String fontname, String encoding, boolean embedded, float size, int style, Color color, boolean cached) {
if (fontname == null) return new Font(Font.UNDEFINED, size, style, color);
String lowercasefontname = fontname.toLowerCase();
ArrayList tmp = (ArrayList) fontFamilies.get(lowercasefontname);
if (tmp != null) {
// some bugs were fixed here by Daniel Marczisovszky
int s = style == Font.UNDEFINED ? Font.NORMAL : style;
int fs = Font.NORMAL;
boolean found = false;
for (Iterator i = tmp.iterator(); i.hasNext(); ) {
String f = (String) i.next();
String lcf = f.toLowerCase();
fs = Font.NORMAL;
if (lcf.toLowerCase().indexOf("bold") != -1) fs |= Font.BOLD;
if (lcf.toLowerCase().indexOf("italic") != -1 || lcf.toLowerCase().indexOf("oblique") != -1) fs |= Font.ITALIC;
if ((s & Font.BOLDITALIC) == fs) {
fontname = f;
found = true;
break;
}
}
if (style != Font.UNDEFINED && found) {
style &= ~fs;
}
}
BaseFont basefont = null;
try {
try {
// the font is a type 1 font or CJK font
basefont = BaseFont.createFont(fontname, encoding, embedded, cached, null, null, true);
}
catch(DocumentException de) {
}
if (basefont == null) {
// the font is a true type font or an unknown font
fontname = trueTypeFonts.getProperty(fontname.toLowerCase());
// the font is not registered as truetype font
if (fontname == null) return new Font(Font.UNDEFINED, size, style, color);
// the font is registered as truetype font
basefont = BaseFont.createFont(fontname, encoding, embedded, cached, null, null);
}
}
catch(DocumentException de) {
// this shouldn't happen
throw new ExceptionConverter(de);
}
catch(IOException ioe) {
// the font is registered as a true type font, but the path was wrong
return new Font(Font.UNDEFINED, size, style, color);
}
catch(NullPointerException npe) {
// null was entered as fontname and/or encoding
return new Font(Font.UNDEFINED, size, style, color);
}
return new Font(basefont, size, style, color);
}
/**
* Constructs a <CODE>Font</CODE>-object.
*
* @param attributes the attributes of a <CODE>Font</CODE> object.
* @return the Font constructed based on the attributes
*/
public Font getFont(Properties attributes) {
String fontname = null;
String encoding = defaultEncoding;
boolean embedded = defaultEmbedding;
float size = Font.UNDEFINED;
int style = Font.NORMAL;
Color color = null;
String value = attributes.getProperty(Markup.HTML_ATTR_STYLE);
if (value != null && value.length() > 0) {
Properties styleAttributes = Markup.parseAttributes(value);
if (styleAttributes.isEmpty()) {
attributes.put(Markup.HTML_ATTR_STYLE, value);
}
else {
fontname = styleAttributes.getProperty(Markup.CSS_KEY_FONTFAMILY);
if (fontname != null) {
String tmp;
while (fontname.indexOf(',') != -1) {
tmp = fontname.substring(0, fontname.indexOf(','));
if (isRegistered(tmp)) {
fontname = tmp;
}
else {
fontname = fontname.substring(fontname.indexOf(',') + 1);
}
}
}
if ((value = styleAttributes.getProperty(Markup.CSS_KEY_FONTSIZE)) != null) {
size = Markup.parseLength(value);
}
if ((value = styleAttributes.getProperty(Markup.CSS_KEY_FONTWEIGHT)) != null) {
style |= Font.getStyleValue(value);
}
if ((value = styleAttributes.getProperty(Markup.CSS_KEY_FONTSTYLE)) != null) {
style |= Font.getStyleValue(value);
}
if ((value = styleAttributes.getProperty(Markup.CSS_KEY_COLOR)) != null) {
color = Markup.decodeColor(value);
}
attributes.putAll(styleAttributes);
for (Enumeration e = styleAttributes.keys(); e.hasMoreElements();) {
Object o = e.nextElement();
attributes.put(o, styleAttributes.get(o));
}
}
}
if ((value = attributes.getProperty(ElementTags.ENCODING)) != null) {
encoding = value;
}
if ("true".equals(attributes.getProperty(ElementTags.EMBEDDED))) {
embedded = true;
}
if ((value = attributes.getProperty(ElementTags.FONT)) != null) {
fontname = value;
}
if ((value = attributes.getProperty(ElementTags.SIZE)) != null) {
size = Markup.parseLength(value);
}
if ((value = attributes.getProperty(Markup.HTML_ATTR_STYLE)) != null) {
style |= Font.getStyleValue(value);
}
if ((value = attributes.getProperty(ElementTags.STYLE)) != null) {
style |= Font.getStyleValue(value);
}
String r = attributes.getProperty(ElementTags.RED);
String g = attributes.getProperty(ElementTags.GREEN);
String b = attributes.getProperty(ElementTags.BLUE);
if (r != null || g != null || b != null) {
int red = 0;
int green = 0;
int blue = 0;
if (r != null) red = Integer.parseInt(r);
if (g != null) green = Integer.parseInt(g);
if (b != null) blue = Integer.parseInt(b);
color = new Color(red, green, blue);
}
else if ((value = attributes.getProperty(ElementTags.COLOR)) != null) {
color = Markup.decodeColor(value);
}
if (fontname == null) {
return getFont(null, encoding, embedded, size, style, color);
}
return getFont(fontname, encoding, embedded, size, style, color);
}
/**
* Constructs a <CODE>Font</CODE>-object.
*
* @param fontname the name of the font
* @param encoding the encoding of the font
* @param embedded true if the font is to be embedded in the PDF
* @param size the size of this font
* @param style the style of this font
* @return the Font constructed based on the parameters
*/
public Font getFont(String fontname, String encoding, boolean embedded, float size, int style) {
return getFont(fontname, encoding, embedded, size, style, null);
}
/**
* Constructs a <CODE>Font</CODE>-object.
*
* @param fontname the name of the font
* @param encoding the encoding of the font
* @param embedded true if the font is to be embedded in the PDF
* @param size the size of this font
* @return the Font constructed based on the parameters
*/
public Font getFont(String fontname, String encoding, boolean embedded, float size) {
return getFont(fontname, encoding, embedded, size, Font.UNDEFINED, null);
}
/**
* Constructs a <CODE>Font</CODE>-object.
*
* @param fontname the name of the font
* @param encoding the encoding of the font
* @param embedded true if the font is to be embedded in the PDF
* @return the Font constructed based on the parameters
*/
public Font getFont(String fontname, String encoding, boolean embedded) {
return getFont(fontname, encoding, embedded, Font.UNDEFINED, Font.UNDEFINED, null);
}
/**
* Constructs a <CODE>Font</CODE>-object.
*
* @param fontname the name of the font
* @param encoding the encoding of the font
* @param size the size of this font
* @param style the style of this font
* @param color the <CODE>Color</CODE> of this font.
* @return the Font constructed based on the parameters
*/
public Font getFont(String fontname, String encoding, float size, int style, Color color) {
return getFont(fontname, encoding, defaultEmbedding, size, style, color);
}
/**
* Constructs a <CODE>Font</CODE>-object.
*
* @param fontname the name of the font
* @param encoding the encoding of the font
* @param size the size of this font
* @param style the style of this font
* @return the Font constructed based on the parameters
*/
public Font getFont(String fontname, String encoding, float size, int style) {
return getFont(fontname, encoding, defaultEmbedding, size, style, null);
}
/**
* Constructs a <CODE>Font</CODE>-object.
*
* @param fontname the name of the font
* @param encoding the encoding of the font
* @param size the size of this font
* @return the Font constructed based on the parameters
*/
public Font getFont(String fontname, String encoding, float size) {
return getFont(fontname, encoding, defaultEmbedding, size, Font.UNDEFINED, null);
}
/**
* Constructs a <CODE>Font</CODE>-object.
*
* @param fontname the name of the font
* @param size the size of this font
* @param color the <CODE>Color</CODE> of this font.
* @return the Font constructed based on the parameters
* @since 2.1.0
*/
public Font getFont(String fontname, float size, Color color) {
return getFont(fontname, defaultEncoding, defaultEmbedding, size, Font.UNDEFINED, color);
}
/**
* Constructs a <CODE>Font</CODE>-object.
*
* @param fontname the name of the font
* @param encoding the encoding of the font
* @return the Font constructed based on the parameters
*/
public Font getFont(String fontname, String encoding) {
return getFont(fontname, encoding, defaultEmbedding, Font.UNDEFINED, Font.UNDEFINED, null);
}
/**
* Constructs a <CODE>Font</CODE>-object.
*
* @param fontname the name of the font
* @param size the size of this font
* @param style the style of this font
* @param color the <CODE>Color</CODE> of this font.
* @return the Font constructed based on the parameters
*/
public Font getFont(String fontname, float size, int style, Color color) {
return getFont(fontname, defaultEncoding, defaultEmbedding, size, style, color);
}
/**
* Constructs a <CODE>Font</CODE>-object.
*
* @param fontname the name of the font
* @param size the size of this font
* @param style the style of this font
* @return the Font constructed based on the parameters
*/
public Font getFont(String fontname, float size, int style) {
return getFont(fontname, defaultEncoding, defaultEmbedding, size, style, null);
}
/**
* Constructs a <CODE>Font</CODE>-object.
*
* @param fontname the name of the font
* @param size the size of this font
* @return the Font constructed based on the parameters
*/
public Font getFont(String fontname, float size) {
return getFont(fontname, defaultEncoding, defaultEmbedding, size, Font.UNDEFINED, null);
}
/**
* Constructs a <CODE>Font</CODE>-object.
*
* @param fontname the name of the font
* @return the Font constructed based on the parameters
*/
public Font getFont(String fontname) {
return getFont(fontname, defaultEncoding, defaultEmbedding, Font.UNDEFINED, Font.UNDEFINED, null);
}
/**
* Register a font by giving explicitly the font family and name.
* @param familyName the font family
* @param fullName the font name
* @param path the font path
*/
public void registerFamily(String familyName, String fullName, String path) {
if (path != null)
trueTypeFonts.setProperty(fullName, path);
ArrayList tmp = (ArrayList) fontFamilies.get(familyName);
if (tmp == null) {
tmp = new ArrayList();
tmp.add(fullName);
fontFamilies.put(familyName, tmp);
}
else {
int fullNameLength = fullName.length();
boolean inserted = false;
for (int j = 0; j < tmp.size(); ++j) {
if (((String)tmp.get(j)).length() >= fullNameLength) {
tmp.add(j, fullName);
inserted = true;
break;
}
}
if (!inserted)
tmp.add(fullName);
}
}
/**
* Register a ttf- or a ttc-file.
*
* @param path the path to a ttf- or ttc-file
*/
public void register(String path) {
register(path, null);
}
/**
* Register a font file and use an alias for the font contained in it.
*
* @param path the path to a font file
* @param alias the alias you want to use for the font
*/
public void register(String path, String alias) {
try {
if (path.toLowerCase().endsWith(".ttf") || path.toLowerCase().endsWith(".otf") || path.toLowerCase().indexOf(".ttc,") > 0) {
Object allNames[] = BaseFont.getAllFontNames(path, BaseFont.WINANSI, null);
trueTypeFonts.setProperty(((String)allNames[0]).toLowerCase(), path);
if (alias != null) {
trueTypeFonts.setProperty(alias.toLowerCase(), path);
}
// register all the font names with all the locales
String[][] names = (String[][])allNames[2]; //full name
for (int i = 0; i < names.length; i++) {
trueTypeFonts.setProperty(names[i][3].toLowerCase(), path);
}
String fullName = null;
String familyName = null;
names = (String[][])allNames[1]; //family name
for (int k = 0; k < TTFamilyOrder.length; k += 3) {
for (int i = 0; i < names.length; i++) {
if (TTFamilyOrder[k].equals(names[i][0]) && TTFamilyOrder[k + 1].equals(names[i][1]) && TTFamilyOrder[k + 2].equals(names[i][2])) {
familyName = names[i][3].toLowerCase();
k = TTFamilyOrder.length;
break;
}
}
}
if (familyName != null) {
String lastName = "";
names = (String[][])allNames[2]; //full name
for (int i = 0; i < names.length; i++) {
for (int k = 0; k < TTFamilyOrder.length; k += 3) {
if (TTFamilyOrder[k].equals(names[i][0]) && TTFamilyOrder[k + 1].equals(names[i][1]) && TTFamilyOrder[k + 2].equals(names[i][2])) {
fullName = names[i][3];
if (fullName.equals(lastName))
continue;
lastName = fullName;
registerFamily(familyName, fullName, null);
break;
}
}
}
}
}
else if (path.toLowerCase().endsWith(".ttc")) {
if (alias != null)
System.err.println("class FontFactory: You can't define an alias for a true type collection.");
String[] names = BaseFont.enumerateTTCNames(path);
for (int i = 0; i < names.length; i++) {
register(path + "," + i);
}
}
else if (path.toLowerCase().endsWith(".afm") || path.toLowerCase().endsWith(".pfm")) {
BaseFont bf = BaseFont.createFont(path, BaseFont.CP1252, false);
String fullName = bf.getFullFontName()[0][3].toLowerCase();
String familyName = bf.getFamilyFontName()[0][3].toLowerCase();
String psName = bf.getPostscriptFontName().toLowerCase();
registerFamily(familyName, fullName, null);
trueTypeFonts.setProperty(psName, path);
trueTypeFonts.setProperty(fullName, path);
}
}
catch(DocumentException de) {
// this shouldn't happen
throw new ExceptionConverter(de);
}
catch(IOException ioe) {
throw new ExceptionConverter(ioe);
}
}
/** Register all the fonts in a directory.
* @param dir the directory
* @return the number of fonts registered
*/
public int registerDirectory(String dir) {
return registerDirectory(dir, false);
}
/**
* Register all the fonts in a directory and possibly its subdirectories.
* @param dir the directory
* @param scanSubdirectories recursively scan subdirectories if <code>true</true>
* @return the number of fonts registered
* @since 2.1.2
*/
public int registerDirectory(String dir, boolean scanSubdirectories) {
int count = 0;
try {
File file = new File(dir);
if (!file.exists() || !file.isDirectory())
return 0;
String files[] = file.list();
if (files == null)
return 0;
for (int k = 0; k < files.length; ++k) {
try {
file = new File(dir, files[k]);
if (file.isDirectory()) {
if (scanSubdirectories) {
count += registerDirectory(file.getAbsolutePath(), true);
}
} else {
String name = file.getPath();
String suffix = name.length() < 4 ? null : name.substring(name.length() - 4).toLowerCase();
if (".afm".equals(suffix) || ".pfm".equals(suffix)) {
/* Only register Type 1 fonts with matching .pfb files */
File pfb = new File(name.substring(0, name.length() - 4) + ".pfb");
if (pfb.exists()) {
register(name, null);
++count;
}
} else if (".ttf".equals(suffix) || ".otf".equals(suffix) || ".ttc".equals(suffix)) {
register(name, null);
++count;
}
}
}
catch (Exception e) {
//empty on purpose
}
}
}
catch (Exception e) {
//empty on purpose
}
return count;
}
/** Register fonts in some probable directories. It usually works in Windows,
* Linux and Solaris.
* @return the number of fonts registered
*/
public int registerDirectories() {
int count = 0;
count += registerDirectory("c:/windows/fonts");
count += registerDirectory("c:/winnt/fonts");
count += registerDirectory("d:/windows/fonts");
count += registerDirectory("d:/winnt/fonts");
count += registerDirectory("/usr/share/X11/fonts", true);
count += registerDirectory("/usr/X/lib/X11/fonts", true);
count += registerDirectory("/usr/openwin/lib/X11/fonts", true);
count += registerDirectory("/usr/share/fonts", true);
count += registerDirectory("/usr/X11R6/lib/X11/fonts", true);
count += registerDirectory("/Library/Fonts");
count += registerDirectory("/System/Library/Fonts");
return count;
}
/**
* Gets a set of registered fontnames.
* @return a set of registered fonts
*/
public Set getRegisteredFonts() {
return Utilities.getKeySet(trueTypeFonts);
}
/**
* Gets a set of registered fontnames.
* @return a set of registered font families
*/
public Set getRegisteredFamilies() {
return Utilities.getKeySet(fontFamilies);
}
/**
* Checks if a certain font is registered.
*
* @param fontname the name of the font that has to be checked.
* @return true if the font is found
*/
public boolean isRegistered(String fontname) {
return trueTypeFonts.containsKey(fontname.toLowerCase());
}
}
| lgpl-3.0 |
QualInsight/qualinsight-plugins-sonarqube-smell | plugin/src/main/java/com/qualinsight/plugins/sonarqube/smell/plugin/SmellPlugin.java | 2994 | /*
* qualinsight-plugins-sonarqube-smell
* Copyright (c) 2015, QualInsight
* http://www.qualinsight.com/
*
* This program is free software: you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation, either
* version 3 of the License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this program. If not, you can retrieve a copy
* from <http://www.gnu.org/licenses/>.
*/
package com.qualinsight.plugins.sonarqube.smell.plugin;
import org.sonar.api.Plugin;
import org.sonar.api.Properties;
import org.sonar.api.Property;
import org.sonar.api.utils.Version;
import com.qualinsight.plugins.sonarqube.smell.plugin.extension.SmellChecksRegistrar;
import com.qualinsight.plugins.sonarqube.smell.plugin.extension.SmellCountByTypeMeasuresComputer;
import com.qualinsight.plugins.sonarqube.smell.plugin.extension.SmellCountTotalMeasureComputer;
import com.qualinsight.plugins.sonarqube.smell.plugin.extension.SmellDebtComputer;
import com.qualinsight.plugins.sonarqube.smell.plugin.extension.SmellMeasuresSensor;
import com.qualinsight.plugins.sonarqube.smell.plugin.extension.SmellMetrics;
import com.qualinsight.plugins.sonarqube.smell.plugin.extension.SmellRulesDefinition;
import com.qualinsight.plugins.sonarqube.smell.plugin.extension.SmellWidget;
/**
* Core Code Smells SonarPlugin class. It declares all extensions used by the plugin.
*
* @author Michel Pawlak
*/
@Properties({
@Property(key = SmellPropertyKeys.WIDGET_TITLE_KEY, name = "Widget title", defaultValue = "Code Smells")
})
public final class SmellPlugin implements Plugin {
private static final Version NO_MORE_WIDGETS_VERSION = Version.create(6, 1);
/**
* This method is executed at runtime when:
* <ul>
* <li>Web Server starts</li>
* <li>Compute Engine starts</li>
* <li>Scanner starts</li>
* </ul>
*
* @param context the Context to which extensions have to be added to.
*/
@Override
public void define(final Context context) {
context.addExtension(SmellChecksRegistrar.class);
context.addExtension(SmellRulesDefinition.class);
context.addExtension(SmellMetrics.class);
context.addExtension(SmellMeasuresSensor.class);
context.addExtension(SmellDebtComputer.class);
context.addExtension(SmellCountByTypeMeasuresComputer.class);
context.addExtension(SmellCountTotalMeasureComputer.class);
if (!context.getSonarQubeVersion()
.isGreaterThanOrEqual(NO_MORE_WIDGETS_VERSION)) {
context.addExtension(SmellWidget.class);
}
}
}
| lgpl-3.0 |
simeshev/parabuild-ci | test/src/org/parabuild/ci/TestIndexRequest.java | 1776 | /*
* Parabuild CI licenses this file to You under the LGPL 2.1
* (the "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* https://www.gnu.org/licenses/lgpl-3.0.txt
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.parabuild.ci;
import java.io.*;
import org.apache.commons.logging.*;
import org.apache.lucene.document.*;
import org.parabuild.ci.configuration.*;
import org.parabuild.ci.object.*;
import org.parabuild.ci.search.*;
import org.parabuild.ci.services.*;
public class TestIndexRequest implements SearchService.IndexRequest {
private static final Log log = LogFactory.getLog(TestIndexRequest.class);
private final ConfigurationManager cm = ConfigurationManager.getInstance();
public Document getDocumentToIndex() {
Document document = null;
try {
if (log.isDebugEnabled()) log.debug("Prepare test document");
// get params
final BuildRun buildRun = cm.getBuildRun(1);
final StepLog stepLog = cm.getStepLog(1);
final StepRun stepRun = cm.getStepRun(1);
// request doc from factory
document = LuceneDocumentFactory.makeDocument(buildRun, stepRun,
stepLog, TestHelper.getTestFile("test_ant_successful_build.log"));
} catch (FileNotFoundException e) {
throw new IllegalStateException(e.toString());
}
if (log.isDebugEnabled()) log.debug("document = " + document.toString());
return document;
}
}
| lgpl-3.0 |
davidmc24/restwars | service/src/main/java/restwars/service/flight/impl/flighthandler/TransportFlightHandler.java | 2447 | package restwars.service.flight.impl.flighthandler;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import restwars.model.flight.Flight;
import restwars.model.planet.Planet;
import restwars.model.resource.Resources;
import restwars.service.event.EventService;
import restwars.service.flight.DetectedFlightDAO;
import restwars.service.flight.FlightDAO;
import restwars.service.infrastructure.RoundService;
import restwars.service.infrastructure.UUIDFactory;
import restwars.service.mechanics.ShipMechanics;
import restwars.service.planet.PlanetDAO;
import restwars.service.ship.HangarDAO;
import java.util.Optional;
public class TransportFlightHandler extends AbstractFlightHandler {
private static final Logger LOGGER = LoggerFactory.getLogger(TransportFlightHandler.class);
public TransportFlightHandler(RoundService roundService, FlightDAO flightDAO, PlanetDAO planetDAO, HangarDAO hangarDAO, UUIDFactory uuidFactory, EventService eventService, DetectedFlightDAO detectedFlightDAO, ShipMechanics shipMechanics) {
super(roundService, flightDAO, planetDAO, hangarDAO, uuidFactory, eventService, detectedFlightDAO, shipMechanics);
}
@Override
public void handle(Flight flight, long round) {
assert flight != null;
LOGGER.debug("Finishing transport flight");
Optional<Planet> planet = getPlanetDAO().findWithLocation(flight.getDestination());
if (planet.isPresent()) {
if (planet.get().getOwnerId().equals(flight.getPlayerId())) {
LOGGER.debug("Transfering cargo to planet {}", planet.get());
Planet updatedPlanet = planet.get().withResources(planet.get().getResources().plus(flight.getCargo()));
getPlanetDAO().update(updatedPlanet);
createReturnFlight(flight, flight.getShips(), Resources.NONE);
// Create event
getEventService().createTransportArrivedEvent(flight.getPlayerId(), updatedPlanet.getId());
} else {
LOGGER.debug("Tried to transport to enemy planet {} , creating return flight", flight.getDestination());
createReturnFlight(flight, flight.getShips(), flight.getCargo());
}
} else {
LOGGER.debug("Planet {} isn't colonized, creating return flight", flight.getDestination());
createReturnFlight(flight, flight.getShips(), flight.getCargo());
}
}
}
| lgpl-3.0 |
datacleaner/DataCleaner | components/machine-learning/src/main/java/org/datacleaner/components/machinelearning/MLClassificationTransformer.java | 5046 | /**
* DataCleaner (community edition)
* Copyright (C) 2014 Free Software Foundation, Inc.
*
* This copyrighted material is made available to anyone wishing to use, modify,
* copy, or redistribute it subject to the terms and conditions of the GNU
* Lesser General Public License, as published by the Free Software Foundation.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
* or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
* for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with this distribution; if not, write to:
* Free Software Foundation, Inc.
* 51 Franklin Street, Fifth Floor
* Boston, MA 02110-1301 USA
*/
package org.datacleaner.components.machinelearning;
import java.io.File;
import java.io.IOException;
import java.io.UncheckedIOException;
import java.util.List;
import javax.inject.Named;
import org.apache.commons.lang.SerializationUtils;
import org.datacleaner.api.Categorized;
import org.datacleaner.api.Configured;
import org.datacleaner.api.Description;
import org.datacleaner.api.FileProperty;
import org.datacleaner.api.FileProperty.FileAccessMode;
import org.datacleaner.api.Initialize;
import org.datacleaner.api.InputColumn;
import org.datacleaner.api.InputRow;
import org.datacleaner.api.OutputColumns;
import org.datacleaner.api.Transformer;
import org.datacleaner.api.Validate;
import org.datacleaner.components.machinelearning.api.MLClassification;
import org.datacleaner.components.machinelearning.api.MLClassificationRecord;
import org.datacleaner.components.machinelearning.api.MLClassifier;
import org.datacleaner.components.machinelearning.impl.MLClassificationRecordImpl;
import com.google.common.io.Files;
@Named("Apply classifier")
@Description("Applies a classifier to incoming records. Note that the classifier must first be trained using one of the analyzers found in the 'Machine Learning' category.")
@Categorized(MachineLearningCategory.class)
public class MLClassificationTransformer implements Transformer {
public enum OutputFormat {
WINNER_CLASS_AND_CONFIDENCE,
CONFIDENCE_MATRIX
}
@Configured
InputColumn<?>[] features;
@Configured
@FileProperty(accessMode = FileAccessMode.OPEN, extension = ".model.ser")
File modelFile = new File("classifier.model.ser");
@Configured
OutputFormat outputFormat = OutputFormat.WINNER_CLASS_AND_CONFIDENCE;
private MLClassifier classifier;
@Validate
public void validate() throws IOException {
if (!modelFile.exists()) {
throw new IllegalArgumentException("Model file '" + modelFile + "' does not exist.");
}
classifier = (MLClassifier) SerializationUtils.deserialize(Files.toByteArray(modelFile));
MLComponentUtils.validateClassifierMapping(classifier, features);
}
@Initialize
public void init() {
try {
final byte[] bytes = Files.toByteArray(modelFile);
classifier = (MLClassifier) SerializationUtils.deserialize(bytes);
} catch (IOException e) {
throw new UncheckedIOException(e);
}
}
@Override
public OutputColumns getOutputColumns() {
if (classifier == null) {
init();
}
if (outputFormat == OutputFormat.WINNER_CLASS_AND_CONFIDENCE) {
String modelName = modelFile.getName();
if (modelName.toLowerCase().endsWith(".model.ser")) {
modelName = modelName.substring(0, modelName.length() - ".model.ser".length());
}
final String[] columnNames = new String[] { modelName + " class", modelName + " confidence" };
final Class<?>[] columnTypes =
new Class[] { classifier.getMetadata().getClassificationType(), Double.class };
return new OutputColumns(columnNames, columnTypes);
} else {
final List<Object> classifications = classifier.getMetadata().getClassifications();
final String[] columnNames = new String[classifications.size()];
for (int i = 0; i < columnNames.length; i++) {
columnNames[i] = classifications.toString() + " confidence";
}
return new OutputColumns(Double.class, columnNames);
}
}
@Override
public Object[] transform(InputRow inputRow) {
final MLClassificationRecord record = MLClassificationRecordImpl.forEvaluation(inputRow, features);
final MLClassification classification = classifier.classify(record);
final int bestClassificationIndex = classification.getBestClassificationIndex();
final double confidence = classification.getConfidence(bestClassificationIndex);
final Object classificationValue = classifier.getMetadata().getClassification(bestClassificationIndex);
return new Object[] { classificationValue, confidence };
}
}
| lgpl-3.0 |
exoplatform/answers | service/src/test/java/org/exoplatform/faq/service/FAQEventQueryTestCase.java | 8328 | /*
* Copyright (C) 2003-2009 eXo Platform SAS.
*
* This program is free software; you can redistribute it and/or
* modify it under the terms of the GNU Affero General Public License
* as published by the Free Software Foundation; either version 3
* of the License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program; if not, see<http://www.gnu.org/licenses/>.
*/
package org.exoplatform.faq.service;
import java.util.Arrays;
import java.util.Calendar;
import java.util.GregorianCalendar;
import junit.framework.TestCase;
import org.exoplatform.commons.utils.ISO8601;
/**
* @author <a href="mailto:patrice.lamarque@exoplatform.com">Patrice Lamarque</a>
* @version $Revision$
*/
public class FAQEventQueryTestCase extends TestCase {
protected void setUp() throws Exception {
super.setUp();
}
public void testQuickSearch() throws Exception {
FAQEventQuery queryObject = new FAQEventQuery();
queryObject.setPath("/foo");
queryObject.setType(FAQEventQuery.CATEGORY_AND_QUESTION);
queryObject.setAdmin(true);
String selector = "/jcr:root/foo//*";
assertEquals(selector + "[]", queryObject.getQuery());
queryObject.setText("bar");
assertEquals(selector + "[ jcr:contains(., 'bar')]", queryObject.getQuery());
queryObject.setAdmin(false);
String predicate = "jcr:contains(., 'bar') and ( not(@exo:isApproved) or @exo:isApproved='true' ) and ( not(@exo:userPrivate) or @exo:userPrivate='' )";
assertEquals(selector + "[ " + predicate + " ]", queryObject.getQuery());
//
queryObject.setAdmin(true);
assertEquals(selector + "[ jcr:contains(., 'bar')]", queryObject.getQuery());
queryObject.setAdmin(false);
queryObject.setUserId("zed");
predicate = "jcr:contains(., 'bar') and ( not(@exo:isApproved) or @exo:isApproved='true' or exo:author='zed' ) and ( not(@exo:userPrivate) or @exo:userPrivate='' )";
assertEquals(selector + "[ " + predicate + " ]", queryObject.getQuery());
//
queryObject.setAdmin(true);
assertEquals(selector + "[ jcr:contains(., 'bar')]", queryObject.getQuery());
queryObject.setViewingCategories(Arrays.asList("cat1"));
queryObject.setAdmin(false);
predicate = "jcr:contains(., 'bar') and ( not(@exo:isApproved) or @exo:isApproved='true' or exo:author='zed' ) and (@exo:categoryId='cat1' or @exo:id='cat1') and ( not(@exo:userPrivate) or @exo:userPrivate='' )";
assertEquals(selector + "[ " + predicate + " ]", queryObject.getQuery());
//
queryObject.setAdmin(true);
assertEquals(selector + "[ jcr:contains(., 'bar') and (@exo:categoryId='cat1' or @exo:id='cat1')]", queryObject.getQuery());
}
public void testBuildCategoryQuery() throws Exception {
final FAQEventQuery queryObject = new FAQEventQuery();
queryObject.setPath("/foo");
queryObject.setType(FAQEventQuery.FAQ_CATEGORY);
queryObject.setAdmin(true);
String selector = "/jcr:root/foo//element(*,exo:faqCategory)[(@exo:isView='true') ";
assertEquals(selector + "]", queryObject.getQuery());
queryObject.setText("bar");
String predicate = " and (jcr:contains(., 'bar'))";
assertEquals(selector + predicate + "]", queryObject.getQuery());
queryObject.setName("zed");
predicate += " and (jcr:contains(@exo:name, 'zed'))";
assertEquals(selector + predicate + "]", queryObject.getQuery());
queryObject.setIsModeQuestion("blah");
predicate += " and (@exo:isModerateQuestions='blah')";
assertEquals(selector + predicate + "]", queryObject.getQuery());
queryObject.setModerator("john");
predicate += " and (jcr:contains(@exo:moderators, 'john'))";
assertEquals(selector + predicate + "]", queryObject.getQuery());
queryObject.setAdmin(false);
queryObject.setUserMembers(Arrays.asList("jack", "jerry"));
predicate += " and (not(@exo:userPrivate) or @exo:userPrivate='' or @exo:userPrivate='jack' or @exo:moderators='jack' or @exo:userPrivate='jerry' or @exo:moderators='jerry')";
assertEquals(selector + predicate + "]", queryObject.getQuery());
}
public void testBuildQuestionQuery() throws Exception {
String selector = "/jcr:root/foo//* [(";
String predicate;
String orderBy = " order by @exo:title ascending";
final FAQEventQuery eventQuery = new FAQEventQuery();
eventQuery.setType(FAQEventQuery.FAQ_QUESTION);
eventQuery.setPath("/foo");
eventQuery.setAuthor("root");
predicate = "jcr:contains(@exo:author, 'root')";
assertEquals(selector + predicate + ")]" + orderBy, eventQuery.getQuery().trim());
eventQuery.setEmail("root@exoplatform");
predicate += " and jcr:contains(@exo:email, 'root@exoplatform')";
assertEquals(selector + predicate + ")]" + orderBy, eventQuery.getQuery().trim());
Calendar calendar = GregorianCalendar.getInstance();
eventQuery.setFromDate(calendar);
predicate += " and ((@exo:createdDate >= xs:dateTime('" + ISO8601.format(calendar) + "')) " + "or (@exo:dateResponse >= xs:dateTime('" + ISO8601.format(calendar) + "')) " + "or (@exo:dateComment >= xs:dateTime('" + ISO8601.format(calendar) + "')))";
assertEquals(selector + predicate + ")]" + orderBy, eventQuery.getQuery().trim());
calendar = GregorianCalendar.getInstance();
eventQuery.setToDate(calendar);
predicate += " and ((@exo:createdDate <= xs:dateTime('" + ISO8601.format(calendar) + "')) " + "or (@exo:dateResponse <= xs:dateTime('" + ISO8601.format(calendar) + "')) " + "or (@exo:dateComment <= xs:dateTime('" + ISO8601.format(calendar) + "')))";
assertEquals(selector + predicate + ")]" + orderBy, eventQuery.getQuery().trim());
eventQuery.setLanguage("English");
eventQuery.setResponse("response");
eventQuery.setAsteriskConditionSearch("*condition*");
predicate += ") and (((@exo:responseLanguage='English') and (jcr:contains(@exo:responses,'response') or jcr:contains(@exo:responses,'*condition*')))";
assertEquals(selector + predicate + ")]" + orderBy, eventQuery.getQuery().trim());
eventQuery.setComment("comment");
predicate += " or ((@exo:commentLanguage='English') and (jcr:contains(@exo:comments,'comment') or jcr:contains(@exo:comments,'*condition*')))";
assertEquals(selector + predicate + ")]" + orderBy, eventQuery.getQuery().trim());
eventQuery.setText("text");
predicate += " or (jcr:contains(., 'text') and ( @exo:language='English' or @exo:commentLanguage='English' or @exo:responseLanguage='English'))";
assertEquals(selector + predicate + ")]" + orderBy, eventQuery.getQuery().trim());
eventQuery.setViewingCategories(Arrays.asList("categoryId1", "categoryId2"));
predicate += ") and (exo:categoryId='categoryId1' or exo:categoryId='categoryId2')";
assertEquals((selector + predicate + "]") + orderBy, eventQuery.getQuery().trim());
eventQuery.setUserId("root");
eventQuery.setAdmin(true);
}
public void testBuildQuestionQueryWithAllLanguage() throws Exception {
String selector = "/jcr:root/foo//* [(";
String predicate = "jcr:contains(@exo:author, 'root')";
String orderBy = " order by @exo:title ascending";
final FAQEventQuery eventQuery = new FAQEventQuery();
eventQuery.setType(FAQEventQuery.FAQ_QUESTION);
eventQuery.setPath("/foo");
eventQuery.setAuthor("root");
eventQuery.setResponse("response");
eventQuery.setAsteriskConditionSearch("*condition*");
predicate += ") and (((jcr:contains(@exo:responses,'response') or jcr:contains(@exo:responses,'*condition*')))";
assertEquals((selector + predicate + ")]") + orderBy, eventQuery.getQuery());
eventQuery.setComment("comment");
predicate += " or ((jcr:contains(@exo:comments,'comment') or jcr:contains(@exo:comments,'*condition*')))";
assertEquals((selector + predicate + ")]") + orderBy, eventQuery.getQuery());
eventQuery.setText("text");
predicate += " or (jcr:contains(., 'text'))";
assertEquals((selector + predicate + ")]") + orderBy, eventQuery.getQuery());
}
}
| lgpl-3.0 |
ecramer89/813Code | src/org/jgap/gp/impl/GPProgramInfo.java | 1787 | /*
* This file is part of JGAP.
*
* JGAP offers a dual license model containing the LGPL as well as the MPL.
*
* For licensing information please see the file license.txt included with JGAP
* or have a look at the top of class org.jgap.Chromosome which representatively
* includes the JGAP license policy applicable for any file delivered with JGAP.
*/
package org.jgap.gp.impl;
/**
* Holds information about a program. Used for caching GP programs during GP
* evolution.
*
* @author Klaus Meffert
* @since 3.2
*/
public class GPProgramInfo {
/** String containing the CVS revision. Read out via reflection!*/
private final static String CVS_REVISION = "$Revision: 1.3 $";
private double m_fitnessValue;
private String m_toStringNorm;
private boolean m_found;
public GPProgramInfo(GPProgram a_prog, boolean a_found) {
m_fitnessValue = a_prog.getFitnessValueDirectly();
m_toStringNorm = a_prog.toStringNorm(0);
m_found = a_found;
}
public String getToStringNorm() {
return m_toStringNorm;
}
public double getFitnessValue() {
return m_fitnessValue;
}
public boolean isFound() {
return m_found;
}
public void setFound(boolean a_found) {
m_found = a_found;
}
public boolean equals(Object a_other) {
GPProgramInfo other = (GPProgramInfo) a_other;
if (m_toStringNorm == null) {
if (other.m_toStringNorm == null) {
return true;
}
return false;
}
return m_toStringNorm.equals(other.m_toStringNorm);
}
public int compareTo(Object a_other) {
GPProgramInfo other = (GPProgramInfo) a_other;
if (m_toStringNorm == null) {
if (other.m_toStringNorm == null) {
return 0;
}
}
return m_toStringNorm.compareTo(other.m_toStringNorm);
}
}
| lgpl-3.0 |
uniba-dsg/betsy | peal/src/main/java/peal/identifier/EngineId.java | 778 | package peal.identifier;
import java.util.Objects;
import javax.xml.bind.annotation.XmlElement;
public class EngineId {
@XmlElement
private final String engineId;
public EngineId(String engineId) {
this.engineId = engineId;
}
public String getEngineId() {
return engineId;
}
@Override
public String toString() {
return engineId;
}
@Override
public boolean equals(Object o) {
if (this == o)
return true;
if (o == null || getClass() != o.getClass())
return false;
EngineId engineId1 = (EngineId) o;
return Objects.equals(engineId, engineId1.engineId);
}
@Override
public int hashCode() {
return Objects.hash(engineId);
}
}
| lgpl-3.0 |
SergiyKolesnikov/fuji | examples/Bali/syntax/UmodSmDecl.java | 1627 | // Automatically generated code. Edit at your own risk!
// Generated by bali2jak v2002.09.03.
public class UmodSmDecl extends SmDeclaration {
final public static int ARG_LENGTH = 4 ;
final public static int TOK_LENGTH = 1 ;
public ImplementsClause getImplementsClause () {
AstNode node = arg[2].arg [0] ;
return (node != null) ? (ImplementsClause) node : null ;
}
public QName getQName () {
return (QName) arg [0] ;
}
public AstToken getSTATE_MACHINE () {
return (AstToken) tok [0] ;
}
public SmClassBody getSmClassBody () {
return (SmClassBody) arg [3] ;
}
public SmExtendsClause getSmExtendsClause () {
AstNode node = arg[1].arg [0] ;
return (node != null) ? (SmExtendsClause) node : null ;
}
public boolean[] printorder () {
return new boolean[] {true, false, false, false, false} ;
}
public UmodSmDecl setParms
(AstToken tok0, QName arg0, AstOptNode arg1, AstOptNode arg2, SmClassBody arg3)
{
arg = new AstNode [ARG_LENGTH] ;
tok = new AstTokenInterface [TOK_LENGTH] ;
tok [0] = tok0 ; /* STATE_MACHINE */
arg [0] = arg0 ; /* QName */
arg [1] = arg1 ; /* [ SmExtendsClause ] */
arg [2] = arg2 ; /* [ ImplementsClause ] */
arg [3] = arg3 ; /* SmClassBody */
InitChildren () ;
return (UmodSmDecl) this ;
}
}
| lgpl-3.0 |
ever-been/everBeen | persistence/src/main/java/cz/cuni/mff/d3s/been/persistence/EqAttributeFilter.java | 674 | package cz.cuni.mff.d3s.been.persistence;
import org.codehaus.jackson.annotate.JsonTypeInfo;
import static cz.cuni.mff.d3s.been.persistence.AttributeFilterType.EQUAL;
import static cz.cuni.mff.d3s.been.persistence.FilterValues.HARD_VALUE;
/**
* Equality attribute filter (attribute value equals)
*
* @author darklight
*/
@JsonTypeInfo(use = JsonTypeInfo.Id.CLASS, include = JsonTypeInfo.As.PROPERTY, property = "@class")
class EqAttributeFilter extends SkeletalAttributeFilter {
public EqAttributeFilter() {
}
EqAttributeFilter(Object value) {
values.put(HARD_VALUE.getKey(), value);
}
@Override
public AttributeFilterType getType() {
return EQUAL;
}
}
| lgpl-3.0 |
SoftwareEngineeringToolDemos/FSE-2011-EvoSuite | runtime/src/test/java/org/evosuite/runtime/mock/java/net/InetAddressTest.java | 1448 | /**
* Copyright (C) 2010-2015 Gordon Fraser, Andrea Arcuri and EvoSuite
* contributors
*
* This file is part of EvoSuite.
*
* EvoSuite is free software: you can redistribute it and/or modify it
* under the terms of the GNU Lesser Public License as published by the
* Free Software Foundation, either version 3.0 of the License, or (at your
* option) any later version.
*
* EvoSuite is distributed in the hope that it will be useful, but
* WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser Public License for more details.
*
* You should have received a copy of the GNU Lesser Public License along
* with EvoSuite. If not, see <http://www.gnu.org/licenses/>.
*/
package org.evosuite.runtime.mock.java.net;
import java.net.InetAddress;
import java.net.UnknownHostException;
import org.junit.Assert;
import org.junit.Test;
public class InetAddressTest {
@Test
public void testGetByName() throws UnknownHostException{
String googleAddr = "www.google.com";
String evosuiteAddr = "www.evosuite.org";
InetAddress google = MockInetAddress.getByName(googleAddr);
InetAddress evosuite = MockInetAddress.getByName(evosuiteAddr);
Assert.assertEquals(googleAddr, google.getHostName());
Assert.assertEquals(evosuiteAddr, evosuite.getHostName());
Assert.assertNotEquals(google.getHostAddress(), evosuite.getHostAddress());
}
}
| lgpl-3.0 |
SoftwareEngineeringToolDemos/FSE-2011-EvoSuite | client/src/main/java/org/evosuite/symbolic/vm/string/Replace.java | 5248 | /**
* Copyright (C) 2010-2015 Gordon Fraser, Andrea Arcuri and EvoSuite
* contributors
*
* This file is part of EvoSuite.
*
* EvoSuite is free software: you can redistribute it and/or modify it
* under the terms of the GNU Lesser Public License as published by the
* Free Software Foundation, either version 3.0 of the License, or (at your
* option) any later version.
*
* EvoSuite is distributed in the hope that it will be useful, but
* WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser Public License for more details.
*
* You should have received a copy of the GNU Lesser Public License along
* with EvoSuite. If not, see <http://www.gnu.org/licenses/>.
*/
package org.evosuite.symbolic.vm.string;
import java.util.ArrayList;
import java.util.Collections;
import org.evosuite.symbolic.expr.Expression;
import org.evosuite.symbolic.expr.Operator;
import org.evosuite.symbolic.expr.bv.IntegerValue;
import org.evosuite.symbolic.expr.str.StringMultipleExpression;
import org.evosuite.symbolic.expr.str.StringValue;
import org.evosuite.symbolic.vm.NonNullReference;
import org.evosuite.symbolic.vm.Reference;
import org.evosuite.symbolic.vm.SymbolicEnvironment;
import org.evosuite.symbolic.vm.SymbolicFunction;
import org.evosuite.symbolic.vm.SymbolicHeap;
public abstract class Replace extends SymbolicFunction {
private static final String REPLACE = "replace";
public Replace(SymbolicEnvironment env, String desc) {
super(env, Types.JAVA_LANG_STRING, REPLACE, desc);
}
public static final class Replace_C extends Replace {
public Replace_C(SymbolicEnvironment env) {
super(env, Types.CHAR_CHAR_TO_STR_DESCRIPTOR);
}
@Override
public Object executeFunction() {
// string receiver
NonNullReference symb_receiver = this.getSymbReceiver();
String conc_receiver = (String) this.getConcReceiver();
// old char
IntegerValue oldCharExpr = this.getSymbIntegerArgument(0);
// new char
IntegerValue newCharExpr = this.getSymbIntegerArgument(1);
// return value
Reference symb_ret_val = this.getSymbRetVal();
String conc_ret_val = (String) this.getConcRetVal();
StringValue stringReceiverExpr = env.heap.getField(
Types.JAVA_LANG_STRING, SymbolicHeap.$STRING_VALUE,
conc_receiver, symb_receiver, conc_receiver);
if (symb_ret_val instanceof NonNullReference) {
NonNullReference non_null_symb_ret_val = (NonNullReference) symb_ret_val;
StringMultipleExpression symb_value = new StringMultipleExpression(
stringReceiverExpr, Operator.REPLACEC, oldCharExpr,
new ArrayList<Expression<?>>(Collections
.singletonList(newCharExpr)),
conc_ret_val);
env.heap.putField(Types.JAVA_LANG_STRING,
SymbolicHeap.$STRING_VALUE, conc_ret_val,
non_null_symb_ret_val, symb_value);
}
return this.getSymbRetVal();
}
}
public static final class Replace_CS extends Replace {
public Replace_CS(SymbolicEnvironment env) {
super(env, Types.CHARSEQ_CHARSEQ_TO_STR_DESCRIPTOR);
}
@Override
public Object executeFunction() {
// string receiver
NonNullReference symb_receiver = this.getSymbReceiver();
String conc_receiver = (String) this.getConcReceiver();
// old string
Reference symb_old_str = this.getSymbArgument(0);
CharSequence conc_old_char_seq = (CharSequence) this
.getConcArgument(0);
// new string
Reference symb_new_str = this.getSymbArgument(1);
CharSequence conc_new_char_seq = (CharSequence) this
.getConcArgument(1);
// return value
Reference symb_ret_val = this.getSymbRetVal();
String conc_ret_val = (String) this.getConcRetVal();
StringValue stringReceiverExpr = env.heap.getField(
Types.JAVA_LANG_STRING, SymbolicHeap.$STRING_VALUE,
conc_receiver, symb_receiver, conc_receiver);
if (symb_old_str instanceof NonNullReference
&& symb_new_str instanceof NonNullReference
&& symb_ret_val instanceof NonNullReference) {
NonNullReference non_null_symb_old_str = (NonNullReference) symb_old_str;
NonNullReference non_null_symb_new_str = (NonNullReference) symb_new_str;
NonNullReference non_null_symb_ret_val = (NonNullReference) symb_ret_val;
if (conc_old_char_seq instanceof String
&& conc_new_char_seq instanceof String) {
String conc_old_str = (String) conc_old_char_seq;
StringValue oldStringExpr = env.heap.getField(
Types.JAVA_LANG_STRING, SymbolicHeap.$STRING_VALUE,
conc_old_str, non_null_symb_old_str, conc_old_str);
String conc_new_str = (String) conc_new_char_seq;
StringValue newStringExpr = env.heap.getField(
Types.JAVA_LANG_STRING, SymbolicHeap.$STRING_VALUE,
conc_new_str, non_null_symb_new_str, conc_new_str);
StringMultipleExpression symb_value = new StringMultipleExpression(
stringReceiverExpr, Operator.REPLACECS,
oldStringExpr, new ArrayList<Expression<?>>(
Collections.singletonList(newStringExpr)),
conc_ret_val);
env.heap.putField(Types.JAVA_LANG_STRING,
SymbolicHeap.$STRING_VALUE, conc_ret_val,
non_null_symb_ret_val, symb_value);
}
}
return symb_ret_val;
}
}
}
| lgpl-3.0 |
Joccob/EnderIO | src/main/java/crazypants/enderio/item/ItemMagnet.java | 6570 | package crazypants.enderio.item;
import java.util.List;
import net.minecraft.client.renderer.texture.IIconRegister;
import net.minecraft.creativetab.CreativeTabs;
import net.minecraft.entity.EntityLivingBase;
import net.minecraft.entity.player.EntityPlayer;
import net.minecraft.inventory.IInventory;
import net.minecraft.item.Item;
import net.minecraft.item.ItemStack;
import net.minecraft.nbt.NBTTagCompound;
import net.minecraft.world.World;
import baubles.api.BaubleType;
import baubles.api.IBauble;
import cofh.api.energy.ItemEnergyContainer;
import cpw.mods.fml.common.FMLCommonHandler;
import cpw.mods.fml.common.Optional;
import cpw.mods.fml.common.Optional.Method;
import cpw.mods.fml.common.registry.GameRegistry;
import cpw.mods.fml.relauncher.Side;
import cpw.mods.fml.relauncher.SideOnly;
import crazypants.enderio.EnderIO;
import crazypants.enderio.EnderIOTab;
import crazypants.enderio.ModObject;
import crazypants.enderio.config.Config;
import crazypants.enderio.gui.IResourceTooltipProvider;
import crazypants.enderio.machine.power.PowerDisplayUtil;
import crazypants.util.BaublesUtil;
import crazypants.util.ItemUtil;
@Optional.Interface(iface = "baubles.api.IBauble", modid = "Baubles|API")
public class ItemMagnet extends ItemEnergyContainer implements IResourceTooltipProvider, IBauble {
private static final String ACTIVE_KEY = "magnetActive";
public static void setActive(ItemStack item, boolean active) {
if(item == null) {
return;
}
NBTTagCompound nbt = ItemUtil.getOrCreateNBT(item);
nbt.setBoolean(ACTIVE_KEY, active);
}
public static boolean isActive(ItemStack item) {
if(item == null) {
return false;
}
if(item.stackTagCompound == null) {
return false;
}
if(!item.stackTagCompound.hasKey(ACTIVE_KEY)) {
return false;
}
return item.stackTagCompound.getBoolean(ACTIVE_KEY);
}
public static boolean hasPower(ItemStack itemStack) {
return EnderIO.itemMagnet.getEnergyStored(itemStack) > 0;
}
public static void drainPerSecondPower(ItemStack itemStack) {
EnderIO.itemMagnet.extractEnergy(itemStack, Config.magnetPowerUsePerSecondRF, false);
}
static MagnetController controller = new MagnetController();
public static ItemMagnet create() {
ItemMagnet result = new ItemMagnet();
result.init();
FMLCommonHandler.instance().bus().register(controller);
return result;
}
protected ItemMagnet() {
super(Config.magnetPowerCapacityRF, Config.magnetPowerCapacityRF / 100);
setCreativeTab(EnderIOTab.tabEnderIO);
setUnlocalizedName(ModObject.itemMagnet.unlocalisedName);
setMaxDamage(16);
setMaxStackSize(1);
setHasSubtypes(true);
}
protected void init() {
GameRegistry.registerItem(this, ModObject.itemMagnet.unlocalisedName);
}
@Override
@SideOnly(Side.CLIENT)
public void registerIcons(IIconRegister IIconRegister) {
itemIcon = IIconRegister.registerIcon("enderio:magnet");
}
@Override
@SideOnly(Side.CLIENT)
public void getSubItems(Item item, CreativeTabs par2CreativeTabs, List par3List) {
ItemStack is = new ItemStack(this);
setFull(is);
par3List.add(is);
is = new ItemStack(this);
setEnergy(is, 0);
par3List.add(is);
}
@Override
@SideOnly(Side.CLIENT)
public void addInformation(ItemStack itemStack, EntityPlayer par2EntityPlayer, List list, boolean par4) {
super.addInformation(itemStack, par2EntityPlayer, list, par4);
String str = PowerDisplayUtil.formatPower(getEnergyStored(itemStack)) + "/" + PowerDisplayUtil.formatPower(getMaxEnergyStored(itemStack)) + " "
+ PowerDisplayUtil.abrevation();
list.add(str);
}
@Override
@SideOnly(Side.CLIENT)
public boolean hasEffect(ItemStack item, int pass) {
return isActive(item);
}
@Override
public void onCreated(ItemStack itemStack, World world, EntityPlayer entityPlayer) {
setEnergy(itemStack, 0);
}
@Override
public int receiveEnergy(ItemStack container, int maxReceive, boolean simulate) {
int res = super.receiveEnergy(container, maxReceive, simulate);
if(res != 0 && !simulate) {
updateDamage(container);
}
return res;
}
@Override
public int extractEnergy(ItemStack container, int maxExtract, boolean simulate) {
int res = super.extractEnergy(container, maxExtract, simulate);
if(res != 0 && !simulate) {
updateDamage(container);
}
return res;
}
void setEnergy(ItemStack container, int energy) {
if(container.stackTagCompound == null) {
container.stackTagCompound = new NBTTagCompound();
}
container.stackTagCompound.setInteger("Energy", energy);
updateDamage(container);
}
void setFull(ItemStack container) {
setEnergy(container, Config.magnetPowerCapacityRF);
}
private void updateDamage(ItemStack stack) {
float r = (float) getEnergyStored(stack) / getMaxEnergyStored(stack);
int res = 16 - (int) (r * 16);
stack.setItemDamage(res);
}
@Override
public ItemStack onItemRightClick(ItemStack equipped, World world, EntityPlayer player) {
if(player.isSneaking()) {
setActive(equipped, !isActive(equipped));
}
return equipped;
}
@Override
public String getUnlocalizedNameForTooltip(ItemStack stack) {
return getUnlocalizedName();
}
@Override
@Method(modid = "Baubles")
public BaubleType getBaubleType(ItemStack itemstack) {
return baubles.api.BaubleType.AMULET;
}
@Override
public void onWornTick(ItemStack itemstack, EntityLivingBase player) {
if(player instanceof EntityPlayer && hasPower(itemstack)) {
controller.doHoover((EntityPlayer) player);
if(!player.worldObj.isRemote && player.worldObj.getTotalWorldTime() % 20 == 0) {
ItemMagnet.drainPerSecondPower(itemstack);
IInventory baubles = BaublesUtil.instance().getBaubles((EntityPlayer) player);
if(baubles != null) {
for (int i = 0; i < baubles.getSizeInventory(); i++) {
if(baubles.getStackInSlot(i) == itemstack) {
baubles.setInventorySlotContents(i, itemstack);
}
}
}
}
}
}
@Override
public void onEquipped(ItemStack itemstack, EntityLivingBase player) {
}
@Override
public void onUnequipped(ItemStack itemstack, EntityLivingBase player) {
}
@Override
public boolean canEquip(ItemStack itemstack, EntityLivingBase player) {
return isActive(itemstack);
}
@Override
public boolean canUnequip(ItemStack itemstack, EntityLivingBase player) {
return true;
}
}
| unlicense |
sirmax/scala-maven-plugin | src/it/test_directory_with_class_name/src/main/java/example/Foo2.java | 131 | package example;
class Foo2 {
public static void main(String[] args) {
System.out.println("Hello world");
}
}
| unlicense |
AqD/JXTN | jxtn-jfx-makers/src/javafx/scene/control/ButtonBaseMakerExt.java | 641 | // @formatter:off
/*
* Unlicensed, generated by javafx.ftl
*/
package javafx.scene.control;
/**
* {@link ButtonBase}建構器延伸(供客製化)
*
* @author JarReflectionDataLoader-1.0.0
* @version jfxrt.jar
* @param <Z> 要建構的物件型態(需繼承{@link ButtonBase})
* @param <B> 建構器本身的型態(需繼承{@link ButtonBaseMaker})
*/
@javax.annotation.Generated("Generated by javafx.ftl")
@SuppressWarnings("all")
public interface ButtonBaseMakerExt<Z extends ButtonBase, B extends ButtonBaseMaker<Z, B>>
extends javafx.scene.control.LabeledMakerExt<Z, B>
{
// nothing yet
}
| unlicense |
CenturyLinkCloud/mdw | mdw-workflow/assets/com/centurylink/mdw/slack/MessageMonitor.java | 3632 | package com.centurylink.mdw.slack;
import java.io.IOException;
import java.net.URL;
import java.util.HashMap;
import java.util.Map;
import org.json.JSONObject;
import com.centurylink.mdw.annotations.RegisteredService;
import com.centurylink.mdw.app.ApplicationContext;
import com.centurylink.mdw.common.service.ServiceException;
import com.centurylink.mdw.model.Comment;
import com.centurylink.mdw.model.listener.Listener;
import com.centurylink.mdw.monitor.ServiceMonitor;
import com.centurylink.mdw.services.ServiceLocator;
import com.centurylink.mdw.util.HttpHelper;
/**
* Monitors for comment messages posted in MDW through its API(s).
*/
@RegisteredService(ServiceMonitor.class)
public class MessageMonitor implements ServiceMonitor {
@Override
public Object onRequest(Object request, Map<String,String> headers) throws ServiceException {
// only POST currently (TODO: how to handle PUT, DELETE)
if ("Comments".equals(headers.get("RequestPath")) && "POST".equalsIgnoreCase(headers.get("HttpMethod"))) {
Comment comment = new Comment(new JSONObject(request.toString()));
if ("TASK_INSTANCE".equals(comment.getOwnerType())) {
Long instId = comment.getOwnerId();
Map<String,String> indexes = ServiceLocator.getTaskServices().getIndexes(instId);
String messageTs = indexes.get("slack:message_ts");
if (messageTs != null) {
JSONObject json = new JSONObject();
json.put("thread_ts", indexes.get("slack:message_ts"));
json.put("reply_broadcast", true);
String text = new MarkdownScrubber(comment.getContent()).toSlack();
if (text.length() > 1024)
text = text.substring(0, 1021) + "...";
json.put("text", text);
json.put("channel", "C85DLE1U7"); // TODO
json.put("as_user", false);
try {
HttpHelper helper = new HttpHelper(new URL(ApplicationContext.getMdwCloudRoutingUrl() + "/slack"));
Map<String,String> hdrs = new HashMap<>();
hdrs.put(Listener.METAINFO_CLOUD_ROUTING, "https://slack.com/api/chat.postMessage");
hdrs.put(Listener.METAINFO_MDW_APP_ID, ApplicationContext.getAppId());
hdrs.put(Listener.METAINFO_MDW_APP_TOKEN, System.getenv("MDW_APP_TOKEN")); // Add the application specific MDW provided token
hdrs.put("Content-Type", "application/json; charset=utf-8");
helper.setHeaders(hdrs);
String response = helper.post(json.toString());
JSONObject responseJson = new JSONObject(response);
if (!responseJson.getBoolean("ok"))
throw new IOException("Slack notification failed with response:" + responseJson);
}
catch (IOException ex) {
throw new ServiceException(ex.getMessage(), ex);
}
}
}
}
return null;
}
@Override
public Object onHandle(Object request, Map<String,String> headers) throws ServiceException {
return null;
}
@Override
public Object onResponse(Object response, Map<String,String> headers) throws ServiceException {
return null;
}
@Override
public Object onError(Throwable t, Map<String,String> headers) {
return null;
}
}
| apache-2.0 |
wangtuo/elasticsearch | core/src/test/java/org/elasticsearch/index/engine/CombinedDeletionPolicyTests.java | 10051 | /*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.index.engine;
import com.carrotsearch.hppc.LongArrayList;
import org.apache.lucene.index.IndexCommit;
import org.apache.lucene.index.SnapshotDeletionPolicy;
import org.elasticsearch.index.seqno.SequenceNumbers;
import org.elasticsearch.index.translog.Translog;
import org.elasticsearch.index.translog.TranslogDeletionPolicy;
import org.elasticsearch.test.ESTestCase;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.UUID;
import java.util.concurrent.atomic.AtomicLong;
import static java.util.Collections.singletonList;
import static org.elasticsearch.index.engine.EngineConfig.OpenMode.OPEN_INDEX_AND_TRANSLOG;
import static org.elasticsearch.index.engine.EngineConfig.OpenMode.OPEN_INDEX_CREATE_TRANSLOG;
import static org.elasticsearch.index.translog.TranslogDeletionPolicies.createTranslogDeletionPolicy;
import static org.hamcrest.Matchers.contains;
import static org.hamcrest.Matchers.equalTo;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.never;
import static org.mockito.Mockito.times;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when;
public class CombinedDeletionPolicyTests extends ESTestCase {
public void testKeepCommitsAfterGlobalCheckpoint() throws Exception {
final AtomicLong globalCheckpoint = new AtomicLong();
TranslogDeletionPolicy translogPolicy = createTranslogDeletionPolicy();
CombinedDeletionPolicy indexPolicy = new CombinedDeletionPolicy(OPEN_INDEX_AND_TRANSLOG, translogPolicy, globalCheckpoint::get);
final LongArrayList maxSeqNoList = new LongArrayList();
final LongArrayList translogGenList = new LongArrayList();
final List<IndexCommit> commitList = new ArrayList<>();
int totalCommits = between(2, 20);
long lastMaxSeqNo = 0;
long lastTranslogGen = 0;
final UUID translogUUID = UUID.randomUUID();
for (int i = 0; i < totalCommits; i++) {
lastMaxSeqNo += between(1, 10000);
lastTranslogGen += between(1, 100);
commitList.add(mockIndexCommit(lastMaxSeqNo, translogUUID, lastTranslogGen));
maxSeqNoList.add(lastMaxSeqNo);
translogGenList.add(lastTranslogGen);
}
int keptIndex = randomInt(commitList.size() - 1);
final long lower = maxSeqNoList.get(keptIndex);
final long upper = keptIndex == commitList.size() - 1 ?
Long.MAX_VALUE : Math.max(maxSeqNoList.get(keptIndex), maxSeqNoList.get(keptIndex + 1) - 1);
globalCheckpoint.set(randomLongBetween(lower, upper));
indexPolicy.onCommit(commitList);
for (int i = 0; i < commitList.size(); i++) {
if (i < keptIndex) {
verify(commitList.get(i), times(1)).delete();
} else {
verify(commitList.get(i), never()).delete();
}
}
assertThat(translogPolicy.getMinTranslogGenerationForRecovery(), equalTo(translogGenList.get(keptIndex)));
assertThat(translogPolicy.getTranslogGenerationOfLastCommit(), equalTo(lastTranslogGen));
}
public void testIgnoreSnapshottingCommits() throws Exception {
final AtomicLong globalCheckpoint = new AtomicLong();
final UUID translogUUID = UUID.randomUUID();
TranslogDeletionPolicy translogPolicy = createTranslogDeletionPolicy();
CombinedDeletionPolicy indexPolicy = new CombinedDeletionPolicy(OPEN_INDEX_AND_TRANSLOG, translogPolicy, globalCheckpoint::get);
long firstMaxSeqNo = randomLongBetween(0, Long.MAX_VALUE - 1);
long secondMaxSeqNo = randomLongBetween(firstMaxSeqNo + 1, Long.MAX_VALUE);
long lastTranslogGen = randomNonNegativeLong();
final IndexCommit firstCommit = mockIndexCommit(firstMaxSeqNo, translogUUID, randomLongBetween(0, lastTranslogGen));
final IndexCommit secondCommit = mockIndexCommit(secondMaxSeqNo, translogUUID, lastTranslogGen);
SnapshotDeletionPolicy snapshotDeletionPolicy = new SnapshotDeletionPolicy(indexPolicy);
snapshotDeletionPolicy.onInit(Arrays.asList(firstCommit));
snapshotDeletionPolicy.snapshot();
assertThat(snapshotDeletionPolicy.getSnapshots(), contains(firstCommit));
// SnapshotPolicy prevents the first commit from deleting, but CombinedPolicy does not retain its translog.
globalCheckpoint.set(randomLongBetween(secondMaxSeqNo, Long.MAX_VALUE));
snapshotDeletionPolicy.onCommit(Arrays.asList(firstCommit, secondCommit));
verify(firstCommit, never()).delete();
verify(secondCommit, never()).delete();
assertThat(translogPolicy.getMinTranslogGenerationForRecovery(), equalTo(lastTranslogGen));
assertThat(translogPolicy.getTranslogGenerationOfLastCommit(), equalTo(lastTranslogGen));
}
public void testLegacyIndex() throws Exception {
final AtomicLong globalCheckpoint = new AtomicLong();
final UUID translogUUID = UUID.randomUUID();
TranslogDeletionPolicy translogPolicy = createTranslogDeletionPolicy();
CombinedDeletionPolicy indexPolicy = new CombinedDeletionPolicy(OPEN_INDEX_AND_TRANSLOG, translogPolicy, globalCheckpoint::get);
long legacyTranslogGen = randomNonNegativeLong();
IndexCommit legacyCommit = mockLegacyIndexCommit(translogUUID, legacyTranslogGen);
indexPolicy.onInit(singletonList(legacyCommit));
verify(legacyCommit, never()).delete();
assertThat(translogPolicy.getMinTranslogGenerationForRecovery(), equalTo(legacyTranslogGen));
assertThat(translogPolicy.getTranslogGenerationOfLastCommit(), equalTo(legacyTranslogGen));
long safeTranslogGen = randomLongBetween(legacyTranslogGen, Long.MAX_VALUE);
long maxSeqNo = randomLongBetween(1, Long.MAX_VALUE);
final IndexCommit freshCommit = mockIndexCommit(maxSeqNo, translogUUID, safeTranslogGen);
globalCheckpoint.set(randomLongBetween(0, maxSeqNo - 1));
indexPolicy.onCommit(Arrays.asList(legacyCommit, freshCommit));
verify(legacyCommit, times(0)).delete();
verify(freshCommit, times(0)).delete();
assertThat(translogPolicy.getMinTranslogGenerationForRecovery(), equalTo(legacyTranslogGen));
assertThat(translogPolicy.getTranslogGenerationOfLastCommit(), equalTo(safeTranslogGen));
// Make the fresh commit safe.
globalCheckpoint.set(randomLongBetween(maxSeqNo, Long.MAX_VALUE));
indexPolicy.onCommit(Arrays.asList(legacyCommit, freshCommit));
verify(legacyCommit, times(1)).delete();
verify(freshCommit, times(0)).delete();
assertThat(translogPolicy.getMinTranslogGenerationForRecovery(), equalTo(safeTranslogGen));
assertThat(translogPolicy.getTranslogGenerationOfLastCommit(), equalTo(safeTranslogGen));
}
public void testDeleteInvalidCommits() throws Exception {
final AtomicLong globalCheckpoint = new AtomicLong(randomNonNegativeLong());
TranslogDeletionPolicy translogPolicy = createTranslogDeletionPolicy();
CombinedDeletionPolicy indexPolicy = new CombinedDeletionPolicy(OPEN_INDEX_CREATE_TRANSLOG, translogPolicy, globalCheckpoint::get);
final int invalidCommits = between(1, 10);
final List<IndexCommit> commitList = new ArrayList<>();
for (int i = 0; i < invalidCommits; i++) {
commitList.add(mockIndexCommit(randomNonNegativeLong(), UUID.randomUUID(), randomNonNegativeLong()));
}
final UUID expectedTranslogUUID = UUID.randomUUID();
long lastTranslogGen = 0;
final int validCommits = between(1, 10);
for (int i = 0; i < validCommits; i++) {
lastTranslogGen += between(1, 1000);
commitList.add(mockIndexCommit(randomNonNegativeLong(), expectedTranslogUUID, lastTranslogGen));
}
// We should never keep invalid commits regardless of the value of the global checkpoint.
indexPolicy.onCommit(commitList);
for (int i = 0; i < invalidCommits - 1; i++) {
verify(commitList.get(i), times(1)).delete();
}
}
IndexCommit mockIndexCommit(long maxSeqNo, UUID translogUUID, long translogGen) throws IOException {
final Map<String, String> userData = new HashMap<>();
userData.put(SequenceNumbers.MAX_SEQ_NO, Long.toString(maxSeqNo));
userData.put(Translog.TRANSLOG_UUID_KEY, translogUUID.toString());
userData.put(Translog.TRANSLOG_GENERATION_KEY, Long.toString(translogGen));
final IndexCommit commit = mock(IndexCommit.class);
when(commit.getUserData()).thenReturn(userData);
return commit;
}
IndexCommit mockLegacyIndexCommit(UUID translogUUID, long translogGen) throws IOException {
final Map<String, String> userData = new HashMap<>();
userData.put(Translog.TRANSLOG_UUID_KEY, translogUUID.toString());
userData.put(Translog.TRANSLOG_GENERATION_KEY, Long.toString(translogGen));
final IndexCommit commit = mock(IndexCommit.class);
when(commit.getUserData()).thenReturn(userData);
return commit;
}
}
| apache-2.0 |
tigerforest/tiger-forest | demo-http/src/main/java/com/xhh/demo/http/sync/FavoriteFruit.java | 657 | package com.xhh.demo.http.sync;
/**
* FavoriteFruit
*
* @author tiger
* @version 1.0.0 createTime: 14-6-12
* @since 1.6
*/
public class FavoriteFruit{
public static void main(String[] args) {
final Fruit fruit = new Fruit();
Thread thread1 = new Thread(new Runnable() {
@Override
public void run() {
fruit.list1();
}
}, "thread-1");
Thread thread2 = new Thread(new Runnable() {
@Override
public void run() {
fruit.list2();
}
}, "thread-2");
thread1.start();
thread2.start();
}
}
| apache-2.0 |
makersoft/makereap | modules/test/src/main/java/org/makersoft/test/fixture/ConstructImport.java | 1144 | /*
* @(#)ConstructImport.java 2013-5-7 下午23:33:33
*
* Copyright (c) 2011-2013 Makersoft.org all rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
*
*/
package org.makersoft.test.fixture;
import java.util.List;
import org.makersoft.core.collect.Lists;
import org.yaml.snakeyaml.constructor.AbstractConstruct;
import org.yaml.snakeyaml.nodes.Node;
import org.yaml.snakeyaml.nodes.ScalarNode;
/**
*
*/
class ConstructImport extends AbstractConstruct {
private final Listener listener;
private final List<String> importedPackages = Lists.newArrayList();
public ConstructImport(Listener listener) {
this.listener = listener;
}
@Override
public Object construct(Node node) {
String location = ((ScalarNode) node).getValue();
if(!importedPackages.contains(location)) {
importedPackages.add(location);
listener.onLoadEntities(location);
}
return null;
}
public interface Listener{
void onLoadEntities(String fileLocation);
}
}
| apache-2.0 |
codeprimate-software/cp-elements | src/test/java/org/cp/elements/io/InverseFileFilterTests.java | 3055 | /*
* Copyright 2011-Present Author or Authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.cp.elements.io;
import static org.assertj.core.api.Assertions.assertThat;
import static org.mockito.ArgumentMatchers.any;
import static org.mockito.ArgumentMatchers.eq;
import static org.mockito.Mockito.times;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.verifyNoInteractions;
import static org.mockito.Mockito.verifyNoMoreInteractions;
import static org.mockito.Mockito.when;
import java.io.File;
import java.io.FileFilter;
import org.cp.elements.test.TestUtils;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.mockito.Mock;
import org.mockito.junit.MockitoJUnitRunner;
/**
* Unit Tests for {@link InverseFileFilter}.
*
* @author John J. Blum
* @see java.io.File
* @see java.io.FileFilter
* @see org.junit.Test
* @see org.junit.runner.RunWith
* @see org.mockito.Mockito
* @see org.mockito.junit.MockitoJUnitRunner
* @see org.cp.elements.io.InverseFileFilter
* @since 1.0.0
*/
@RunWith(MockitoJUnitRunner.class)
public class InverseFileFilterTests {
@Mock
private File mockFile;
@Mock
private FileFilter mockFileFilter;
@Test
public void constructInverseFileFilter() {
InverseFileFilter inverseFileFilter = new InverseFileFilter(this.mockFileFilter);
assertThat(inverseFileFilter.getDelegate()).isSameAs(this.mockFileFilter);
}
@Test(expected = IllegalArgumentException.class)
public void constructInverseFileFilterWithNullDelegate() {
TestUtils.doIllegalArgumentExceptionThrowingOperation(() -> new InverseFileFilter(null),
() -> "FileFilter must not be null");
}
@Test
public void accept() {
InverseFileFilter inverseFileFilter = new InverseFileFilter(this.mockFileFilter);
when(this.mockFileFilter.accept(any(File.class))).thenReturn(false);
assertThat(inverseFileFilter.accept(this.mockFile)).isTrue();
verify(this.mockFileFilter, times(1)).accept(eq(this.mockFile));
verifyNoMoreInteractions(this.mockFileFilter);
verifyNoInteractions(this.mockFile);
}
@Test
public void reject() {
InverseFileFilter inverseFileFilter = new InverseFileFilter(this.mockFileFilter);
when(this.mockFileFilter.accept(any(File.class))).thenReturn(true);
assertThat(inverseFileFilter.accept(this.mockFile)).isFalse();
verify(this.mockFileFilter, times(1)).accept(eq(this.mockFile));
verifyNoMoreInteractions(this.mockFileFilter);
verifyNoInteractions(this.mockFile);
}
}
| apache-2.0 |
ZikFat/wrml-prototype | src/main/java/org/wrml/core/service/ServiceEvent.java | 1279 | /**
* Copyright (C) 2011 WRML.org <mark@wrml.org>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.wrml.core.service;
import java.net.URI;
import org.wrml.core.Model;
import org.wrml.core.util.observable.MapEvent;
import org.wrml.core.util.observable.ObservableMap;
public class ServiceEvent<M extends Model> extends MapEvent {
private static final long serialVersionUID = 1L;
public ServiceEvent(ObservableMap<URI, M> map) {
super(map);
}
public ServiceEvent(ObservableMap<URI, M> map, boolean cancelable, URI resourceId, M newValue, M oldValue) {
super(map, resourceId, newValue, oldValue);
}
public ServiceEvent(ObservableMap<URI, M> map, URI resourceId) {
super(map, resourceId);
}
}
| apache-2.0 |
phrocker/accumulo-1 | core/src/test/java/org/apache/accumulo/core/clientImpl/TabletLocatorImplTest.java | 52332 | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.accumulo.core.clientImpl;
import static org.easymock.EasyMock.replay;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertNull;
import static org.junit.Assert.fail;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Set;
import java.util.SortedMap;
import java.util.TreeMap;
import org.apache.accumulo.core.clientImpl.TabletLocator.TabletLocation;
import org.apache.accumulo.core.clientImpl.TabletLocator.TabletLocations;
import org.apache.accumulo.core.clientImpl.TabletLocator.TabletServerMutations;
import org.apache.accumulo.core.clientImpl.TabletLocatorImpl.TabletLocationObtainer;
import org.apache.accumulo.core.clientImpl.TabletLocatorImpl.TabletServerLockChecker;
import org.apache.accumulo.core.data.Key;
import org.apache.accumulo.core.data.Mutation;
import org.apache.accumulo.core.data.PartialKey;
import org.apache.accumulo.core.data.Range;
import org.apache.accumulo.core.data.TableId;
import org.apache.accumulo.core.data.Value;
import org.apache.accumulo.core.dataImpl.KeyExtent;
import org.apache.accumulo.core.metadata.MetadataLocationObtainer;
import org.apache.accumulo.core.metadata.MetadataTable;
import org.apache.accumulo.core.metadata.RootTable;
import org.apache.accumulo.core.metadata.schema.MetadataSchema.TabletsSection.CurrentLocationColumnFamily;
import org.apache.accumulo.core.metadata.schema.MetadataSchema.TabletsSection.TabletColumnFamily;
import org.apache.hadoop.io.Text;
import org.easymock.EasyMock;
import org.junit.Before;
import org.junit.Test;
public class TabletLocatorImplTest {
private static final KeyExtent RTE = RootTable.EXTENT;
private static final KeyExtent MTE = new KeyExtent(MetadataTable.ID, null, RTE.endRow());
static KeyExtent nke(String t, String er, String per) {
return new KeyExtent(TableId.of(t), er == null ? null : new Text(er),
per == null ? null : new Text(per));
}
static Range nr(String k1, boolean si, String k2, boolean ei) {
return new Range(k1 == null ? null : new Text(k1), si, k2 == null ? null : new Text(k2), ei);
}
static Range nr(String k1, String k2) {
return new Range(k1 == null ? null : new Text(k1), k2 == null ? null : new Text(k2));
}
static List<Range> nrl(Range... ranges) {
return Arrays.asList(ranges);
}
static Object[] nol(Object... objs) {
return objs;
}
@SuppressWarnings("unchecked")
static Map<String,Map<KeyExtent,List<Range>>> createExpectedBinnings(Object... data) {
Map<String,Map<KeyExtent,List<Range>>> expBinnedRanges = new HashMap<>();
for (int i = 0; i < data.length; i += 2) {
String loc = (String) data[i];
Object[] binData = (Object[]) data[i + 1];
HashMap<KeyExtent,List<Range>> binnedKE = new HashMap<>();
expBinnedRanges.put(loc, binnedKE);
for (int j = 0; j < binData.length; j += 2) {
KeyExtent ke = (KeyExtent) binData[j];
List<Range> ranges = (List<Range>) binData[j + 1];
binnedKE.put(ke, ranges);
}
}
return expBinnedRanges;
}
static TreeMap<KeyExtent,TabletLocation> createMetaCacheKE(Object... data) {
TreeMap<KeyExtent,TabletLocation> mcke = new TreeMap<>();
for (int i = 0; i < data.length; i += 2) {
KeyExtent ke = (KeyExtent) data[i];
String loc = (String) data[i + 1];
mcke.put(ke, new TabletLocation(ke, loc, "1"));
}
return mcke;
}
static TreeMap<Text,TabletLocation> createMetaCache(Object... data) {
TreeMap<KeyExtent,TabletLocation> mcke = createMetaCacheKE(data);
TreeMap<Text,TabletLocation> mc = new TreeMap<>(TabletLocatorImpl.END_ROW_COMPARATOR);
for (Entry<KeyExtent,TabletLocation> entry : mcke.entrySet()) {
if (entry.getKey().endRow() == null)
mc.put(TabletLocatorImpl.MAX_TEXT, entry.getValue());
else
mc.put(entry.getKey().endRow(), entry.getValue());
}
return mc;
}
static TabletLocatorImpl createLocators(TServers tservers, String rootTabLoc, String metaTabLoc,
String table, TabletServerLockChecker tslc, Object... data) {
TreeMap<KeyExtent,TabletLocation> mcke = createMetaCacheKE(data);
TestTabletLocationObtainer ttlo = new TestTabletLocationObtainer(tservers);
RootTabletLocator rtl = new TestRootTabletLocator();
TabletLocatorImpl rootTabletCache =
new TabletLocatorImpl(MetadataTable.ID, rtl, ttlo, new YesLockChecker());
TabletLocatorImpl tab1TabletCache =
new TabletLocatorImpl(TableId.of(table), rootTabletCache, ttlo, tslc);
setLocation(tservers, rootTabLoc, RTE, MTE, metaTabLoc);
for (Entry<KeyExtent,TabletLocation> entry : mcke.entrySet()) {
setLocation(tservers, metaTabLoc, MTE, entry.getKey(), entry.getValue().tablet_location);
}
return tab1TabletCache;
}
static TabletLocatorImpl createLocators(TServers tservers, String rootTabLoc, String metaTabLoc,
String table, Object... data) {
return createLocators(tservers, rootTabLoc, metaTabLoc, table, new YesLockChecker(), data);
}
static TabletLocatorImpl createLocators(String table, Object... data) {
TServers tservers = new TServers();
return createLocators(tservers, "tserver1", "tserver2", table, data);
}
private ClientContext context;
@Before
public void setUp() {
context = EasyMock.createMock(ClientContext.class);
EasyMock.expect(context.getRootTabletLocation()).andReturn("tserver1").anyTimes();
EasyMock.expect(context.getInstanceID()).andReturn("instance1").anyTimes();
replay(context);
}
private void runTest(List<Range> ranges, TabletLocatorImpl tab1TabletCache,
Map<String,Map<KeyExtent,List<Range>>> expected) throws Exception {
List<Range> failures = Collections.emptyList();
runTest(ranges, tab1TabletCache, expected, failures);
}
private void runTest(List<Range> ranges, TabletLocatorImpl tab1TabletCache,
Map<String,Map<KeyExtent,List<Range>>> expected, List<Range> efailures) throws Exception {
Map<String,Map<KeyExtent,List<Range>>> binnedRanges = new HashMap<>();
List<Range> f = tab1TabletCache.binRanges(context, ranges, binnedRanges);
assertEquals(expected, binnedRanges);
HashSet<Range> f1 = new HashSet<>(f);
HashSet<Range> f2 = new HashSet<>(efailures);
assertEquals(f2, f1);
}
static Set<KeyExtent> nkes(KeyExtent... extents) {
HashSet<KeyExtent> kes = new HashSet<>();
Collections.addAll(kes, extents);
return kes;
}
static void runTest(TreeMap<Text,TabletLocation> mc, KeyExtent remove, Set<KeyExtent> expected) {
// copy so same metaCache can be used for multiple test
mc = new TreeMap<>(mc);
TabletLocatorImpl.removeOverlapping(mc, remove);
HashSet<KeyExtent> eic = new HashSet<>();
for (TabletLocation tl : mc.values()) {
eic.add(tl.tablet_extent);
}
assertEquals(expected, eic);
}
static Mutation nm(String row, String... data) {
Mutation mut = new Mutation(new Text(row));
for (String element : data) {
String[] cvp = element.split("=");
String[] cols = cvp[0].split(":");
mut.put(cols[0], cols[1], cvp[1]);
}
return mut;
}
static List<Mutation> nml(Mutation... ma) {
return Arrays.asList(ma);
}
private void runTest(TabletLocatorImpl metaCache, List<Mutation> ml,
Map<String,Map<KeyExtent,List<String>>> emb, String... efailures) throws Exception {
Map<String,TabletServerMutations<Mutation>> binnedMutations = new HashMap<>();
List<Mutation> afailures = new ArrayList<>();
metaCache.binMutations(context, ml, binnedMutations, afailures);
verify(emb, binnedMutations);
ArrayList<String> afs = new ArrayList<>();
ArrayList<String> efs = new ArrayList<>(Arrays.asList(efailures));
for (Mutation mutation : afailures) {
afs.add(new String(mutation.getRow()));
}
Collections.sort(afs);
Collections.sort(efs);
assertEquals(efs, afs);
}
private void verify(Map<String,Map<KeyExtent,List<String>>> expected,
Map<String,TabletServerMutations<Mutation>> actual) {
assertEquals(expected.keySet(), actual.keySet());
for (String server : actual.keySet()) {
TabletServerMutations<Mutation> atb = actual.get(server);
Map<KeyExtent,List<String>> etb = expected.get(server);
assertEquals(etb.keySet(), atb.getMutations().keySet());
for (KeyExtent ke : etb.keySet()) {
ArrayList<String> eRows = new ArrayList<>(etb.get(ke));
ArrayList<String> aRows = new ArrayList<>();
for (Mutation m : atb.getMutations().get(ke)) {
aRows.add(new String(m.getRow()));
}
Collections.sort(eRows);
Collections.sort(aRows);
assertEquals(eRows, aRows);
}
}
}
static Map<String,Map<KeyExtent,List<String>>> cemb(Object[]... ols) {
Map<String,Map<KeyExtent,List<String>>> emb = new HashMap<>();
for (Object[] ol : ols) {
String row = (String) ol[0];
String server = (String) ol[1];
KeyExtent ke = (KeyExtent) ol[2];
emb.computeIfAbsent(server, k -> new HashMap<>()).computeIfAbsent(ke, k -> new ArrayList<>())
.add(row);
}
return emb;
}
@Test
public void testRemoveOverlapping1() {
TreeMap<Text,TabletLocation> mc = createMetaCache(nke("0", null, null), "l1");
runTest(mc, nke("0", "a", null), nkes());
runTest(mc, nke("0", null, null), nkes());
runTest(mc, nke("0", null, "a"), nkes());
mc = createMetaCache(nke("0", "g", null), "l1", nke("0", "r", "g"), "l1", nke("0", null, "r"),
"l1");
runTest(mc, nke("0", null, null), nkes());
runTest(mc, nke("0", "a", null), nkes(nke("0", "r", "g"), nke("0", null, "r")));
runTest(mc, nke("0", "g", null), nkes(nke("0", "r", "g"), nke("0", null, "r")));
runTest(mc, nke("0", "h", null), nkes(nke("0", null, "r")));
runTest(mc, nke("0", "r", null), nkes(nke("0", null, "r")));
runTest(mc, nke("0", "s", null), nkes());
runTest(mc, nke("0", "b", "a"), nkes(nke("0", "r", "g"), nke("0", null, "r")));
runTest(mc, nke("0", "g", "a"), nkes(nke("0", "r", "g"), nke("0", null, "r")));
runTest(mc, nke("0", "h", "a"), nkes(nke("0", null, "r")));
runTest(mc, nke("0", "r", "a"), nkes(nke("0", null, "r")));
runTest(mc, nke("0", "s", "a"), nkes());
runTest(mc, nke("0", "h", "g"), nkes(nke("0", "g", null), nke("0", null, "r")));
runTest(mc, nke("0", "r", "g"), nkes(nke("0", "g", null), nke("0", null, "r")));
runTest(mc, nke("0", "s", "g"), nkes(nke("0", "g", null)));
runTest(mc, nke("0", "i", "h"), nkes(nke("0", "g", null), nke("0", null, "r")));
runTest(mc, nke("0", "r", "h"), nkes(nke("0", "g", null), nke("0", null, "r")));
runTest(mc, nke("0", "s", "h"), nkes(nke("0", "g", null)));
runTest(mc, nke("0", "z", "f"), nkes());
runTest(mc, nke("0", "z", "g"), nkes(nke("0", "g", null)));
runTest(mc, nke("0", "z", "q"), nkes(nke("0", "g", null)));
runTest(mc, nke("0", "z", "r"), nkes(nke("0", "g", null), nke("0", "r", "g")));
runTest(mc, nke("0", "z", "s"), nkes(nke("0", "g", null), nke("0", "r", "g")));
runTest(mc, nke("0", null, "f"), nkes());
runTest(mc, nke("0", null, "g"), nkes(nke("0", "g", null)));
runTest(mc, nke("0", null, "q"), nkes(nke("0", "g", null)));
runTest(mc, nke("0", null, "r"), nkes(nke("0", "g", null), nke("0", "r", "g")));
runTest(mc, nke("0", null, "s"), nkes(nke("0", "g", null), nke("0", "r", "g")));
}
@Test
public void testRemoveOverlapping2() {
// test removes when cache does not contain all tablets in a table
TreeMap<Text,TabletLocation> mc =
createMetaCache(nke("0", "r", "g"), "l1", nke("0", null, "r"), "l1");
runTest(mc, nke("0", "a", null), nkes(nke("0", "r", "g"), nke("0", null, "r")));
runTest(mc, nke("0", "g", null), nkes(nke("0", "r", "g"), nke("0", null, "r")));
runTest(mc, nke("0", "h", null), nkes(nke("0", null, "r")));
runTest(mc, nke("0", "r", null), nkes(nke("0", null, "r")));
runTest(mc, nke("0", "s", null), nkes());
runTest(mc, nke("0", "b", "a"), nkes(nke("0", "r", "g"), nke("0", null, "r")));
runTest(mc, nke("0", "g", "a"), nkes(nke("0", "r", "g"), nke("0", null, "r")));
runTest(mc, nke("0", "h", "a"), nkes(nke("0", null, "r")));
runTest(mc, nke("0", "r", "a"), nkes(nke("0", null, "r")));
runTest(mc, nke("0", "s", "a"), nkes());
runTest(mc, nke("0", "h", "g"), nkes(nke("0", null, "r")));
mc = createMetaCache(nke("0", "g", null), "l1", nke("0", null, "r"), "l1");
runTest(mc, nke("0", "h", "g"), nkes(nke("0", "g", null), nke("0", null, "r")));
runTest(mc, nke("0", "h", "a"), nkes(nke("0", null, "r")));
runTest(mc, nke("0", "s", "g"), nkes(nke("0", "g", null)));
runTest(mc, nke("0", "s", "a"), nkes());
mc = createMetaCache(nke("0", "g", null), "l1", nke("0", "r", "g"), "l1");
runTest(mc, nke("0", "z", "f"), nkes());
runTest(mc, nke("0", "z", "g"), nkes(nke("0", "g", null)));
runTest(mc, nke("0", "z", "q"), nkes(nke("0", "g", null)));
runTest(mc, nke("0", "z", "r"), nkes(nke("0", "g", null), nke("0", "r", "g")));
runTest(mc, nke("0", "z", "s"), nkes(nke("0", "g", null), nke("0", "r", "g")));
runTest(mc, nke("0", null, "f"), nkes());
runTest(mc, nke("0", null, "g"), nkes(nke("0", "g", null)));
runTest(mc, nke("0", null, "q"), nkes(nke("0", "g", null)));
runTest(mc, nke("0", null, "r"), nkes(nke("0", "g", null), nke("0", "r", "g")));
runTest(mc, nke("0", null, "s"), nkes(nke("0", "g", null), nke("0", "r", "g")));
}
static class TServers {
private final Map<String,Map<KeyExtent,SortedMap<Key,Value>>> tservers = new HashMap<>();
}
static class TestTabletLocationObtainer implements TabletLocationObtainer {
private final Map<String,Map<KeyExtent,SortedMap<Key,Value>>> tservers;
TestTabletLocationObtainer(TServers tservers) {
this.tservers = tservers.tservers;
}
@Override
public TabletLocations lookupTablet(ClientContext context, TabletLocation src, Text row,
Text stopRow, TabletLocator parent) {
// System.out.println("lookupTablet("+src+","+row+","+stopRow+","+ parent+")");
// System.out.println(tservers);
Map<KeyExtent,SortedMap<Key,Value>> tablets = tservers.get(src.tablet_location);
if (tablets == null) {
parent.invalidateCache(context, src.tablet_location);
return null;
}
SortedMap<Key,Value> tabletData = tablets.get(src.tablet_extent);
if (tabletData == null) {
parent.invalidateCache(src.tablet_extent);
return null;
}
// the following clip is done on a tablet, do it here to see if it throws exceptions
src.tablet_extent.toDataRange().clip(new Range(row, true, stopRow, true));
Key startKey = new Key(row);
Key stopKey = new Key(stopRow).followingKey(PartialKey.ROW);
SortedMap<Key,Value> results = tabletData.tailMap(startKey).headMap(stopKey);
return MetadataLocationObtainer.getMetadataLocationEntries(results);
}
@Override
public List<TabletLocation> lookupTablets(ClientContext context, String tserver,
Map<KeyExtent,List<Range>> map, TabletLocator parent) {
ArrayList<TabletLocation> list = new ArrayList<>();
Map<KeyExtent,SortedMap<Key,Value>> tablets = tservers.get(tserver);
if (tablets == null) {
parent.invalidateCache(context, tserver);
return list;
}
TreeMap<Key,Value> results = new TreeMap<>();
Set<Entry<KeyExtent,List<Range>>> es = map.entrySet();
List<KeyExtent> failures = new ArrayList<>();
for (Entry<KeyExtent,List<Range>> entry : es) {
SortedMap<Key,Value> tabletData = tablets.get(entry.getKey());
if (tabletData == null) {
failures.add(entry.getKey());
continue;
}
List<Range> ranges = entry.getValue();
for (Range range : ranges) {
SortedMap<Key,Value> tm;
if (range.getStartKey() == null)
tm = tabletData;
else
tm = tabletData.tailMap(range.getStartKey());
for (Entry<Key,Value> de : tm.entrySet()) {
if (range.afterEndKey(de.getKey())) {
break;
}
if (range.contains(de.getKey())) {
results.put(de.getKey(), de.getValue());
}
}
}
}
if (!failures.isEmpty())
parent.invalidateCache(failures);
return MetadataLocationObtainer.getMetadataLocationEntries(results).getLocations();
}
}
static class YesLockChecker implements TabletServerLockChecker {
@Override
public boolean isLockHeld(String tserver, String session) {
return true;
}
@Override
public void invalidateCache(String server) {}
}
static class TestRootTabletLocator extends RootTabletLocator {
TestRootTabletLocator() {
super(new YesLockChecker());
}
@Override
protected TabletLocation getRootTabletLocation(ClientContext context) {
return new TabletLocation(RootTable.EXTENT, context.getRootTabletLocation(), "1");
}
@Override
public void invalidateCache(ClientContext context, String server) {}
}
static void createEmptyTablet(TServers tservers, String server, KeyExtent tablet) {
Map<KeyExtent,SortedMap<Key,Value>> tablets =
tservers.tservers.computeIfAbsent(server, k -> new HashMap<>());
SortedMap<Key,Value> tabletData = tablets.computeIfAbsent(tablet, k -> new TreeMap<>());
if (!tabletData.isEmpty()) {
throw new RuntimeException("Asked for empty tablet, but non empty tablet exists");
}
}
static void clearLocation(TServers tservers, String server, KeyExtent tablet, KeyExtent ke,
String instance) {
Map<KeyExtent,SortedMap<Key,Value>> tablets = tservers.tservers.get(server);
if (tablets == null) {
return;
}
SortedMap<Key,Value> tabletData = tablets.get(tablet);
if (tabletData == null) {
return;
}
Text mr = ke.toMetaRow();
Key lk = new Key(mr, CurrentLocationColumnFamily.NAME, new Text(instance));
tabletData.remove(lk);
}
static void setLocation(TServers tservers, String server, KeyExtent tablet, KeyExtent ke,
String location, String instance) {
Map<KeyExtent,SortedMap<Key,Value>> tablets =
tservers.tservers.computeIfAbsent(server, k -> new HashMap<>());
SortedMap<Key,Value> tabletData = tablets.computeIfAbsent(tablet, k -> new TreeMap<>());
Text mr = ke.toMetaRow();
Value per = TabletColumnFamily.encodePrevEndRow(ke.prevEndRow());
if (location != null) {
if (instance == null)
instance = "";
Key lk = new Key(mr, CurrentLocationColumnFamily.NAME, new Text(instance));
tabletData.put(lk, new Value(location));
}
Key pk = new Key(mr, TabletColumnFamily.PREV_ROW_COLUMN.getColumnFamily(),
TabletColumnFamily.PREV_ROW_COLUMN.getColumnQualifier());
tabletData.put(pk, per);
}
static void setLocation(TServers tservers, String server, KeyExtent tablet, KeyExtent ke,
String location) {
setLocation(tservers, server, tablet, ke, location, "");
}
static void deleteServer(TServers tservers, String server) {
tservers.tservers.remove(server);
}
private void locateTabletTest(TabletLocatorImpl cache, String row, boolean skipRow,
KeyExtent expected, String server) throws Exception {
TabletLocation tl = cache.locateTablet(context, new Text(row), skipRow, false);
if (expected == null) {
if (tl != null)
System.out.println("tl = " + tl);
assertNull(tl);
} else {
assertNotNull(tl);
assertEquals(server, tl.tablet_location);
assertEquals(expected, tl.tablet_extent);
}
}
private void locateTabletTest(TabletLocatorImpl cache, String row, KeyExtent expected,
String server) throws Exception {
locateTabletTest(cache, row, false, expected, server);
}
@Test
public void test1() throws Exception {
TServers tservers = new TServers();
TestTabletLocationObtainer ttlo = new TestTabletLocationObtainer(tservers);
RootTabletLocator rtl = new TestRootTabletLocator();
TabletLocatorImpl rootTabletCache =
new TabletLocatorImpl(MetadataTable.ID, rtl, ttlo, new YesLockChecker());
TabletLocatorImpl tab1TabletCache =
new TabletLocatorImpl(TableId.of("tab1"), rootTabletCache, ttlo, new YesLockChecker());
locateTabletTest(tab1TabletCache, "r1", null, null);
KeyExtent tab1e = nke("tab1", null, null);
setLocation(tservers, "tserver1", RTE, MTE, "tserver2");
setLocation(tservers, "tserver2", MTE, tab1e, "tserver3");
locateTabletTest(tab1TabletCache, "r1", tab1e, "tserver3");
locateTabletTest(tab1TabletCache, "r2", tab1e, "tserver3");
// simulate a split
KeyExtent tab1e1 = nke("tab1", "g", null);
KeyExtent tab1e2 = nke("tab1", null, "g");
setLocation(tservers, "tserver2", MTE, tab1e1, "tserver4");
setLocation(tservers, "tserver2", MTE, tab1e2, "tserver5");
locateTabletTest(tab1TabletCache, "r1", tab1e, "tserver3");
tab1TabletCache.invalidateCache(tab1e);
locateTabletTest(tab1TabletCache, "r1", tab1e2, "tserver5");
locateTabletTest(tab1TabletCache, "a", tab1e1, "tserver4");
locateTabletTest(tab1TabletCache, "a", true, tab1e1, "tserver4");
locateTabletTest(tab1TabletCache, "g", tab1e1, "tserver4");
locateTabletTest(tab1TabletCache, "g", true, tab1e2, "tserver5");
// simulate a partial split
KeyExtent tab1e22 = nke("tab1", null, "m");
setLocation(tservers, "tserver2", MTE, tab1e22, "tserver6");
locateTabletTest(tab1TabletCache, "r1", tab1e2, "tserver5");
tab1TabletCache.invalidateCache(tab1e2);
locateTabletTest(tab1TabletCache, "r1", tab1e22, "tserver6");
locateTabletTest(tab1TabletCache, "h", null, null);
locateTabletTest(tab1TabletCache, "a", tab1e1, "tserver4");
KeyExtent tab1e21 = nke("tab1", "m", "g");
setLocation(tservers, "tserver2", MTE, tab1e21, "tserver7");
locateTabletTest(tab1TabletCache, "r1", tab1e22, "tserver6");
locateTabletTest(tab1TabletCache, "h", tab1e21, "tserver7");
locateTabletTest(tab1TabletCache, "a", tab1e1, "tserver4");
// simulate a migration
setLocation(tservers, "tserver2", MTE, tab1e21, "tserver8");
tab1TabletCache.invalidateCache(tab1e21);
locateTabletTest(tab1TabletCache, "r1", tab1e22, "tserver6");
locateTabletTest(tab1TabletCache, "h", tab1e21, "tserver8");
locateTabletTest(tab1TabletCache, "a", tab1e1, "tserver4");
// simulate a server failure
setLocation(tservers, "tserver2", MTE, tab1e21, "tserver9");
tab1TabletCache.invalidateCache(context, "tserver8");
locateTabletTest(tab1TabletCache, "r1", tab1e22, "tserver6");
locateTabletTest(tab1TabletCache, "h", tab1e21, "tserver9");
locateTabletTest(tab1TabletCache, "a", tab1e1, "tserver4");
// simulate all servers failing
deleteServer(tservers, "tserver1");
deleteServer(tservers, "tserver2");
tab1TabletCache.invalidateCache(context, "tserver4");
tab1TabletCache.invalidateCache(context, "tserver6");
tab1TabletCache.invalidateCache(context, "tserver9");
locateTabletTest(tab1TabletCache, "r1", null, null);
locateTabletTest(tab1TabletCache, "h", null, null);
locateTabletTest(tab1TabletCache, "a", null, null);
EasyMock.verify(context);
context = EasyMock.createMock(ClientContext.class);
EasyMock.expect(context.getInstanceID()).andReturn("instance1").anyTimes();
EasyMock.expect(context.getRootTabletLocation()).andReturn("tserver4").anyTimes();
replay(context);
setLocation(tservers, "tserver4", RTE, MTE, "tserver5");
setLocation(tservers, "tserver5", MTE, tab1e1, "tserver1");
setLocation(tservers, "tserver5", MTE, tab1e21, "tserver2");
setLocation(tservers, "tserver5", MTE, tab1e22, "tserver3");
locateTabletTest(tab1TabletCache, "a", tab1e1, "tserver1");
locateTabletTest(tab1TabletCache, "h", tab1e21, "tserver2");
locateTabletTest(tab1TabletCache, "r", tab1e22, "tserver3");
// simulate the metadata table splitting
KeyExtent mte1 = new KeyExtent(MetadataTable.ID, tab1e21.toMetaRow(), RTE.endRow());
KeyExtent mte2 = new KeyExtent(MetadataTable.ID, null, tab1e21.toMetaRow());
setLocation(tservers, "tserver4", RTE, mte1, "tserver5");
setLocation(tservers, "tserver4", RTE, mte2, "tserver6");
deleteServer(tservers, "tserver5");
setLocation(tservers, "tserver5", mte1, tab1e1, "tserver7");
setLocation(tservers, "tserver5", mte1, tab1e21, "tserver8");
setLocation(tservers, "tserver6", mte2, tab1e22, "tserver9");
tab1TabletCache.invalidateCache(tab1e1);
tab1TabletCache.invalidateCache(tab1e21);
tab1TabletCache.invalidateCache(tab1e22);
locateTabletTest(tab1TabletCache, "a", tab1e1, "tserver7");
locateTabletTest(tab1TabletCache, "h", tab1e21, "tserver8");
locateTabletTest(tab1TabletCache, "r", tab1e22, "tserver9");
// simulate metadata and regular server down and the reassigned
deleteServer(tservers, "tserver5");
tab1TabletCache.invalidateCache(context, "tserver7");
locateTabletTest(tab1TabletCache, "a", null, null);
locateTabletTest(tab1TabletCache, "h", tab1e21, "tserver8");
locateTabletTest(tab1TabletCache, "r", tab1e22, "tserver9");
setLocation(tservers, "tserver4", RTE, mte1, "tserver10");
setLocation(tservers, "tserver10", mte1, tab1e1, "tserver7");
setLocation(tservers, "tserver10", mte1, tab1e21, "tserver8");
locateTabletTest(tab1TabletCache, "a", tab1e1, "tserver7");
locateTabletTest(tab1TabletCache, "h", tab1e21, "tserver8");
locateTabletTest(tab1TabletCache, "r", tab1e22, "tserver9");
tab1TabletCache.invalidateCache(context, "tserver7");
setLocation(tservers, "tserver10", mte1, tab1e1, "tserver2");
locateTabletTest(tab1TabletCache, "a", tab1e1, "tserver2");
locateTabletTest(tab1TabletCache, "h", tab1e21, "tserver8");
locateTabletTest(tab1TabletCache, "r", tab1e22, "tserver9");
// simulate a hole in the metadata, caused by a partial split
KeyExtent mte11 = new KeyExtent(MetadataTable.ID, tab1e1.toMetaRow(), RTE.endRow());
KeyExtent mte12 = new KeyExtent(MetadataTable.ID, tab1e21.toMetaRow(), tab1e1.toMetaRow());
deleteServer(tservers, "tserver10");
setLocation(tservers, "tserver4", RTE, mte12, "tserver10");
setLocation(tservers, "tserver10", mte12, tab1e21, "tserver12");
// at this point should be no table1 metadata
tab1TabletCache.invalidateCache(tab1e1);
tab1TabletCache.invalidateCache(tab1e21);
locateTabletTest(tab1TabletCache, "a", null, null);
locateTabletTest(tab1TabletCache, "h", tab1e21, "tserver12");
locateTabletTest(tab1TabletCache, "r", tab1e22, "tserver9");
setLocation(tservers, "tserver4", RTE, mte11, "tserver5");
setLocation(tservers, "tserver5", mte11, tab1e1, "tserver13");
locateTabletTest(tab1TabletCache, "a", tab1e1, "tserver13");
locateTabletTest(tab1TabletCache, "h", tab1e21, "tserver12");
locateTabletTest(tab1TabletCache, "r", tab1e22, "tserver9");
}
@Test
public void test2() throws Exception {
TServers tservers = new TServers();
TabletLocatorImpl metaCache = createLocators(tservers, "tserver1", "tserver2", "foo");
KeyExtent ke1 = nke("foo", "m", null);
KeyExtent ke2 = nke("foo", null, "m");
setLocation(tservers, "tserver2", MTE, ke1, null);
setLocation(tservers, "tserver2", MTE, ke2, "L1");
locateTabletTest(metaCache, "a", null, null);
locateTabletTest(metaCache, "r", ke2, "L1");
setLocation(tservers, "tserver2", MTE, ke1, "L2");
locateTabletTest(metaCache, "a", ke1, "L2");
locateTabletTest(metaCache, "r", ke2, "L1");
}
@Test
public void testBinRanges1() throws Exception {
TabletLocatorImpl metaCache = createLocators("foo", nke("foo", null, null), "l1");
List<Range> ranges = nrl(nr(null, null));
Map<String,Map<KeyExtent,List<Range>>> expected =
createExpectedBinnings("l1", nol(nke("foo", null, null), nrl(nr(null, null)))
);
runTest(ranges, metaCache, expected);
ranges = nrl(nr("a", null));
expected = createExpectedBinnings("l1", nol(nke("foo", null, null), nrl(nr("a", null)))
);
runTest(ranges, metaCache, expected);
ranges = nrl(nr(null, "b"));
expected = createExpectedBinnings("l1", nol(nke("foo", null, null), nrl(nr(null, "b")))
);
runTest(ranges, metaCache, expected);
}
@Test
public void testBinRanges2() throws Exception {
List<Range> ranges = nrl(nr(null, null));
TabletLocatorImpl metaCache =
createLocators("foo", nke("foo", "g", null), "l1", nke("foo", null, "g"), "l2");
Map<String,Map<KeyExtent,List<Range>>> expected =
createExpectedBinnings("l1", nol(nke("foo", "g", null), nrl(nr(null, null))), "l2",
nol(nke("foo", null, "g"), nrl(nr(null, null)))
);
runTest(ranges, metaCache, expected);
}
@Test
public void testBinRanges3() throws Exception {
// test with three tablets and a range that covers the whole table
List<Range> ranges = nrl(nr(null, null));
TabletLocatorImpl metaCache = createLocators("foo", nke("foo", "g", null), "l1",
nke("foo", "m", "g"), "l2", nke("foo", null, "m"), "l2");
Map<String,Map<KeyExtent,List<Range>>> expected = createExpectedBinnings("l1",
nol(nke("foo", "g", null), nrl(nr(null, null))), "l2",
nol(nke("foo", "m", "g"), nrl(nr(null, null)), nke("foo", null, "m"), nrl(nr(null, null)))
);
runTest(ranges, metaCache, expected);
// test with three tablets where one range falls within the first tablet and last two ranges
// fall within the last tablet
ranges = nrl(nr(null, "c"), nr("s", "y"), nr("z", null));
expected = createExpectedBinnings("l1", nol(nke("foo", "g", null), nrl(nr(null, "c"))), "l2",
nol(nke("foo", null, "m"), nrl(nr("s", "y"), nr("z", null)))
);
runTest(ranges, metaCache, expected);
// test is same as above, but has an additional range that spans the first two tablets
ranges = nrl(nr(null, "c"), nr("f", "i"), nr("s", "y"), nr("z", null));
expected =
createExpectedBinnings("l1", nol(nke("foo", "g", null), nrl(nr(null, "c"), nr("f", "i"))),
"l2", nol(nke("foo", "m", "g"), nrl(nr("f", "i")), nke("foo", null, "m"),
nrl(nr("s", "y"), nr("z", null)))
);
runTest(ranges, metaCache, expected);
// test where start of range is not inclusive and same as tablet endRow
ranges = nrl(nr("g", false, "m", true));
expected =
createExpectedBinnings("l2", nol(nke("foo", "m", "g"), nrl(nr("g", false, "m", true)))
);
runTest(ranges, metaCache, expected);
// test where start of range is inclusive and same as tablet endRow
ranges = nrl(nr("g", true, "m", true));
expected =
createExpectedBinnings("l1", nol(nke("foo", "g", null), nrl(nr("g", true, "m", true))),
"l2", nol(nke("foo", "m", "g"), nrl(nr("g", true, "m", true)))
);
runTest(ranges, metaCache, expected);
ranges = nrl(nr("g", true, "m", false));
expected =
createExpectedBinnings("l1", nol(nke("foo", "g", null), nrl(nr("g", true, "m", false))),
"l2", nol(nke("foo", "m", "g"), nrl(nr("g", true, "m", false)))
);
runTest(ranges, metaCache, expected);
ranges = nrl(nr("g", false, "m", false));
expected =
createExpectedBinnings("l2", nol(nke("foo", "m", "g"), nrl(nr("g", false, "m", false)))
);
runTest(ranges, metaCache, expected);
}
@Test
public void testBinRanges4() throws Exception {
List<Range> ranges = nrl(new Range(new Text("1")));
TabletLocatorImpl metaCache =
createLocators("foo", nke("foo", "0", null), "l1", nke("foo", "1", "0"), "l2",
nke("foo", "2", "1"), "l3", nke("foo", "3", "2"), "l4", nke("foo", null, "3"), "l5");
Map<String,Map<KeyExtent,List<Range>>> expected =
createExpectedBinnings("l2", nol(nke("foo", "1", "0"), nrl(new Range(new Text("1"))))
);
runTest(ranges, metaCache, expected);
Key rowColKey = new Key(new Text("3"), new Text("cf1"), new Text("cq1"));
Range range =
new Range(rowColKey, true, new Key(new Text("3")).followingKey(PartialKey.ROW), false);
ranges = nrl(range);
Map<String,Map<KeyExtent,List<Range>>> expected4 =
createExpectedBinnings("l4", nol(nke("foo", "3", "2"), nrl(range))
);
runTest(ranges, metaCache, expected4, nrl());
range = new Range(rowColKey, true, new Key(new Text("3")).followingKey(PartialKey.ROW), true);
ranges = nrl(range);
Map<String,Map<KeyExtent,List<Range>>> expected5 = createExpectedBinnings("l4",
nol(nke("foo", "3", "2"), nrl(range)), "l5", nol(nke("foo", null, "3"), nrl(range))
);
runTest(ranges, metaCache, expected5, nrl());
range = new Range(new Text("2"), false, new Text("3"), false);
ranges = nrl(range);
Map<String,Map<KeyExtent,List<Range>>> expected6 =
createExpectedBinnings("l4", nol(nke("foo", "3", "2"), nrl(range))
);
runTest(ranges, metaCache, expected6, nrl());
range = new Range(new Text("2"), true, new Text("3"), false);
ranges = nrl(range);
Map<String,Map<KeyExtent,List<Range>>> expected7 = createExpectedBinnings("l3",
nol(nke("foo", "2", "1"), nrl(range)), "l4", nol(nke("foo", "3", "2"), nrl(range))
);
runTest(ranges, metaCache, expected7, nrl());
range = new Range(new Text("2"), false, new Text("3"), true);
ranges = nrl(range);
Map<String,Map<KeyExtent,List<Range>>> expected8 =
createExpectedBinnings("l4", nol(nke("foo", "3", "2"), nrl(range))
);
runTest(ranges, metaCache, expected8, nrl());
range = new Range(new Text("2"), true, new Text("3"), true);
ranges = nrl(range);
Map<String,Map<KeyExtent,List<Range>>> expected9 = createExpectedBinnings("l3",
nol(nke("foo", "2", "1"), nrl(range)), "l4", nol(nke("foo", "3", "2"), nrl(range))
);
runTest(ranges, metaCache, expected9, nrl());
}
@Test
public void testBinRanges5() throws Exception {
// Test binning when there is a hole in the metadata
List<Range> ranges = nrl(new Range(new Text("1")));
TabletLocatorImpl metaCache = createLocators("foo", nke("foo", "0", null), "l1",
nke("foo", "1", "0"), "l2", nke("foo", "3", "2"), "l4", nke("foo", null, "3"), "l5");
Map<String,Map<KeyExtent,List<Range>>> expected1 =
createExpectedBinnings("l2", nol(nke("foo", "1", "0"), nrl(new Range(new Text("1"))))
);
runTest(ranges, metaCache, expected1);
ranges = nrl(new Range(new Text("2")), new Range(new Text("11")));
Map<String,Map<KeyExtent,List<Range>>> expected2 = createExpectedBinnings();
runTest(ranges, metaCache, expected2, ranges);
ranges = nrl(new Range(new Text("1")), new Range(new Text("2")));
runTest(ranges, metaCache, expected1, nrl(new Range(new Text("2"))));
ranges = nrl(nr("0", "2"), nr("3", "4"));
Map<String,Map<KeyExtent,List<Range>>> expected3 =
createExpectedBinnings("l4", nol(nke("foo", "3", "2"), nrl(nr("3", "4"))), "l5",
nol(nke("foo", null, "3"), nrl(nr("3", "4")))
);
runTest(ranges, metaCache, expected3, nrl(nr("0", "2")));
ranges =
nrl(nr("0", "1"), nr("0", "11"), nr("1", "2"), nr("0", "4"), nr("2", "4"), nr("21", "4"));
Map<String,Map<KeyExtent,List<Range>>> expected4 =
createExpectedBinnings("l1", nol(nke("foo", "0", null), nrl(nr("0", "1"))), "l2",
nol(nke("foo", "1", "0"), nrl(nr("0", "1"))), "l4",
nol(nke("foo", "3", "2"), nrl(nr("21", "4"))), "l5",
nol(nke("foo", null, "3"), nrl(nr("21", "4")))
);
runTest(ranges, metaCache, expected4,
nrl(nr("0", "11"), nr("1", "2"), nr("0", "4"), nr("2", "4")));
}
@Test
public void testBinMutations1() throws Exception {
// one tablet table
KeyExtent ke1 = nke("foo", null, null);
TabletLocatorImpl metaCache = createLocators("foo", ke1, "l1");
List<Mutation> ml =
nml(nm("a", "cf1:cq1=v1", "cf1:cq2=v2"), nm("c", "cf1:cq1=v3", "cf1:cq2=v4"));
Map<String,Map<KeyExtent,List<String>>> emb = cemb(nol("a", "l1", ke1), nol("c", "l1", ke1));
runTest(metaCache, ml, emb);
ml = nml(nm("a", "cf1:cq1=v1", "cf1:cq2=v2"));
emb = cemb(nol("a", "l1", ke1));
runTest(metaCache, ml, emb);
ml = nml(nm("a", "cf1:cq1=v1", "cf1:cq2=v2"), nm("a", "cf1:cq3=v3"));
emb = cemb(nol("a", "l1", ke1), nol("a", "l1", ke1));
runTest(metaCache, ml, emb);
}
@Test
public void testBinMutations2() throws Exception {
// no tablets for table
TabletLocatorImpl metaCache = createLocators("foo");
List<Mutation> ml =
nml(nm("a", "cf1:cq1=v1", "cf1:cq2=v2"), nm("c", "cf1:cq1=v3", "cf1:cq2=v4"));
Map<String,Map<KeyExtent,List<String>>> emb = cemb();
runTest(metaCache, ml, emb, "a", "c");
}
@Test
public void testBinMutations3() throws Exception {
// three tablet table
KeyExtent ke1 = nke("foo", "h", null);
KeyExtent ke2 = nke("foo", "t", "h");
KeyExtent ke3 = nke("foo", null, "t");
TabletLocatorImpl metaCache = createLocators("foo", ke1, "l1", ke2, "l2", ke3, "l3");
List<Mutation> ml =
nml(nm("a", "cf1:cq1=v1", "cf1:cq2=v2"), nm("i", "cf1:cq1=v3", "cf1:cq2=v4"));
Map<String,Map<KeyExtent,List<String>>> emb = cemb(nol("a", "l1", ke1), nol("i", "l2", ke2));
runTest(metaCache, ml, emb);
ml = nml(nm("a", "cf1:cq1=v1", "cf1:cq2=v2"));
emb = cemb(nol("a", "l1", ke1));
runTest(metaCache, ml, emb);
ml = nml(nm("a", "cf1:cq1=v1", "cf1:cq2=v2"), nm("a", "cf1:cq3=v3"));
emb = cemb(nol("a", "l1", ke1), nol("a", "l1", ke1));
runTest(metaCache, ml, emb);
ml = nml(nm("a", "cf1:cq1=v1", "cf1:cq2=v2"), nm("w", "cf1:cq3=v3"));
emb = cemb(nol("a", "l1", ke1), nol("w", "l3", ke3));
runTest(metaCache, ml, emb);
ml = nml(nm("a", "cf1:cq1=v1", "cf1:cq2=v2"), nm("w", "cf1:cq3=v3"), nm("z", "cf1:cq4=v4"));
emb = cemb(nol("a", "l1", ke1), nol("w", "l3", ke3), nol("z", "l3", ke3));
runTest(metaCache, ml, emb);
ml = nml(nm("h", "cf1:cq1=v1", "cf1:cq2=v2"), nm("t", "cf1:cq1=v1", "cf1:cq2=v2"));
emb = cemb(nol("h", "l1", ke1), nol("t", "l2", ke2));
runTest(metaCache, ml, emb);
}
@Test
public void testBinMutations4() throws Exception {
// three table with hole
KeyExtent ke1 = nke("foo", "h", null);
KeyExtent ke3 = nke("foo", null, "t");
TabletLocatorImpl metaCache = createLocators("foo", ke1, "l1", ke3, "l3");
List<Mutation> ml =
nml(nm("a", "cf1:cq1=v1", "cf1:cq2=v2"), nm("i", "cf1:cq1=v3", "cf1:cq2=v4"));
Map<String,Map<KeyExtent,List<String>>> emb = cemb(nol("a", "l1", ke1));
runTest(metaCache, ml, emb, "i");
ml = nml(nm("a", "cf1:cq1=v1", "cf1:cq2=v2"));
emb = cemb(nol("a", "l1", ke1));
runTest(metaCache, ml, emb);
ml = nml(nm("a", "cf1:cq1=v1", "cf1:cq2=v2"), nm("a", "cf1:cq3=v3"));
emb = cemb(nol("a", "l1", ke1), nol("a", "l1", ke1));
runTest(metaCache, ml, emb);
ml = nml(nm("a", "cf1:cq1=v1", "cf1:cq2=v2"), nm("w", "cf1:cq3=v3"));
emb = cemb(nol("a", "l1", ke1), nol("w", "l3", ke3));
runTest(metaCache, ml, emb);
ml = nml(nm("a", "cf1:cq1=v1", "cf1:cq2=v2"), nm("w", "cf1:cq3=v3"), nm("z", "cf1:cq4=v4"));
emb = cemb(nol("a", "l1", ke1), nol("w", "l3", ke3), nol("z", "l3", ke3));
runTest(metaCache, ml, emb);
ml = nml(nm("a", "cf1:cq1=v1", "cf1:cq2=v2"), nm("w", "cf1:cq3=v3"), nm("z", "cf1:cq4=v4"),
nm("t", "cf1:cq5=v5"));
emb = cemb(nol("a", "l1", ke1), nol("w", "l3", ke3), nol("z", "l3", ke3));
runTest(metaCache, ml, emb, "t");
}
@Test
public void testBinSplit() throws Exception {
// try binning mutations and ranges when a tablet splits
for (int i = 0; i < 3; i++) {
// when i == 0 only test binning mutations
// when i == 1 only test binning ranges
// when i == 2 test both
KeyExtent ke1 = nke("foo", null, null);
TServers tservers = new TServers();
TabletLocatorImpl metaCache =
createLocators(tservers, "tserver1", "tserver2", "foo", ke1, "l1");
List<Mutation> ml = nml(nm("a", "cf1:cq1=v1", "cf1:cq2=v2"),
nm("m", "cf1:cq1=v3", "cf1:cq2=v4"), nm("z", "cf1:cq1=v5"));
Map<String,Map<KeyExtent,List<String>>> emb =
cemb(nol("a", "l1", ke1), nol("m", "l1", ke1), nol("z", "l1", ke1));
if (i == 0 || i == 2)
runTest(metaCache, ml, emb);
List<Range> ranges =
nrl(new Range(new Text("a")), new Range(new Text("m")), new Range(new Text("z")));
Map<String,Map<KeyExtent,List<Range>>> expected1 =
createExpectedBinnings("l1", nol(nke("foo", null, null), ranges)
);
if (i == 1 || i == 2)
runTest(ranges, metaCache, expected1);
KeyExtent ke11 = nke("foo", "n", null);
KeyExtent ke12 = nke("foo", null, "n");
setLocation(tservers, "tserver2", MTE, ke12, "l2");
metaCache.invalidateCache(ke1);
emb = cemb(nol("z", "l2", ke12));
if (i == 0 || i == 2)
runTest(metaCache, ml, emb, "a", "m");
Map<String,Map<KeyExtent,List<Range>>> expected2 =
createExpectedBinnings("l2", nol(nke("foo", null, "n"), nrl(new Range(new Text("z"))))
);
if (i == 1 || i == 2)
runTest(ranges, metaCache, expected2,
nrl(new Range(new Text("a")), new Range(new Text("m"))));
setLocation(tservers, "tserver2", MTE, ke11, "l3");
emb = cemb(nol("a", "l3", ke11), nol("m", "l3", ke11), nol("z", "l2", ke12));
if (i == 0 || i == 2)
runTest(metaCache, ml, emb);
Map<String,
Map<KeyExtent,List<Range>>> expected3 = createExpectedBinnings("l2",
nol(nke("foo", null, "n"), nrl(new Range(new Text("z")))), "l3",
nol(nke("foo", "n", null), nrl(new Range(new Text("a")), new Range(new Text("m"))))
);
if (i == 1 || i == 2)
runTest(ranges, metaCache, expected3);
}
}
@Test
public void testBug1() throws Exception {
// a bug that occurred while running continuous ingest
KeyExtent mte1 = new KeyExtent(MetadataTable.ID, new Text("0;0bc"), RTE.endRow());
KeyExtent mte2 = new KeyExtent(MetadataTable.ID, null, new Text("0;0bc"));
TServers tservers = new TServers();
TestTabletLocationObtainer ttlo = new TestTabletLocationObtainer(tservers);
RootTabletLocator rtl = new TestRootTabletLocator();
TabletLocatorImpl rootTabletCache =
new TabletLocatorImpl(MetadataTable.ID, rtl, ttlo, new YesLockChecker());
TabletLocatorImpl tab0TabletCache =
new TabletLocatorImpl(TableId.of("0"), rootTabletCache, ttlo, new YesLockChecker());
setLocation(tservers, "tserver1", RTE, mte1, "tserver2");
setLocation(tservers, "tserver1", RTE, mte2, "tserver3");
// create two tablets that straddle a metadata split point
KeyExtent ke1 = new KeyExtent(TableId.of("0"), new Text("0bbf20e"), null);
KeyExtent ke2 = new KeyExtent(TableId.of("0"), new Text("0bc0756"), new Text("0bbf20e"));
setLocation(tservers, "tserver2", mte1, ke1, "tserver4");
setLocation(tservers, "tserver3", mte2, ke2, "tserver5");
// look up something that comes after the last entry in mte1
locateTabletTest(tab0TabletCache, "0bbff", ke2, "tserver5");
}
@Test
public void testBug2() throws Exception {
// a bug that occurred while running a functional test
KeyExtent mte1 = new KeyExtent(MetadataTable.ID, new Text("~"), RTE.endRow());
KeyExtent mte2 = new KeyExtent(MetadataTable.ID, null, new Text("~"));
TServers tservers = new TServers();
TestTabletLocationObtainer ttlo = new TestTabletLocationObtainer(tservers);
RootTabletLocator rtl = new TestRootTabletLocator();
TabletLocatorImpl rootTabletCache =
new TabletLocatorImpl(MetadataTable.ID, rtl, ttlo, new YesLockChecker());
TabletLocatorImpl tab0TabletCache =
new TabletLocatorImpl(TableId.of("0"), rootTabletCache, ttlo, new YesLockChecker());
setLocation(tservers, "tserver1", RTE, mte1, "tserver2");
setLocation(tservers, "tserver1", RTE, mte2, "tserver3");
// create the ~ tablet so it exists
Map<KeyExtent,SortedMap<Key,Value>> ts3 = new HashMap<>();
ts3.put(mte2, new TreeMap<>());
tservers.tservers.put("tserver3", ts3);
assertNull(tab0TabletCache.locateTablet(context, new Text("row_0000000000"), false, false));
}
// this test reproduces a problem where empty metadata tablets, that were created by user tablets
// being merged away, caused locating tablets to fail
@Test
public void testBug3() throws Exception {
KeyExtent mte1 = new KeyExtent(MetadataTable.ID, new Text("1;c"), RTE.endRow());
KeyExtent mte2 = new KeyExtent(MetadataTable.ID, new Text("1;f"), new Text("1;c"));
KeyExtent mte3 = new KeyExtent(MetadataTable.ID, new Text("1;j"), new Text("1;f"));
KeyExtent mte4 = new KeyExtent(MetadataTable.ID, new Text("1;r"), new Text("1;j"));
KeyExtent mte5 = new KeyExtent(MetadataTable.ID, null, new Text("1;r"));
KeyExtent ke1 = new KeyExtent(TableId.of("1"), null, null);
TServers tservers = new TServers();
TestTabletLocationObtainer ttlo = new TestTabletLocationObtainer(tservers);
RootTabletLocator rtl = new TestRootTabletLocator();
TabletLocatorImpl rootTabletCache =
new TabletLocatorImpl(MetadataTable.ID, rtl, ttlo, new YesLockChecker());
TabletLocatorImpl tab0TabletCache =
new TabletLocatorImpl(TableId.of("1"), rootTabletCache, ttlo, new YesLockChecker());
setLocation(tservers, "tserver1", RTE, mte1, "tserver2");
setLocation(tservers, "tserver1", RTE, mte2, "tserver3");
setLocation(tservers, "tserver1", RTE, mte3, "tserver4");
setLocation(tservers, "tserver1", RTE, mte4, "tserver5");
setLocation(tservers, "tserver1", RTE, mte5, "tserver6");
createEmptyTablet(tservers, "tserver2", mte1);
createEmptyTablet(tservers, "tserver3", mte2);
createEmptyTablet(tservers, "tserver4", mte3);
createEmptyTablet(tservers, "tserver5", mte4);
setLocation(tservers, "tserver6", mte5, ke1, "tserver7");
locateTabletTest(tab0TabletCache, "a", ke1, "tserver7");
}
@Test
public void testAccumulo1248() {
TServers tservers = new TServers();
TabletLocatorImpl metaCache = createLocators(tservers, "tserver1", "tserver2", "foo");
KeyExtent ke1 = nke("foo", null, null);
// set two locations for a tablet, this is not supposed to happen. The metadata cache should
// throw an exception if it sees this rather than caching one of
// the locations.
setLocation(tservers, "tserver2", MTE, ke1, "L1", "I1");
setLocation(tservers, "tserver2", MTE, ke1, "L2", "I2");
try {
metaCache.locateTablet(context, new Text("a"), false, false);
fail();
} catch (Exception e) {
}
}
@Test
public void testLostLock() throws Exception {
final HashSet<String> activeLocks = new HashSet<>();
TServers tservers = new TServers();
TabletLocatorImpl metaCache =
createLocators(tservers, "tserver1", "tserver2", "foo", new TabletServerLockChecker() {
@Override
public boolean isLockHeld(String tserver, String session) {
return activeLocks.contains(tserver + ":" + session);
}
@Override
public void invalidateCache(String server) {}
});
KeyExtent ke1 = nke("foo", null, null);
setLocation(tservers, "tserver2", MTE, ke1, "L1", "5");
activeLocks.add("L1:5");
locateTabletTest(metaCache, "a", ke1, "L1");
locateTabletTest(metaCache, "a", ke1, "L1");
activeLocks.clear();
locateTabletTest(metaCache, "a", null, null);
locateTabletTest(metaCache, "a", null, null);
locateTabletTest(metaCache, "a", null, null);
clearLocation(tservers, "tserver2", MTE, ke1, "5");
setLocation(tservers, "tserver2", MTE, ke1, "L2", "6");
activeLocks.add("L2:6");
locateTabletTest(metaCache, "a", ke1, "L2");
locateTabletTest(metaCache, "a", ke1, "L2");
clearLocation(tservers, "tserver2", MTE, ke1, "6");
locateTabletTest(metaCache, "a", ke1, "L2");
setLocation(tservers, "tserver2", MTE, ke1, "L3", "7");
locateTabletTest(metaCache, "a", ke1, "L2");
activeLocks.clear();
locateTabletTest(metaCache, "a", null, null);
locateTabletTest(metaCache, "a", null, null);
activeLocks.add("L3:7");
locateTabletTest(metaCache, "a", ke1, "L3");
locateTabletTest(metaCache, "a", ke1, "L3");
List<Mutation> ml = nml(nm("a", "cf1:cq1=v1", "cf1:cq2=v2"), nm("w", "cf1:cq3=v3"));
Map<String,Map<KeyExtent,List<String>>> emb = cemb(nol("a", "L3", ke1), nol("w", "L3", ke1));
runTest(metaCache, ml, emb);
clearLocation(tservers, "tserver2", MTE, ke1, "7");
runTest(metaCache, ml, emb);
activeLocks.clear();
emb.clear();
runTest(metaCache, ml, emb, "a", "w");
runTest(metaCache, ml, emb, "a", "w");
KeyExtent ke11 = nke("foo", "m", null);
KeyExtent ke12 = nke("foo", null, "m");
setLocation(tservers, "tserver2", MTE, ke11, "L1", "8");
setLocation(tservers, "tserver2", MTE, ke12, "L2", "9");
runTest(metaCache, ml, emb, "a", "w");
activeLocks.add("L1:8");
emb = cemb(nol("a", "L1", ke11));
runTest(metaCache, ml, emb, "w");
activeLocks.add("L2:9");
emb = cemb(nol("a", "L1", ke11), nol("w", "L2", ke12));
runTest(metaCache, ml, emb);
List<Range> ranges = nrl(new Range("a"), nr("b", "o"), nr("r", "z"));
Map<String,Map<KeyExtent,List<Range>>> expected =
createExpectedBinnings("L1", nol(ke11, nrl(new Range("a"), nr("b", "o"))), "L2",
nol(ke12, nrl(nr("b", "o"), nr("r", "z"))));
runTest(ranges, metaCache, expected);
activeLocks.remove("L2:9");
expected = createExpectedBinnings("L1", nol(ke11, nrl(new Range("a"))));
runTest(ranges, metaCache, expected, nrl(nr("b", "o"), nr("r", "z")));
activeLocks.clear();
expected = createExpectedBinnings();
runTest(ranges, metaCache, expected, nrl(new Range("a"), nr("b", "o"), nr("r", "z")));
clearLocation(tservers, "tserver2", MTE, ke11, "8");
clearLocation(tservers, "tserver2", MTE, ke12, "9");
setLocation(tservers, "tserver2", MTE, ke11, "L3", "10");
setLocation(tservers, "tserver2", MTE, ke12, "L4", "11");
runTest(ranges, metaCache, expected, nrl(new Range("a"), nr("b", "o"), nr("r", "z")));
activeLocks.add("L3:10");
expected = createExpectedBinnings("L3", nol(ke11, nrl(new Range("a"))));
runTest(ranges, metaCache, expected, nrl(nr("b", "o"), nr("r", "z")));
activeLocks.add("L4:11");
expected = createExpectedBinnings("L3", nol(ke11, nrl(new Range("a"), nr("b", "o"))), "L4",
nol(ke12, nrl(nr("b", "o"), nr("r", "z"))));
runTest(ranges, metaCache, expected);
}
}
| apache-2.0 |
zeedeveloper/zee-jenkins | src/main/java/com/thed/service/soap/GetProjectsByCriteriaResponse.java | 1925 |
package com.thed.service.soap;
import java.util.ArrayList;
import java.util.List;
import javax.xml.bind.annotation.XmlAccessType;
import javax.xml.bind.annotation.XmlAccessorType;
import javax.xml.bind.annotation.XmlElement;
import javax.xml.bind.annotation.XmlType;
/**
* <p>Java class for getProjectsByCriteriaResponse complex type.
*
* <p>The following schema fragment specifies the expected content contained within this class.
*
* <pre>
* <complexType name="getProjectsByCriteriaResponse">
* <complexContent>
* <restriction base="{http://www.w3.org/2001/XMLSchema}anyType">
* <sequence>
* <element name="return" type="{http://soap.service.thed.com/}remoteProject" maxOccurs="unbounded" minOccurs="0"/>
* </sequence>
* </restriction>
* </complexContent>
* </complexType>
* </pre>
*
*
*/
@XmlAccessorType(XmlAccessType.FIELD)
@XmlType(name = "getProjectsByCriteriaResponse", propOrder = {
"_return"
})
public class GetProjectsByCriteriaResponse {
@XmlElement(name = "return")
protected List<RemoteProject> _return;
/**
* Gets the value of the return property.
*
* <p>
* This accessor method returns a reference to the live list,
* not a snapshot. Therefore any modification you make to the
* returned list will be present inside the JAXB object.
* This is why there is not a <CODE>set</CODE> method for the return property.
*
* <p>
* For example, to add a new item, do as follows:
* <pre>
* getReturn().add(newItem);
* </pre>
*
*
* <p>
* Objects of the following type(s) are allowed in the list
* {@link RemoteProject }
*
*
*/
public List<RemoteProject> getReturn() {
if (_return == null) {
_return = new ArrayList<RemoteProject>();
}
return this._return;
}
}
| apache-2.0 |
vmatha002c/dawg | libraries/dawg-show/src/main/java/com/comcast/video/dawg/show/ViewConstants.java | 3185 | /**
* Copyright 2010 Comcast Cable Communications Management, LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.comcast.video.dawg.show;
/**
* Holds constants for UI views
*
* @author Kevin Pearson
*
*/
public interface ViewConstants {
/** View Names */
public static final String STB = "stb";
public static final String SIMPLIFIED = "simplified";
public static final String MULTI = "multi";
public static final String NOSTB = "nostb";
public static final String COMPARE_NOT_FOUND = "compareNotFound";
public static final String META = "meta";
public static final String BROWSERNOTSUPPORTED = "bns";
public static final String SNAPPEDIMAGE = "snappedImage";
public static final String SNAP_DOWNLOAD = "multiSnapDownloadManager";
public static final String REMOTE_PLUGIN = "remote-plugin";
public static final String PLUGIN_CONFIG = "plugin-config";
/** View parameters */
public static final String DEVICE_ID = "deviceId";
public static final String STBS = "stbs";
public static final String MODEL = "model";
public static final String IPADDRESS = "ipAddress";
public static final String STB_PARAM = "stb";
public static final String REMOTE_MANAGER = "remoteManager";
public static final String REMOTE = "remote";
public static final String REMOTE_TYPES = "remoteTypes";
public static final String SELECTED_REMOTE_TYPE = "selectedRemoteType";
public static final String MOBILE = "mobile";
public static final String VIDEO_URL = "videoUrl";
public static final String VIDEO_CAMERA = "videoCamera";
public static final String VIDEO_AVAILABLE = "videoAvailable";
public static final String HD_VIDEO_URL = "hdVideoUrl";
public static final String IR_AVAILABLE = "irAvailable";
public static final String TRACE_AVAILABLE = "traceAvailable";
public static final String TRACE_HOST = "traceHost";
public static final String SUPPORTED = "supported";
public static final String AUDIO_URL_OGG = "audioUrl_ogg";
public static final String AUDIO_URL_MPEG = "audioUrl_mpeg";
/** Represent generic remote keys */
public static final String GENERIC_REMOTE_KEYS = "genericRemoteKeys";
/** Represent map of image id with corresponding snapshot id */
public static final String DEVICEID_IMAGEID_MAP = "deviceIdImageIdMap";
public static final String STB_URLS = "urls";
public static final String STB_URLS_AUDIO_OGG = "audio_ogg";
public static final String STB_URLS_AUDIO_MPEG = "audio_mpeg";
public static final String STB_URLS_VIDEO = "video";
public static final String IMAGE_CACHE = "imageCache";
}
| apache-2.0 |
incodehq/isis | core/applib/src/main/java/org/apache/isis/schema/utils/MemberExecutionDtoUtils.java | 4369 | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.isis.schema.utils;
import java.io.StringReader;
import java.io.StringWriter;
import javax.xml.bind.JAXBContext;
import javax.xml.bind.JAXBElement;
import javax.xml.bind.JAXBException;
import javax.xml.bind.Marshaller;
import javax.xml.bind.Unmarshaller;
import javax.xml.namespace.QName;
import javax.xml.transform.stream.StreamSource;
import org.apache.isis.applib.util.JaxbUtil;
import org.apache.isis.schema.common.v1.DifferenceDto;
import org.apache.isis.schema.common.v1.PeriodDto;
import org.apache.isis.schema.ixn.v1.MemberExecutionDto;
import org.apache.isis.schema.ixn.v1.MetricsDto;
import org.apache.isis.schema.ixn.v1.ObjectCountsDto;
public final class MemberExecutionDtoUtils {
public static <T extends MemberExecutionDto> T clone(final T dto) {
final Class<T> aClass = (Class)dto.getClass();
return clone(dto, aClass);
}
private static <T> T clone(final T dto, final Class<T> dtoClass) {
try {
JAXBContext jaxbContext = jaxbContextFor(dtoClass);
final Marshaller marshaller = jaxbContext.createMarshaller();
final QName name = new QName("", dtoClass.getSimpleName());
final JAXBElement<T> jaxbElement = new JAXBElement<>(name, dtoClass, null, dto);
final StringWriter stringWriter = new StringWriter();
marshaller.marshal(jaxbElement, stringWriter);
final StringReader reader = new StringReader(stringWriter.toString());
final Unmarshaller unmarshaller = jaxbContext.createUnmarshaller();
final JAXBElement<T> root = unmarshaller.unmarshal(new StreamSource(reader), dtoClass);
return root.getValue();
} catch (JAXBException e) {
throw new RuntimeException(e);
}
}
private static <T> JAXBContext jaxbContextFor(final Class<T> dtoClass) {
return JaxbUtil.jaxbContextFor(dtoClass);
}
public static MetricsDto metricsFor(final MemberExecutionDto executionDto) {
MetricsDto metrics = executionDto.getMetrics();
if(metrics == null) {
metrics = new MetricsDto();
executionDto.setMetrics(metrics);
}
return metrics;
}
public static PeriodDto timingsFor(final MetricsDto metricsDto) {
PeriodDto timings = metricsDto.getTimings();
if(timings == null) {
timings = new PeriodDto();
metricsDto.setTimings(timings);
}
return timings;
}
public static ObjectCountsDto objectCountsFor(final MetricsDto metricsDto) {
ObjectCountsDto objectCounts = metricsDto.getObjectCounts();
if(objectCounts == null) {
objectCounts = new ObjectCountsDto();
metricsDto.setObjectCounts(objectCounts);
}
return objectCounts;
}
public static DifferenceDto numberObjectsLoadedFor(final ObjectCountsDto objectCountsDto) {
DifferenceDto differenceDto = objectCountsDto.getLoaded();
if(differenceDto == null) {
differenceDto = new DifferenceDto();
objectCountsDto.setLoaded(differenceDto);
}
return differenceDto;
}
public static DifferenceDto numberObjectsDirtiedFor(final ObjectCountsDto objectCountsDto) {
DifferenceDto differenceDto = objectCountsDto.getDirtied();
if(differenceDto == null) {
differenceDto = new DifferenceDto();
objectCountsDto.setDirtied(differenceDto);
}
return differenceDto;
}
}
| apache-2.0 |
bigal91/CSAR_Repository | src/main/java/org/opentosca/csarrepo/service/CreateUserService.java | 1196 | package org.opentosca.csarrepo.service;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.opentosca.csarrepo.exception.PersistenceException;
import org.opentosca.csarrepo.model.User;
import org.opentosca.csarrepo.model.repository.UserRepository;
import org.opentosca.csarrepo.util.Hash;
/**
*
* @author Dennis Przytarski
*/
public class CreateUserService extends AbstractService {
private static final Logger LOGGER = LogManager.getLogger(CreateUserService.class);
long userId;
/**
* Creates a new user.
*
* @param name
* @param mail
* @param password
*/
public CreateUserService(long userId, String name, String mail, String password) {
super(0);
UserRepository userRepository = new UserRepository();
try {
User user = new User();
user.setName(name);
user.setMail(mail);
user.setPassword(Hash.sha256(password));
userId = userRepository.save(user);
} catch (PersistenceException e) {
this.addError(e.getMessage());
LOGGER.error(e);
}
}
/**
*
* @return user id of the created user
*/
public long getResult() {
super.logInvalidResultAccess("getResult");
return this.userId;
}
}
| apache-2.0 |
user234/setyon-guava-libraries-clone | guava/src/com/google/common/collect/ForwardingNavigableSet.java | 8152 | /*
* Copyright (C) 2012 The Guava Authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.common.collect;
import com.google.common.annotations.Beta;
import java.util.Iterator;
import java.util.NavigableSet;
import java.util.SortedSet;
/**
* A navigable set which forwards all its method calls to another navigable set. Subclasses should
* override one or more methods to modify the behavior of the backing set as desired per the <a
* href="http://en.wikipedia.org/wiki/Decorator_pattern">decorator pattern</a>.
*
* <p><i>Warning:</i> The methods of {@code ForwardingNavigableSet} forward <i>indiscriminately</i>
* to the methods of the delegate. For example, overriding {@link #add} alone <i>will not</i>
* change the behavior of {@link #addAll}, which can lead to unexpected behavior. In this case, you
* should override {@code addAll} as well, either providing your own implementation, or delegating
* to the provided {@code standardAddAll} method.
*
* <p>Each of the {@code standard} methods uses the set's comparator (or the natural ordering of
* the elements, if there is no comparator) to test element equality. As a result, if the
* comparator is not consistent with equals, some of the standard implementations may violate the
* {@code Set} contract.
*
* <p>The {@code standard} methods and the collection views they return are not guaranteed to be
* thread-safe, even when all of the methods that they depend on are thread-safe.
*
* @author Louis Wasserman
* @since 12.0
*/
@Beta
public abstract class ForwardingNavigableSet<E>
extends ForwardingSortedSet<E> implements NavigableSet<E> {
/** Constructor for use by subclasses. */
protected ForwardingNavigableSet() {}
@Override
protected abstract NavigableSet<E> delegate();
@Override
public E lower(E e) {
return delegate().lower(e);
}
/**
* A sensible definition of {@link #lower} in terms of the {@code descendingIterator} method of
* {@link #headSet(Object, boolean)}. If you override {@link #headSet(Object, boolean)}, you may
* wish to override {@link #lower} to forward to this implementation.
*/
protected E standardLower(E e) {
return Iterators.getNext(headSet(e, false).descendingIterator(), null);
}
@Override
public E floor(E e) {
return delegate().floor(e);
}
/**
* A sensible definition of {@link #floor} in terms of the {@code descendingIterator} method of
* {@link #headSet(Object, boolean)}. If you override {@link #headSet(Object, boolean)}, you may
* wish to override {@link #floor} to forward to this implementation.
*/
protected E standardFloor(E e) {
return Iterators.getNext(headSet(e, true).descendingIterator(), null);
}
@Override
public E ceiling(E e) {
return delegate().ceiling(e);
}
/**
* A sensible definition of {@link #ceiling} in terms of the {@code iterator} method of
* {@link #tailSet(Object, boolean)}. If you override {@link #tailSet(Object, boolean)}, you may
* wish to override {@link #ceiling} to forward to this implementation.
*/
protected E standardCeiling(E e) {
return Iterators.getNext(tailSet(e, true).iterator(), null);
}
@Override
public E higher(E e) {
return delegate().higher(e);
}
/**
* A sensible definition of {@link #higher} in terms of the {@code iterator} method of
* {@link #tailSet(Object, boolean)}. If you override {@link #tailSet(Object, boolean)}, you may
* wish to override {@link #higher} to forward to this implementation.
*/
protected E standardHigher(E e) {
return Iterators.getNext(tailSet(e, false).iterator(), null);
}
@Override
public E pollFirst() {
return delegate().pollFirst();
}
/**
* A sensible definition of {@link #pollFirst} in terms of the {@code iterator} method. If you
* override {@link #iterator} you may wish to override {@link #pollFirst} to forward to this
* implementation.
*/
protected E standardPollFirst() {
return Iterators.pollNext(iterator());
}
@Override
public E pollLast() {
return delegate().pollLast();
}
/**
* A sensible definition of {@link #pollLast} in terms of the {@code descendingIterator} method.
* If you override {@link #descendingIterator} you may wish to override {@link #pollLast} to
* forward to this implementation.
*/
protected E standardPollLast() {
return Iterators.pollNext(delegate().descendingIterator());
}
protected E standardFirst() {
return iterator().next();
}
protected E standardLast() {
return descendingIterator().next();
}
@Override
public NavigableSet<E> descendingSet() {
return delegate().descendingSet();
}
/**
* A sensible implementation of {@link NavigableSet#descendingSet} in terms of the other methods
* of {@link NavigableSet}, notably including {@link NavigableSet#descendingIterator}.
*
* <p>In many cases, you may wish to override {@link ForwardingNavigableSet#descendingSet} to
* forward to this implementation or a subclass thereof.
*
* @since 12.0
*/
@Beta
protected class StandardDescendingSet extends Sets.DescendingSet<E> {
/** Constructor for use by subclasses. */
public StandardDescendingSet() {
super(ForwardingNavigableSet.this);
}
}
@Override
public Iterator<E> descendingIterator() {
return delegate().descendingIterator();
}
@Override
public NavigableSet<E> subSet(
E fromElement,
boolean fromInclusive,
E toElement,
boolean toInclusive) {
return delegate().subSet(fromElement, fromInclusive, toElement, toInclusive);
}
/**
* A sensible definition of {@link #subSet(Object, boolean, Object, boolean)} in terms of the
* {@code headSet} and {@code tailSet} methods. In many cases, you may wish to override
* {@link #subSet(Object, boolean, Object, boolean)} to forward to this implementation.
*/
protected NavigableSet<E> standardSubSet(
E fromElement,
boolean fromInclusive,
E toElement,
boolean toInclusive) {
return tailSet(fromElement, fromInclusive).headSet(toElement, toInclusive);
}
/**
* A sensible definition of {@link #subSet(Object, Object)} in terms of the
* {@link #subSet(Object, boolean, Object, boolean)} method. If you override
* {@link #subSet(Object, boolean, Object, boolean)}, you may wish to override
* {@link #subSet(Object, Object)} to forward to this implementation.
*/
@Override
protected SortedSet<E> standardSubSet(E fromElement, E toElement) {
return subSet(fromElement, true, toElement, false);
}
@Override
public NavigableSet<E> headSet(E toElement, boolean inclusive) {
return delegate().headSet(toElement, inclusive);
}
/**
* A sensible definition of {@link #headSet(Object)} in terms of the
* {@link #headSet(Object, boolean)} method. If you override
* {@link #headSet(Object, boolean)}, you may wish to override
* {@link #headSet(Object)} to forward to this implementation.
*/
protected SortedSet<E> standardHeadSet(E toElement) {
return headSet(toElement, false);
}
@Override
public NavigableSet<E> tailSet(E fromElement, boolean inclusive) {
return delegate().tailSet(fromElement, inclusive);
}
/**
* A sensible definition of {@link #tailSet(Object)} in terms of the
* {@link #tailSet(Object, boolean)} method. If you override
* {@link #tailSet(Object, boolean)}, you may wish to override
* {@link #tailSet(Object)} to forward to this implementation.
*/
protected SortedSet<E> standardTailSet(E fromElement) {
return tailSet(fromElement, true);
}
}
| apache-2.0 |
yahoo/pulsar | managed-ledger/src/test/java/org/apache/bookkeeper/mledger/impl/ManagedLedgerTerminationTest.java | 5742 | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.bookkeeper.mledger.impl;
import static org.testng.Assert.assertEquals;
import static org.testng.Assert.assertFalse;
import static org.testng.Assert.assertTrue;
import static org.testng.Assert.fail;
import java.util.Collections;
import java.util.List;
import org.apache.bookkeeper.mledger.Entry;
import org.apache.bookkeeper.mledger.ManagedCursor;
import org.apache.bookkeeper.mledger.ManagedLedger;
import org.apache.bookkeeper.mledger.ManagedLedgerException.ManagedLedgerTerminatedException;
import org.apache.bookkeeper.mledger.ManagedLedgerException.NoMoreEntriesToReadException;
import org.apache.bookkeeper.mledger.Position;
import org.apache.bookkeeper.test.MockedBookKeeperTestCase;
import org.testng.annotations.Test;
public class ManagedLedgerTerminationTest extends MockedBookKeeperTestCase {
@Test(timeOut = 20000)
public void terminateSimple() throws Exception {
ManagedLedger ledger = factory.open("my_test_ledger");
Position p0 = ledger.addEntry("entry-0".getBytes());
Position lastPosition = ledger.terminate();
assertEquals(lastPosition, p0);
try {
ledger.addEntry("entry-1".getBytes());
} catch (ManagedLedgerTerminatedException e) {
// Expected
}
}
@Test(timeOut = 20000)
public void terminateReopen() throws Exception {
ManagedLedger ledger = factory.open("my_test_ledger");
Position p0 = ledger.addEntry("entry-0".getBytes());
Position lastPosition = ledger.terminate();
assertEquals(lastPosition, p0);
ledger.close();
ledger = factory.open("my_test_ledger");
try {
ledger.addEntry("entry-1".getBytes());
fail("Should have thrown exception");
} catch (ManagedLedgerTerminatedException e) {
// Expected
}
}
@Test(timeOut = 20000)
public void terminateWithCursor() throws Exception {
ManagedLedger ledger = factory.open("my_test_ledger");
ManagedCursor c1 = ledger.openCursor("c1");
Position p0 = ledger.addEntry("entry-0".getBytes());
Position p1 = ledger.addEntry("entry-1".getBytes());
List<Entry> entries = c1.readEntries(1);
assertEquals(entries.size(), 1);
assertEquals(entries.get(0).getPosition(), p0);
entries.forEach(Entry::release);
Position lastPosition = ledger.terminate();
assertEquals(lastPosition, p1);
// Cursor can keep reading
entries = c1.readEntries(1);
assertEquals(entries.size(), 1);
assertEquals(entries.get(0).getPosition(), p1);
entries.forEach(Entry::release);
}
@Test(timeOut = 20000)
public void terminateWithCursorReadOrWait() throws Exception {
ManagedLedger ledger = factory.open("my_test_ledger");
ManagedCursor c1 = ledger.openCursor("c1");
Position p0 = ledger.addEntry("entry-0".getBytes());
Position p1 = ledger.addEntry("entry-1".getBytes());
assertFalse(ledger.isTerminated());
Position lastPosition = ledger.terminate();
assertTrue(ledger.isTerminated());
assertEquals(lastPosition, p1);
List<Entry> entries = c1.readEntries(10);
assertEquals(entries.size(), 2);
assertEquals(entries.get(0).getPosition(), p0);
assertEquals(entries.get(1).getPosition(), p1);
entries.forEach(Entry::release);
// Normal read will just return no entries
assertEquals(c1.readEntries(10), Collections.emptyList());
// Read or wait will fail
try {
c1.readEntriesOrWait(10);
fail("Should have thrown exception");
} catch (NoMoreEntriesToReadException e) {
// Expected
}
}
@Test(timeOut = 20000)
public void terminateWithNonDurableCursor() throws Exception {
ManagedLedger ledger = factory.open("my_test_ledger");
Position p0 = ledger.addEntry("entry-0".getBytes());
Position p1 = ledger.addEntry("entry-1".getBytes());
assertFalse(ledger.isTerminated());
Position lastPosition = ledger.terminate();
assertTrue(ledger.isTerminated());
assertEquals(lastPosition, p1);
ManagedCursor c1 = ledger.newNonDurableCursor(PositionImpl.earliest);
List<Entry> entries = c1.readEntries(10);
assertEquals(entries.size(), 2);
assertEquals(entries.get(0).getPosition(), p0);
assertEquals(entries.get(1).getPosition(), p1);
entries.forEach(Entry::release);
// Normal read will just return no entries
assertEquals(c1.readEntries(10), Collections.emptyList());
// Read or wait will fail
try {
c1.readEntriesOrWait(10);
fail("Should have thrown exception");
} catch (NoMoreEntriesToReadException e) {
// Expected
}
}
}
| apache-2.0 |
jivesoftware/miru | miru-anomaly-deployable/src/main/java/com/jivesoftware/os/miru/anomaly/deployable/endpoints/AnomalyTrendsPluginEndpoints.java | 1998 | package com.jivesoftware.os.miru.anomaly.deployable.endpoints;
import com.google.common.base.Optional;
import com.jivesoftware.os.miru.anomaly.deployable.MiruAnomalyService;
import com.jivesoftware.os.miru.anomaly.deployable.region.AnomalyTrendsPluginRegion;
import com.jivesoftware.os.miru.anomaly.deployable.region.AnomalyTrendsPluginRegion.TrendingPluginRegionInput;
import com.jivesoftware.os.mlogger.core.MetricLogger;
import com.jivesoftware.os.mlogger.core.MetricLoggerFactory;
import javax.inject.Singleton;
import javax.ws.rs.DefaultValue;
import javax.ws.rs.GET;
import javax.ws.rs.Path;
import javax.ws.rs.Produces;
import javax.ws.rs.QueryParam;
import javax.ws.rs.core.Context;
import javax.ws.rs.core.MediaType;
import javax.ws.rs.core.Response;
/**
*
*/
@Singleton
@Path("/ui/anomaly/trends")
public class AnomalyTrendsPluginEndpoints {
private static final MetricLogger LOG = MetricLoggerFactory.getLogger();
private final MiruAnomalyService miruAnomalyService;
private final AnomalyTrendsPluginRegion trendingPluginRegion;
public AnomalyTrendsPluginEndpoints(@Context MiruAnomalyService miruAnomalyService, @Context AnomalyTrendsPluginRegion trendingPluginRegion) {
this.miruAnomalyService = miruAnomalyService;
this.trendingPluginRegion = trendingPluginRegion;
}
@GET
@Path("/")
@Produces(MediaType.TEXT_HTML)
public Response getTrends(@QueryParam("type") @DefaultValue("counter") String type,
@QueryParam("service") @DefaultValue("") String service) {
try {
if (service.trim().isEmpty()) {
service = null;
}
String rendered = miruAnomalyService.renderPlugin(trendingPluginRegion,
Optional.of(new TrendingPluginRegionInput(type, service)));
return Response.ok(rendered).build();
} catch (Throwable t) {
LOG.error("Failed trends", t);
return Response.serverError().build();
}
}
}
| apache-2.0 |
coding0011/elasticsearch | x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/session/Executable.java | 607 | /*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.sql.session;
import org.elasticsearch.action.ActionListener;
import org.elasticsearch.xpack.sql.expression.Attribute;
import org.elasticsearch.xpack.sql.session.Cursor.Page;
import java.util.List;
public interface Executable {
List<Attribute> output();
void execute(SqlSession session, ActionListener<Page> listener);
}
| apache-2.0 |
ThilankaBowala/andes | modules/andes-core/broker/src/main/java/org/wso2/andes/kernel/SubscriptionListener.java | 1756 | /*
* Copyright (c) 2005-2014, WSO2 Inc. (http://www.wso2.org) All Rights Reserved.
*
* WSO2 Inc. licenses this file to you under the Apache License,
* Version 2.0 (the "License"); you may not use this file except
* in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.wso2.andes.kernel;
import org.wso2.andes.subscription.LocalSubscription;
/**
* Subscription Listener Interface. This has methods related to local subscription changes
* and cluster subscription changes. Any handler handling a subscription change should implement
* this interface
*/
public interface SubscriptionListener {
static enum SubscriptionChange{
ADDED,
DELETED,
DISCONNECTED
}
/**
* handle subscription changes in cluster
* @param subscription subscription changed
* @param changeType type of change happened
* @throws AndesException
*/
public void handleClusterSubscriptionsChanged(AndesSubscription subscription, SubscriptionChange changeType) throws AndesException;
/**
* handle local subscription changes
* @param subscription subscription changed
* @param changeType type of change happened
* @throws AndesException
*/
public void handleLocalSubscriptionsChanged(LocalSubscription subscription, SubscriptionChange changeType) throws AndesException;
}
| apache-2.0 |
B2M-Software/project-drahtlos-smg20 | actuatorclient.siemens.impl/src/main/java/ch/iec/_61400/ews/_1/GetDataDirectoryRequest.java | 2775 |
package ch.iec._61400.ews._1;
import javax.xml.bind.annotation.XmlAccessType;
import javax.xml.bind.annotation.XmlAccessorType;
import javax.xml.bind.annotation.XmlAttribute;
import javax.xml.bind.annotation.XmlElement;
import javax.xml.bind.annotation.XmlRootElement;
import javax.xml.bind.annotation.XmlType;
/**
* <p>Java class for anonymous complex type.
*
* <p>The following schema fragment specifies the expected content contained within this class.
*
* <pre>
* <complexType>
* <complexContent>
* <restriction base="{http://www.w3.org/2001/XMLSchema}anyType">
* <sequence>
* <element name="DataRef" type="{http://iec.ch/61400/ews/1.0/}tDAReference"/>
* </sequence>
* <attribute name="UUID" type="{http://iec.ch/61400/ews/1.0/}tstring36" />
* <attribute name="AssocID" use="required" type="{http://iec.ch/61400/ews/1.0/}tAssocID" />
* </restriction>
* </complexContent>
* </complexType>
* </pre>
*
*
*/
@XmlAccessorType(XmlAccessType.FIELD)
@XmlType(name = "", propOrder = {
"dataRef"
})
@XmlRootElement(name = "GetDataDirectoryRequest")
public class GetDataDirectoryRequest {
@XmlElement(name = "DataRef", required = true)
protected String dataRef;
@XmlAttribute(name = "UUID")
protected String uuid;
@XmlAttribute(name = "AssocID", required = true)
protected String assocID;
/**
* Gets the value of the dataRef property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getDataRef() {
return dataRef;
}
/**
* Sets the value of the dataRef property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setDataRef(String value) {
this.dataRef = value;
}
/**
* Gets the value of the uuid property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getUUID() {
return uuid;
}
/**
* Sets the value of the uuid property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setUUID(String value) {
this.uuid = value;
}
/**
* Gets the value of the assocID property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getAssocID() {
return assocID;
}
/**
* Sets the value of the assocID property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setAssocID(String value) {
this.assocID = value;
}
}
| apache-2.0 |
cdegroot/river | qa/src/com/sun/jini/test/spec/loader/pref/preferredClassLoader/GetClassAnnotation.java | 5596 | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.sun.jini.test.spec.loader.pref.preferredClassLoader;
import java.util.logging.Level;
// com.sun.jini.qa.harness
import com.sun.jini.qa.harness.TestException;
import com.sun.jini.qa.harness.QAConfig;
// com.sun.jini.qa
import com.sun.jini.qa.harness.QATest;
import com.sun.jini.qa.harness.QAConfig;
// java.io
import java.io.IOException;
// java.net
import java.net.URL;
// java.util.logging
import java.util.logging.Logger;
import java.util.logging.Level;
// davis packages
import net.jini.loader.pref.PreferredClassLoader;
// instrumented preferred class loader
import com.sun.jini.test.spec.loader.util.Item;
import com.sun.jini.test.spec.loader.util.Util;
import com.sun.jini.test.spec.loader.util.QATestPreferredClassLoader;
// test base class
import com.sun.jini.test.spec.loader.pref.AbstractTestBase;
/**
*
* <b>Purpose</b><br><br>
*
* This test verifies the behavior of the<br>
* <code>public String getClassAnnotation()</code>
* method of the<br>
* <code>net.jini.loader.pref.PreferredClassLoader</code> class:
*
* <br><blockquote>
* Return the string to be annotated with all classes loaded from
* this class loader.
* </blockquote>
*
* <b>Test Description</b><br><br>
*
* This test iterates over a set of various parameters passing to
* {@link QATestPreferredClassLoader} constructors.
* All parameters are passing to the {@link #testCase} method.
* <ul><lh>Possible parameters are:</lh>
* <li>URL[] urls: http or file based url to qa1-loader-pref.jar file and
* qa1-loader-pref-NO_PREFERRED_LIST.jar</li>
* <li>ClassLoader parent: ClassLoader.getSystemClassLoader()</li>
* <li>String exportAnnotation: <code>null</code>,
* "Any export annotation string"</li>
* <li>boolean requireDlPerm: <code>true</code>, <code>false</code></li>
* </ul>
*
* <br><br>
* This test verifies returned string annotation for <code>null</code> and
* non-<code>null</code>exportAnnotation passing to
* {@link QATestPreferredClassLoader} constructors.
* <br><br>
*
* <b>Infrastructure</b><br><br>
*
* <ol><lh>This test requires the following infrastructure:</lh>
* <li> {@link QATestPreferredClassLoader} is an instrumented
* PreferredClassLoader using for davis.loader's and davis.loader.pref's
* testing.</li>
* </ol>
*
* <br>
*
* <b>Actions</b><br><br>
* <ol>
* <li> construct a {@link QATestPreferredClassLoader} with urls to
* the qa1-loader-pref.jar file and appropriate parameters.
* </li>
* <li> invoke loader.getClassAnnotation()
* and verify that we get expected result
* </li>
* </ol>
*
*/
public class GetClassAnnotation extends AbstractTestBase {
/** String that indicates fail status */
String message = "";
/**
* Run the test according <b>Test Description</b>
*/
public void run() throws Exception {
String annotation = super.annotation;
testCase(true, null);
testCase(true, annotation);
testCase(false, null);
testCase(false, annotation);
if (message.length() > 0) {
throw new TestException(message);
}
}
/**
* Reset setup parameters by passing parameters and create
* {@link QATestPreferredClassLoader}.
* <br><br>
* Then run the test case according <b>Test Description</b>
*
* @param isHttp flag to define whether http or file url will be used
* for download preferred classes and resources
* @param annotation the exportAnnotation string
*
* @throws TestException if could not create instrumented preferred class
* loader
*/
public void testCase(boolean isHttp, String annotation)
throws TestException {
/*
* Reset setup parameters by passing parameters.
*/
super.isHttp = isHttp;
super.annotation = annotation;
/*
* 1) construct a QATestPreferredClassLoader with urls
* to "qa1-loader-pref.jar file and
* qa1-loader-pref-NO_PREFERRED_LIST.jar files.
*/
createLoader(Util.PREFERREDJarFile, Util.NOPREFERREDListJarFile);
String expected = expectedAnnotationString();
String returned = loader.getClassAnnotation();
if (!expected.equals(returned)) {
message += "\ngetClassAnnotation()\n"
+ " returned:" + returned + "\n"
+ " expected:" + expected;
// Fast fail approach
throw new TestException(message);
} else {
String msg = "getClassAnnotation()"
+ " returned " + returned + " as expected";
logger.log(Level.FINEST, msg);
}
}
}
| apache-2.0 |
brightchen/incubator-apex-core | bufferserver/src/main/java/com/datatorrent/bufferserver/internal/LogicalNode.java | 10843 | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package com.datatorrent.bufferserver.internal;
import java.util.Collection;
import java.util.HashSet;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.datatorrent.bufferserver.internal.DataList.DataListIterator;
import com.datatorrent.bufferserver.packet.MessageType;
import com.datatorrent.bufferserver.packet.Tuple;
import com.datatorrent.bufferserver.policy.GiveAll;
import com.datatorrent.bufferserver.policy.Policy;
import com.datatorrent.bufferserver.util.BitVector;
import com.datatorrent.bufferserver.util.Codec;
import com.datatorrent.bufferserver.util.SerializedData;
import com.datatorrent.netlet.EventLoop;
import com.datatorrent.netlet.WriteOnlyClient;
/**
* LogicalNode represents a logical node in a DAG<p>
* <br>
* Logical node can be split into multiple physical nodes. The type of the logical node groups the multiple
* physical nodes together in a group.
* <br>
*
* @since 0.3.2
*/
public class LogicalNode implements DataListener
{
private final String identifier;
private final String upstream;
private final String group;
private final HashSet<PhysicalNode> physicalNodes;
private final HashSet<BitVector> partitions;
private final Policy policy = GiveAll.getInstance();
private final DataListIterator iterator;
private final long skipWindowId;
private final EventLoop eventloop;
private long baseSeconds;
private boolean caughtup;
/**
*
* @param identifier
* @param upstream
* @param group
* @param iterator
* @param skipWindowId
*/
public LogicalNode(String identifier, String upstream, String group, DataListIterator iterator, long skipWindowId, EventLoop eventloop)
{
this.identifier = identifier;
this.upstream = upstream;
this.group = group;
this.physicalNodes = new HashSet<PhysicalNode>();
this.partitions = new HashSet<BitVector>();
this.iterator = iterator;
this.skipWindowId = skipWindowId;
this.eventloop = eventloop;
}
/**
*
* @return String
*/
public String getGroup()
{
return group;
}
/**
*
* @return DataListIterator
*/
public DataListIterator getIterator()
{
return iterator;
}
/**
*
* @param connection
*/
public void addConnection(WriteOnlyClient connection)
{
PhysicalNode pn = new PhysicalNode(connection);
if (!physicalNodes.contains(pn)) {
physicalNodes.add(pn);
}
}
/**
*
* @param client
*/
public void removeChannel(WriteOnlyClient client)
{
for (PhysicalNode pn : physicalNodes) {
if (pn.getClient() == client) {
physicalNodes.remove(pn);
break;
}
}
}
/**
*
* @param partition
* @param mask
*/
public void addPartition(int partition, int mask)
{
partitions.add(new BitVector(partition, mask));
}
boolean ready = true;
public boolean isReady()
{
if (!ready) {
ready = true;
for (PhysicalNode pn : physicalNodes) {
ready = pn.unblock() & ready;
}
}
return ready;
}
// make it run a lot faster by tracking faster!
/**
*
*/
public void catchUp()
{
caughtup = false;
if (isReady()) {
logger.debug("catching up {}->{}", upstream, group);
long lBaseSeconds = (long)iterator.getBaseSeconds() << 32;
logger.debug("BaseSeconds = {} and lBaseSeconds = {}", Codec.getStringWindowId(baseSeconds),
Codec.getStringWindowId(lBaseSeconds));
if (lBaseSeconds > baseSeconds) {
baseSeconds = lBaseSeconds;
}
logger.debug("Set the base seconds to {}", Codec.getStringWindowId(baseSeconds));
int intervalMillis;
int skippedPayloadTuples = 0;
try {
/*
* fast forward to catch up with the windowId without consuming
*/
outer:
while (ready && iterator.hasNext()) {
SerializedData data = iterator.next();
switch (data.buffer[data.dataOffset]) {
case MessageType.PAYLOAD_VALUE:
++skippedPayloadTuples;
break;
case MessageType.RESET_WINDOW_VALUE:
Tuple tuple = Tuple.getTuple(data.buffer, data.dataOffset, data.length - data.dataOffset + data.offset);
baseSeconds = (long)tuple.getBaseSeconds() << 32;
intervalMillis = tuple.getWindowWidth();
if (intervalMillis <= 0) {
logger.warn("Interval value set to non positive value = {}", intervalMillis);
}
ready = GiveAll.getInstance().distribute(physicalNodes, data);
break;
case MessageType.BEGIN_WINDOW_VALUE:
tuple = Tuple.getTuple(data.buffer, data.dataOffset, data.length - data.dataOffset + data.offset);
logger.debug("{}->{} condition {} =? {}", upstream, group,
Codec.getStringWindowId(baseSeconds | tuple.getWindowId()), Codec.getStringWindowId(skipWindowId));
if ((baseSeconds | tuple.getWindowId()) > skipWindowId) {
logger.debug("caught up {}->{} skipping {} payload tuples", upstream, group, skippedPayloadTuples);
ready = GiveAll.getInstance().distribute(physicalNodes, data);
caughtup = true;
break outer;
}
break;
case MessageType.CHECKPOINT_VALUE:
case MessageType.CODEC_STATE_VALUE:
case MessageType.END_STREAM_VALUE:
ready = GiveAll.getInstance().distribute(physicalNodes, data);
logger.debug("Message {} was distributed to {}", MessageType.valueOf(data.buffer[data.dataOffset]),
physicalNodes);
break;
default:
logger.debug("Message {} was not distributed to {}", MessageType.valueOf(data.buffer[data.dataOffset]),
physicalNodes);
}
}
} catch (Exception e) {
logger.error("Disconnecting {}", this, e);
boot();
}
if (iterator.hasNext()) {
addedData();
}
}
logger.debug("Exiting catch up because caughtup = {}", caughtup);
}
@Override
public boolean addedData()
{
if (isReady()) {
if (caughtup) {
try {
/*
* consume as much data as you can before running out of steam
*/
if (partitions.isEmpty()) {
while (ready && iterator.hasNext()) {
SerializedData data = iterator.next();
switch (data.buffer[data.dataOffset]) {
case MessageType.PAYLOAD_VALUE:
ready = policy.distribute(physicalNodes, data);
break;
case MessageType.NO_MESSAGE_VALUE:
case MessageType.NO_MESSAGE_ODD_VALUE:
break;
case MessageType.RESET_WINDOW_VALUE:
final int length = data.length - data.dataOffset + data.offset;
Tuple resetWindow = Tuple.getTuple(data.buffer, data.dataOffset, length);
baseSeconds = (long)resetWindow.getBaseSeconds() << 32;
ready = GiveAll.getInstance().distribute(physicalNodes, data);
break;
default:
//logger.debug("sending data of type {}", MessageType.valueOf(data.buffer[data.dataOffset]));
ready = GiveAll.getInstance().distribute(physicalNodes, data);
break;
}
}
} else {
while (ready && iterator.hasNext()) {
SerializedData data = iterator.next();
final int length = data.length - data.dataOffset + data.offset;
switch (data.buffer[data.dataOffset]) {
case MessageType.PAYLOAD_VALUE:
Tuple tuple = Tuple.getTuple(data.buffer, data.dataOffset, length);
int value = tuple.getPartition();
for (BitVector bv : partitions) {
if (bv.matches(value)) {
ready = policy.distribute(physicalNodes, data);
break;
}
}
break;
case MessageType.NO_MESSAGE_VALUE:
case MessageType.NO_MESSAGE_ODD_VALUE:
break;
case MessageType.RESET_WINDOW_VALUE:
tuple = Tuple.getTuple(data.buffer, data.dataOffset, length);
baseSeconds = (long)tuple.getBaseSeconds() << 32;
ready = GiveAll.getInstance().distribute(physicalNodes, data);
break;
default:
ready = GiveAll.getInstance().distribute(physicalNodes, data);
break;
}
}
}
} catch (Exception e) {
logger.error("Disconnecting {}", this, e);
boot();
}
} else {
catchUp();
}
}
return !ready;
}
/**
*
* @param partitions
* @return int
*/
@Override
public int getPartitions(Collection<BitVector> partitions)
{
partitions.addAll(this.partitions);
return partitions.size();
}
/**
*
* @return int
*/
public final int getPhysicalNodeCount()
{
return physicalNodes.size();
}
/**
* @return the upstream
*/
public String getUpstream()
{
return upstream;
}
/**
*
* @return the identifier
*/
public String getIdentifier()
{
return identifier;
}
public void boot()
{
for (PhysicalNode pn : physicalNodes) {
eventloop.disconnect(pn.getClient());
}
physicalNodes.clear();
}
@Override
public String toString()
{
return "LogicalNode@" + Integer.toHexString(hashCode()) +
"identifier=" + identifier + ", upstream=" + upstream + ", group=" + group + ", partitions=" + partitions +
", iterator=" + iterator + '}';
}
private static final Logger logger = LoggerFactory.getLogger(LogicalNode.class);
}
| apache-2.0 |
jitsi/jitsi-videobridge | jvb/src/main/java/org/jitsi/videobridge/datachannel/DataChannelStack.java | 6500 | /*
* Copyright @ 2018 - present 8x8, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.jitsi.videobridge.datachannel;
import org.jitsi.utils.logging2.*;
import org.jitsi.videobridge.datachannel.protocol.*;
import java.nio.*;
import java.util.*;
/**
* We need the stack to look at all incoming messages so that it can listen for an 'open channel'
* message from the remote side
*
* Handles DataChannel negotiation and the routing of all Data Channel messages
* to specific {@link DataChannel} instances.
*
* @author Brian Baldino
*/
//TODO: revisit thread safety in here
//TODO: add an arbitrary ID
public class DataChannelStack
{
private final Map<Integer, DataChannel> dataChannels = new HashMap<>();
private final DataChannelDataSender dataChannelDataSender;
private final Logger logger;
private DataChannelStackEventListener listener;
/**
* Initializes a new {@link DataChannelStack} with a specific sender.
* @param dataChannelDataSender the sender.
*/
public DataChannelStack(DataChannelDataSender dataChannelDataSender, Logger parentLogger)
{
this.dataChannelDataSender = dataChannelDataSender;
logger = parentLogger.createChildLogger(DataChannelStack.class.getName());
}
/**
* Handles a received packet.
*/
public void onIncomingDataChannelPacket(ByteBuffer data, int sid, int ppid)
{
logger.debug(() -> "Data channel stack received SCTP message");
DataChannelMessage message = DataChannelProtocolMessageParser.parse(data.array(), ppid);
if (message instanceof OpenChannelMessage)
{
logger.info("Received data channel open message");
OpenChannelMessage openChannelMessage = (OpenChannelMessage)message;
// Remote side wants to open a channel
DataChannel dataChannel = new RemotelyOpenedDataChannel(
dataChannelDataSender,
logger,
openChannelMessage.channelType,
openChannelMessage.priority,
openChannelMessage.reliability,
sid,
openChannelMessage.label);
dataChannels.put(sid, dataChannel);
listener.onDataChannelOpenedRemotely(dataChannel);
}
else
{
DataChannel dataChannel= dataChannels.get(sid);
if (dataChannel == null)
{
logger.error("Could not find data channel for sid " + sid);
return;
}
dataChannel.onIncomingMsg(message);
}
}
public void onDataChannelStackEvents(DataChannelStackEventListener listener)
{
this.listener = listener;
}
/**
* Opens new WebRTC data channel using specified parameters.
* @param channelType channel type as defined in control protocol description.
* Use 0 for "reliable".
* @param priority channel priority. The higher the number, the lower
* the priority.
* @param reliability Reliability Parameter<br/>
*
* This field is ignored if a reliable channel is used.
* If a partial reliable channel with limited number of
* retransmissions is used, this field specifies the number of
* retransmissions. If a partial reliable channel with limited
* lifetime is used, this field specifies the maximum lifetime in
* milliseconds. The following table summarizes this:<br/></br>
+------------------------------------------------+------------------+
| Channel Type | Reliability |
| | Parameter |
+------------------------------------------------+------------------+
| DATA_CHANNEL_RELIABLE | Ignored |
| DATA_CHANNEL_RELIABLE_UNORDERED | Ignored |
| DATA_CHANNEL_PARTIAL_RELIABLE_REXMIT | Number of RTX |
| DATA_CHANNEL_PARTIAL_RELIABLE_REXMIT_UNORDERED | Number of RTX |
| DATA_CHANNEL_PARTIAL_RELIABLE_TIMED | Lifetime in ms |
| DATA_CHANNEL_PARTIAL_RELIABLE_TIMED_UNORDERED | Lifetime in ms |
+------------------------------------------------+------------------+
* @param sid SCTP stream id that will be used by new channel
* (it must not be already used).
* @param label text label for the channel.
* @return new instance of <tt>WebRtcDataStream</tt> that represents opened
* WebRTC data channel.
*/
public DataChannel createDataChannel(int channelType, int priority, long reliability, int sid, String label)
{
synchronized (dataChannels) {
DataChannel dataChannel = new DataChannel(
dataChannelDataSender, logger, channelType, priority, reliability, sid, label);
dataChannels.put(sid, dataChannel);
return dataChannel;
}
}
/**
* TODO: these 2 feel a bit awkward since they are so similar, but we use
* a different one for a remote channel (fired by the stack) and a
* locally-created channel (fired by the data channel itself).
*/
public interface DataChannelStackEventListener
{
/**
* The data channel was opened by the remote side.
*/
void onDataChannelOpenedRemotely(DataChannel dataChannel);
}
public interface DataChannelEventListener
{
/**
* The data channel was opened.
*/
void onDataChannelOpened();
}
public interface DataChannelMessageListener
{
/**
* A message received.
*/
void onDataChannelMessage(DataChannelMessage dataChannelMessage);
}
public interface DataChannelDataSender
{
/**
* Sends a message.
*/
int send(ByteBuffer data, int sid, int ppid);
}
}
| apache-2.0 |
JavaMicroService/rapidpm-microservice | modules/core-testutils/src/main/java/org/rapidpm/microservice/test/ServletUtils.java | 2128 | /**
* Copyright © 2013 Sven Ruppert (sven.ruppert@gmail.com)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.rapidpm.microservice.test;
import org.rapidpm.dependencies.core.net.PortUtils;
import javax.servlet.annotation.WebServlet;
import javax.servlet.http.HttpServlet;
public class ServletUtils {
public static final String SERVLET_PORT_PROPERTY = "org.rapidpm.microservice.servlet.port";
public static final String SERVLET_HOST_PROPERTY = "org.rapidpm.microservice.servlet.host";
public void setServletPropertys(String host, String port) {
System.setProperty(SERVLET_PORT_PROPERTY, port);
System.setProperty(SERVLET_HOST_PROPERTY, host);
}
public void setServletPropertys(String port) {
System.setProperty(SERVLET_PORT_PROPERTY, port);
}
public void setAllForLocalHost() {
final PortUtils portUtils = new PortUtils();
System.setProperty(SERVLET_PORT_PROPERTY, portUtils.nextFreePortForTest() + "");
System.setProperty(SERVLET_HOST_PROPERTY, "127.0.0.1");
}
public String generateBasicReqURL(Class<? extends HttpServlet> servletClass, String servletPath) {
if (servletClass.isAnnotationPresent(WebServlet.class)) {
final WebServlet annotation = servletClass.getAnnotation(WebServlet.class);
final String urlPattern = annotation.urlPatterns()[0];
return "http://"
+ System.getProperty(SERVLET_HOST_PROPERTY) + ":"
+ System.getProperty(SERVLET_PORT_PROPERTY)
+ servletPath
+ urlPattern;
}
throw new RuntimeException("Class without WebServlet Annotation " + servletClass);
}
}
| apache-2.0 |
Sage-Bionetworks/SynapseWebClient | src/test/java/org/sagebionetworks/web/unitclient/widget/entity/controller/StorageLocationWidgetTest.java | 19460 | package org.sagebionetworks.web.unitclient.widget.entity.controller;
import static org.junit.Assert.assertEquals;
import static org.mockito.Matchers.any;
import static org.mockito.Matchers.anyBoolean;
import static org.mockito.Matchers.anyList;
import static org.mockito.Matchers.anyString;
import static org.mockito.Matchers.eq;
import static org.mockito.Mockito.atLeast;
import static org.mockito.Mockito.never;
import static org.mockito.Mockito.reset;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.verifyNoMoreInteractions;
import static org.mockito.Mockito.when;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.mockito.ArgumentCaptor;
import org.mockito.Captor;
import org.mockito.Mock;
import org.mockito.runners.MockitoJUnitRunner;
import org.sagebionetworks.repo.model.Folder;
import org.sagebionetworks.repo.model.entitybundle.v2.EntityBundle;
import org.sagebionetworks.repo.model.file.ExternalGoogleCloudUploadDestination;
import org.sagebionetworks.repo.model.file.ExternalObjectStoreUploadDestination;
import org.sagebionetworks.repo.model.file.ExternalS3UploadDestination;
import org.sagebionetworks.repo.model.file.ExternalUploadDestination;
import org.sagebionetworks.repo.model.file.UploadDestination;
import org.sagebionetworks.repo.model.file.UploadType;
import org.sagebionetworks.repo.model.project.ExternalGoogleCloudStorageLocationSetting;
import org.sagebionetworks.repo.model.project.ExternalObjectStorageLocationSetting;
import org.sagebionetworks.repo.model.project.ExternalS3StorageLocationSetting;
import org.sagebionetworks.repo.model.project.ExternalStorageLocationSetting;
import org.sagebionetworks.repo.model.project.StorageLocationSetting;
import org.sagebionetworks.web.client.DisplayUtils;
import org.sagebionetworks.web.client.SynapseClientAsync;
import org.sagebionetworks.web.client.SynapseJavascriptClient;
import org.sagebionetworks.web.client.SynapseProperties;
import org.sagebionetworks.web.client.cookie.CookieProvider;
import org.sagebionetworks.web.client.events.EntityUpdatedEvent;
import org.sagebionetworks.web.client.widget.entity.controller.StorageLocationWidget;
import org.sagebionetworks.web.client.widget.entity.controller.StorageLocationWidgetView;
import org.sagebionetworks.web.client.widget.entity.controller.SynapseAlert;
import org.sagebionetworks.web.shared.WebConstants;
import org.sagebionetworks.web.test.helper.AsyncMockStubber;
import com.google.gwt.event.shared.EventBus;
import com.google.gwt.user.client.rpc.AsyncCallback;
@RunWith(MockitoJUnitRunner.class)
public class StorageLocationWidgetTest {
@Mock
StorageLocationWidgetView mockView;
@Mock
SynapseClientAsync mockSynapseClient;
@Mock
SynapseJavascriptClient mockJsClient;
@Mock
SynapseProperties mockSynapseProperties;
StorageLocationWidget widget;
@Mock
SynapseAlert mockSynAlert;
List<String> locationSettingBanners;
@Mock
EntityBundle mockBundle;
Folder folder;
@Mock
CookieProvider mockCookies;
@Mock
EventBus mockEventBus;
@Captor
ArgumentCaptor<StorageLocationSetting> locationSettingCaptor;
public static final String DEFAULT_STORAGE_ID = "424242";
@Before
public void setup() {
when(mockCookies.getCookie(eq(DisplayUtils.SYNAPSE_TEST_WEBSITE_COOKIE_KEY))).thenReturn(null);
widget = new StorageLocationWidget(mockView, mockSynapseClient, mockJsClient, mockSynAlert, mockSynapseProperties, mockCookies, mockEventBus);
folder = new Folder();
folder.setId("syn420");
when(mockBundle.getEntity()).thenReturn(folder);
locationSettingBanners = Arrays.asList(new String[] {"Banner 1", "Banner 2"});
widget.configure(mockBundle);
when(mockSynapseProperties.getSynapseProperty(WebConstants.DEFAULT_STORAGE_ID_PROPERTY_KEY)).thenReturn(DEFAULT_STORAGE_ID);
}
@Test
public void testConfigure() {
verify(mockView).setSynAlertWidget(mockSynAlert);
verify(mockView).setPresenter(widget);
verify(mockSynAlert).clear();
verify(mockView).clear();
verify(mockView).setLoading(true);
}
@Test
public void testGetMyLocationSettingBanners() {
AsyncMockStubber.callSuccessWith(locationSettingBanners).when(mockSynapseClient).getMyLocationSettingBanners(any(AsyncCallback.class));
widget.getMyLocationSettingBanners();
verify(mockView).setBannerDropdownVisible(true);
verify(mockView).setBannerSuggestions(anyList());
}
@Test
public void testGetMyLocationSettingBannersEmpty() {
locationSettingBanners = Collections.EMPTY_LIST;
AsyncMockStubber.callSuccessWith(locationSettingBanners).when(mockSynapseClient).getMyLocationSettingBanners(any(AsyncCallback.class));
widget.getMyLocationSettingBanners();
verify(mockView).setBannerDropdownVisible(false);
verify(mockView).setBannerSuggestions(anyList());
}
@Test
public void testGetMyLocationSettingBannersFailure() {
String error = "An service error that should be shown to the user";
AsyncMockStubber.callFailureWith(new Exception(error)).when(mockSynapseClient).getMyLocationSettingBanners(any(AsyncCallback.class));
widget.getMyLocationSettingBanners();
verify(mockView).showErrorMessage(error);
verify(mockView).hide();
}
@Test
public void testGetStorageLocationSettingNull() {
UploadDestination entityStorageLocationSetting = null;
AsyncMockStubber.callSuccessWith(entityStorageLocationSetting).when(mockJsClient).getDefaultUploadDestination(anyString(), any(AsyncCallback.class));
reset(mockView);
widget.getStorageLocationSetting();
// should remain set to the default config
verify(mockView).setSFTPVisible(anyBoolean());
verify(mockView).setLoading(false);
verify(mockView).setS3StsVisible(false);
verifyNoMoreInteractions(mockView);
}
@Test
public void testGetStorageLocationSettingFailure() {
String error = "An service error that should be shown to the user";
AsyncMockStubber.callFailureWith(new Exception(error)).when(mockJsClient).getDefaultUploadDestination(anyString(), any(AsyncCallback.class));
widget.getStorageLocationSetting();
verify(mockView).showErrorMessage(error);
verify(mockView).setLoading(false);
}
@Test
public void testNullBanner() {
ExternalS3UploadDestination entityStorageLocationSetting = new ExternalS3UploadDestination();
String baseKey = "key";
String bucket = "a.bucket ";
String banner = null;
entityStorageLocationSetting.setBanner(banner);
entityStorageLocationSetting.setBucket(bucket);
entityStorageLocationSetting.setBaseKey(baseKey);
AsyncMockStubber.callSuccessWith(entityStorageLocationSetting).when(mockJsClient).getDefaultUploadDestination(anyString(), any(AsyncCallback.class));
widget.getStorageLocationSetting();
verify(mockView).setExternalS3Banner("");
}
@Test
public void testGetStorageLocationSettingExternalS3() {
when(mockCookies.getCookie(eq(DisplayUtils.SYNAPSE_TEST_WEBSITE_COOKIE_KEY))).thenReturn("true");
ExternalS3UploadDestination entityStorageLocationSetting = new ExternalS3UploadDestination();
String baseKey = "key";
String bucket = "a.bucket ";
String banner = "upload to a.bucket";
Boolean isStsEnabled = false;
entityStorageLocationSetting.setBanner(banner);
entityStorageLocationSetting.setBucket(bucket);
entityStorageLocationSetting.setBaseKey(baseKey);
entityStorageLocationSetting.setStsEnabled(isStsEnabled);
AsyncMockStubber.callSuccessWith(entityStorageLocationSetting).when(mockJsClient).getDefaultUploadDestination(anyString(), any(AsyncCallback.class));
widget.getStorageLocationSetting();
verify(mockView).setS3BaseKey(baseKey);
verify(mockView).setS3Bucket(bucket.trim());
verify(mockView).setExternalS3Banner(banner);
verify(mockView).selectExternalS3Storage();
verify(mockView).setSFTPVisible(true);
verify(mockView).setS3StsVisible(true); //because we're in alpha mode
verify(mockView).setS3StsEnabled(isStsEnabled);
}
@Test
public void testS3StsEnabled() {
ExternalS3UploadDestination entityStorageLocationSetting = new ExternalS3UploadDestination();
entityStorageLocationSetting.setBanner("");
entityStorageLocationSetting.setBucket("");
entityStorageLocationSetting.setBaseKey("");
entityStorageLocationSetting.setStsEnabled(true);
AsyncMockStubber.callSuccessWith(entityStorageLocationSetting).when(mockJsClient).getDefaultUploadDestination(anyString(), any(AsyncCallback.class));
widget.getStorageLocationSetting();
verify(mockView).selectExternalS3Storage();
verify(mockView).setS3StsVisible(true);
verify(mockView).setS3StsEnabled(true);
}
// This test can be deleted once STS is out of alpha mode
@Test
public void testS3StsNotEnabledNotInAlpha() {
ExternalS3UploadDestination entityStorageLocationSetting = new ExternalS3UploadDestination();
entityStorageLocationSetting.setBanner("");
entityStorageLocationSetting.setBucket("");
entityStorageLocationSetting.setBaseKey("");
entityStorageLocationSetting.setStsEnabled(false);
AsyncMockStubber.callSuccessWith(entityStorageLocationSetting).when(mockJsClient).getDefaultUploadDestination(anyString(), any(AsyncCallback.class));
widget.getStorageLocationSetting();
verify(mockView).selectExternalS3Storage();
verify(mockView).setS3StsVisible(false);
verify(mockView).setS3StsEnabled(false);
}
@Test
public void testGetStorageLocationSettingHideSFTP() {
ExternalS3UploadDestination entityStorageLocationSetting = new ExternalS3UploadDestination();
entityStorageLocationSetting.setBanner("");
entityStorageLocationSetting.setBucket("");
entityStorageLocationSetting.setBaseKey("");
AsyncMockStubber.callSuccessWith(entityStorageLocationSetting).when(mockJsClient).getDefaultUploadDestination(anyString(), any(AsyncCallback.class));
widget.getStorageLocationSetting();
verify(mockView).selectExternalS3Storage();
verify(mockView, never()).setSFTPVisible(true);
}
@Test
public void testGetStorageLocationSettingSFTP() {
ExternalUploadDestination entityStorageLocationSetting = new ExternalUploadDestination();
String url = "sftp://tcgaftps.nnn.mmm.gov";
String banner = "upload to a sftp site";
entityStorageLocationSetting.setBanner(banner);
entityStorageLocationSetting.setUrl(url);
AsyncMockStubber.callSuccessWith(entityStorageLocationSetting).when(mockJsClient).getDefaultUploadDestination(anyString(), any(AsyncCallback.class));
widget.getStorageLocationSetting();
verify(mockView).setSFTPBanner(banner);
verify(mockView).setSFTPUrl(url);
verify(mockView).selectSFTPStorage();
verify(mockView, atLeast(1)).setSFTPVisible(true);
}
@Test
public void testGetStorageLocationSettingGoogleCloud() {
ExternalGoogleCloudUploadDestination entityStorageLocationSetting = new ExternalGoogleCloudUploadDestination();
String bucket = "my-bucket";
String baseKey = "key.txt";
String banner = "upload to a google cloud bucket";
entityStorageLocationSetting.setBucket(bucket);
entityStorageLocationSetting.setBaseKey(baseKey);
entityStorageLocationSetting.setBanner(banner);
AsyncMockStubber.callSuccessWith(entityStorageLocationSetting).when(mockJsClient).getDefaultUploadDestination(anyString(), any(AsyncCallback.class));
widget.getStorageLocationSetting();
verify(mockView).setGoogleCloudBucket(bucket);
verify(mockView).setGoogleCloudBaseKey(baseKey);
verify(mockView).setExternalGoogleCloudBanner(banner);
verify(mockView).selectExternalGoogleCloudStorage();
verify(mockView, atLeast(1)).setGoogleCloudVisible(true);
}
@Test
public void testGetStorageLocationSettingExternalObjectStore() {
ExternalObjectStoreUploadDestination setting = new ExternalObjectStoreUploadDestination();
String endpointUrl = "something.gov";
String bucket = "mybucket";
String banner = "upload to a sftp site";
setting.setBanner(banner);
setting.setBucket(bucket);
setting.setEndpointUrl(endpointUrl);
AsyncMockStubber.callSuccessWith(setting).when(mockJsClient).getDefaultUploadDestination(anyString(), any(AsyncCallback.class));
widget.getStorageLocationSetting();
verify(mockView).setExternalObjectStoreBanner(banner);
verify(mockView).setExternalObjectStoreBucket(bucket);
verify(mockView).setExternalObjectStoreEndpointUrl(endpointUrl);
verify(mockView).selectExternalObjectStore();
}
@Test
public void testShow() {
widget.show();
verify(mockView).show();
}
@Test
public void testHide() {
widget.hide();
verify(mockView).hide();
}
@Test
public void testOnSaveSynapseStorage() {
when(mockView.isExternalS3StorageSelected()).thenReturn(false);
when(mockView.isSFTPStorageSelected()).thenReturn(false);
AsyncMockStubber.callSuccessWith(null).when(mockSynapseClient).createStorageLocationSetting(anyString(), any(StorageLocationSetting.class), any(AsyncCallback.class));
widget.onSave();
verify(mockSynapseClient).createStorageLocationSetting(anyString(), any(StorageLocationSetting.class), any(AsyncCallback.class));
verify(mockEventBus).fireEvent(any(EntityUpdatedEvent.class));
verify(mockView).hide();
}
@Test
public void testOnSaveExternalObjectStore() {
when(mockView.isExternalS3StorageSelected()).thenReturn(false);
when(mockView.isSFTPStorageSelected()).thenReturn(false);
when(mockView.isSynapseStorageSelected()).thenReturn(false);
when(mockView.isExternalObjectStoreSelected()).thenReturn(true);
String banner = "hello object store";
String bucket = "need a bucket";
String endpointUrl = "http://test";
when(mockView.getExternalObjectStoreBanner()).thenReturn(banner);
when(mockView.getExternalObjectStoreBucket()).thenReturn(bucket);
when(mockView.getExternalObjectStoreEndpointUrl()).thenReturn(endpointUrl);
AsyncMockStubber.callSuccessWith(null).when(mockSynapseClient).createStorageLocationSetting(anyString(), any(StorageLocationSetting.class), any(AsyncCallback.class));
widget.onSave();
verify(mockSynapseClient).createStorageLocationSetting(anyString(), locationSettingCaptor.capture(), any(AsyncCallback.class));
ExternalObjectStorageLocationSetting setting = (ExternalObjectStorageLocationSetting) locationSettingCaptor.getValue();
assertEquals(banner, setting.getBanner());
assertEquals(endpointUrl, setting.getEndpointUrl());
assertEquals(bucket, setting.getBucket());
assertEquals(UploadType.S3, setting.getUploadType());
verify(mockEventBus).fireEvent(any(EntityUpdatedEvent.class));
verify(mockView).hide();
}
@Test
public void testOnSaveSynapseStorageFailure() {
when(mockView.isExternalS3StorageSelected()).thenReturn(false);
when(mockView.isSFTPStorageSelected()).thenReturn(false);
Exception e = new Exception("oh nos!");
AsyncMockStubber.callFailureWith(e).when(mockSynapseClient).createStorageLocationSetting(anyString(), any(StorageLocationSetting.class), any(AsyncCallback.class));
widget.onSave();
verify(mockSynapseClient).createStorageLocationSetting(anyString(), any(StorageLocationSetting.class), any(AsyncCallback.class));
verify(mockSynAlert).handleException(e);
}
@Test
public void testOnSaveExternalS3() {
when(mockView.isExternalS3StorageSelected()).thenReturn(true);
when(mockView.isSFTPStorageSelected()).thenReturn(false);
String baseKey = " key";
String bucket = "a.bucket ";
String banner = " upload to a.bucket";
when(mockView.getExternalS3Banner()).thenReturn(banner);
when(mockView.getS3Bucket()).thenReturn(bucket);
when(mockView.getS3BaseKey()).thenReturn(baseKey);
widget.onSave();
ArgumentCaptor<StorageLocationSetting> captor = ArgumentCaptor.forClass(StorageLocationSetting.class);
verify(mockSynapseClient).createStorageLocationSetting(anyString(), captor.capture(), any(AsyncCallback.class));
ExternalS3StorageLocationSetting capturedSetting = (ExternalS3StorageLocationSetting) captor.getValue();
assertEquals(baseKey.trim(), capturedSetting.getBaseKey());
assertEquals(bucket.trim(), capturedSetting.getBucket());
assertEquals(banner.trim(), capturedSetting.getBanner());
}
@Test
public void testOnSaveExternalS3Invalid() {
when(mockView.isExternalS3StorageSelected()).thenReturn(true);
when(mockView.isSFTPStorageSelected()).thenReturn(false);
when(mockView.getExternalS3Banner()).thenReturn("banner");
// invalid bucket
when(mockView.getS3Bucket()).thenReturn(" ");
when(mockView.getS3BaseKey()).thenReturn("base key");
widget.onSave();
verify(mockSynAlert).showError(anyString());
}
@Test
public void testOnSaveExternalGoogleCloud() {
when(mockView.isExternalGoogleCloudStorageSelected()).thenReturn(true);
when(mockView.isSFTPStorageSelected()).thenReturn(false);
String baseKey = " key";
String bucket = "a.bucket ";
String banner = " upload to a.bucket";
when(mockView.getExternalGoogleCloudBanner()).thenReturn(banner);
when(mockView.getGoogleCloudBucket()).thenReturn(bucket);
when(mockView.getGoogleCloudBaseKey()).thenReturn(baseKey);
widget.onSave();
ArgumentCaptor<StorageLocationSetting> captor = ArgumentCaptor.forClass(StorageLocationSetting.class);
verify(mockSynapseClient).createStorageLocationSetting(anyString(), captor.capture(), any(AsyncCallback.class));
ExternalGoogleCloudStorageLocationSetting capturedSetting = (ExternalGoogleCloudStorageLocationSetting) captor.getValue();
assertEquals(baseKey.trim(), capturedSetting.getBaseKey());
assertEquals(bucket.trim(), capturedSetting.getBucket());
assertEquals(banner.trim(), capturedSetting.getBanner());
}
@Test
public void testOnSaveExternalGoogleCloudInvalid() {
when(mockView.isExternalGoogleCloudStorageSelected()).thenReturn(true);
when(mockView.isSFTPStorageSelected()).thenReturn(false);
when(mockView.getExternalGoogleCloudBanner()).thenReturn("banner");
// invalid bucket
when(mockView.getGoogleCloudBucket()).thenReturn(" ");
when(mockView.getGoogleCloudBaseKey()).thenReturn("base key");
widget.onSave();
verify(mockSynAlert).showError(anyString());
}
@Test
public void testOnSaveSFTP() {
when(mockView.isExternalS3StorageSelected()).thenReturn(false);
when(mockView.isSFTPStorageSelected()).thenReturn(true);
String url = "sftp://tcgaftps.nnn.mmm.gov";
String banner = "a sftp site";
when(mockView.getSFTPUrl()).thenReturn(url);
when(mockView.getSFTPBanner()).thenReturn(banner);
widget.onSave();
ArgumentCaptor<StorageLocationSetting> captor = ArgumentCaptor.forClass(StorageLocationSetting.class);
verify(mockSynapseClient).createStorageLocationSetting(anyString(), captor.capture(), any(AsyncCallback.class));
ExternalStorageLocationSetting capturedSetting = (ExternalStorageLocationSetting) captor.getValue();
assertEquals(url.trim(), capturedSetting.getUrl());
assertEquals(banner.trim(), capturedSetting.getBanner());
}
@Test
public void testOnSaveSFTPInvalid1() {
when(mockView.isExternalS3StorageSelected()).thenReturn(false);
when(mockView.isSFTPStorageSelected()).thenReturn(true);
when(mockView.getSFTPUrl()).thenReturn("https://fjkdsljfdsl");
when(mockView.getSFTPBanner()).thenReturn("banner");
widget.onSave();
verify(mockSynAlert).showError(anyString());
}
@Test
public void testOnSaveSFTPInvalid2() {
when(mockView.isExternalS3StorageSelected()).thenReturn(false);
when(mockView.isSFTPStorageSelected()).thenReturn(true);
// empty
when(mockView.getSFTPUrl()).thenReturn(" ");
when(mockView.getSFTPBanner()).thenReturn("banner");
widget.onSave();
verify(mockSynAlert).showError(anyString());
}
}
| apache-2.0 |
DwayneJengSage/Bridge-Exporter-1 | src/test/java/org/sagebionetworks/bridge/exporter/synapse/SynapseHelperSerializeTest.java | 11790 | package org.sagebionetworks.bridge.exporter.synapse;
import static org.mockito.Mockito.doReturn;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.spy;
import static org.mockito.Mockito.when;
import static org.testng.Assert.assertEquals;
import static org.testng.Assert.assertNull;
import static org.testng.Assert.assertTrue;
import java.io.File;
import java.math.BigDecimal;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.node.BooleanNode;
import com.fasterxml.jackson.databind.node.DecimalNode;
import com.fasterxml.jackson.databind.node.IntNode;
import com.fasterxml.jackson.databind.node.LongNode;
import com.fasterxml.jackson.databind.node.NullNode;
import com.fasterxml.jackson.databind.node.TextNode;
import com.google.common.collect.Multiset;
import org.testng.annotations.DataProvider;
import org.testng.annotations.Test;
import org.sagebionetworks.bridge.config.Config;
import org.sagebionetworks.bridge.exporter.metrics.Metrics;
import org.sagebionetworks.bridge.exporter.util.BridgeExporterUtil;
import org.sagebionetworks.bridge.json.DefaultObjectMapper;
import org.sagebionetworks.bridge.rest.model.UploadFieldDefinition;
import org.sagebionetworks.bridge.rest.model.UploadFieldType;
import org.sagebionetworks.bridge.s3.S3Helper;
// Tests for SynapseHelper.serializeToSynapseType()
public class SynapseHelperSerializeTest {
private static final File MOCK_TEMP_DIR = mock(File.class);
private static final String TEST_ATTACHMENTS_BUCKET = "attachments-bucket";
private static final String TEST_PROJECT_ID = "test-project-id";
private static final String TEST_RECORD_ID = "test-record-id";
private static final String TEST_FIELD_NAME = "test-field-name";
private static final String TEST_STUDY_ID = "test-study-id";
@DataProvider(name = "testSerializeProvider")
public Object[][] testSerializeProvider() {
// fieldType, input, expected
return new Object[][] {
{ UploadFieldType.STRING, null, null },
{ UploadFieldType.STRING, NullNode.instance, null },
{ UploadFieldType.BOOLEAN, BooleanNode.TRUE, "true" },
{ UploadFieldType.BOOLEAN, BooleanNode.FALSE, "false" },
// We don't parse JSON strings. We only accept JSON booleans.
{ UploadFieldType.BOOLEAN, new TextNode("true"), null },
{ UploadFieldType.CALENDAR_DATE, new TextNode("2015-12-01"), "2015-12-01" },
{ UploadFieldType.DURATION_V2, new TextNode("PT1H"), "PT1H" },
{ UploadFieldType.TIME_V2, new TextNode("13:07:56.123"), "13:07:56.123" },
{ UploadFieldType.FLOAT, new DecimalNode(new BigDecimal("3.14")), "3.14" },
{ UploadFieldType.FLOAT, new IntNode(42), "42" },
// We don't parse JSON strings. We only accept numeric JSON values.
{ UploadFieldType.FLOAT, new TextNode("3.14"), null },
{ UploadFieldType.INT, new IntNode(42), "42" },
// This simply calls longValue() on the node, which causes truncation instead of rounding.
{ UploadFieldType.INT, new DecimalNode(new BigDecimal("-13.9")), "-13" },
// We don't parse JSON strings. We only accept numeric JSON values.
{ UploadFieldType.INT, new TextNode("-13"), null },
{ UploadFieldType.SINGLE_CHOICE, new TextNode("foobarbaz"), "foobarbaz" },
{ UploadFieldType.STRING, new TextNode("foobarbaz"), "foobarbaz" },
// Test removing HTML
{ UploadFieldType.SINGLE_CHOICE, new TextNode("<a href=\"sagebase.org\">link</a>"), "link" },
{ UploadFieldType.STRING, new TextNode("<a href=\"sagebase.org\">link</a>"), "link" },
// These types are not supported by serialize() because they serialize into multiple columns.
// This test is mainly for branch coverage. This code path should never be hit in real life.
{ UploadFieldType.MULTI_CHOICE, new TextNode("value doesn't matter"), null },
{ UploadFieldType.TIMESTAMP, new TextNode("value doesn't matter"), null },
};
}
@Test(dataProvider = "testSerializeProvider")
public void testSerialize(UploadFieldType fieldType, JsonNode input, String expected) throws Exception {
testHelper(new Metrics(), fieldDefForType(fieldType), input, expected);
}
@Test
public void inlineJsonBlob() throws Exception {
// based on real JSON blobs
String jsonText = "[1, 3, 5, 7, \"this is a string\"]";
JsonNode originalNode = DefaultObjectMapper.INSTANCE.readTree(jsonText);
// serialize, which basically just copies the JSON text as is
String retVal = new SynapseHelper().serializeToSynapseType(new Metrics(), MOCK_TEMP_DIR, TEST_PROJECT_ID,
TEST_RECORD_ID, TEST_STUDY_ID, fieldDefForType(UploadFieldType.INLINE_JSON_BLOB), originalNode);
// parse back into JSON and compare
JsonNode reparsedNode = DefaultObjectMapper.INSTANCE.readTree(retVal);
assertTrue(reparsedNode.isArray());
assertEquals(reparsedNode.size(), 5);
assertEquals(reparsedNode.get(0).intValue(), 1);
assertEquals(reparsedNode.get(1).intValue(), 3);
assertEquals(reparsedNode.get(2).intValue(), 5);
assertEquals(reparsedNode.get(3).intValue(), 7);
assertEquals(reparsedNode.get(4).textValue(), "this is a string");
}
@DataProvider(name = "stringTypeProvider")
public Object[][] stringTypeProvider() {
return new Object[][] {
{ UploadFieldType.SINGLE_CHOICE },
{ UploadFieldType.STRING },
};
}
@Test(dataProvider = "stringTypeProvider")
public void stringSanitized(UploadFieldType stringType) throws Exception {
// Use an extra short field def.
UploadFieldDefinition fieldDef = new UploadFieldDefinition().name(TEST_FIELD_NAME).type(stringType)
.maxLength(10);
// String value has newlines and tabs that need to be stripped out.
// Newlines turned into spaces, string truncated to length 10
testHelper(new Metrics(), fieldDef, new TextNode("asdf\njkl;\tlorem ipsum dolor"), "asdf jkl; ");
}
// branch coverage
@Test(dataProvider = "stringTypeProvider")
public void stringUnboundedTrue(UploadFieldType stringType) throws Exception {
UploadFieldDefinition fieldDef = new UploadFieldDefinition().name(TEST_FIELD_NAME).type(stringType).
unboundedText(true);
testHelper(new Metrics(), fieldDef, new TextNode("unbounded text not really"), "unbounded text not really");
}
// branch coverage
@Test(dataProvider = "stringTypeProvider")
public void stringUnboundedFalse(UploadFieldType stringType) throws Exception {
UploadFieldDefinition fieldDef = new UploadFieldDefinition().name(TEST_FIELD_NAME).type(stringType)
.unboundedText(false);
testHelper(new Metrics(), fieldDef, new TextNode("not really unbounded text"), "not really unbounded text");
}
@DataProvider(name = "attachmentTypeProvider")
public Object[][] attachmentTypeProvider() {
return new Object[][] {
{ UploadFieldType.ATTACHMENT_BLOB },
{ UploadFieldType.ATTACHMENT_CSV },
{ UploadFieldType.ATTACHMENT_JSON_BLOB },
{ UploadFieldType.ATTACHMENT_JSON_TABLE },
{ UploadFieldType.ATTACHMENT_V2 },
};
}
@Test(dataProvider = "attachmentTypeProvider")
public void attachmentInvalidType(UploadFieldType attachmentType) throws Exception {
// Attachments are strings, which is the attachment ID.
Metrics metrics = new Metrics();
testHelper(metrics, fieldDefForType(attachmentType), new LongNode(1234567890L), null);
// Validate metrics - There were no attachments.
Multiset<String> counterMap = metrics.getCounterMap();
assertEquals(counterMap.count("numAttachments"), 0);
}
@Test(dataProvider = "attachmentTypeProvider")
public void attachment(UploadFieldType attachmentType) throws Exception {
UploadFieldDefinition attachmentFieldDef = fieldDefForType(attachmentType);
// Spy uploadFromS3ToSynapseFileHandle(). This has some complex logic that is tested elsewhere. For simplicity
// of tests, just mock it out.
SynapseHelper synapseHelper = spy(new SynapseHelper());
doReturn("dummy-filehandle-id").when(synapseHelper).uploadFromS3ToSynapseFileHandle(
TEST_PROJECT_ID, "dummy-attachment-id");
// execute
Metrics metrics = new Metrics();
String retVal = synapseHelper.serializeToSynapseType(metrics, MOCK_TEMP_DIR, TEST_PROJECT_ID, TEST_RECORD_ID,
TEST_STUDY_ID, attachmentFieldDef, new TextNode("dummy-attachment-id"));
assertEquals(retVal, "dummy-filehandle-id");
// Validate metrics
Multiset<String> counterMap = metrics.getCounterMap();
assertEquals(counterMap.count("numAttachments"), 1);
}
@Test
public void largeTextAttachment() throws Exception {
// mock config
Config mockConfig = mock(Config.class);
when(mockConfig.get(BridgeExporterUtil.CONFIG_KEY_ATTACHMENT_S3_BUCKET)).thenReturn(TEST_ATTACHMENTS_BUCKET);
when(mockConfig.getInt(SynapseHelper.CONFIG_KEY_SYNAPSE_ASYNC_INTERVAL_MILLIS)).thenReturn(0);
when(mockConfig.getInt(SynapseHelper.CONFIG_KEY_SYNAPSE_ASYNC_TIMEOUT_LOOPS)).thenReturn(2);
// Set a very high number for rate limiting, since we don't want the rate limiter to interfere with our tests.
when(mockConfig.getInt(SynapseHelper.CONFIG_KEY_SYNAPSE_RATE_LIMIT_PER_SECOND)).thenReturn(1000);
when(mockConfig.getInt(SynapseHelper.CONFIG_KEY_SYNAPSE_GET_COLUMN_MODELS_RATE_LIMIT_PER_MINUTE)).thenReturn(
1000);
// Mock S3Helper.
S3Helper mockS3Helper = mock(S3Helper.class);
when(mockS3Helper.readS3FileAsString(TEST_ATTACHMENTS_BUCKET, "my-large-text-attachment-id")).thenReturn(
"This is my <b>large text</b> attachment");
// Create SynapseHelper.
SynapseHelper synapseHelper = new SynapseHelper();
synapseHelper.setConfig(mockConfig);
synapseHelper.setS3Helper(mockS3Helper);
// Create inputs.
UploadFieldDefinition fieldDef = fieldDefForType(UploadFieldType.LARGE_TEXT_ATTACHMENT);
Metrics metrics = new Metrics();
// Test case 1: attachment with sanitization
String retVal1 = synapseHelper.serializeToSynapseType(metrics, MOCK_TEMP_DIR, TEST_PROJECT_ID, TEST_RECORD_ID,
TEST_STUDY_ID, fieldDef, new TextNode("my-large-text-attachment-id"));
assertEquals(retVal1, "This is my large text attachment");
// Test case 2: wrong type
String retVal2 = synapseHelper.serializeToSynapseType(metrics, MOCK_TEMP_DIR, TEST_PROJECT_ID, TEST_RECORD_ID,
TEST_STUDY_ID, fieldDef, new LongNode(1234567890L));
assertNull(retVal2);
}
private static UploadFieldDefinition fieldDefForType(UploadFieldType type) {
return new UploadFieldDefinition().name(TEST_FIELD_NAME).type(type);
}
private static void testHelper(Metrics metrics, UploadFieldDefinition fieldDef, JsonNode input,
String expected) throws Exception {
String retVal = new SynapseHelper().serializeToSynapseType(metrics, MOCK_TEMP_DIR, TEST_PROJECT_ID,
TEST_RECORD_ID, TEST_STUDY_ID, fieldDef, input);
assertEquals(retVal, expected);
}
}
| apache-2.0 |
locationtech/geowave | extensions/adapters/vector/src/test/java/org/locationtech/geowave/adapter/vector/stats/NumericHistogramStatisticsTest.java | 8611 | /**
* Copyright (c) 2013-2020 Contributors to the Eclipse Foundation
*
* <p> See the NOTICE file distributed with this work for additional information regarding copyright
* ownership. All rights reserved. This program and the accompanying materials are made available
* under the terms of the Apache License, Version 2.0 which accompanies this distribution and is
* available at http://www.apache.org/licenses/LICENSE-2.0.txt
*/
package org.locationtech.geowave.adapter.vector.stats;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
import java.text.ParseException;
import java.util.Date;
import java.util.List;
import java.util.Random;
import java.util.UUID;
import org.apache.commons.math.util.MathUtils;
import org.geotools.data.DataUtilities;
import org.geotools.feature.SchemaException;
import org.geotools.feature.simple.SimpleFeatureBuilder;
import org.geotools.filter.text.cql2.CQLException;
import org.junit.Before;
import org.junit.Test;
import org.locationtech.geowave.adapter.vector.FeatureDataAdapter;
import org.locationtech.geowave.core.store.statistics.field.NumericHistogramStatistic;
import org.locationtech.geowave.core.store.statistics.field.NumericHistogramStatistic.NumericHistogramValue;
import org.locationtech.jts.geom.Coordinate;
import org.locationtech.jts.geom.GeometryFactory;
import org.locationtech.jts.geom.PrecisionModel;
import org.opengis.feature.simple.SimpleFeature;
import org.opengis.feature.simple.SimpleFeatureType;
import org.opengis.feature.type.AttributeDescriptor;
public class NumericHistogramStatisticsTest {
private SimpleFeatureType schema;
FeatureDataAdapter dataAdapter;
GeometryFactory factory = new GeometryFactory(new PrecisionModel(PrecisionModel.FIXED));
@Before
public void setup() throws SchemaException, CQLException, ParseException {
schema =
DataUtilities.createType(
"sp.geostuff",
"geometry:Geometry:srid=4326,pop:java.lang.Long,when:Date,whennot:Date,somewhere:Polygon,pid:String");
dataAdapter = new FeatureDataAdapter(schema);
}
private SimpleFeature create(final Double val) {
final List<AttributeDescriptor> descriptors = schema.getAttributeDescriptors();
final Object[] defaults = new Object[descriptors.size()];
int p = 0;
for (final AttributeDescriptor descriptor : descriptors) {
defaults[p++] = descriptor.getDefaultValue();
}
final SimpleFeature newFeature =
SimpleFeatureBuilder.build(schema, defaults, UUID.randomUUID().toString());
newFeature.setAttribute("pop", val);
newFeature.setAttribute("pid", UUID.randomUUID().toString());
newFeature.setAttribute("when", new Date());
newFeature.setAttribute("whennot", new Date());
newFeature.setAttribute("geometry", factory.createPoint(new Coordinate(27.25, 41.25)));
return newFeature;
}
@Test
public void testPositive() {
final NumericHistogramStatistic stat = new NumericHistogramStatistic("", "pop");
final NumericHistogramValue statValue = stat.createEmpty();
final Random rand = new Random(7777);
statValue.entryIngested(dataAdapter, create(100.0));
statValue.entryIngested(dataAdapter, create(101.0));
statValue.entryIngested(dataAdapter, create(2.0));
double next = 1;
for (int i = 0; i < 10000; i++) {
next = next + (Math.round(rand.nextDouble()));
statValue.entryIngested(dataAdapter, create(next));
}
final NumericHistogramValue statValue2 = stat.createEmpty();
final double start2 = next;
double max = 0;
for (long i = 0; i < 10000; i++) {
final double val = next + (1000 * rand.nextDouble());
statValue2.entryIngested(dataAdapter, create(val));
max = Math.max(val, max);
}
final double skewvalue = next + (1000 * rand.nextDouble());
final SimpleFeature skewedFeature = create(skewvalue);
for (int i = 0; i < 10000; i++) {
statValue2.entryIngested(dataAdapter, skewedFeature);
// skewedFeature.setAttribute("pop", Long.valueOf(next + (long)
// (1000 * rand.nextDouble())));
}
final byte[] b = statValue2.toBinary();
statValue2.fromBinary(b);
assertEquals(1.0, statValue2.cdf(max + 1), 0.00001);
statValue.merge(statValue2);
assertEquals(1.0, statValue.cdf(max + 1), 0.00001);
assertEquals(0.33, statValue.cdf(start2), 0.01);
assertEquals(30003, sum(statValue.count(10)));
final double r = statValue.percentPopulationOverRange(skewvalue - 1, skewvalue + 1);
assertTrue((r > 0.3) && (r < 0.35));
}
@Test
public void testRapidIncreaseInRange() {
final NumericHistogramStatistic stat = new NumericHistogramStatistic("", "pop");
final NumericHistogramValue statValue = stat.createEmpty();
final Random rand = new Random(7777);
double next = 1;
for (int i = 0; i < 100; i++) {
next = next + (rand.nextDouble() * 100.0);
statValue.entryIngested(dataAdapter, create(next));
}
for (long i = 0; i < 100; i++) {
final NumericHistogramValue statValue2 = stat.createEmpty();
for (int j = 0; j < 100; j++) {
statValue2.entryIngested(
dataAdapter,
create(4839000434.547854578 * rand.nextDouble() * rand.nextGaussian()));
}
byte[] b = statValue2.toBinary();
statValue2.fromBinary(b);
b = statValue.toBinary();
statValue.fromBinary(b);
statValue.merge(statValue2);
}
}
@Test
public void testNegative() {
final NumericHistogramStatistic stat = new NumericHistogramStatistic("", "pop");
final NumericHistogramValue statValue = stat.createEmpty();
final Random rand = new Random(7777);
statValue.entryIngested(dataAdapter, create(-100.0));
statValue.entryIngested(dataAdapter, create(-101.0));
statValue.entryIngested(dataAdapter, create(-2.0));
double next = -1;
for (int i = 0; i < 10000; i++) {
next = next - (Math.round(rand.nextDouble()));
statValue.entryIngested(dataAdapter, create(next));
}
final NumericHistogramValue statValue2 = stat.createEmpty();
final double start2 = next;
double min = 0;
for (long i = 0; i < 10000; i++) {
final double val = next - (long) (1000 * rand.nextDouble());
statValue2.entryIngested(dataAdapter, create(val));
min = Math.min(val, min);
}
final double skewvalue = next - (1000 * rand.nextDouble());
final SimpleFeature skewedFeature = create(skewvalue);
for (int i = 0; i < 10000; i++) {
statValue2.entryIngested(dataAdapter, skewedFeature);
}
assertEquals(1.0, statValue2.cdf(0), 0.00001);
final byte[] b = statValue2.toBinary();
statValue2.fromBinary(b);
assertEquals(0.0, statValue2.cdf(min), 0.00001);
statValue.merge(statValue2);
assertEquals(1.0, statValue.cdf(0), 0.00001);
assertEquals(0.66, statValue.cdf(start2), 0.01);
assertEquals(30003, sum(statValue.count(10)));
final double r = statValue.percentPopulationOverRange(skewvalue - 1, skewvalue + 1);
assertTrue((r > 0.3) && (r < 0.35));
}
@Test
public void testMix() {
final NumericHistogramStatistic stat = new NumericHistogramStatistic("", "pop");
final NumericHistogramValue statValue = stat.createEmpty();
final Random rand = new Random(7777);
double min = 0;
double max = 0;
double next = 0;
for (int i = 1; i < 300; i++) {
final NumericHistogramValue statValue2 = stat.createEmpty();
final double m = 10000.0 * Math.pow(10.0, ((i / 100) + 1));
if (i == 50) {
next = 0.0;
} else if (i == 100) {
next = Double.NaN;
} else if (i == 150) {
next = Double.MAX_VALUE;
} else if (i == 200) {
next = Integer.MAX_VALUE;
} else if (i == 225) {
next = Integer.MIN_VALUE;
} else {
next = (m * rand.nextDouble() * MathUtils.sign(rand.nextGaussian()));
}
statValue2.entryIngested(dataAdapter, create(next));
if (!Double.isNaN(next)) {
max = Math.max(next, max);
min = Math.min(next, min);
stat.fromBinary(stat.toBinary());
statValue2.fromBinary(statValue2.toBinary());
statValue.merge(statValue2);
}
}
assertEquals(0.5, statValue.cdf(0), 0.1);
assertEquals(0.0, statValue.cdf(min), 0.00001);
assertEquals(1.0, statValue.cdf(max), 0.00001);
assertEquals(298, sum(statValue.count(10)));
}
private long sum(final long[] list) {
long result = 0;
for (final long v : list) {
result += v;
}
return result;
}
}
| apache-2.0 |
mdogan/hazelcast | hazelcast/src/main/java/com/hazelcast/internal/eviction/EvictionPolicyEvaluatorProvider.java | 4600 | /*
* Copyright (c) 2008-2020, Hazelcast, Inc. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.hazelcast.internal.eviction;
import com.hazelcast.config.EvictionPolicy;
import com.hazelcast.internal.eviction.impl.comparator.LFUEvictionPolicyComparator;
import com.hazelcast.internal.eviction.impl.comparator.LRUEvictionPolicyComparator;
import com.hazelcast.internal.eviction.impl.comparator.RandomEvictionPolicyComparator;
import com.hazelcast.internal.eviction.impl.evaluator.EvictionPolicyEvaluator;
import com.hazelcast.internal.nio.ClassLoaderUtil;
import com.hazelcast.spi.eviction.EvictionPolicyComparator;
import static com.hazelcast.internal.util.ExceptionUtil.rethrow;
import static com.hazelcast.internal.util.Preconditions.checkNotNull;
import static com.hazelcast.internal.util.StringUtil.isNullOrEmpty;
/**
* Provider to get any kind ({@link EvictionPolicy}) of {@link EvictionPolicyEvaluator}.
*/
public final class EvictionPolicyEvaluatorProvider {
private EvictionPolicyEvaluatorProvider() {
}
/**
* Gets the {@link EvictionPolicyEvaluator}
* implementation specified with {@code evictionPolicy}.
*
* @param evictionConfig {@link EvictionConfiguration} for
* requested {@link EvictionPolicyEvaluator} implementation
* @param classLoader the {@link java.lang.ClassLoader} to be
* used while creating custom {@link EvictionPolicyComparator}
* if it is specified in the config
* @return the requested
* {@link EvictionPolicyEvaluator} implementation
*/
public static <A, E extends Evictable> EvictionPolicyEvaluator<A, E>
getEvictionPolicyEvaluator(EvictionConfiguration evictionConfig, ClassLoader classLoader) {
checkNotNull(evictionConfig);
return new EvictionPolicyEvaluator<>(getEvictionPolicyComparator(evictionConfig, classLoader));
}
/**
* @param evictionConfig {@link EvictionConfiguration} for
* requested {@link EvictionPolicyEvaluator} implementation
* @param classLoader the {@link java.lang.ClassLoader} to be
* used while creating custom {@link EvictionPolicyComparator}
* if it is specified in the config
* @return {@link
* EvictionPolicyComparator} instance if it is defined, otherwise
* returns null to indicate there is no comparator defined
*/
public static EvictionPolicyComparator getEvictionPolicyComparator(EvictionConfiguration evictionConfig,
ClassLoader classLoader) {
// 1. First check comparator class name
String evictionPolicyComparatorClassName = evictionConfig.getComparatorClassName();
if (!isNullOrEmpty(evictionPolicyComparatorClassName)) {
try {
return ClassLoaderUtil.newInstance(classLoader, evictionPolicyComparatorClassName);
} catch (Exception e) {
throw rethrow(e);
}
}
// 2. Then check comparator implementation
EvictionPolicyComparator comparator = evictionConfig.getComparator();
if (comparator != null) {
return comparator;
}
// 3. As a last resort, try to pick an out-of-the-box comparator implementation
return pickOutOfTheBoxComparator(evictionConfig.getEvictionPolicy());
}
private static EvictionPolicyComparator pickOutOfTheBoxComparator(EvictionPolicy evictionPolicy) {
switch (evictionPolicy) {
case LRU:
return LRUEvictionPolicyComparator.INSTANCE;
case LFU:
return LFUEvictionPolicyComparator.INSTANCE;
case RANDOM:
return RandomEvictionPolicyComparator.INSTANCE;
case NONE:
return null;
default:
throw new IllegalArgumentException("Unsupported eviction policy: " + evictionPolicy);
}
}
}
| apache-2.0 |
zogwei/zeus3 | web/src/main/java/com/taobao/zeus/web/platform/client/app/report/chart/OwnerJobTrend.java | 4940 | package com.taobao.zeus.web.platform.client.app.report.chart;
import java.util.Date;
import java.util.List;
import java.util.Map;
import org.moxieapps.gwt.highcharts.client.Chart;
import org.moxieapps.gwt.highcharts.client.Legend;
import org.moxieapps.gwt.highcharts.client.Series;
import org.moxieapps.gwt.highcharts.client.ToolTip;
import org.moxieapps.gwt.highcharts.client.ToolTipData;
import org.moxieapps.gwt.highcharts.client.ToolTipFormatter;
import org.moxieapps.gwt.highcharts.client.Legend.VerticalAlign;
import org.moxieapps.gwt.highcharts.client.plotOptions.ColumnPlotOptions;
import com.google.gwt.event.logical.shared.AttachEvent;
import com.google.gwt.event.logical.shared.AttachEvent.Handler;
import com.google.gwt.i18n.shared.DateTimeFormat;
import com.google.gwt.user.client.ui.IsWidget;
import com.google.gwt.user.client.ui.Widget;
import com.sencha.gxt.widget.core.client.button.TextButton;
import com.sencha.gxt.widget.core.client.container.HorizontalLayoutContainer;
import com.sencha.gxt.widget.core.client.container.HorizontalLayoutContainer.HorizontalLayoutData;
import com.sencha.gxt.widget.core.client.container.VerticalLayoutContainer;
import com.sencha.gxt.widget.core.client.container.VerticalLayoutContainer.VerticalLayoutData;
import com.sencha.gxt.widget.core.client.form.DateField;
import com.sencha.gxt.widget.core.client.form.FieldLabel;
import com.sencha.gxt.widget.core.client.event.SelectEvent;
import com.sencha.gxt.widget.core.client.event.SelectEvent.SelectHandler;
import com.taobao.zeus.web.platform.client.util.RPCS;
import com.taobao.zeus.web.platform.client.util.async.AbstractAsyncCallback;
/**
* 每一个负责人的Job统计信息
* 主要是排出失败任务最多的几个负责人,督促改进
* @author zhoufang
*
*/
public class OwnerJobTrend implements IsWidget{
private DateField date;
private TextButton submit=new TextButton("查询",new SelectHandler(){
public void onSelect(SelectEvent event) {
if(!date.validate()){
return;
}
RPCS.getReportService().ownerFailJobs(date.getValue(), new AbstractAsyncCallback<List<Map<String,String>>>() {
private DateTimeFormat format=DateTimeFormat.getFormat("yyyy年MM月dd日");
public void onSuccess(final List<Map<String, String>> result) {
final String[] categories=new String[result.size()];
Number[] numbers=new Number[result.size()];
for(int i=0;i<result.size();i++){
Map<String, String> map=result.get(i);
categories[i]=map.get("uname")==null?map.get("uid"):map.get("uname");
numbers[i]=Integer.valueOf(map.get("count"));
}
final Chart chart=new Chart();
chart.setType(Series.Type.COLUMN);
chart.setChartTitleText(format.format(date.getValue())+"责任人失败任务统计图");
chart.setColumnPlotOptions(new ColumnPlotOptions()
.setPointPadding(0.2).setBorderWidth(0))
.setLegend(new Legend()
.setLayout(Legend.Layout.VERTICAL)
.setAlign(Legend.Align.LEFT)
.setVerticalAlign(VerticalAlign.TOP)
.setX(100)
.setY(70)
.setFloating(true)
.setBackgroundColor("#FFFFFF")
.setShadow(true))
.setToolTip(new ToolTip()
.setFormatter(new ToolTipFormatter() {
public String format(ToolTipData toolTipData) {
int index=0;
for(int i=0;i<categories.length;i++){
if(categories[i].equals(toolTipData.getXAsString())){
index=i;
break;
}
}
String value= toolTipData.getYAsLong()+"个任务<br/>";
int count=Integer.valueOf(result.get(index).get("count"));
if(count>0){
for(int i=0;i<count;i++){
value+=result.get(index).get("history"+i)+"<br/>";
}
}
index++;
return value;
}
}));
chart.getXAxis().setCategories(categories);
chart.getYAxis().setAxisTitleText("失败任务数");
chart.addSeries(chart.createSeries().setName("失败的任务")
.setPoints(numbers));
for(int i=0;i<container.getWidgetCount();i++){
if(container.getWidget(i) instanceof Chart){
container.remove(container.getWidget(i));
break;
}
}
container.add(chart,new VerticalLayoutData(1, -1));
}
});
}
});
private VerticalLayoutContainer container=new VerticalLayoutContainer();
public OwnerJobTrend(){
date=new DateField();
date.setEditable(false);
date.setAllowBlank(false);
date.setValue(new Date());
HorizontalLayoutContainer form=new HorizontalLayoutContainer();
form.add(new FieldLabel(date,"日期"),new HorizontalLayoutData());
form.add(submit,new HorizontalLayoutData());
container.add(form,new VerticalLayoutData(1,30));
container.addAttachHandler(new Handler() {
public void onAttachOrDetach(AttachEvent event) {
submit.fireEvent(new SelectEvent());
}
});
}
@Override
public Widget asWidget() {
return container;
}
}
| apache-2.0 |
lburgazzoli/apache-activemq-artemis | examples/features/ha/scale-down/src/main/java/org/apache/activemq/artemis/jms/example/ScaleDownExample.java | 5527 | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.activemq.artemis.jms.example;
import org.apache.activemq.artemis.util.ServerUtil;
import javax.jms.Connection;
import javax.jms.ConnectionFactory;
import javax.jms.MessageConsumer;
import javax.jms.MessageProducer;
import javax.jms.Queue;
import javax.jms.Session;
import javax.jms.TextMessage;
import javax.naming.InitialContext;
import java.util.Hashtable;
/**
* A simple example that demonstrates a colocated server
*/
public class ScaleDownExample {
private static Process server0;
private static Process server1;
public static void main(final String[] args) throws Exception {
final int numMessages = 30;
Connection connection = null;
Connection connection1 = null;
InitialContext initialContext = null;
InitialContext initialContext1 = null;
try {
server0 = ServerUtil.startServer(args[0], ScaleDownExample.class.getSimpleName() + "0", 0, 5000);
server1 = ServerUtil.startServer(args[1], ScaleDownExample.class.getSimpleName() + "1", 1, 5000);
// Step 1. Get an initial context for looking up JNDI for both servers
Hashtable<String, Object> properties = new Hashtable<>();
properties.put("java.naming.factory.initial", "org.apache.activemq.artemis.jndi.ActiveMQInitialContextFactory");
properties.put("connectionFactory.ConnectionFactory", "tcp://localhost:61616?ha=true&retryInterval=1000&retryIntervalMultiplier=1.0&reconnectAttempts=-1");
properties.put("queue.queue/exampleQueue", "exampleQueue");
initialContext = new InitialContext(properties);
properties = new Hashtable<>();
properties.put("java.naming.factory.initial", "org.apache.activemq.artemis.jndi.ActiveMQInitialContextFactory");
properties.put("connectionFactory.ConnectionFactory", "tcp://localhost:61617?ha=true&retryInterval=1000&retryIntervalMultiplier=1.0&reconnectAttempts=-1");
initialContext1 = new InitialContext(properties);
// Step 2. Look up the JMS resources from JNDI
Queue queue = (Queue) initialContext.lookup("queue/exampleQueue");
ConnectionFactory connectionFactory = (ConnectionFactory) initialContext.lookup("ConnectionFactory");
ConnectionFactory connectionFactory1 = (ConnectionFactory) initialContext1.lookup("ConnectionFactory");
// Step 3. Create a JMS Connections
connection = connectionFactory.createConnection();
connection1 = connectionFactory1.createConnection();
// Step 4. Create a *non-transacted* JMS Session with client acknowledgement
Session session = connection.createSession(false, Session.AUTO_ACKNOWLEDGE);
Session session1 = connection1.createSession(false, Session.AUTO_ACKNOWLEDGE);
// Step 5. Create a JMS MessageProducers
MessageProducer producer = session.createProducer(queue);
MessageProducer producer1 = session1.createProducer(queue);
// Step 6. Send some messages to both servers
for (int i = 0; i < numMessages; i++) {
TextMessage message = session.createTextMessage("This is text message " + i);
producer.send(message);
System.out.println("Sent message: " + message.getText());
message = session1.createTextMessage("This is another text message " + i);
producer1.send(message);
System.out.println("Sent message: " + message.getText());
}
// Step 7. Crash server #1
ServerUtil.killServer(server1);
// Step 8. start the connection ready to receive messages
connection.start();
// Step 9.create a consumer
MessageConsumer consumer = session.createConsumer(queue);
// Step 10. Receive and acknowledge all of the sent messages, the backup server that is colocated with server 1
// will have become live and is now handling messages for server 0.
TextMessage message0 = null;
for (int i = 0; i < numMessages * 2; i++) {
message0 = (TextMessage) consumer.receive(5000);
System.out.println("Got message: " + message0.getText());
}
message0.acknowledge();
}
finally {
// Step 11. Be sure to close our resources!
if (connection != null) {
connection.close();
}
if (initialContext != null) {
initialContext.close();
}
if (connection1 != null) {
connection1.close();
}
if (initialContext1 != null) {
initialContext1.close();
}
ServerUtil.killServer(server0);
ServerUtil.killServer(server1);
}
}
}
| apache-2.0 |
ServiceComb/java-chassis | transports/transport-rest/transport-rest-vertx/src/main/java/org/apache/servicecomb/transport/rest/vertx/VertxRestTransport.java | 3856 | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.servicecomb.transport.rest.vertx;
import java.util.List;
import org.apache.servicecomb.core.Const;
import org.apache.servicecomb.core.Invocation;
import org.apache.servicecomb.core.transport.AbstractTransport;
import org.apache.servicecomb.foundation.common.net.NetUtils;
import org.apache.servicecomb.foundation.common.net.URIEndpointObject;
import org.apache.servicecomb.foundation.common.utils.BeanUtils;
import org.apache.servicecomb.foundation.common.utils.SPIServiceUtils;
import org.apache.servicecomb.foundation.vertx.SimpleJsonObject;
import org.apache.servicecomb.foundation.vertx.VertxUtils;
import org.apache.servicecomb.swagger.invocation.AsyncResponse;
import org.apache.servicecomb.transport.rest.client.RestTransportClient;
import org.apache.servicecomb.transport.rest.client.RestTransportClientManager;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import io.vertx.core.DeploymentOptions;
import io.vertx.core.VertxOptions;
public class VertxRestTransport extends AbstractTransport {
private static final Logger LOGGER = LoggerFactory.getLogger(VertxRestTransport.class);
private RestTransportClient restClient;
@Override
public String getName() {
return Const.RESTFUL;
}
@Override
public int getOrder() {
return -1000;
}
@Override
public boolean canInit() {
setListenAddressWithoutSchema(TransportConfig.getAddress());
URIEndpointObject ep = (URIEndpointObject) getEndpoint().getAddress();
if (ep == null) {
return true;
}
if (!NetUtils.canTcpListen(ep.getSocketAddress().getAddress(), ep.getPort())) {
LOGGER.warn(
"Can not start VertxRestTransport, the port:{} may have been occupied. You can ignore this message if you are using a web container like tomcat.",
ep.getPort());
return false;
}
return true;
}
@Override
public boolean init() throws Exception {
restClient = RestTransportClientManager.INSTANCE.getRestClient();
// 部署transport server
DeploymentOptions options = new DeploymentOptions().setInstances(TransportConfig.getThreadCount());
SimpleJsonObject json = new SimpleJsonObject();
json.put(ENDPOINT_KEY, getEndpoint());
json.put(RestTransportClient.class.getName(), restClient);
options.setConfig(json);
options.setWorkerPoolName("pool-worker-transport-rest");
options.setWorkerPoolSize(VertxOptions.DEFAULT_WORKER_POOL_SIZE);
prepareBlockResource();
return VertxUtils.blockDeploy(transportVertx, TransportConfig.getRestServerVerticle(), options);
}
private void prepareBlockResource() {
// block deploy will load resources in event loop, but beans auto wire can only be done in main thread
List<VertxHttpDispatcher> dispatchers = SPIServiceUtils.getOrLoadSortedService(VertxHttpDispatcher.class);
BeanUtils.addBeans(VertxHttpDispatcher.class, dispatchers);
}
@Override
public void send(Invocation invocation, AsyncResponse asyncResp) throws Exception {
restClient.send(invocation, asyncResp);
}
}
| apache-2.0 |
Ztiany/CodeRepository | Android/AndroidAspectj/AndroidAspectj02/app/src/main/java/com/ztiany/androidaspectj02/AppAspectTools.java | 601 | package com.ztiany.androidaspectj02;
import android.util.Log;
import org.aspectj.lang.JoinPoint;
import org.aspectj.lang.Signature;
import org.aspectj.lang.annotation.Aspect;
import org.aspectj.lang.annotation.Before;
@Aspect
public class AppAspectTools {
private static final String TAG = AppAspectTools.class.getSimpleName();
@Before("execution(* android.app.Activity.on*(..))")
public void onActivityMethodBefore(JoinPoint joinPoint) throws Throwable {
Signature signature = joinPoint.getSignature();
Log.d(TAG, "----------------" + signature.toString());
}
}
| apache-2.0 |
Forexware/quickfixj | src/main/java/quickfix/field/LegCouponRate.java | 1152 | /*******************************************************************************
* Copyright (c) quickfixengine.org All rights reserved.
*
* This file is part of the QuickFIX FIX Engine
*
* This file may be distributed under the terms of the quickfixengine.org
* license as defined by quickfixengine.org and appearing in the file
* LICENSE included in the packaging of this file.
*
* This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING
* THE WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A
* PARTICULAR PURPOSE.
*
* See http://www.quickfixengine.org/LICENSE for licensing information.
*
* Contact ask@quickfixengine.org if any conditions of this licensing
* are not clear to you.
******************************************************************************/
package quickfix.field;
import quickfix.DoubleField;
public class LegCouponRate extends DoubleField
{
static final long serialVersionUID = 20050617;
public static final int FIELD = 615;
public LegCouponRate()
{
super(615);
}
public LegCouponRate(double data)
{
super(615, data);
}
}
| apache-2.0 |
lburgazzoli/spring-boot | spring-boot-project/spring-boot-actuator/src/main/java/org/springframework/boot/actuate/endpoint/jmx/JmxEndpointsSupplier.java | 988 | /*
* Copyright 2012-2018 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.boot.actuate.endpoint.jmx;
import org.springframework.boot.actuate.endpoint.EndpointsSupplier;
/**
* {@link EndpointsSupplier} for {@link ExposableJmxEndpoint JMX endpoints}.
*
* @author Phillip Webb
* @since 2.0.0
*/
@FunctionalInterface
public interface JmxEndpointsSupplier extends EndpointsSupplier<ExposableJmxEndpoint> {
}
| apache-2.0 |
damienmg/bazel | src/main/java/com/google/devtools/build/lib/rules/android/ResourceFilterFactory.java | 34234 | // Copyright 2017 The Bazel Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.devtools.build.lib.rules.android;
import com.android.ide.common.resources.configuration.DensityQualifier;
import com.android.ide.common.resources.configuration.FolderConfiguration;
import com.android.ide.common.resources.configuration.VersionQualifier;
import com.android.resources.Density;
import com.android.resources.ResourceFolderType;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.base.Joiner;
import com.google.common.base.Objects;
import com.google.common.base.Preconditions;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableSet;
import com.google.devtools.build.lib.actions.Artifact;
import com.google.devtools.build.lib.analysis.RuleContext;
import com.google.devtools.build.lib.analysis.config.BuildOptions;
import com.google.devtools.build.lib.analysis.config.PatchTransition;
import com.google.devtools.build.lib.collect.nestedset.NestedSet;
import com.google.devtools.build.lib.collect.nestedset.NestedSetBuilder;
import com.google.devtools.build.lib.packages.AttributeMap;
import com.google.devtools.build.lib.packages.RuleErrorConsumer;
import com.google.devtools.build.lib.syntax.Type;
import com.google.devtools.build.lib.vfs.Path;
import com.google.devtools.common.options.EnumConverter;
import com.google.devtools.common.options.OptionsParsingException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import javax.annotation.Nullable;
/**
* Filters resources based on their qualifiers.
*
* <p>This includes filtering resources based on both the "resource_configuration_filters" and
* "densities" attributes.
*
* <p>Whenever a new field is added to this class, be sure to add it to the {@link #equals(Object)}
* and {@link #hashCode()} methods. Failure to do so isn't just bad practice; it could seriously
* interfere with Bazel's caching performance.
*/
public class ResourceFilterFactory {
public static final String RESOURCE_CONFIGURATION_FILTERS_NAME = "resource_configuration_filters";
public static final String DENSITIES_NAME = "densities";
/**
* Locales used for pseudolocation.
*
* <p>These are special resources that can be used to test how apps handles special cases (such as
* particularly long text, accents, or left-to-right text). These resources are not provided like
* other resources; instead, when the appropriate filters are passed in, aapt generates them based
* on the default resources.
*
* <p>When these locales are specified in the configuration filters, even if we are filtering in
* analysis, we need to pass *all* configuration filters to aapt - the pseudolocalization filters
* themselves to trigger pseudolocalization, and the other filters to prevent aapt from filtering
* matching resources out.
*/
@VisibleForTesting
static enum FilterBehavior {
/**
* Resources will be filtered in execution. This class will just pass the filtering parameters
* to the appropriate resource processing actions.
*/
FILTER_IN_EXECUTION,
/**
* Resources will be filtered in analysis. In android_binary targets, all resources will be
* filtered by this class, and only resources that are accepted will be passed to resource
* processing actions.
*/
FILTER_IN_ANALYSIS,
/**
* Resources will be filtered in each android target in analysis. Filter settings will be
* extracted from android_binary targets and passed to all their dependencies using dynamic
* configuration. Only resources that are accepted by filtering will be passed to resource
* processing actions or to reverse dependencies.
*/
FILTER_IN_ANALYSIS_WITH_DYNAMIC_CONFIGURATION;
private static final class Converter extends EnumConverter<FilterBehavior> {
Converter() {
super(FilterBehavior.class, "resource filter behavior");
}
}
}
static final FilterBehavior DEFAULT_BEHAVIOR = FilterBehavior.FILTER_IN_EXECUTION;
/**
* The value of the {@link #RESOURCE_CONFIGURATION_FILTERS_NAME} attribute, as a list of qualifier
* strings.
*/
private final ImmutableList<String> configFilters;
/** The value of the {@link #DENSITIES_NAME} attribute, as a list of qualifier strings. */
private final ImmutableList<String> densities;
/** A builder for a set of strings representing resources that were filtered using this class. */
private final ImmutableSet.Builder<String> filteredResources = ImmutableSet.builder();
private final FilterBehavior filterBehavior;
/**
* Constructor.
*
* @param configFilters the resource configuration filters, as a list of strings.
* @param densities the density filters, as a list of strings.
* @param filterBehavior the behavior of this filter.
*/
@VisibleForTesting
ResourceFilterFactory(
ImmutableList<String> configFilters,
ImmutableList<String> densities,
FilterBehavior filterBehavior) {
this.configFilters = configFilters;
this.densities = densities;
this.filterBehavior = filterBehavior;
}
private static boolean hasAttr(AttributeMap attrs, String attrName) {
if (!attrs.isAttributeValueExplicitlySpecified(attrName)) {
return false;
}
List<String> values = attrs.get(attrName, Type.STRING_LIST);
return values != null && !values.isEmpty();
}
static boolean hasFilters(RuleContext ruleContext) {
return hasFilters(ruleContext.attributes());
}
static boolean hasFilters(AttributeMap attrs) {
return hasAttr(attrs, RESOURCE_CONFIGURATION_FILTERS_NAME) || hasAttr(attrs, DENSITIES_NAME);
}
/**
* Extracts filters from an AttributeMap, as a list of strings.
*
* <p>In BUILD files, string lists can be represented as a list of strings, a single
* comma-separated string, or a combination of both. This method outputs a single list of
* individual string values, which can then be passed directly to resource processing actions.
*
* @return the values of this attribute contained in the {@link AttributeMap}, as a list.
*/
private static ImmutableList<String> extractFilters(AttributeMap attrs, String attrName) {
if (!hasAttr(attrs, attrName)) {
return ImmutableList.<String>of();
}
/*
* To deal with edge cases involving placement of whitespace and multiple strings inside a
* single item of the given list, manually build the list here rather than call something like
* {@link RuleContext#getTokenizedStringListAttr}.
*
* Filter out all empty values, even those that were explicitly provided. Paying attention to
* empty values is never helpful: even if code handled them correctly (and not all of it does)
* empty filter values result in all resources matching the empty filter, meaning that filtering
* does nothing (even if non-empty filters were also provided).
*/
List<String> rawValues = attrs.get(attrName, Type.STRING_LIST);
// Use an ImmutableSet to remove duplicate values
ImmutableSet.Builder<String> builder = ImmutableSet.builder();
for (String rawValue : rawValues) {
if (rawValue.contains(",")) {
for (String token : rawValue.split(",")) {
if (!token.trim().isEmpty()) {
builder.add(token.trim());
}
}
} else if (!rawValue.isEmpty()) {
builder.add(rawValue);
}
}
// Create a sorted copy so that ResourceFilterFactory objects with the same filters are treated
// the same regardless of the ordering of those filters.
return ImmutableList.sortedCopyOf(builder.build());
}
static ResourceFilterFactory fromRuleContext(RuleContext ruleContext) {
Preconditions.checkNotNull(ruleContext);
if (!ruleContext.isLegalFragment(AndroidConfiguration.class)) {
return empty(DEFAULT_BEHAVIOR);
}
return forBaseAndAttrs(
ruleContext.getFragment(AndroidConfiguration.class).getResourceFilterFactory(),
ruleContext.attributes());
}
@VisibleForTesting
static ResourceFilterFactory forBaseAndAttrs(ResourceFilterFactory base, AttributeMap attrs) {
return base.withAttrsFrom(attrs);
}
/**
* Creates a new {@link ResourceFilterFactory} based on this object's properties, overridden by
* any filters specified in the passed {@link AttributeMap}.
*
* <p>A new object will always be returned, as returning the same object across multiple rules (as
* would be done with {@link FilterBehavior#FILTER_IN_ANALYSIS_WITH_DYNAMIC_CONFIGURATION}) causes
* problems.
*/
ResourceFilterFactory withAttrsFrom(AttributeMap attrs) {
if (!hasFilters(attrs)) {
return new ResourceFilterFactory(configFilters, densities, filterBehavior);
}
return new ResourceFilterFactory(
extractFilters(attrs, RESOURCE_CONFIGURATION_FILTERS_NAME),
extractFilters(attrs, DENSITIES_NAME),
filterBehavior);
}
ResourceFilterFactory withoutDynamicConfiguration() {
if (!usesDynamicConfiguration()) {
return this;
}
return empty(FilterBehavior.FILTER_IN_ANALYSIS);
}
private ImmutableList<FolderConfiguration> getConfigurationFilters(
RuleErrorConsumer ruleErrorConsumer) {
ImmutableList.Builder<FolderConfiguration> filterBuilder = ImmutableList.builder();
for (String filter : configFilters) {
addIfNotNull(
getFolderConfiguration(ruleErrorConsumer, filter),
filter,
filterBuilder,
ruleErrorConsumer,
RESOURCE_CONFIGURATION_FILTERS_NAME);
}
return filterBuilder.build();
}
private FolderConfiguration getFolderConfiguration(
RuleErrorConsumer ruleErrorConsumer, String filter) {
// Clean up deprecated representations of resource qualifiers that FolderConfiguration can't
// handle.
for (DeprecatedQualifierHandler handler : deprecatedQualifierHandlers) {
filter = handler.fixAttributeIfNeeded(ruleErrorConsumer, filter);
}
return FolderConfiguration.getConfigForQualifierString(filter);
}
private static final class DeprecatedQualifierHandler {
private final Pattern pattern;
private final String replacement;
private final String description;
private boolean warnedForAttribute = false;
private boolean warnedForResources = false;
private DeprecatedQualifierHandler(String pattern, String replacement, String description) {
this.pattern = Pattern.compile(pattern);
this.replacement = replacement;
this.description = description;
}
private String fixAttributeIfNeeded(RuleErrorConsumer ruleErrorConsumer, String qualifier) {
Matcher matcher = pattern.matcher(qualifier);
if (!matcher.matches()) {
return qualifier;
}
String fixed = matcher.replaceFirst(replacement);
// We don't want to spam users. Only warn about this kind of issue once per target.
// TODO(asteinb): Will this cause problems when settings are propagated via dynamic
// configuration?
if (!warnedForAttribute) {
ruleErrorConsumer.attributeWarning(
RESOURCE_CONFIGURATION_FILTERS_NAME,
String.format(
"When referring to %s, use of qualifier '%s' is deprecated. Use '%s' instead.",
description, matcher.group(), fixed));
warnedForAttribute = true;
}
return fixed;
}
private String fixResourceIfNeeded(
RuleErrorConsumer ruleErrorConsumer, String qualifier, String resourceFolder) {
Matcher matcher = pattern.matcher(qualifier);
if (!matcher.matches()) {
return qualifier;
}
String fixed = matcher.replaceFirst(replacement);
// We don't want to spam users. Only warn about this kind of issue once per target.
// TODO(asteinb): Will this cause problems when settings are propagated via dynamic
// configuration?
if (!warnedForResources) {
warnedForResources = true;
ruleErrorConsumer.ruleWarning(
String.format(
"For resource folder %s, when referring to %s, use of qualifier '%s' is deprecated."
+ " Use '%s' instead.",
resourceFolder, description, matcher.group(), fixed));
}
return fixed;
}
}
/** List of deprecated qualifiers that should currently by handled with a warning */
private final List<DeprecatedQualifierHandler> deprecatedQualifierHandlers =
ImmutableList.of(
/*
* Aapt used to expect locale configurations of form 'en_US'. It now also supports the
* correct 'en-rUS' format. For backwards comparability, use a regex to convert filters
* with locales in the old format to filters with locales of the correct format.
*
* The correct format for locales is defined at
* https://developer.android.com/guide/topics/resources/providing-resources.html#LocaleQualifier
*
* TODO(bazel-team): Migrate consumers away from the old Aapt locale format, then remove
* this replacement.
*
* The regex is a bit complicated to avoid modifying potential new qualifiers that contain
* underscores. Specifically, it searches for the entire beginning of the resource
* qualifier, including (optionally) MCC and MNC, and then the locale itself.
*/
new DeprecatedQualifierHandler(
"^((mcc[0-9]{3}-(mnc[0-9]{3}-)?)?[a-z]{2})_([A-Z]{2}).*",
"$1-r$4", "locale qualifiers with regions"),
new DeprecatedQualifierHandler(
"sr[_\\-]r?Latn.*", "b+sr+Latn", "Serbian in Latin characters"),
new DeprecatedQualifierHandler(
"es[_\\-]419.*", "b+es+419", "Spanish for Latin America and the Caribbean"));
private ImmutableList<Density> getDensities(RuleErrorConsumer ruleErrorConsumer) {
ImmutableList.Builder<Density> densityBuilder = ImmutableList.builder();
for (String density : densities) {
addIfNotNull(
Density.getEnum(density), density, densityBuilder, ruleErrorConsumer, DENSITIES_NAME);
}
return densityBuilder.build();
}
/** Reports an attribute error if the given item is null, and otherwise adds it to the builder. */
private static <T> void addIfNotNull(
T item,
String itemString,
ImmutableList.Builder<T> builder,
RuleErrorConsumer ruleErrorConsumer,
String attrName) {
if (item == null) {
ruleErrorConsumer.attributeError(
attrName, "String '" + itemString + "' is not a valid value for " + attrName);
} else {
builder.add(item);
}
}
static ResourceFilterFactory empty(RuleContext ruleContext) {
return empty(fromRuleContext(ruleContext).filterBehavior);
}
@VisibleForTesting
static ResourceFilterFactory empty(FilterBehavior filterBehavior) {
return new ResourceFilterFactory(
ImmutableList.<String>of(), ImmutableList.<String>of(), filterBehavior);
}
/**
* Filters a NestedSet of resource containers that contain dependencies of the current rule. This
* may be a no-op if this filter is empty or if resource prefiltering is disabled.
*/
NestedSet<ResourceContainer> filterDependencyContainers(
RuleErrorConsumer ruleErrorConsumer, NestedSet<ResourceContainer> resources) {
if (!shouldFilterDependencies()) {
return resources;
}
NestedSetBuilder<ResourceContainer> builder = new NestedSetBuilder<>(resources.getOrder());
for (ResourceContainer resource : resources) {
builder.add(resource.filter(ruleErrorConsumer, this));
}
return builder.build();
}
/**
* Filters a NestedSet of artifact dependencies of the current rule. Returns a filtered copy of
* the input, or the input itself if no filtering needs to be done.
*/
NestedSet<Artifact> filterDependencies(
RuleErrorConsumer ruleErrorConsumer, NestedSet<Artifact> resources) {
if (!shouldFilterDependencies()) {
return resources;
}
return NestedSetBuilder.wrap(
resources.getOrder(), filter(ruleErrorConsumer, ImmutableList.copyOf(resources)));
}
private boolean shouldFilterDependencies() {
if (!isPrefiltering() || usesDynamicConfiguration()) {
/*
* If the filter is empty, resource prefiltering is disabled, or the resources of dependencies
* have already been filtered thanks to dynamic configuration, just return the original,
* rather than make a copy.
*
* Resources should only be prefiltered in top-level android targets (such as android_binary).
* The output of resource processing, which includes the input NestedSet<ResourceContainer>
* returned by this method, is exposed to other actions via the AndroidResourcesProvider. If
* this method did a no-op copy and collapse in those cases, rather than just return the
* original NestedSet, we would lose all of the advantages around memory and time that
* NestedSets provide: each android_library target would have to copy the resources provided
* by its dependencies into a new NestedSet rather than just create a NestedSet pointing at
* its dependencies's NestedSets.
*/
return false;
}
return true;
}
ImmutableList<Artifact> filter(
RuleErrorConsumer ruleErrorConsumer, ImmutableList<Artifact> artifacts) {
if (!isPrefiltering()) {
return artifacts;
}
List<BestArtifactsForDensity> bestArtifactsForAllDensities = new ArrayList<>();
for (Density density : getDensities(ruleErrorConsumer)) {
bestArtifactsForAllDensities.add(new BestArtifactsForDensity(ruleErrorConsumer, density));
}
ImmutableList<FolderConfiguration> folderConfigs = getConfigurationFilters(ruleErrorConsumer);
Set<Artifact> keptArtifactsNotFilteredByDensity = new HashSet<>();
for (Artifact artifact : artifacts) {
FolderConfiguration config = getConfigForArtifact(ruleErrorConsumer, artifact);
// aapt explicitly ignores the version qualifier; duplicate this behavior here.
config.setVersionQualifier(VersionQualifier.getQualifier(""));
if (!matchesConfigurationFilters(folderConfigs, config)) {
continue;
}
if (!shouldFilterByDensity(artifact)) {
keptArtifactsNotFilteredByDensity.add(artifact);
continue;
}
for (BestArtifactsForDensity bestArtifactsForDensity : bestArtifactsForAllDensities) {
bestArtifactsForDensity.maybeAddArtifact(artifact);
}
}
// Build the output by iterating through the input so that contents of both have the same order.
ImmutableList.Builder<Artifact> builder = ImmutableList.builder();
for (Artifact artifact : artifacts) {
boolean kept = false;
if (keptArtifactsNotFilteredByDensity.contains(artifact)) {
builder.add(artifact);
kept = true;
} else {
for (BestArtifactsForDensity bestArtifactsForDensity : bestArtifactsForAllDensities) {
if (bestArtifactsForDensity.contains(artifact)) {
builder.add(artifact);
kept = true;
break;
}
}
}
// In FilterBehavior.FILTER_IN_ANALYSIS, this class needs to record any resources that were
// filtered out so that resource processing ignores references to them in symbols files of
// dependencies.
if (!kept && !usesDynamicConfiguration()) {
String parentDir = artifact.getPath().getParentDirectory().getBaseName();
filteredResources.add(parentDir + "/" + artifact.getFilename());
}
}
// TODO(asteinb): We should only build a new list if some artifacts were filtered out. If
// nothing was filtered, we can be more efficient by returning the original list instead.
return builder.build();
}
/**
* Tracks the best artifact for a desired density for each combination of filename and non-density
* qualifiers.
*/
private class BestArtifactsForDensity {
private final RuleErrorConsumer ruleErrorConsumer;
private final Density desiredDensity;
private final Map<String, Artifact> nameAndConfigurationToBestArtifact = new HashMap<>();
public BestArtifactsForDensity(RuleErrorConsumer ruleErrorConsumer, Density density) {
this.ruleErrorConsumer = ruleErrorConsumer;
desiredDensity = density;
}
/**
* @param artifact if this artifact is a better match for this object's desired density than any
* other artifacts with the same name and non-density configuration, adds it to this object.
*/
public void maybeAddArtifact(Artifact artifact) {
FolderConfiguration config = getConfigForArtifact(ruleErrorConsumer, artifact);
// We want to find a single best artifact for each combination of non-density qualifiers and
// filename. Combine those two values to create a single unique key.
// We also need to include the path to the resource, otherwise resource conflicts (multiple
// resources with the same name but different locations) might accidentally get resolved here
// (possibly incorrectly). Resource conflicts should be resolve during merging in execution
// instead.
config.setDensityQualifier(null);
Path qualifierDir = artifact.getPath().getParentDirectory();
String resourceDir = qualifierDir.getParentDirectory().toString();
String nameAndConfiguration =
Joiner.on('/').join(resourceDir, config.getUniqueKey(), artifact.getFilename());
Artifact currentBest = nameAndConfigurationToBestArtifact.get(nameAndConfiguration);
if (currentBest == null || computeAffinity(artifact) < computeAffinity(currentBest)) {
nameAndConfigurationToBestArtifact.put(nameAndConfiguration, artifact);
}
}
public boolean contains(Artifact artifact) {
return nameAndConfigurationToBestArtifact.containsValue(artifact);
}
/**
* Compute how well this artifact matches the {@link #desiredDensity}.
*
* <p>Various different codebases have different and sometimes contradictory methods for which
* resources are better in different situations. All of them agree that an exact match is best,
* but:
*
* <p>The android common code (see {@link FolderConfiguration#getDensityQualifier()} treats
* larger densities as better than non-matching smaller densities.
*
* <p>aapt code to filter assets by density prefers the smallest density that is larger than or
* the same as the desired density, or, lacking that, the largest available density.
*
* <p>Other implementations of density filtering include Gradle (to filter which resources
* actually get built into apps) and Android code itself (for the device to decide which
* resource to use).
*
* <p>This particular implementation is based on {@link
* com.google.devtools.build.android.DensitySpecificResourceFilter}, which filters resources by
* density during execution. It prefers to use exact matches when possible, then tries to find
* resources with exactly double the desired density for particularly easy downsizing, and
* otherwise prefers resources that are closest to the desired density, relative to the smaller
* of the available and desired densities.
*
* <p>Once we always filter resources during analysis, we should be able to completely remove
* that code.
*
* @return a score for how well the artifact matches. Lower scores indicate better matches.
*/
private double computeAffinity(Artifact artifact) {
DensityQualifier resourceQualifier =
getConfigForArtifact(ruleErrorConsumer, artifact).getDensityQualifier();
if (resourceQualifier == null) {
return Double.MAX_VALUE;
}
int resourceDensity = resourceQualifier.getValue().getDpiValue();
int density = desiredDensity.getDpiValue();
if (resourceDensity == density) {
// Exact match is the best.
return -2;
}
if (resourceDensity == 2 * density) {
// It's very efficient to downsample an image that's exactly twice the screen
// density, so we prefer that over other non-perfect matches.
return -1;
}
// Find the ratio between the larger and smaller of the available and desired densities.
double densityRatio =
Math.max(density, resourceDensity) / (double) Math.min(density, resourceDensity);
if (density < resourceDensity) {
return densityRatio;
}
// Apply a slight bias against resources that are smaller than those of the desired density.
// This becomes relevant only when we are considering multiple resources with the same ratio.
return densityRatio + 0.01;
}
}
private FolderConfiguration getConfigForArtifact(
RuleErrorConsumer ruleErrorConsumer, Artifact artifact) {
String containingFolder = getContainingFolder(artifact);
if (containingFolder.contains("-")) {
String[] parts = containingFolder.split("-", 2);
String prefix = parts[0];
String qualifiers = parts[1];
for (DeprecatedQualifierHandler handler : deprecatedQualifierHandlers) {
qualifiers = handler.fixResourceIfNeeded(ruleErrorConsumer, qualifiers, containingFolder);
}
containingFolder = String.format("%s-%s", prefix, qualifiers);
}
FolderConfiguration config = FolderConfiguration.getConfigForFolder(containingFolder);
if (config == null) {
ruleErrorConsumer.ruleError(
"Resource folder '" + containingFolder + "' has invalid resource qualifiers");
return FolderConfiguration.getConfigForQualifierString("");
}
return config;
}
/**
* Checks if we should filter this artifact by its density.
*
* <p>We filter by density if there are densities to filter by, the artifact is in a Drawable
* directory, and the artifact is not an XML file.
*
* <p>Similarly-named XML files may contain different resource definitions, so it's impossible to
* ensure that all required resources will be provided without that XML file unless we parse it.
*/
private boolean shouldFilterByDensity(Artifact artifact) {
return !densities.isEmpty()
&& !artifact.getExtension().equals("xml")
&& ResourceFolderType.getFolderType(getContainingFolder(artifact))
== ResourceFolderType.DRAWABLE;
}
private static String getContainingFolder(Artifact artifact) {
return artifact.getPath().getParentDirectory().getBaseName();
}
private static boolean matchesConfigurationFilters(
ImmutableList<FolderConfiguration> folderConfigs, FolderConfiguration config) {
for (FolderConfiguration filter : folderConfigs) {
if (config.isMatchFor(filter)) {
return true;
}
}
return folderConfigs.isEmpty();
}
/**
* Returns if this object contains a non-empty resource configuration filter.
*
* <p>Note that non-empty filters are not guaranteed to filter resources during the analysis
* phase.
*/
boolean hasConfigurationFilters() {
return !configFilters.isEmpty();
}
String getConfigurationFilterString() {
return Joiner.on(',').join(configFilters);
}
/**
* Returns if this object contains a non-empty density filter.
*
* <p>Note that non-empty filters are not guaranteed to filter resources during the analysis
* phase.
*/
boolean hasDensities() {
return !densities.isEmpty();
}
String getDensityString() {
return Joiner.on(',').join(densities);
}
List<String> getDensities() {
return densities;
}
boolean isPrefiltering() {
return hasFilters() && filterBehavior != FilterBehavior.FILTER_IN_EXECUTION;
}
boolean hasFilters() {
return hasConfigurationFilters() || hasDensities();
}
public String getOutputDirectorySuffix() {
if (!hasFilters()) {
return null;
}
return getConfigurationFilterString() + "_" + getDensityString();
}
boolean usesDynamicConfiguration() {
return filterBehavior == FilterBehavior.FILTER_IN_ANALYSIS_WITH_DYNAMIC_CONFIGURATION;
}
/*
* TODO: Stop tracking these once {@link FilterBehavior#FILTER_IN_ANALYSIS} is fully replaced by
* {@link FilterBehavior#FILTER_IN_ANALYSIS_WITH_DYNAMIC_CONFIGURATION}.
*
* <p>Currently, when using {@link FilterBehavior#FILTER_IN_ANALYSIS}, android_library targets do
* no filtering, and all resources are built into their symbol files. The android_binary target
* filters out these resources in analysis. However, the filtered resources must be passed to
* resource processing at execution time so the code knows to ignore resources that were filtered
* out. Without this, resource processing code would see references to those resources in
* dependencies's symbol files, but then be unable to follow those references or know whether they
* were missing due to resource filtering or a bug.
*/
ImmutableList<String> getResourcesToIgnoreInExecution() {
return filteredResources.build().asList();
}
/**
* {@inheritDoc}
*
* <p>ResourceFilterFactory requires an accurately overridden equals() method to work correctly
* with Bazel's caching and dynamic configuration.
*/
@Override
public boolean equals(Object object) {
if (!(object instanceof ResourceFilterFactory)) {
return false;
}
ResourceFilterFactory other = (ResourceFilterFactory) object;
return filterBehavior == other.filterBehavior
&& configFilters.equals(other.configFilters)
&& densities.equals(other.densities)
&& filteredResources.build().equals(other.filteredResources.build());
}
@Override
public int hashCode() {
return Objects.hashCode(filterBehavior, configFilters, densities, filteredResources.build());
}
/**
* Converts command line settings for the filter behavior into an empty {@link
* ResourceFilterFactory} object.
*/
public static final class Converter
implements com.google.devtools.common.options.Converter<ResourceFilterFactory> {
private final FilterBehavior.Converter filterEnumConverter = new FilterBehavior.Converter();
@Override
public ResourceFilterFactory convert(String input) throws OptionsParsingException {
return empty(filterEnumConverter.convert(input));
}
@Override
public String getTypeDescription() {
return filterEnumConverter.getTypeDescription();
}
}
// Transitions for dealing with dynamically configured resource filtering:
@Nullable
PatchTransition getTopLevelPatchTransition(String ruleClass, AttributeMap attrs) {
if (!usesDynamicConfiguration()) {
// We're not using dynamic configuration, so we don't need to make a transition
return null;
}
if (!ruleClass.equals("android_binary") || !hasFilters(attrs)) {
// This target doesn't specify any filtering settings, so dynamically configured resource
// filtering would be a waste of time.
// If the target's dependencies include android_binary targets, those dependencies might
// specify filtering settings, but we don't apply them dynamically since the chances of
// encountering differing settings (leading to splitting the build graph and poor overall
// performance) are high.
return REMOVE_DYNAMICALLY_CONFIGURED_RESOURCE_FILTERING_TRANSITION;
}
// Continue using dynamically configured resource filtering, and propagate this target's
// filtering settings.
return new AddDynamicallyConfiguredResourceFilteringTransition(attrs);
}
public static final PatchTransition REMOVE_DYNAMICALLY_CONFIGURED_RESOURCE_FILTERING_TRANSITION =
new RemoveDynamicallyConfiguredResourceFilteringTransition();
private static final class RemoveDynamicallyConfiguredResourceFilteringTransition
extends BaseDynamicallyConfiguredResourceFilteringTransition {
@Override
ResourceFilterFactory getNewResourceFilter(ResourceFilterFactory oldResourceFilterFactory) {
return oldResourceFilterFactory.withoutDynamicConfiguration();
}
}
@VisibleForTesting
static final class AddDynamicallyConfiguredResourceFilteringTransition
extends BaseDynamicallyConfiguredResourceFilteringTransition {
private final AttributeMap attrs;
AddDynamicallyConfiguredResourceFilteringTransition(AttributeMap attrs) {
this.attrs = attrs;
}
@Override
ResourceFilterFactory getNewResourceFilter(ResourceFilterFactory oldResourceFilterFactory) {
return oldResourceFilterFactory.withAttrsFrom(attrs);
}
@VisibleForTesting
AttributeMap getAttrs() {
return attrs;
}
}
private abstract static class BaseDynamicallyConfiguredResourceFilteringTransition
implements PatchTransition {
@Override
public BuildOptions apply(BuildOptions options) {
BuildOptions newOptions = options.clone();
AndroidConfiguration.Options androidOptions =
newOptions.get(AndroidConfiguration.Options.class);
androidOptions.resourceFilterFactory =
getNewResourceFilter(androidOptions.resourceFilterFactory);
return newOptions;
}
abstract ResourceFilterFactory getNewResourceFilter(
ResourceFilterFactory oldResourceFilterFactory);
}
}
| apache-2.0 |
sergecodd/FireFox-OS | B2G/gecko/mobile/android/base/sync/delegates/InfoCollectionsDelegate.java | 549 | /* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
package org.mozilla.gecko.sync.delegates;
import org.mozilla.gecko.sync.InfoCollections;
import org.mozilla.gecko.sync.net.SyncStorageResponse;
public interface InfoCollectionsDelegate {
public void handleSuccess(InfoCollections global);
public void handleFailure(SyncStorageResponse response);
public void handleError(Exception e);
}
| apache-2.0 |
bazelbuild/bazel | src/main/java/com/google/devtools/build/skyframe/ParallelEvaluatorContext.java | 8757 | // Copyright 2016 The Bazel Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.devtools.build.skyframe;
import com.github.benmanes.caffeine.cache.Cache;
import com.google.common.base.Preconditions;
import com.google.common.base.Supplier;
import com.google.common.base.Suppliers;
import com.google.common.collect.ImmutableMap;
import com.google.devtools.build.lib.collect.nestedset.NestedSetVisitor;
import com.google.devtools.build.lib.events.Event;
import com.google.devtools.build.lib.events.ExtendedEventHandler;
import com.google.devtools.build.lib.events.ExtendedEventHandler.Postable;
import com.google.devtools.build.skyframe.MemoizingEvaluator.EmittedEventState;
import com.google.devtools.build.skyframe.QueryableGraph.Reason;
import com.google.devtools.build.skyframe.SkyFunction.Environment.SkyKeyComputeState;
import java.util.Map;
import javax.annotation.Nullable;
/**
* Context object holding sufficient information for {@link SkyFunctionEnvironment} to perform its
* duties. Shared among all {@link SkyFunctionEnvironment} instances, which should regard this
* object as a read-only collection of data.
*
* <p>Also used during cycle detection.
*/
class ParallelEvaluatorContext {
private final QueryableGraph graph;
private final Version graphVersion;
private final ImmutableMap<SkyFunctionName, SkyFunction> skyFunctions;
private final ExtendedEventHandler reporter;
private final NestedSetVisitor<TaggedEvents> replayingNestedSetEventVisitor;
private final NestedSetVisitor<Postable> replayingNestedSetPostableVisitor;
private final boolean keepGoing;
private final DirtyTrackingProgressReceiver progressReceiver;
private final EventFilter storedEventFilter;
private final ErrorInfoManager errorInfoManager;
private final GraphInconsistencyReceiver graphInconsistencyReceiver;
private final boolean mergingSkyframeAnalysisExecutionPhases;
private final Cache<SkyKey, SkyKeyComputeState> stateCache;
/**
* The visitor managing the thread pool. Used to enqueue parents when an entry is finished, and,
* during testing, to block until an exception is thrown if a node builder requests that.
* Initialized after construction to avoid the overhead of the caller's creating a threadpool in
* cases where it is not needed.
*/
private final Supplier<NodeEntryVisitor> visitorSupplier;
/**
* Returns a {@link Runnable} given a {@code key} to evaluate and an {@code evaluationPriority}
* indicating whether it should be scheduled for evaluation soon (higher is better). The returned
* {@link Runnable} is a {@link ComparableRunnable} so that it can be ordered by {@code
* evaluationPriority} in a priority queue if needed.
*/
interface RunnableMaker {
ComparableRunnable make(SkyKey key, int evaluationPriority);
}
interface ComparableRunnable extends Runnable, Comparable<ComparableRunnable> {}
public ParallelEvaluatorContext(
QueryableGraph graph,
Version graphVersion,
ImmutableMap<SkyFunctionName, SkyFunction> skyFunctions,
ExtendedEventHandler reporter,
EmittedEventState emittedEventState,
boolean keepGoing,
DirtyTrackingProgressReceiver progressReceiver,
EventFilter storedEventFilter,
ErrorInfoManager errorInfoManager,
GraphInconsistencyReceiver graphInconsistencyReceiver,
Supplier<NodeEntryVisitor> visitorSupplier,
boolean mergingSkyframeAnalysisExecutionPhases,
Cache<SkyKey, SkyKeyComputeState> stateCache) {
this.graph = graph;
this.graphVersion = graphVersion;
this.skyFunctions = skyFunctions;
this.reporter = reporter;
this.graphInconsistencyReceiver = graphInconsistencyReceiver;
this.replayingNestedSetEventVisitor =
new NestedSetVisitor<>(new NestedSetEventReceiver(reporter), emittedEventState.eventState);
this.replayingNestedSetPostableVisitor =
new NestedSetVisitor<>(
new NestedSetPostableReceiver(reporter), emittedEventState.postableState);
this.keepGoing = keepGoing;
this.progressReceiver = Preconditions.checkNotNull(progressReceiver);
this.storedEventFilter = storedEventFilter;
this.errorInfoManager = errorInfoManager;
this.visitorSupplier = Suppliers.memoize(visitorSupplier);
this.mergingSkyframeAnalysisExecutionPhases = mergingSkyframeAnalysisExecutionPhases;
this.stateCache = stateCache;
}
Map<SkyKey, ? extends NodeEntry> getBatchValues(
@Nullable SkyKey requestor, Reason reason, Iterable<? extends SkyKey> keys)
throws InterruptedException {
return graph.getBatch(requestor, reason, keys);
}
/**
* Signals all parents that this node is finished.
*
* <p>Calling this method indicates that we are building this node after the main build aborted,
* so skips signalling any parents that are already done (that can happen with cycles).
*/
void signalParentsOnAbort(SkyKey skyKey, Iterable<SkyKey> parents, Version version)
throws InterruptedException {
Map<SkyKey, ? extends NodeEntry> batch = getBatchValues(skyKey, Reason.SIGNAL_DEP, parents);
for (SkyKey parent : parents) {
NodeEntry entry = Preconditions.checkNotNull(batch.get(parent), parent);
if (!entry.isDone()) { // In cycles, we can have parents that are already done.
entry.signalDep(version, skyKey);
}
}
}
/**
* Signals all parents that this node is finished and enqueues any parents that are ready at the
* given evaluation priority.
*/
void signalParentsAndEnqueueIfReady(
SkyKey skyKey, Iterable<SkyKey> parents, Version version, int evaluationPriority)
throws InterruptedException {
Map<SkyKey, ? extends NodeEntry> batch = getBatchValues(skyKey, Reason.SIGNAL_DEP, parents);
for (SkyKey parent : parents) {
NodeEntry entry = Preconditions.checkNotNull(batch.get(parent), parent);
if (entry.signalDep(version, skyKey)) {
getVisitor().enqueueEvaluation(parent, evaluationPriority);
}
}
}
QueryableGraph getGraph() {
return graph;
}
Version getGraphVersion() {
return graphVersion;
}
boolean keepGoing() {
return keepGoing;
}
NodeEntryVisitor getVisitor() {
return visitorSupplier.get();
}
DirtyTrackingProgressReceiver getProgressReceiver() {
return progressReceiver;
}
GraphInconsistencyReceiver getGraphInconsistencyReceiver() {
return graphInconsistencyReceiver;
}
NestedSetVisitor<TaggedEvents> getReplayingNestedSetEventVisitor() {
return replayingNestedSetEventVisitor;
}
NestedSetVisitor<Postable> getReplayingNestedSetPostableVisitor() {
return replayingNestedSetPostableVisitor;
}
ExtendedEventHandler getReporter() {
return reporter;
}
ImmutableMap<SkyFunctionName, SkyFunction> getSkyFunctions() {
return skyFunctions;
}
EventFilter getStoredEventFilter() {
return storedEventFilter;
}
ErrorInfoManager getErrorInfoManager() {
return errorInfoManager;
}
boolean restartPermitted() {
return graphInconsistencyReceiver.restartPermitted();
}
boolean mergingSkyframeAnalysisExecutionPhases() {
return mergingSkyframeAnalysisExecutionPhases;
}
Cache<SkyKey, SkyKeyComputeState> stateCache() {
return stateCache;
}
/** Receives the events from the NestedSet and delegates to the reporter. */
private static final class NestedSetEventReceiver
implements NestedSetVisitor.Receiver<TaggedEvents> {
private final ExtendedEventHandler reporter;
NestedSetEventReceiver(ExtendedEventHandler reporter) {
this.reporter = reporter;
}
@Override
public void accept(TaggedEvents events) {
for (Event e : events.getEvents()) {
reporter.handle(e);
}
}
}
/** Receives the postables from the NestedSet and delegates to the reporter. */
private static final class NestedSetPostableReceiver
implements NestedSetVisitor.Receiver<Postable> {
private final ExtendedEventHandler reporter;
NestedSetPostableReceiver(ExtendedEventHandler reporter) {
this.reporter = reporter;
}
@Override
public void accept(Postable post) {
reporter.post(post);
}
}
}
| apache-2.0 |
apache/geronimo | framework/modules/geronimo-management/src/main/java/org/apache/geronimo/management/stats/JMSSessionStatsImpl.java | 3632 | /**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.geronimo.management.stats;
import javax.management.j2ee.statistics.CountStatistic;
import javax.management.j2ee.statistics.JMSConsumerStats;
import javax.management.j2ee.statistics.JMSProducerStats;
import javax.management.j2ee.statistics.JMSSessionStats;
import javax.management.j2ee.statistics.TimeStatistic;
/**
* Geronimo implementation of the JSR-77 JMSSessionStats interface.
*
* @version $Rev: 476049 $ $Date: 2006-11-16 20:35:17 -0800 (Thu, 16 Nov 2006) $
*/
public class JMSSessionStatsImpl extends StatsImpl implements JMSSessionStats {
private JMSProducerStats[] producersStats;
private JMSConsumerStats[] consumersStats;
private final CountStatisticImpl messageCount;
private final CountStatisticImpl pendingMessageCount;
private final CountStatisticImpl expiredMessageCount;
private final CountStatisticImpl durableSubscriptionCount;
private final TimeStatisticImpl messageWaitTime;
public JMSSessionStatsImpl() {
messageCount = new CountStatisticImpl("Message Count",
StatisticImpl.UNIT_COUNT, "Number of messages exchanged");
pendingMessageCount = new CountStatisticImpl("Pending Message Count",
StatisticImpl.UNIT_COUNT, "Number of pending messages");
expiredMessageCount = new CountStatisticImpl("Expired Message Count",
StatisticImpl.UNIT_COUNT, "Number of expired messages");
durableSubscriptionCount = new CountStatisticImpl(
"Durable Subscription Count", StatisticImpl.UNIT_COUNT,
"Number of durable subscriptions");
messageWaitTime = new TimeStatisticImpl("Message Wait Time",
StatisticImpl.UNIT_COUNT,
"Time spent by a message before being delivered");
addStat("MessageCount", messageCount);
addStat("PendingMessageCount", pendingMessageCount);
addStat("ExpiredMessageCount", expiredMessageCount);
addStat("DurableSubscriptionCount", durableSubscriptionCount);
addStat("MessageWaitTime", messageWaitTime);
}
public JMSProducerStats[] getProducers() {
return producersStats;
}
public JMSConsumerStats[] getConsumers() {
return consumersStats;
}
public CountStatistic getMessageCount() {
return messageCount;
}
public CountStatistic getPendingMessageCount() {
return pendingMessageCount;
}
public CountStatistic getExpiredMessageCount() {
return messageCount;
}
public CountStatistic getDurableSubscriptionCount() {
return durableSubscriptionCount;
}
public TimeStatistic getMessageWaitTime() {
return messageWaitTime;
}
}
| apache-2.0 |
evandor/skysail | skysail.server/src/io/skysail/server/services/MessageQueueHandler.java | 202 | package io.skysail.server.services;
public interface MessageQueueHandler {
void send(String topic, String message);
void addMessageListener(String topic, SkysailMessageListener listener);
}
| apache-2.0 |
grgrzybek/karaf | main/src/test/java/org/apache/karaf/main/MockLock.java | 2143 | package org.apache.karaf.main;
import org.apache.felix.utils.properties.Properties;
import java.util.logging.Logger;
import org.apache.karaf.main.lock.Lock;
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
public class MockLock implements Lock {
private boolean lock = true;
private boolean isAlive = true;
private static final Logger LOG = Logger.getLogger(MockLock.class.getName());
private Object lockLock = new Object();
public MockLock(Properties props) {
/* KARAF-5798: allow tests to simulate slave instances */
lock = Boolean.valueOf(System.getProperty("test.karaf.mocklock.initiallyLocked", "true"));
}
public boolean lock() throws Exception {
synchronized (lockLock) {
LOG.fine("lock = " + lock);
lockLock.notifyAll();
}
return lock;
}
public void release() throws Exception {
LOG.fine("release");
}
public boolean isAlive() throws Exception {
LOG.fine("isAlive = " + isAlive);
return isAlive;
}
public void setLock(boolean lock) {
this.lock = lock;
}
public void setIsAlive(boolean isAlive) {
this.isAlive = isAlive;
}
public void waitForLock() throws InterruptedException {
synchronized (lockLock) {
lockLock.wait(1000 * 60 * 5);
}
}
}
| apache-2.0 |
google/truth | core/src/main/java/com/google/common/truth/PrimitiveCharArraySubject.java | 1245 | /*
* Copyright (c) 2014 Google, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.common.truth;
import com.google.common.primitives.Chars;
import org.checkerframework.checker.nullness.qual.Nullable;
/**
* A Subject for {@code char[]}.
*
* @author Christian Gruber (cgruber@israfil.net)
*/
public final class PrimitiveCharArraySubject extends AbstractArraySubject {
private final char[] actual;
PrimitiveCharArraySubject(
FailureMetadata metadata, char @Nullable [] o, @Nullable String typeDescription) {
super(metadata, o, typeDescription);
this.actual = o;
}
public IterableSubject asList() {
return checkNoNeedToDisplayBothValues("asList()").that(Chars.asList(actual));
}
}
| apache-2.0 |
lucasponce/hawkular-alerts | api/src/main/java/org/hawkular/alerts/api/model/condition/ExternalConditionEval.java | 5048 | /*
* Copyright 2015-2017 Red Hat, Inc. and/or its affiliates
* and other contributors as indicated by the @author tags.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.hawkular.alerts.api.model.condition;
import org.hawkular.alerts.api.doc.DocModel;
import org.hawkular.alerts.api.doc.DocModelProperty;
import org.hawkular.alerts.api.model.condition.Condition.Type;
import org.hawkular.alerts.api.model.data.Data;
import org.hawkular.alerts.api.model.event.Event;
import com.fasterxml.jackson.annotation.JsonInclude;
import com.fasterxml.jackson.annotation.JsonInclude.Include;
/**
* An evaluation state for an external condition. Note that external conditions may report a <code>Data</code> value
* or an <code>Event</code>.
*
* @author Jay Shaughnessy
* @author Lucas Ponce
*/
@DocModel(description = "An evaluation state for an external condition. + \n" +
"Note that external conditions may report a Data value or an Event.")
public class ExternalConditionEval extends ConditionEval {
private static final long serialVersionUID = 1L;
@DocModelProperty(description = "External condition linked with this state.",
position = 0)
@JsonInclude(Include.NON_NULL)
private ExternalCondition condition;
@DocModelProperty(description = "String value used for dataId.",
position = 1)
@JsonInclude(Include.NON_NULL)
private String value;
@DocModelProperty(description = "Event value used for dataId.",
position = 2)
@JsonInclude(Include.NON_NULL)
private Event event;
public ExternalConditionEval() {
super(Type.EXTERNAL, false, 0, null);
this.condition = null;
this.value = null;
this.event = null;
}
public ExternalConditionEval(ExternalCondition condition, Event event) {
super(Type.EXTERNAL, condition.match(event.getText()), event.getCtime(), event.getContext());
this.condition = condition;
this.event = event;
}
public ExternalConditionEval(ExternalCondition condition, Data data) {
super(Type.EXTERNAL, condition.match(data.getValue()), data.getTimestamp(), data.getContext());
this.condition = condition;
this.value = data.getValue();
}
public ExternalCondition getCondition() {
return condition;
}
public void setCondition(ExternalCondition condition) {
this.condition = condition;
}
public String getValue() {
return value;
}
public void setValue(String value) {
this.value = value;
}
public Event getEvent() {
return event;
}
public void setEvent(Event event) {
this.event = event;
}
@Override
public String getTenantId() {
return condition.getTenantId();
}
@Override
public String getTriggerId() {
return condition.getTriggerId();
}
@Override
public int getConditionSetSize() {
return condition.getConditionSetSize();
}
@Override
public int getConditionSetIndex() {
return condition.getConditionSetIndex();
}
@Override
public void updateDisplayString() {
String s = String.format("External[%s]: %s[%s] matches [%s]", condition.getAlerterId(),
condition.getDataId(), (value != null ? value : event.toString()), condition.getExpression());
setDisplayString(s);
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
if (!super.equals(o)) return false;
ExternalConditionEval that = (ExternalConditionEval) o;
if (condition != null ? !condition.equals(that.condition) : that.condition != null) return false;
if (value != null ? !value.equals(that.value) : that.value != null) return false;
return event != null ? event.equals(that.event) : that.event == null;
}
@Override
public int hashCode() {
int result = super.hashCode();
result = 31 * result + (condition != null ? condition.hashCode() : 0);
result = 31 * result + (value != null ? value.hashCode() : 0);
result = 31 * result + (event != null ? event.hashCode() : 0);
return result;
}
@Override
public String toString() {
return "ExternalConditionEval{" +
"condition=" + condition +
", value='" + value + '\'' +
", event=" + event +
'}';
}
}
| apache-2.0 |
bitraten/arx | src/main/org/deidentifier/arx/metric/v2/MetricMDNUNMNormalizedEntropyPrecomputed.java | 5791 | /*
* ARX: Powerful Data Anonymization
* Copyright 2012 - 2015 Florian Kohlmayer, Fabian Prasser
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.deidentifier.arx.metric.v2;
import java.util.Arrays;
import org.deidentifier.arx.ARXConfiguration;
import org.deidentifier.arx.DataDefinition;
import org.deidentifier.arx.framework.check.groupify.HashGroupify;
import org.deidentifier.arx.framework.check.groupify.HashGroupifyEntry;
import org.deidentifier.arx.framework.data.Data;
import org.deidentifier.arx.framework.data.DataManager;
import org.deidentifier.arx.framework.data.GeneralizationHierarchy;
import org.deidentifier.arx.framework.lattice.Transformation;
import org.deidentifier.arx.metric.MetricConfiguration;
/**
* This class provides an efficient implementation of normalized non-uniform entropy
*
* @author Fabian Prasser
* @author Florian Kohlmayer
*/
public class MetricMDNUNMNormalizedEntropyPrecomputed extends MetricMDNUNMEntropyPrecomputed {
/** SVUID. */
private static final long serialVersionUID = -2384411534214262365L;
/** Upper bounds */
private double[] upper;
/**
* Creates a new instance.
*
* @param function
*/
public MetricMDNUNMNormalizedEntropyPrecomputed(AggregateFunction function) {
super(function);
}
/**
* Creates a new instance.
*/
protected MetricMDNUNMNormalizedEntropyPrecomputed() {
super();
}
/**
* Returns the configuration of this metric.
*
* @return
*/
public MetricConfiguration getConfiguration() {
return new MetricConfiguration(false, // monotonic
0.5d, // gs-factor
true, // precomputed
1.0d, // precomputation threshold
this.getAggregateFunction() // aggregate function
);
}
@Override
public String toString() {
return "Normalized non-uniform entropy";
}
@Override
public String getName() {
return "Normalized non-uniform entropy";
}
@Override
protected ILMultiDimensionalWithBound getInformationLossInternal(Transformation node, HashGroupifyEntry entry) {
return super.getInformationLossInternal(node, entry);
}
@Override
protected ILMultiDimensionalWithBound getInformationLossInternal(final Transformation node, final HashGroupify g) {
ILMultiDimensionalWithBound result = super.getInformationLossInternal(node, g);
double[] loss = result.getInformationLoss() != null ? result.getInformationLoss().getValues() : null;
double[] bound = result.getLowerBound() != null ?result.getLowerBound().getValues() : null;
// Switch sign bit and round
for (int column = 0; column < loss.length; column++) {
if (loss != null) loss[column] /= upper[column];
if (bound != null) bound[column] /= upper[column];
}
// Return
return new ILMultiDimensionalWithBound(super.createInformationLoss(loss),
super.createInformationLoss(bound));
}
@Override
protected AbstractILMultiDimensional getLowerBoundInternal(Transformation node) {
AbstractILMultiDimensional result = super.getLowerBoundInternal(node);
if (result == null) return null;
double[] loss = result.getValues();
// Switch sign bit and round
for (int column = 0; column < loss.length; column++) {
loss[column] /= upper[column];
}
// Return
return super.createInformationLoss(loss);
}
@Override
protected AbstractILMultiDimensional getLowerBoundInternal(Transformation node,
HashGroupify groupify) {
AbstractILMultiDimensional result = super.getLowerBoundInternal(node, groupify);
if (result == null) return null;
double[] loss = result.getValues();
// Switch sign bit and round
for (int column = 0; column < loss.length; column++) {
loss[column] /= upper[column];
}
// Return
return super.createInformationLoss(loss);
}
@Override
protected void initializeInternal(final DataManager manager,
final DataDefinition definition,
final Data input,
final GeneralizationHierarchy[] hierarchies,
final ARXConfiguration config) {
super.initializeInternal(manager, definition, input, hierarchies, config);
this.upper = super.getUpperBounds();
// Compute a reasonable min & max
double[] min = new double[hierarchies.length];
Arrays.fill(min, 0d);
double[] max = new double[hierarchies.length];
Arrays.fill(max, 1d);
super.setMax(max);
super.setMin(min);
}
} | apache-2.0 |
iacdingping/closure-templates | java/tests/com/google/template/soy/soyparse/SourceLocationTest.java | 12050 | /*
* Copyright 2010 Google Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.template.soy.soyparse;
import com.google.common.base.Joiner;
import com.google.template.soy.SoyFileSetParserBuilder;
import com.google.template.soy.base.SourceLocation;
import com.google.template.soy.base.internal.FixedIdGenerator;
import com.google.template.soy.base.internal.SoyFileKind;
import com.google.template.soy.base.internal.SoyFileSupplier;
import com.google.template.soy.error.ExplodingErrorReporter;
import com.google.template.soy.soytree.AbstractSoyNodeVisitor;
import com.google.template.soy.soytree.SoyFileSetNode;
import com.google.template.soy.soytree.SoyNode;
import com.google.template.soy.soytree.SoyNode.ParentSoyNode;
import com.google.template.soy.soytree.TemplateNode;
import com.google.template.soy.types.SoyTypeRegistry;
import junit.framework.TestCase;
import java.io.StringReader;
/**
* Tests that the Soy file and template parsers properly embed source locations.
*/
public final class SourceLocationTest extends TestCase {
public void testLocationsInParsedContent() throws Exception {
assertSourceLocations(
Joiner.on('\n').join(
"SoyFileSetNode @ unknown",
" SoyFileNode @ /example/file.soy",
" TemplateBasicNode @ /example/file.soy:2:1",
" RawTextNode @ /example/file.soy:3:1",
" PrintNode @ /example/file.soy:5:3",
" RawTextNode @ /example/file.soy:6:1",
" CallBasicNode @ /example/file.soy:8:3",
" TemplateBasicNode @ /example/file.soy:10:1",
" RawTextNode @ /example/file.soy:11:1",
""),
Joiner.on('\n').join(
"{namespace ns}",
"{template .foo autoescape=\"deprecated-noncontextual\"}", // 1
" Hello", // 2
" {lb}", // 3
" {print $world}", // 4
" {rb}!", // 5
"", // 6
" {call bar /}", // 7
"{/template}", // 8
"{template .bar autoescape=\"deprecated-noncontextual\"}", // 9
" Gooodbye", // 10
"{/template}" // 11
)
);
}
public void testSwitches() throws Exception {
assertSourceLocations(
Joiner.on('\n').join(
"SoyFileSetNode @ unknown",
" SoyFileNode @ /example/file.soy",
" TemplateBasicNode @ /example/file.soy:2:1",
" RawTextNode @ /example/file.soy:3:1",
" SwitchNode @ /example/file.soy:4:3",
" SwitchCaseNode @ /example/file.soy:5:5",
" RawTextNode @ /example/file.soy:6:1",
" SwitchCaseNode @ /example/file.soy:7:5",
" RawTextNode @ /example/file.soy:8:1",
" SwitchCaseNode @ /example/file.soy:9:5",
" RawTextNode @ /example/file.soy:10:1",
" SwitchDefaultNode @ /example/file.soy:11:5",
" RawTextNode @ /example/file.soy:12:1",
" RawTextNode @ /example/file.soy:14:1",
""),
Joiner.on('\n').join(
"{namespace ns}",
"{template .foo autoescape=\"deprecated-noncontextual\"}", // 1
" Hello,", // 2
" {switch $i}", // 3
" {case 0}", // 4
" Mercury", // 5
" {case 1}", // 6
" Venus", // 7
" {case 2}", // 8
" Mars", // 9
" {default}", // 10
" Gassy", // 11
" {/switch}", // 12
" !", // 13
"{/template}", // 14
"")
);
}
public void testForLoop() throws Exception {
assertSourceLocations(
Joiner.on('\n').join(
"SoyFileSetNode @ unknown",
" SoyFileNode @ /example/file.soy",
" TemplateBasicNode @ /example/file.soy:2:1",
" RawTextNode @ /example/file.soy:3:1",
" ForNode @ /example/file.soy:4:3",
" RawTextNode @ /example/file.soy:5:1",
" PrintNode @ /example/file.soy:6:5",
" RawTextNode @ /example/file.soy:8:1",
""),
Joiner.on('\n').join(
"{namespace ns}",
"{template .foo autoescape=\"deprecated-noncontextual\"}", // 1
" Hello", // 2
" {for $i in range($s, $e, $t)}", // 3
" ,", // 4
" {print $planet[$i]}", // 5
" {/for}", // 6
" !", // 7
"{/template}", // 8
"")
);
}
public void testForeachLoop() throws Exception {
assertSourceLocations(
Joiner.on('\n').join(
"SoyFileSetNode @ unknown",
" SoyFileNode @ /example/file.soy",
" TemplateBasicNode @ /example/file.soy:2:1",
" RawTextNode @ /example/file.soy:3:1",
" ForeachNode @ /example/file.soy:4:3",
" ForeachNonemptyNode @ /example/file.soy:4:3",
" RawTextNode @ /example/file.soy:5:1",
" PrintNode @ /example/file.soy:6:5",
" ForeachIfemptyNode @ /example/file.soy:7:3",
" RawTextNode @ /example/file.soy:8:1",
" RawTextNode @ /example/file.soy:10:1",
""),
Joiner.on('\n').join(
"{namespace ns}",
"{template .foo autoescape=\"deprecated-noncontextual\"}", // 1
" Hello", // 2
" {foreach $planet in $planets}", // 3
" ,", // 4
" {print $planet[$i]}", // 5
" {ifempty}", // 6
" lifeless interstellar void", // 7
" {/foreach}", // 8
" !", // 9
"{/template}", // 10
"")
);
}
public void testConditional() throws Exception {
assertSourceLocations(
Joiner.on('\n').join(
"SoyFileSetNode @ unknown",
" SoyFileNode @ /example/file.soy",
" TemplateBasicNode @ /example/file.soy:2:1",
" RawTextNode @ /example/file.soy:3:1",
" IfNode @ /example/file.soy:4:3",
" IfCondNode @ /example/file.soy:4:3",
" RawTextNode @ /example/file.soy:5:1",
" IfCondNode @ /example/file.soy:6:3",
" RawTextNode @ /example/file.soy:7:1",
" IfElseNode @ /example/file.soy:8:3",
" RawTextNode @ /example/file.soy:9:1",
" RawTextNode @ /example/file.soy:11:1",
""),
Joiner.on('\n').join(
"{namespace ns}",
"{template .foo autoescape=\"deprecated-noncontextual\"}", // 1
" Hello,", // 2
" {if $skyIsBlue}", // 3
" Earth", // 4
" {elseif $isReallyReallyHot}", // 5
" Venus", // 6
" {else}", // 7
" Cincinatti", // 8
" {/if}", // 9
" !", // 10
"{/template}", // 11
"")
);
}
public void testDoesntAccessPastEnd() {
// Make sure that if we have a token stream that ends abruptly, we don't
// look for a line number and break in a way that suppresses the real error
// message.
// JavaCC is pretty good about never using null as a token value.
try {
SoyFileSetParserBuilder.forSuppliers(
SoyFileSupplier.Factory.create(
"{template t autoescape=\"deprecated-noncontextual\"}\nHello, World!\n",
SoyFileKind.SRC, "borken.soy"))
.doRunInitialParsingPasses(false)
.parse();
fail();
} catch (IllegalStateException e) {
// Expected.
}
}
public void testAdditionalSourceLocationInfo() throws Exception {
String template =
"{namespace ns}\n"
+ "{template .t}\n"
+ " hello, world\n"
+ "{/template}\n";
TemplateNode templateNode = new SoyFileParser(
new SoyTypeRegistry(),
new FixedIdGenerator(),
new StringReader(template),
SoyFileKind.SRC,
"/example/file.soy",
ExplodingErrorReporter.get())
.parseSoyFile()
.getChild(0);
SourceLocation location = templateNode.getSourceLocation();
assertEquals(2, location.getLineNumber());
assertEquals(1, location.getBeginColumn());
assertEquals(4, location.getEndLine());
assertEquals(11, location.getEndColumn());
}
private void assertSourceLocations(String asciiArtExpectedOutput, String soySourceCode) {
SoyFileSetNode soyTree =
SoyFileSetParserBuilder.forSuppliers(
SoyFileSupplier.Factory.create(soySourceCode, SoyFileKind.SRC, "/example/file.soy"))
.doRunInitialParsingPasses(false)
.parse()
.fileSet();
String actual = new AsciiArtVisitor().exec(soyTree);
assertEquals(asciiArtExpectedOutput, actual);
}
/**
* Generates a concise readable summary of a soy tree and its source locations.
*/
private static class AsciiArtVisitor extends AbstractSoyNodeVisitor<String> {
final StringBuilder sb = new StringBuilder();
int depth;
@Override public String exec(SoyNode node) {
visit(node);
return sb.toString();
}
@Override protected void visitSoyNode(SoyNode node) {
// Output a header like:
// <indent> <node class> @ <location>
// where indent is 2 spaces per level, and the @ sign is indented to the 31st column.
for (int indent = depth; --indent >= 0;) {
sb.append(" ");
}
String typeName = node.getClass().getSimpleName();
sb.append(typeName);
int pos = typeName.length() + 2 * depth;
while (pos < 30) {
sb.append(' ');
++pos;
}
sb.append(" @ ").append(node.getSourceLocation()).append('\n');
if (node instanceof ParentSoyNode<?>) {
++depth;
visitChildren((ParentSoyNode<?>) node);
--depth;
}
}
}
}
| apache-2.0 |
psiroky/droolsjbpm-knowledge | kie-internal/src/main/java/org/kie/event/io/ResourceChangeListener.java | 1432 | /*
* Copyright 2010 JBoss Inc
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.kie.event.io;
import org.kie.ChangeSet;
/**
* Interface that provides informed on changes resources, via the ChangeSet interface.
*
*
* This interface, as well as ChangeSet, ResourceChangeNotifier, ResourceChangeMonitor and ResourceChangeScanner are still considered subject to change.
* Use the XML format change-set, as
* part of the ResourceType api when adding to KnowledgeBuilder, which is considered stable. KnowledgeBuilder currently ignored Added/Modified xml elements,
* the KnowledgeAgent will use them, when rebuilding the KnowledgeBase.
*/
public interface ResourceChangeListener {
/**
* The Resource has changed, the ResourceChangeNotifier will call this method and execute the user implemented code.
*
* @param changeSet
*/
void resourcesChanged(ChangeSet changeSet);
}
| apache-2.0 |
jwren/intellij-community | jps/model-api/src/com/intellij/openapi/fileTypes/FileNameMatcherEx.java | 1259 | /*
* Copyright 2000-2014 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.openapi.fileTypes;
import org.jetbrains.annotations.ApiStatus;
import org.jetbrains.annotations.NotNull;
/**
* @author peter
* @deprecated please implement {@link FileNameMatcher} directly and define {@link #acceptsCharSequence(CharSequence)} there.
*/
@Deprecated
@ApiStatus.ScheduledForRemoval
public abstract class FileNameMatcherEx implements FileNameMatcher {
/**
* @deprecated call {@link #acceptsCharSequence(CharSequence)} instead
*/
@Deprecated
public static boolean acceptsCharSequence(@NotNull FileNameMatcher matcher, @NotNull CharSequence fileName) {
return matcher.acceptsCharSequence(fileName);
}
}
| apache-2.0 |
titimoby/golo-lang | src/main/java/fr/insalyon/citi/golo/runtime/OperatorType.java | 1240 | /*
* Copyright 2012-2015 Institut National des Sciences Appliquées de Lyon (INSA-Lyon)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package fr.insalyon.citi.golo.runtime;
public enum OperatorType {
PLUS("+"),
MINUS("-"),
TIMES("*"),
DIVIDE("/"),
MODULO("%"),
EQUALS("=="),
NOTEQUALS("!="),
LESS("<"),
LESSOREQUALS("<="),
MORE(">"),
MOREOREQUALS(">="),
AND("and"),
OR("or"),
NOT("not"),
IS("is"),
ISNT("isnt"),
OFTYPE("oftype"),
ORIFNULL("orIfNull"),
ANON_CALL(""),
METHOD_CALL(":"),
ELVIS_METHOD_CALL("?:");
private final String symbol;
OperatorType(String symbol) {
this.symbol = symbol;
}
@Override
public String toString() {
return symbol;
}
}
| apache-2.0 |
irontable/genie | genie-web/src/test/java/com/netflix/genie/web/configs/aws/package-info.java | 763 | /*
*
* Copyright 2016 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
/**
* Tests for AWS config classes.
*
* @author tgianos
* @since 3.0.0
*/
package com.netflix.genie.web.configs.aws;
| apache-2.0 |
smulikHakipod/zb4osgi | zb4o-basedriver/src/main/java/it/cnr/isti/zigbee/basedriver/discovery/LQINetworkBrowserThread.java | 17002 | /*
Copyright 2012-2013 CNR-ISTI, http://isti.cnr.it
Institute of Information Science and Technologies
of the Italian National Research Council
See the NOTICE file distributed with this work for additional
information regarding copyright ownership
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package it.cnr.isti.zigbee.basedriver.discovery;
import gnu.trove.TShortObjectHashMap;
import it.cnr.isti.primitivetypes.util.Integers;
import it.cnr.isti.thread.RunnableThread;
import it.cnr.isti.thread.ThreadUtils;
import it.cnr.isti.zigbee.api.ZigBeeNode;
import it.cnr.isti.zigbee.basedriver.Activator;
import it.cnr.isti.zigbee.basedriver.api.impl.ZigBeeNodeImpl;
import it.cnr.isti.zigbee.dongle.api.SimpleDriver;
import java.util.ArrayList;
import java.util.List;
import org.aaloa.zb4osgi.api.monitor.ZigBeeDiscoveryMonitor;
import org.osgi.framework.InvalidSyntaxException;
import org.osgi.framework.ServiceReference;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.itaca.ztool.api.ZToolAddress16;
import com.itaca.ztool.api.zdo.ZDO_IEEE_ADDR_REQ;
import com.itaca.ztool.api.zdo.ZDO_IEEE_ADDR_RSP;
import com.itaca.ztool.api.zdo.ZDO_MGMT_LQI_REQ;
import com.itaca.ztool.api.zdo.ZDO_MGMT_LQI_RSP;
import com.itaca.ztool.api.zdo.ZDO_MGMT_LQI_RSP.NeighborLqiListItemClass;
/**
*
* @author <a href="mailto:stefano.lenzi@isti.cnr.it">Stefano "Kismet" Lenzi</a>
* @author <a href="mailto:francesco.furfari@isti.cnr.it">Francesco Furfari</a>
* @author <a href="mailto:manlio.bacco@isti.cnr.it">Manlio Bacco</a>
* @version $LastChangedRevision: 67 $ ($LastChangedDate: 2010-10-01 04:08:24 +0200 (ven, 01 ott 2010) $)
* @since 0.7.0
*
*/
public class LQINetworkBrowserThread extends RunnableThread {
private static final Logger logger = LoggerFactory.getLogger(LQINetworkBrowserThread.class);
private static final short COORDINATOR_NWK_ADDRESS = 0;
private static final short LQI_START_INDEX = 0;
private final ImportingQueue queue;
final SimpleDriver driver;
final ArrayList<NetworkAddressNodeItem> toInspect = new ArrayList<NetworkAddressNodeItem>();
final TShortObjectHashMap<NetworkAddressNodeItem> alreadyInspected = new TShortObjectHashMap<NetworkAddressNodeItem>();
private List<NetworkAddressNodeItem> connectedNodesFound = new ArrayList<NetworkAddressNodeItem>();
private class NetworkAddressNodeItem {
final NetworkAddressNodeItem parent;
final short address;
ZigBeeNodeImpl node = null;
NetworkAddressNodeItem(NetworkAddressNodeItem addressTreeParent, short networkAddress){
parent = addressTreeParent;
address = networkAddress;
}
public String toString(){
if ( parent != null ) {
return "<" + parent.address + " / " + parent.node + "," + address + " / " + node + ">";
} else {
return "< NULL ," + address + " / " + node + ">";
}
}
}
public LQINetworkBrowserThread(ImportingQueue queue, SimpleDriver driver) {
this.queue = queue;
this.driver = driver;
}
private NetworkAddressNodeItem getIEEEAddress(short nwkAddress){
NetworkAddressNodeItem node = new NetworkAddressNodeItem(null, nwkAddress);
ZDO_IEEE_ADDR_RSP ieee_addr_resp = driver.sendZDOIEEEAddressRequest(
new ZDO_IEEE_ADDR_REQ(nwkAddress, ZDO_IEEE_ADDR_REQ.REQ_TYPE.SINGLE_DEVICE_RESPONSE,(byte) 0)
);
if( ieee_addr_resp == null) {
logger.debug("No ZDO_IEEE_ADDR_RSP from #{}", nwkAddress);
return null;
} else {
logger.debug(
"ZDO_IEEE_ADDR_RSP from {} with {} associated",
ieee_addr_resp.getIEEEAddress(), ieee_addr_resp.getAssociatedDeviceCount()
);
node.node = new ZigBeeNodeImpl(node.address, ieee_addr_resp.getIEEEAddress());
ZToolAddress16 nwk = new ZToolAddress16(
Integers.getByteAsInteger(node.address, 1),
Integers.getByteAsInteger(node.address, 0)
);
queue.push(nwk, ieee_addr_resp.getIEEEAddress());
announceNode(node);
return node;
}
}
private void announceNodes(List<NetworkAddressNodeItem> nodes){
if(nodes != null)
for(int i = 0; i < nodes.size(); i++)
announceNode(nodes.get(i));
}
private void announceNode(NetworkAddressNodeItem node){
if(node != null){
notifyBrowsedNode(node);
}
}
private List<NetworkAddressNodeItem> lqiRequestToNode(NetworkAddressNodeItem node, int index){
if(alreadyInspected.get(node.address) == null){
if(index == 0)
connectedNodesFound.clear();
short nwk = node.address;
ZToolAddress16 nwk16 = new ZToolAddress16(
Integers.getByteAsInteger(node.address, 1),
Integers.getByteAsInteger(node.address, 0)
);
logger.debug("ZDO_MGMT_LQI_REQ to {} from index {}", node.address, index);
ZDO_MGMT_LQI_RSP lqi_resp = driver.sendLQIRequest(new ZDO_MGMT_LQI_REQ(nwk16, index));
if( lqi_resp == null) {
logger.debug("No LQI answer from #{}", nwk);
return null;
} else {
logger.debug(
"Found {} neighbors on node {}",
lqi_resp.getNeighborLQICount(), node.address);
NeighborLqiListItemClass[] neighbors = (NeighborLqiListItemClass[]) lqi_resp.getNeighborLqiList();
if(neighbors != null){
for(int i = 0; i < neighbors.length; i++){
NeighborLqiListItemClass neighbor = (NeighborLqiListItemClass) neighbors[i];
logger.info("Node #{} visible from node #{} with LQI value {}", new Object[]{neighbor.NetworkAddress.get16BitValue(), nwk, neighbor.RxLQI});
NetworkAddressNodeItem result = getIEEEAddress( (short) neighbor.NetworkAddress.get16BitValue() );
NetworkAddressNodeItem newNode;
if(result != null) {
newNode = new NetworkAddressNodeItem(node, result.address);
connectedNodesFound.add(newNode);
} else {
newNode = new NetworkAddressNodeItem(node, (short)neighbor.NetworkAddress.get16BitValue());
connectedNodesFound.add(newNode);
logger.info("No response to ZDO_IEEE_ADDR_REQ from node {}", neighbor.NetworkAddress.get16BitValue());
}
}
}
// NeighborLQICount: neighbors IN THIS RESPONSE
// NeighborLQIEntries: all available neighbors
if ( lqi_resp.getNeighborLQIEntries() > ( lqi_resp.getNeighborLQICount() + index + 1 ) ) {
logger.debug("ZDO_MGMT_LQI_REQ new request to {} because of too many entries for a single request," +
" restarting from index {}", node.address, lqi_resp.getNeighborLQICount() + index + 1 );
lqiRequestToNode( node, lqi_resp.getNeighborLQICount() + index + 1 );
}
alreadyInspected.put( node.address, node );
return connectedNodesFound;
}
}
else{
logger.debug("Node {} inspected few seconds ago, request delayed", node.address);
return null;
}
}
private void inspectQueue(ArrayList<NetworkAddressNodeItem> toInspectTemp){
for(int i = 0; i < toInspect.size(); i++){
List<NetworkAddressNodeItem> children = new ArrayList<NetworkAddressNodeItem>();
NetworkAddressNodeItem node = toInspect.get(i);
if(node != null){
children = lqiRequestToNode(node, LQI_START_INDEX);
if(children != null){
toInspectTemp.addAll(children);
announceNodes(children);
}
}
}
}
public void task(){
final String threadName = Thread.currentThread().getName();
logger.info("{} STARTED Succesfully", threadName);
while( ! isDone() ){
cleanUpWalkingTree();
logger.info("Inspecting ZigBee network for new nodes");
try{
NetworkAddressNodeItem coordinator = getIEEEAddress(COORDINATOR_NWK_ADDRESS);
if(coordinator != null){
//gt = new GraphThread();
List<NetworkAddressNodeItem> coordinatorChildren = lqiRequestToNode(coordinator, LQI_START_INDEX);
if(coordinatorChildren != null)
toInspect.addAll(coordinatorChildren);
ArrayList<NetworkAddressNodeItem> toInspectTemp = new ArrayList<NetworkAddressNodeItem>();
while(!toInspect.isEmpty()){
inspectQueue(toInspectTemp);
toInspect.clear();
if(!toInspectTemp.isEmpty())
for(int i = 0; i < toInspectTemp.size(); i++)
toInspect.add(toInspectTemp.get(i));
toInspectTemp.clear();
}
toInspect.clear();
}
long wakeUpTime = System.currentTimeMillis() + Activator.getCurrentConfiguration().getNetworkBrowsingPeriod();
if ( ! isDone() ) ThreadUtils.waitingUntil( wakeUpTime );
logger.info("Network browsing completed, waiting until {}", wakeUpTime);
//gt.run();
}
catch(Exception e){
e.printStackTrace();
}
}
//gt.end();
logger.info("{} TERMINATED Succesfully", threadName);
}
private void cleanUpWalkingTree() {
alreadyInspected.clear();
toInspect.clear();
}
/*
private ArrayList<NetworkAddressNodeItem> addChildrenNodesToInspectingQueue(NetworkAddressNodeItem inspecting, ZDO_IEEE_ADDR_RSP result) {
int start = 0;
final ArrayList<NetworkAddressNodeItem> adding = new ArrayList<NetworkAddressNodeItem>();
do{
short[] toAdd = result.getAssociatedDeviceList();
for (int i = 0; i < toAdd.length; i++) {
logger.info("Found node #{} associated to node #{}",toAdd[i],inspecting.address);
final NetworkAddressNodeItem next = new NetworkAddressNodeItem(inspecting, toAdd[i]);
final NetworkAddressNodeItem found = alreadyInspected.get(toAdd[i]);
if( found != null ) {
//NOTE Logging this wrong behavior but doing nothing
logger.error(
"BROKEN ZIGBEE UNDERSTANDING (while walking address-tree): " +
"found twice the same node with network address {} ", toAdd[i]
);
logger.debug("Previus node data was {} while current has parent {}", found, inspecting);
} else {
adding.add(next);
}
}
if( toAdd.length + result.getStartIndex() >= result.getAssociatedDeviceCount() ) {
//NOTE No more node connected to inspecting
return adding;
}
start += toAdd.length;
logger.info(
"Node #{} as many too many device connected to it received only {} out of {}, " +
"we need to inspect it once more", new Object[]{
inspecting.address, toAdd.length, result.getAssociatedDeviceCount()
});
result = driver.sendZDOIEEEAddressRequest(
new ZDO_IEEE_ADDR_REQ(inspecting.address,ZDO_IEEE_ADDR_REQ.REQ_TYPE.EXTENDED,(byte) start )
);
if ( result == null ){
logger.error("Faild to further inspect connected device to node #{}", inspecting.address);
}
}while(result != null);
return adding;
}
private ArrayList<NetworkAddressNodeItem> addChildrenNodesToInspectingQueue(NetworkAddressNodeItem inspecting, ZDO_MGMT_LQI_RSP result) {
//int start = 0;
final ArrayList<NetworkAddressNodeItem> adding = new ArrayList<NetworkAddressNodeItem>();
//do{
NeighborLqiListItemClass[] list = (NeighborLqiListItemClass[]) result.getNeighborLqiList();
List<ZToolAddress16> toAdd = new ArrayList<ZToolAddress16>();
for(int i = 0; i < list.length; i++)
toAdd.add(list[i].NetworkAddress);
//List<ZToolAddress16> toAdd = result.getNeighborAddressList();
for (int i = 0; i < toAdd.size(); i++) {
logger.info("Found node #{} associated to node #{}", toAdd.get(i), inspecting.address);
final NetworkAddressNodeItem next = new NetworkAddressNodeItem(inspecting, (short)toAdd.get(i).get16BitValue());
final NetworkAddressNodeItem found = alreadyInspected.get((short)toAdd.get(i).get16BitValue());
if( found != null ) {
//NOTE Logging this wrong behavior but doing nothing
logger.error(
"BROKEN ZIGBEE UNDERSTANDING (while walking address-tree): " +
"found twice the same node with network address {} ", (short)toAdd.get(i).get16BitValue()
);
logger.debug("Previus node data was {} while current has parent {}", found, inspecting);
} else {
adding.add(next);
}
}
//if( toAdd.size() + result.getStartIndex() >= result.getAssociatedDeviceCount() ) {
//NOTE No more node connected to inspecting
return adding;
// }
// start += toAdd.length;
//
// logger.info(
// "Node #{} as many too many device connected to it received only {} out of {}, " +
// "we need to inspect it once more", new Object[]{
// inspecting.address, toAdd.length, result.getAssociatedDeviceCount()
// });
// result = driver.sendZDOIEEEAddressRequest(
// new ZDO_IEEE_ADDR_REQ(inspecting.address,ZDO_IEEE_ADDR_REQ.REQ_TYPE.EXTENDED,(byte) start )
// );
// if ( result == null ){
// logger.error("Faild to further inspect connected device to node #{}", inspecting.address);
// }
// }while(result != null);
//
// return adding;
}
*/
private void notifyBrowsedNode(NetworkAddressNodeItem item) {
ServiceReference[] refs = null;
try {
refs = Activator.getBundleContext().getServiceReferences(ZigBeeDiscoveryMonitor.class.getName(), null);
} catch (InvalidSyntaxException ex) {
logger.error( "CODE BROKEN we need to recompile and fix", ex );
}
if ( refs == null ){
return ;
}
final ZigBeeNode child = item.node;
final ZigBeeNode parent;
if ( item.parent == null ){
//Notifying the root node
parent = null;
}else if( item.parent.node == null ){
//This should not happen
logger.error("BROKEN CODE: Found a parent node that is null, but it has a parent");
parent = null;
}else{
parent = item.parent.node;
}
for (int i = 0; i < refs.length; i++) {
final ZigBeeDiscoveryMonitor listener;
try{
listener = (ZigBeeDiscoveryMonitor) Activator.getBundleContext().getService(refs[i]);
listener.browsedNode( parent, child );
}catch(Exception ex) {
logger.error("Handled excepetion during notification", ex);
}
}
}
} | apache-2.0 |
rekhajoshm/pigfork | src/org/apache/pig/backend/hadoop/executionengine/mapReduceLayer/PigInputFormat.java | 16174 | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.pig.backend.hadoop.executionengine.mapReduceLayer;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.HashSet;
import java.util.HashMap;
import java.util.Comparator;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.PathFilter;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.InputFormat;
import org.apache.hadoop.mapreduce.InputSplit;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.JobContext;
import org.apache.hadoop.mapreduce.RecordReader;
import org.apache.hadoop.mapreduce.TaskAttemptContext;
import org.apache.pig.ExecType;
import org.apache.pig.FuncSpec;
import org.apache.pig.IndexableLoadFunc;
import org.apache.pig.LoadFunc;
import org.apache.pig.PigException;
import org.apache.pig.CollectableLoadFunc;
import org.apache.pig.OrderedLoadFunc;
import org.apache.pig.backend.executionengine.ExecException;
import org.apache.pig.backend.hadoop.datastorage.ConfigurationUtil;
import org.apache.pig.backend.hadoop.executionengine.shims.HadoopShims;
import org.apache.pig.backend.hadoop.executionengine.util.MapRedUtil;
import org.apache.pig.data.Tuple;
import org.apache.pig.impl.PigContext;
import org.apache.pig.impl.io.FileSpec;
import org.apache.pig.impl.plan.OperatorKey;
import org.apache.pig.impl.util.ObjectSerializer;
import org.apache.pig.impl.util.Pair;
import org.apache.pig.impl.util.UDFContext;
public class PigInputFormat extends InputFormat<Text, Tuple> {
public static final Log log = LogFactory
.getLog(PigInputFormat.class);
private static final PathFilter hiddenFileFilter = new PathFilter() {
public boolean accept(Path p) {
String name = p.getName();
return !name.startsWith("_") && !name.startsWith(".");
}
};
public static final String PIG_INPUTS = "pig.inputs";
/**
* @deprecated Use {@link UDFContext} instead in the following way to get
* the job's {@link Configuration}:
* <pre>UdfContext.getUdfContext().getJobConf()</pre>
*/
@Deprecated
public static Configuration sJob;
/* (non-Javadoc)
* @see org.apache.hadoop.mapreduce.InputFormat#createRecordReader(org.apache.hadoop.mapreduce.InputSplit, org.apache.hadoop.mapreduce.TaskAttemptContext)
*/
@Override
public org.apache.hadoop.mapreduce.RecordReader<Text, Tuple> createRecordReader(
org.apache.hadoop.mapreduce.InputSplit split,
TaskAttemptContext context) throws IOException,
InterruptedException {
// We need to create a TaskAttemptContext based on the Configuration which
// was used in the getSplits() to produce the split supplied here. For
// this, let's find out the input of the script which produced the split
// supplied here and then get the corresponding Configuration and setup
// TaskAttemptContext based on it and then call the real InputFormat's
// createRecordReader() method
PigSplit pigSplit = (PigSplit)split;
activeSplit = pigSplit;
// XXX hadoop 20 new API integration: get around a hadoop 20 bug by
// passing total # of splits to each split so it can be retrieved
// here and set it to the configuration object. This number is needed
// by PoissonSampleLoader to compute the number of samples
int n = pigSplit.getTotalSplits();
context.getConfiguration().setInt("pig.mapsplits.count", n);
Configuration conf = context.getConfiguration();
LoadFunc loadFunc = getLoadFunc(pigSplit.getInputIndex(), conf);
// Pass loader signature to LoadFunc and to InputFormat through
// the conf
passLoadSignature(loadFunc, pigSplit.getInputIndex(), conf);
// merge entries from split specific conf into the conf we got
PigInputFormat.mergeSplitSpecificConf(loadFunc, pigSplit, conf);
// for backward compatibility
PigInputFormat.sJob = conf;
InputFormat inputFormat = loadFunc.getInputFormat();
List<Long> inpLimitLists =
(ArrayList<Long>)ObjectSerializer.deserialize(
conf.get("pig.inpLimits"));
return new PigRecordReader(inputFormat, pigSplit, loadFunc, context, inpLimitLists.get(pigSplit.getInputIndex()));
}
/**
* get the corresponding configuration for the input on which the split
* is based and merge it with the Conf supplied
*
* package level access so that this is not publicly used elsewhere
* @throws IOException
*/
static void mergeSplitSpecificConf(LoadFunc loadFunc, PigSplit pigSplit, Configuration originalConf)
throws IOException {
// set up conf with entries from input specific conf
Job job = new Job(originalConf);
loadFunc.setLocation(getLoadLocation(pigSplit.getInputIndex(),
originalConf), job);
// The above setLocation call could write to the conf within
// the job - merge that updated conf with original conf
ConfigurationUtil.mergeConf(originalConf, job.getConfiguration());
}
/**
* @param inputIndex
* @param conf
* @return
* @throws IOException
*/
@SuppressWarnings("unchecked")
private static LoadFunc getLoadFunc(int inputIndex, Configuration conf) throws IOException {
ArrayList<FileSpec> inputs =
(ArrayList<FileSpec>) ObjectSerializer.deserialize(
conf.get(PIG_INPUTS));
FuncSpec loadFuncSpec = inputs.get(inputIndex).getFuncSpec();
return (LoadFunc) PigContext.instantiateFuncFromSpec(loadFuncSpec);
}
@SuppressWarnings("unchecked")
private static String getLoadLocation(int inputIndex, Configuration conf) throws IOException {
ArrayList<FileSpec> inputs =
(ArrayList<FileSpec>) ObjectSerializer.deserialize(
conf.get(PIG_INPUTS));
return inputs.get(inputIndex).getFileName();
}
/**
* Pass loader signature to LoadFunc and to InputFormat through
* the conf
* @param loadFunc the Loadfunc to set the signature on
* @param inputIndex the index of the input corresponding to the loadfunc
* @param conf the Configuration object into which the signature should be
* set
* @throws IOException on failure
*/
@SuppressWarnings("unchecked")
static void passLoadSignature(LoadFunc loadFunc, int inputIndex,
Configuration conf) throws IOException {
List<String> inpSignatureLists =
(ArrayList<String>)ObjectSerializer.deserialize(
conf.get("pig.inpSignatures"));
// signature can be null for intermediate jobs where it will not
// be required to be passed down
if(inpSignatureLists.get(inputIndex) != null) {
loadFunc.setUDFContextSignature(inpSignatureLists.get(inputIndex));
conf.set("pig.loader.signature", inpSignatureLists.get(inputIndex));
}
MapRedUtil.setupUDFContext(conf);
}
/* (non-Javadoc)
* @see org.apache.hadoop.mapreduce.InputFormat#getSplits(org.apache.hadoop.mapreduce.JobContext)
*/
@SuppressWarnings("unchecked")
@Override
public List<InputSplit> getSplits(JobContext jobcontext)
throws IOException, InterruptedException {
Configuration conf = jobcontext.getConfiguration();
ArrayList<FileSpec> inputs;
ArrayList<ArrayList<OperatorKey>> inpTargets;
PigContext pigContext;
try {
inputs = (ArrayList<FileSpec>) ObjectSerializer
.deserialize(conf.get("pig.inputs"));
inpTargets = (ArrayList<ArrayList<OperatorKey>>) ObjectSerializer
.deserialize(conf.get("pig.inpTargets"));
pigContext = (PigContext) ObjectSerializer.deserialize(conf
.get("pig.pigContext"));
PigContext.setPackageImportList((ArrayList<String>)ObjectSerializer.deserialize(conf.get("udf.import.list")));
MapRedUtil.setupUDFContext(conf);
} catch (Exception e) {
int errCode = 2094;
String msg = "Unable to deserialize object.";
throw new ExecException(msg, errCode, PigException.BUG, e);
}
ArrayList<InputSplit> splits = new ArrayList<InputSplit>();
for (int i = 0; i < inputs.size(); i++) {
try {
Path path = new Path(inputs.get(i).getFileName());
FileSystem fs;
boolean isFsPath = true;
try {
fs = path.getFileSystem(conf);
} catch (Exception e) {
// If an application specific
// scheme was used
// (e.g.: "hbase://table") we will fail
// getting the file system. That's
// ok, we just use the dfs in that case.
fs = new Path("/").getFileSystem(conf);
isFsPath = false;
}
// if the execution is against Mapred DFS, set
// working dir to /user/<userid>
if(!pigContext.getExecType().isLocal()) {
fs.setWorkingDirectory(jobcontext.getWorkingDirectory());
}
// first pass input location to the loader - for this send a
// clone of the configuration we have - this is so that if the
// loader (or the inputformat of the loader) decide to store the
// input location into the configuration (for example,
// FileInputFormat stores this in mapred.input.dir in the conf),
// then for different inputs, the loader's don't end up
// over-writing the same conf.
FuncSpec loadFuncSpec = inputs.get(i).getFuncSpec();
LoadFunc loadFunc = (LoadFunc) PigContext.instantiateFuncFromSpec(
loadFuncSpec);
boolean combinable = !(loadFunc instanceof MergeJoinIndexer
|| loadFunc instanceof IndexableLoadFunc
|| (loadFunc instanceof CollectableLoadFunc && loadFunc instanceof OrderedLoadFunc));
if (combinable)
combinable = !conf.getBoolean("pig.noSplitCombination", false);
Configuration confClone = new Configuration(conf);
Job inputSpecificJob = new Job(confClone);
// Pass loader signature to LoadFunc and to InputFormat through
// the conf
passLoadSignature(loadFunc, i, inputSpecificJob.getConfiguration());
loadFunc.setLocation(inputs.get(i).getFileName(),
inputSpecificJob);
// The above setLocation call could write to the conf within
// the inputSpecificJob - use this updated conf
// get the InputFormat from it and ask for splits
InputFormat inpFormat = loadFunc.getInputFormat();
List<InputSplit> oneInputSplits = inpFormat.getSplits(
HadoopShims.createJobContext(inputSpecificJob.getConfiguration(),
jobcontext.getJobID()));
List<InputSplit> oneInputPigSplits = getPigSplits(
oneInputSplits, i, inpTargets.get(i),
HadoopShims.getDefaultBlockSize(fs, isFsPath? path: fs.getWorkingDirectory()),
combinable, confClone);
splits.addAll(oneInputPigSplits);
} catch (ExecException ee) {
throw ee;
} catch (Exception e) {
int errCode = 2118;
String msg = "Unable to create input splits for: " + inputs.get(i).getFileName();
if(e.getMessage() !=null && (!e.getMessage().isEmpty()) ){
throw new ExecException(e.getMessage(), errCode, PigException.BUG, e);
}else{
throw new ExecException(msg, errCode, PigException.BUG, e);
}
}
}
// XXX hadoop 20 new API integration: get around a hadoop 20 bug by
// passing total # of splits to each split so that it can be retrieved
// in the RecordReader method when called by mapreduce framework later.
int n = splits.size();
// also passing the multi-input flag to the back-end so that
// the multi-input record counters can be created
int m = inputs.size();
boolean disableCounter = conf.getBoolean("pig.disable.counter", false);
if ((m > 1) && disableCounter) {
log.info("Disable Pig custom input counters");
}
for (InputSplit split : splits) {
((PigSplit) split).setTotalSplits(n);
if (m > 1) ((PigSplit) split).setMultiInputs(true);
((PigSplit) split).setDisableCounter(disableCounter);
}
return splits;
}
protected List<InputSplit> getPigSplits(List<InputSplit> oneInputSplits,
int inputIndex, ArrayList<OperatorKey> targetOps, long blockSize, boolean combinable, Configuration conf)
throws IOException, InterruptedException {
ArrayList<InputSplit> pigSplits = new ArrayList<InputSplit>();
if (!combinable) {
int splitIndex = 0;
for (InputSplit inputSplit : oneInputSplits) {
PigSplit pigSplit = new PigSplit(new InputSplit[] {inputSplit}, inputIndex, targetOps,
splitIndex++);
pigSplit.setConf(conf);
pigSplits.add(pigSplit);
}
return pigSplits;
} else {
long maxCombinedSplitSize = conf.getLong("pig.maxCombinedSplitSize", 0);
if (maxCombinedSplitSize== 0)
// default is the block size
maxCombinedSplitSize = blockSize;
List<List<InputSplit>> combinedSplits =
MapRedUtil.getCombinePigSplits(oneInputSplits, maxCombinedSplitSize, conf);
for (int i = 0; i < combinedSplits.size(); i++)
pigSplits.add(createPigSplit(combinedSplits.get(i), inputIndex, targetOps, i, conf));
return pigSplits;
}
}
private InputSplit createPigSplit(List<InputSplit> combinedSplits,
int inputIndex, ArrayList<OperatorKey> targetOps, int splitIndex, Configuration conf)
{
PigSplit pigSplit = new PigSplit(combinedSplits.toArray(new InputSplit[0]), inputIndex, targetOps, splitIndex);
pigSplit.setConf(conf);
return pigSplit;
}
public static PigSplit getActiveSplit() {
return activeSplit;
}
private static PigSplit activeSplit;
}
| apache-2.0 |
BigAppOS/BigApp_Discuz_Android | Clan/Clan/src/com/youzu/clan/threadandarticle/model/ShareData.java | 1279 | package com.youzu.clan.threadandarticle.model;
import com.youzu.android.framework.json.annotation.JSONField;
/**
* Created by Zhao on 15/11/13.
*/
public class ShareData {
// share_image: 分享图片,
// share_url: 页面地址,
// share_subject: 页面主题,
// share_abstract: 页面描述
private String shareImage;
private String shareUrl;
private String shareSubject;
private String shareAbstract;
public String getShareImage() {
return shareImage;
}
@JSONField(name = "share_image")
public void setShareImage(String shareImage) {
this.shareImage = shareImage;
}
public String getShareUrl() {
return shareUrl;
}
@JSONField(name = "share_url")
public void setShareUrl(String shareUrl) {
this.shareUrl = shareUrl;
}
public String getShareSubject() {
return shareSubject;
}
@JSONField(name = "share_subject")
public void setShareSubject(String shareSubject) {
this.shareSubject = shareSubject;
}
public String getShareAbstract() {
return shareAbstract;
}
@JSONField(name = "share_abstract")
public void setShareAbstract(String shareAbstract) {
this.shareAbstract = shareAbstract;
}
}
| apache-2.0 |
buehner/shogun2 | src/shogun-core-main/src/main/java/de/terrestris/shoguncore/model/Role.java | 2580 | package de.terrestris.shoguncore.model;
import javax.persistence.Cacheable;
import javax.persistence.Column;
import javax.persistence.Entity;
import javax.persistence.Table;
import org.apache.commons.lang3.builder.EqualsBuilder;
import org.apache.commons.lang3.builder.HashCodeBuilder;
import org.hibernate.annotations.Cache;
import org.hibernate.annotations.CacheConcurrencyStrategy;
/**
* @author Nils Bühner
*/
@Entity
@Table
@Cacheable
@Cache(usage = CacheConcurrencyStrategy.READ_WRITE)
public class Role extends PersistentObject {
private static final long serialVersionUID = 1L;
@Column(unique = true, nullable = false)
private String name;
@Column
private String description;
/**
* Default Constructor
*/
public Role() {
}
/**
* Constructor
*/
public Role(String name) {
this.name = name;
}
/**
* @return the name
*/
public String getName() {
return name;
}
/**
* @param name the name to set
*/
public void setName(String name) {
this.name = name;
}
/**
* @return the description
*/
public String getDescription() {
return description;
}
/**
* @param description the description to set
*/
public void setDescription(String description) {
this.description = description;
}
/**
* @see java.lang.Object#hashCode()
* <p>
* According to
* http://stackoverflow.com/questions/27581/overriding-equals
* -and-hashcode-in-java it is recommended only to use getter-methods
* when using ORM like Hibernate
*/
@Override
public int hashCode() {
// two randomly chosen prime numbers
return new HashCodeBuilder(13, 53).
appendSuper(super.hashCode()).
append(getName()).
append(getDescription()).
toHashCode();
}
/**
* @see java.lang.Object#equals(java.lang.Object)
* <p>
* According to
* http://stackoverflow.com/questions/27581/overriding-equals
* -and-hashcode-in-java it is recommended only to use getter-methods
* when using ORM like Hibernate
*/
@Override
public boolean equals(Object obj) {
if (!(obj instanceof Role))
return false;
Role other = (Role) obj;
return new EqualsBuilder().
appendSuper(super.equals(other)).
append(getName(), other.getName()).
append(getDescription(), other.getDescription()).
isEquals();
}
}
| apache-2.0 |
apache/axis1-java | axis-rt-core/src/main/java/org/apache/axis/utils/IdentityHashMap.java | 1958 | /**
* Created by IntelliJ IDEA.
* User: srida01
* Date: Dec 2, 2002
* Time: 10:38:46 AM
* To change this template use Options | File Templates.
*/
package org.apache.axis.utils;
import java.util.HashMap;
import java.util.Map;
/**
* IdentityHashMap similar to JDK1.4's java.util.IdentityHashMap
* @author Davanum Srinivas (dims@yahoo.com)
*/
public class IdentityHashMap extends HashMap
{
/**
* Constructor for IdentityHashMap.
* @param initialCapacity
* @param loadFactor
*/
public IdentityHashMap(int initialCapacity, float loadFactor)
{
super(initialCapacity, loadFactor);
}
/**
* Constructor for IdentityHashMap.
* @param initialCapacity
*/
public IdentityHashMap(int initialCapacity)
{
super(initialCapacity);
}
/**
* Constructor for IdentityHashMap.
*/
public IdentityHashMap()
{
super();
}
/**
* Constructor for IdentityHashMap.
* @param t
*/
public IdentityHashMap(Map t)
{
super(t);
}
/**
* @see Map#get(Object)
*/
public Object get(Object key)
{
return super.get(new IDKey(key));
}
/**
* @see Map#put(Object, Object)
*/
public Object put(Object key, Object value)
{
return super.put(new IDKey(key), value);
}
/**
* adds an object to the Map. new Identity(obj) is used as key
*/
public Object add(Object value)
{
Object key = new IDKey(value);
if (! super.containsKey(key))
{
return super.put(key, value);
}
else return null;
}
/**
* @see Map#remove(Object)
*/
public Object remove(Object key)
{
return super.remove(new IDKey(key));
}
/**
* @see Map#containsKey(Object)
*/
public boolean containsKey(Object key)
{
return super.containsKey(new IDKey(key));
}
}
| apache-2.0 |
Kromzem/gdx-facebook | ios-moe/src/de/tomgrill/gdxfacebook/iosmoe/bindings/sdk/login/fbsdkloginkit/FBSDKTooltipView.java | 17105 | package de.tomgrill.gdxfacebook.iosmoe.bindings.sdk.login.fbsdkloginkit;
import org.moe.natj.c.ann.FunctionPtr;
import org.moe.natj.c.ann.Variadic;
import org.moe.natj.general.NatJ;
import org.moe.natj.general.Pointer;
import org.moe.natj.general.ann.ByValue;
import org.moe.natj.general.ann.Generated;
import org.moe.natj.general.ann.Library;
import org.moe.natj.general.ann.Mapped;
import org.moe.natj.general.ann.MappedReturn;
import org.moe.natj.general.ann.NFloat;
import org.moe.natj.general.ann.NInt;
import org.moe.natj.general.ann.NUInt;
import org.moe.natj.general.ann.Owned;
import org.moe.natj.general.ann.Runtime;
import org.moe.natj.general.ptr.VoidPtr;
import org.moe.natj.objc.Class;
import org.moe.natj.objc.ObjCRuntime;
import org.moe.natj.objc.SEL;
import org.moe.natj.objc.ann.ObjCBlock;
import org.moe.natj.objc.ann.ObjCClassBinding;
import org.moe.natj.objc.ann.ProtocolClassMethod;
import org.moe.natj.objc.ann.Selector;
import org.moe.natj.objc.map.ObjCObjectMapper;
import apple.NSObject;
import apple.coregraphics.struct.CGPoint;
import apple.coregraphics.struct.CGRect;
import apple.foundation.NSArray;
import apple.foundation.NSCoder;
import apple.foundation.NSDate;
import apple.foundation.NSMethodSignature;
import apple.foundation.NSSet;
import apple.uikit.UITraitCollection;
import apple.uikit.UIView;
@Generated
@Library("FBSDKLoginKit")
@Runtime(ObjCRuntime.class)
@ObjCClassBinding
public class FBSDKTooltipView extends UIView {
static {
NatJ.register();
}
@Generated
protected FBSDKTooltipView(Pointer peer) {
super(peer);
}
@Generated
@Selector("accessInstanceVariablesDirectly")
public static native boolean accessInstanceVariablesDirectly();
@Generated
@Selector("addKeyframeWithRelativeStartTime:relativeDuration:animations:")
public static native void addKeyframeWithRelativeStartTimeRelativeDurationAnimations(
double frameStartTime,
double frameDuration,
@ObjCBlock(name = "call_addKeyframeWithRelativeStartTimeRelativeDurationAnimations") UIView.Block_addKeyframeWithRelativeStartTimeRelativeDurationAnimations animations);
@Generated
@Owned
@Selector("alloc")
public static native FBSDKTooltipView alloc();
@Generated
@Selector("allocWithZone:")
@MappedReturn(ObjCObjectMapper.class)
public static native Object allocWithZone(VoidPtr zone);
@Generated
@Selector("animateKeyframesWithDuration:delay:options:animations:completion:")
public static native void animateKeyframesWithDurationDelayOptionsAnimationsCompletion(
double duration,
double delay,
@NUInt long options,
@ObjCBlock(name = "call_animateKeyframesWithDurationDelayOptionsAnimationsCompletion_3") UIView.Block_animateKeyframesWithDurationDelayOptionsAnimationsCompletion_3 animations,
@ObjCBlock(name = "call_animateKeyframesWithDurationDelayOptionsAnimationsCompletion_4") UIView.Block_animateKeyframesWithDurationDelayOptionsAnimationsCompletion_4 completion);
@Generated
@Selector("animateWithDuration:animations:")
public static native void animateWithDurationAnimations(
double duration,
@ObjCBlock(name = "call_animateWithDurationAnimations") UIView.Block_animateWithDurationAnimations animations);
@Generated
@Selector("animateWithDuration:animations:completion:")
public static native void animateWithDurationAnimationsCompletion(
double duration,
@ObjCBlock(name = "call_animateWithDurationAnimationsCompletion_1") UIView.Block_animateWithDurationAnimationsCompletion_1 animations,
@ObjCBlock(name = "call_animateWithDurationAnimationsCompletion_2") UIView.Block_animateWithDurationAnimationsCompletion_2 completion);
@Generated
@Selector("animateWithDuration:delay:options:animations:completion:")
public static native void animateWithDurationDelayOptionsAnimationsCompletion(
double duration,
double delay,
@NUInt long options,
@ObjCBlock(name = "call_animateWithDurationDelayOptionsAnimationsCompletion_3") UIView.Block_animateWithDurationDelayOptionsAnimationsCompletion_3 animations,
@ObjCBlock(name = "call_animateWithDurationDelayOptionsAnimationsCompletion_4") UIView.Block_animateWithDurationDelayOptionsAnimationsCompletion_4 completion);
@Generated
@Selector("animateWithDuration:delay:usingSpringWithDamping:initialSpringVelocity:options:animations:completion:")
public static native void animateWithDurationDelayUsingSpringWithDampingInitialSpringVelocityOptionsAnimationsCompletion(
double duration,
double delay,
@NFloat double dampingRatio,
@NFloat double velocity,
@NUInt long options,
@ObjCBlock(name = "call_animateWithDurationDelayUsingSpringWithDampingInitialSpringVelocityOptionsAnimationsCompletion_5") UIView.Block_animateWithDurationDelayUsingSpringWithDampingInitialSpringVelocityOptionsAnimationsCompletion_5 animations,
@ObjCBlock(name = "call_animateWithDurationDelayUsingSpringWithDampingInitialSpringVelocityOptionsAnimationsCompletion_6") UIView.Block_animateWithDurationDelayUsingSpringWithDampingInitialSpringVelocityOptionsAnimationsCompletion_6 completion);
@Generated
@Selector("appearance")
@MappedReturn(ObjCObjectMapper.class)
public static native Object appearance();
@Generated
@ProtocolClassMethod("appearance")
@MappedReturn(ObjCObjectMapper.class)
public Object _appearance() {
return appearance();
}
@Generated
@Selector("appearanceForTraitCollection:")
@MappedReturn(ObjCObjectMapper.class)
public static native Object appearanceForTraitCollection(
UITraitCollection trait);
@Generated
@ProtocolClassMethod("appearanceForTraitCollection")
@MappedReturn(ObjCObjectMapper.class)
public Object _appearanceForTraitCollection(UITraitCollection trait) {
return appearanceForTraitCollection(trait);
}
@Generated
@Variadic()
@Deprecated
@Selector("appearanceForTraitCollection:whenContainedIn:")
@MappedReturn(ObjCObjectMapper.class)
public static native Object appearanceForTraitCollectionWhenContainedIn(
UITraitCollection trait,
@Mapped(ObjCObjectMapper.class) Object ContainerClass,
Object... varargs);
@Generated
@Deprecated
@ProtocolClassMethod("appearanceForTraitCollectionWhenContainedIn")
@MappedReturn(ObjCObjectMapper.class)
public Object _appearanceForTraitCollectionWhenContainedIn(
UITraitCollection trait,
@Mapped(ObjCObjectMapper.class) Object ContainerClass,
Object... varargs) {
return appearanceForTraitCollectionWhenContainedIn(trait,
ContainerClass, varargs);
}
@Generated
@Selector("appearanceForTraitCollection:whenContainedInInstancesOfClasses:")
@MappedReturn(ObjCObjectMapper.class)
public static native Object appearanceForTraitCollectionWhenContainedInInstancesOfClasses(
UITraitCollection trait, NSArray<?> containerTypes);
@Generated
@ProtocolClassMethod("appearanceForTraitCollectionWhenContainedInInstancesOfClasses")
@MappedReturn(ObjCObjectMapper.class)
public Object _appearanceForTraitCollectionWhenContainedInInstancesOfClasses(
UITraitCollection trait, NSArray<?> containerTypes) {
return appearanceForTraitCollectionWhenContainedInInstancesOfClasses(
trait, containerTypes);
}
@Generated
@Variadic()
@Deprecated
@Selector("appearanceWhenContainedIn:")
@MappedReturn(ObjCObjectMapper.class)
public static native Object appearanceWhenContainedIn(
@Mapped(ObjCObjectMapper.class) Object ContainerClass,
Object... varargs);
@Generated
@Deprecated
@ProtocolClassMethod("appearanceWhenContainedIn")
@MappedReturn(ObjCObjectMapper.class)
public Object _appearanceWhenContainedIn(
@Mapped(ObjCObjectMapper.class) Object ContainerClass,
Object... varargs) {
return appearanceWhenContainedIn(ContainerClass, varargs);
}
@Generated
@Selector("appearanceWhenContainedInInstancesOfClasses:")
@MappedReturn(ObjCObjectMapper.class)
public static native Object appearanceWhenContainedInInstancesOfClasses(
NSArray<?> containerTypes);
@Generated
@ProtocolClassMethod("appearanceWhenContainedInInstancesOfClasses")
@MappedReturn(ObjCObjectMapper.class)
public Object _appearanceWhenContainedInInstancesOfClasses(
NSArray<?> containerTypes) {
return appearanceWhenContainedInInstancesOfClasses(containerTypes);
}
@Generated
@Selector("areAnimationsEnabled")
public static native boolean areAnimationsEnabled();
@Generated
@Selector("automaticallyNotifiesObserversForKey:")
public static native boolean automaticallyNotifiesObserversForKey(String key);
@Generated
@Selector("beginAnimations:context:")
public static native void beginAnimationsContext(String animationID,
VoidPtr context);
@Generated
@Selector("cancelPreviousPerformRequestsWithTarget:")
public static native void cancelPreviousPerformRequestsWithTarget(
@Mapped(ObjCObjectMapper.class) Object aTarget);
@Generated
@Selector("cancelPreviousPerformRequestsWithTarget:selector:object:")
public static native void cancelPreviousPerformRequestsWithTargetSelectorObject(
@Mapped(ObjCObjectMapper.class) Object aTarget, SEL aSelector,
@Mapped(ObjCObjectMapper.class) Object anArgument);
@Generated
@Selector("class")
public static native Class class_objc_static();
@Generated
@Selector("classFallbacksForKeyedArchiver")
public static native NSArray<String> classFallbacksForKeyedArchiver();
@Generated
@Selector("classForKeyedUnarchiver")
public static native Class classForKeyedUnarchiver();
@Generated
@Selector("clearTextInputContextIdentifier:")
public static native void clearTextInputContextIdentifier(String identifier);
@Generated
@Selector("colorStyle")
@NUInt
public native long colorStyle();
@Generated
@Selector("commitAnimations")
public static native void commitAnimations();
@Generated
@Selector("debugDescription")
public static native String debugDescription_static();
@Generated
@Selector("description")
public static native String description_static();
@Generated
@Selector("dismiss")
public native void dismiss();
@Generated
@Selector("displayDuration")
public native double displayDuration();
@Generated
@Selector("hash")
@NUInt
public static native long hash_static();
@Generated
@Selector("inheritedAnimationDuration")
public static native double inheritedAnimationDuration();
@Generated
@Selector("init")
public native FBSDKTooltipView init();
@Generated
@Selector("initWithCoder:")
public native FBSDKTooltipView initWithCoder(NSCoder aDecoder);
@Generated
@Selector("initWithFrame:")
public native FBSDKTooltipView initWithFrame(@ByValue CGRect frame);
@Generated
@Selector("initWithTagline:message:colorStyle:")
public native FBSDKTooltipView initWithTaglineMessageColorStyle(
String tagline, String message, @NUInt long colorStyle);
@Generated
@Selector("initialize")
public static native void initialize();
@Generated
@Selector("instanceMethodForSelector:")
@FunctionPtr(name = "call_instanceMethodForSelector_ret")
public static native NSObject.Function_instanceMethodForSelector_ret instanceMethodForSelector(
SEL aSelector);
@Generated
@Selector("instanceMethodSignatureForSelector:")
public static native NSMethodSignature instanceMethodSignatureForSelector(
SEL aSelector);
@Generated
@Selector("instancesRespondToSelector:")
public static native boolean instancesRespondToSelector(SEL aSelector);
@Generated
@Selector("isSubclassOfClass:")
public static native boolean isSubclassOfClass(Class aClass);
@Generated
@Selector("keyPathsForValuesAffectingValueForKey:")
public static native NSSet<String> keyPathsForValuesAffectingValueForKey(
String key);
@Generated
@Selector("layerClass")
public static native Class layerClass();
@Generated
@Selector("load")
public static native void load_objc_static();
@Generated
@Selector("message")
public native String message();
@Generated
@Owned
@Selector("new")
@MappedReturn(ObjCObjectMapper.class)
public static native Object new_objc();
@Generated
@Selector("performSystemAnimation:onViews:options:animations:completion:")
public static native void performSystemAnimationOnViewsOptionsAnimationsCompletion(
@NUInt long animation,
NSArray<? extends UIView> views,
@NUInt long options,
@ObjCBlock(name = "call_performSystemAnimationOnViewsOptionsAnimationsCompletion_3") UIView.Block_performSystemAnimationOnViewsOptionsAnimationsCompletion_3 parallelAnimations,
@ObjCBlock(name = "call_performSystemAnimationOnViewsOptionsAnimationsCompletion_4") UIView.Block_performSystemAnimationOnViewsOptionsAnimationsCompletion_4 completion);
@Generated
@Selector("performWithoutAnimation:")
public static native void performWithoutAnimation(
@ObjCBlock(name = "call_performWithoutAnimation") UIView.Block_performWithoutAnimation actionsWithoutAnimation);
@Generated
@Selector("presentFromView:")
public native void presentFromView(UIView anchorView);
@Generated
@Selector("presentInView:withArrowPosition:direction:")
public native void presentInViewWithArrowPositionDirection(UIView view,
@ByValue CGPoint arrowPosition, @NUInt long arrowDirection);
@Generated
@Selector("requiresConstraintBasedLayout")
public static native boolean requiresConstraintBasedLayout();
@Generated
@Selector("resolveClassMethod:")
public static native boolean resolveClassMethod(SEL sel);
@Generated
@Selector("resolveInstanceMethod:")
public static native boolean resolveInstanceMethod(SEL sel);
@Generated
@Selector("setAnimationBeginsFromCurrentState:")
public static native void setAnimationBeginsFromCurrentState(
boolean fromCurrentState);
@Generated
@Selector("setAnimationCurve:")
public static native void setAnimationCurve(@NInt long curve);
@Generated
@Selector("setAnimationDelay:")
public static native void setAnimationDelay(double delay);
@Generated
@Selector("setAnimationDelegate:")
public static native void setAnimationDelegate(
@Mapped(ObjCObjectMapper.class) Object delegate);
@Generated
@Selector("setAnimationDidStopSelector:")
public static native void setAnimationDidStopSelector(SEL selector);
@Generated
@Selector("setAnimationDuration:")
public static native void setAnimationDuration_static(double duration);
@Generated
@Selector("setAnimationRepeatAutoreverses:")
public static native void setAnimationRepeatAutoreverses(
boolean repeatAutoreverses);
@Generated
@Selector("setAnimationRepeatCount:")
public static native void setAnimationRepeatCount_static(float repeatCount);
@Generated
@Selector("setAnimationStartDate:")
public static native void setAnimationStartDate(NSDate startDate);
@Generated
@Selector("setAnimationTransition:forView:cache:")
public static native void setAnimationTransitionForViewCache(
@NInt long transition, UIView view, boolean cache);
@Generated
@Selector("setAnimationWillStartSelector:")
public static native void setAnimationWillStartSelector(SEL selector);
@Generated
@Selector("setAnimationsEnabled:")
public static native void setAnimationsEnabled(boolean enabled);
@Generated
@Selector("setColorStyle:")
public native void setColorStyle(@NUInt long value);
@Generated
@Selector("setDisplayDuration:")
public native void setDisplayDuration(double value);
@Generated
@Selector("setMessage:")
public native void setMessage(String value);
@Generated
@Selector("setTagline:")
public native void setTagline(String value);
@Generated
@Selector("setVersion:")
public static native void setVersion(@NInt long aVersion);
@Generated
@Selector("superclass")
public static native Class superclass_static();
@Generated
@Selector("tagline")
public native String tagline();
@Generated
@Selector("transitionFromView:toView:duration:options:completion:")
public static native void transitionFromViewToViewDurationOptionsCompletion(
UIView fromView,
UIView toView,
double duration,
@NUInt long options,
@ObjCBlock(name = "call_transitionFromViewToViewDurationOptionsCompletion") UIView.Block_transitionFromViewToViewDurationOptionsCompletion completion);
@Generated
@Selector("transitionWithView:duration:options:animations:completion:")
public static native void transitionWithViewDurationOptionsAnimationsCompletion(
UIView view,
double duration,
@NUInt long options,
@ObjCBlock(name = "call_transitionWithViewDurationOptionsAnimationsCompletion_3") UIView.Block_transitionWithViewDurationOptionsAnimationsCompletion_3 animations,
@ObjCBlock(name = "call_transitionWithViewDurationOptionsAnimationsCompletion_4") UIView.Block_transitionWithViewDurationOptionsAnimationsCompletion_4 completion);
@Generated
@Selector("userInterfaceLayoutDirectionForSemanticContentAttribute:")
@NInt
public static native long userInterfaceLayoutDirectionForSemanticContentAttribute(
@NInt long attribute);
@Generated
@Selector("userInterfaceLayoutDirectionForSemanticContentAttribute:relativeToLayoutDirection:")
@NInt
public static native long userInterfaceLayoutDirectionForSemanticContentAttributeRelativeToLayoutDirection(
@NInt long semanticContentAttribute, @NInt long layoutDirection);
@Generated
@Selector("version")
@NInt
public static native long version_static();
} | apache-2.0 |
scouter-project/scouter | scouter.agent.java/src/main/java/scouter/agent/counter/anotation/Counter.java | 927 | /*
* Copyright 2015 the original author or authors.
* @https://github.com/scouter-project/scouter
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package scouter.agent.counter.anotation;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
@Retention(RetentionPolicy.RUNTIME)
public @interface Counter {
int interval() default 2000; //default interval 2000 ms
} | apache-2.0 |
mdogan/hazelcast | hazelcast/src/test/java/com/hazelcast/client/cache/DummyClientCachePartitionIteratorTest.java | 2547 | /*
* Copyright (c) 2008-2020, Hazelcast, Inc. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.hazelcast.client.cache;
import com.hazelcast.client.config.ClientConfig;
import com.hazelcast.client.config.ClientNetworkConfig;
import com.hazelcast.client.test.TestHazelcastFactory;
import com.hazelcast.cluster.Address;
import com.hazelcast.config.Config;
import com.hazelcast.core.HazelcastInstance;
import com.hazelcast.test.HazelcastParallelParametersRunnerFactory;
import com.hazelcast.test.annotation.ParallelJVMTest;
import com.hazelcast.test.annotation.QuickTest;
import org.junit.Before;
import org.junit.experimental.categories.Category;
import org.junit.runner.RunWith;
import org.junit.runners.Parameterized;
import static com.hazelcast.cache.CacheTestSupport.createClientCachingProvider;
@RunWith(Parameterized.class)
@Parameterized.UseParametersRunnerFactory(HazelcastParallelParametersRunnerFactory.class)
@Category({QuickTest.class, ParallelJVMTest.class})
public class DummyClientCachePartitionIteratorTest extends AbstractClientCachePartitionIteratorTest {
@Before
public void setup() {
factory = new TestHazelcastFactory();
Config config = getConfig();
server = factory.newHazelcastInstance(config);
factory.newHazelcastInstance(config);
HazelcastInstance client = factory.newHazelcastClient(getClientConfig(server));
cachingProvider = createClientCachingProvider(client);
}
private static ClientConfig getClientConfig(HazelcastInstance instance) {
Address address = instance.getCluster().getLocalMember().getAddress();
String addressString = address.getHost() + ":" + address.getPort();
ClientConfig clientConfig = new ClientConfig();
ClientNetworkConfig networkConfig = new ClientNetworkConfig();
networkConfig.setSmartRouting(false);
networkConfig.addAddress(addressString);
clientConfig.setNetworkConfig(networkConfig);
return clientConfig;
}
}
| apache-2.0 |
EvilMcJerkface/crate | server/src/main/java/org/elasticsearch/search/profile/Timer.java | 3575 | /*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.search.profile;
/** Helps measure how much time is spent running some methods.
* The {@link #start()} and {@link #stop()} methods should typically be called
* in a try/finally clause with {@link #start()} being called right before the
* try block and {@link #stop()} being called at the beginning of the finally
* block:
* <pre>
* timer.start();
* try {
* // code to time
* } finally {
* timer.stop();
* }
* </pre>
*/
public class Timer {
private boolean doTiming;
private long timing;
private long count;
private long lastCount;
private long start;
/** pkg-private for testing */
long nanoTime() {
return System.nanoTime();
}
/** Start the timer. */
public final void start() {
assert start == 0 : "#start call misses a matching #stop call";
// We measure the timing of each method call for the first 256
// calls, then 1/2 call up to 512 then 1/3 up to 768, etc. with
// a maximum interval of 1024, which is reached for 1024*2^8 ~= 262000
// This allows to not slow down things too much because of calls
// to System.nanoTime() when methods are called millions of time
// in tight loops, while still providing useful timings for methods
// that are only called a couple times per search execution.
doTiming = (count - lastCount) >= Math.min(lastCount >>> 8, 1024);
if (doTiming) {
start = nanoTime();
}
count++;
}
/** Stop the timer. */
public final void stop() {
if (doTiming) {
timing += (count - lastCount) * Math.max(nanoTime() - start, 1L);
lastCount = count;
start = 0;
}
}
/** Return the number of times that {@link #start()} has been called. */
public final long getCount() {
if (start != 0) {
throw new IllegalStateException("#start call misses a matching #stop call");
}
return count;
}
/** Return an approximation of the total time spent between consecutive calls of #start and #stop. */
public final long getApproximateTiming() {
if (start != 0) {
throw new IllegalStateException("#start call misses a matching #stop call");
}
// We don't have timings for the last `count-lastCount` method calls
// so we assume that they had the same timing as the lastCount first
// calls. This approximation is ok since at most 1/256th of method
// calls have not been timed.
long timing = this.timing;
if (count > lastCount) {
assert lastCount > 0;
timing += (count - lastCount) * timing / lastCount;
}
return timing;
}
}
| apache-2.0 |
atlassian/httpclient | httpclient/src/main/java/org/apache/http/client/protocol/ClientContext.java | 4355 | /*
* ====================================================================
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
* ====================================================================
*
* This software consists of voluntary contributions made by many
* individuals on behalf of the Apache Software Foundation. For more
* information on the Apache Software Foundation, please see
* <http://www.apache.org/>.
*
*/
package org.apache.http.client.protocol;
/**
* {@link org.apache.http.protocol.HttpContext} attribute names for
* client side HTTP protocol processing.
*
* @since 4.0
*/
public interface ClientContext {
/**
* Attribute name of a {@link org.apache.http.conn.routing.RouteInfo}
* object that represents the actual connection route.
*
* @since 4.3
*/
public static final String ROUTE = "http.route";
/**
* Attribute name of a {@link org.apache.http.conn.scheme.Scheme}
* object that represents the actual protocol scheme registry.
*/
public static final String SCHEME_REGISTRY = "http.scheme-registry";
/**
* Attribute name of a {@link org.apache.http.cookie.CookieSpecRegistry}
* object that represents the actual cookie specification registry.
*/
public static final String COOKIESPEC_REGISTRY = "http.cookiespec-registry";
/**
* Attribute name of a {@link org.apache.http.cookie.CookieSpec}
* object that represents the actual cookie specification.
*/
public static final String COOKIE_SPEC = "http.cookie-spec";
/**
* Attribute name of a {@link org.apache.http.cookie.CookieOrigin}
* object that represents the actual details of the origin server.
*/
public static final String COOKIE_ORIGIN = "http.cookie-origin";
/**
* Attribute name of a {@link org.apache.http.client.CookieStore}
* object that represents the actual cookie store.
*/
public static final String COOKIE_STORE = "http.cookie-store";
/**
* Attribute name of a {@link org.apache.http.auth.AuthSchemeRegistry}
* object that represents the actual authentication scheme registry.
*/
public static final String AUTHSCHEME_REGISTRY = "http.authscheme-registry";
/**
* Attribute name of a {@link org.apache.http.client.CredentialsProvider}
* object that represents the actual credentials provider.
*/
public static final String CREDS_PROVIDER = "http.auth.credentials-provider";
/**
* Attribute name of a {@link org.apache.http.client.AuthCache} object
* that represents the auth scheme cache.
*/
public static final String AUTH_CACHE = "http.auth.auth-cache";
/**
* Attribute name of a {@link org.apache.http.auth.AuthState}
* object that represents the actual target authentication state.
*/
public static final String TARGET_AUTH_STATE = "http.auth.target-scope";
/**
* Attribute name of a {@link org.apache.http.auth.AuthState}
* object that represents the actual proxy authentication state.
*/
public static final String PROXY_AUTH_STATE = "http.auth.proxy-scope";
/**
* @deprecated (4.1) do not use
*/
@Deprecated
public static final String AUTH_SCHEME_PREF = "http.auth.scheme-pref";
/**
* Attribute name of a {@link java.lang.Object} object that represents
* the actual user identity such as user {@link java.security.Principal}.
*/
public static final String USER_TOKEN = "http.user-token";
}
| apache-2.0 |
anupcshan/bazel | src/main/java/com/google/devtools/build/lib/query2/AbstractBlazeQueryEnvironment.java | 9083 | // Copyright 2015 The Bazel Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.devtools.build.lib.query2;
import com.google.common.base.Predicate;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.Iterables;
import com.google.common.collect.Sets;
import com.google.devtools.build.lib.cmdline.Label;
import com.google.devtools.build.lib.cmdline.TargetParsingException;
import com.google.devtools.build.lib.events.ErrorSensingEventHandler;
import com.google.devtools.build.lib.events.Event;
import com.google.devtools.build.lib.events.EventHandler;
import com.google.devtools.build.lib.packages.DependencyFilter;
import com.google.devtools.build.lib.packages.Target;
import com.google.devtools.build.lib.profiler.AutoProfiler;
import com.google.devtools.build.lib.query2.engine.Callback;
import com.google.devtools.build.lib.query2.engine.QueryEnvironment;
import com.google.devtools.build.lib.query2.engine.QueryEvalResult;
import com.google.devtools.build.lib.query2.engine.QueryException;
import com.google.devtools.build.lib.query2.engine.QueryExpression;
import com.google.devtools.build.lib.query2.engine.QueryUtil.AggregateAllCallback;
import com.google.devtools.build.lib.util.Preconditions;
import java.util.Collection;
import java.util.HashMap;
import java.util.LinkedHashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.logging.Logger;
/**
* {@link QueryEnvironment} that can evaluate queries to produce a result, and implements as much
* of QueryEnvironment as possible while remaining mostly agnostic as to the objects being stored.
*/
public abstract class AbstractBlazeQueryEnvironment<T> implements QueryEnvironment<T> {
protected final ErrorSensingEventHandler eventHandler;
private final Map<String, Set<T>> letBindings = new HashMap<>();
protected final boolean keepGoing;
protected final boolean strictScope;
protected final DependencyFilter dependencyFilter;
private final Predicate<Label> labelFilter;
private final Set<Setting> settings;
private final List<QueryFunction> extraFunctions;
private static final Logger LOG = Logger.getLogger(AbstractBlazeQueryEnvironment.class.getName());
protected AbstractBlazeQueryEnvironment(boolean keepGoing,
boolean strictScope,
Predicate<Label> labelFilter,
EventHandler eventHandler,
Set<Setting> settings,
Iterable<QueryFunction> extraFunctions) {
this.eventHandler = new ErrorSensingEventHandler(eventHandler);
this.keepGoing = keepGoing;
this.strictScope = strictScope;
this.dependencyFilter = constructDependencyFilter(settings);
this.labelFilter = labelFilter;
this.settings = Sets.immutableEnumSet(settings);
this.extraFunctions = ImmutableList.copyOf(extraFunctions);
}
private static DependencyFilter constructDependencyFilter(
Set<Setting> settings) {
DependencyFilter specifiedFilter =
settings.contains(Setting.NO_HOST_DEPS)
? DependencyFilter.NO_HOST_DEPS
: DependencyFilter.ALL_DEPS;
if (settings.contains(Setting.NO_IMPLICIT_DEPS)) {
specifiedFilter = DependencyFilter.and(specifiedFilter, DependencyFilter.NO_IMPLICIT_DEPS);
}
if (settings.contains(Setting.NO_NODEP_DEPS)) {
specifiedFilter = DependencyFilter.and(specifiedFilter, DependencyFilter.NO_NODEP_ATTRIBUTES);
}
return specifiedFilter;
}
/**
* Evaluate the specified query expression in this environment.
*
* @return a {@link QueryEvalResult} object that contains the resulting set of targets and a bit
* to indicate whether errors occurred during evaluation; note that the
* success status can only be false if {@code --keep_going} was in effect
* @throws QueryException if the evaluation failed and {@code --nokeep_going} was in
* effect
*/
public QueryEvalResult evaluateQuery(QueryExpression expr, final Callback<T> callback)
throws QueryException, InterruptedException {
final AtomicBoolean empty = new AtomicBoolean(true);
try (final AutoProfiler p = AutoProfiler.logged("evaluating query", LOG)) {
// In the --nokeep_going case, errors are reported in the order in which the patterns are
// specified; using a linked hash set here makes sure that the left-most error is reported.
Set<String> targetPatternSet = new LinkedHashSet<>();
expr.collectTargetPatterns(targetPatternSet);
try {
preloadOrThrow(expr, targetPatternSet);
} catch (TargetParsingException e) {
// Unfortunately, by evaluating the patterns in parallel, we lose some location information.
throw new QueryException(expr, e.getMessage());
}
try {
this.eval(expr, new Callback<T>() {
@Override
public void process(Iterable<T> partialResult)
throws QueryException, InterruptedException {
empty.compareAndSet(true, Iterables.isEmpty(partialResult));
callback.process(partialResult);
}
});
} catch (QueryException e) {
throw new QueryException(e, expr);
}
}
if (eventHandler.hasErrors()) {
if (!keepGoing) {
// This case represents loading-phase errors reported during evaluation
// of target patterns that don't cause evaluation to fail per se.
throw new QueryException("Evaluation of query \"" + expr
+ "\" failed due to BUILD file errors");
} else {
eventHandler.handle(Event.warn("--keep_going specified, ignoring errors. "
+ "Results may be inaccurate"));
}
}
return new QueryEvalResult(!eventHandler.hasErrors(), empty.get());
}
public void afterCommand() {
}
public QueryExpression transformParsedQuery(QueryExpression queryExpression) {
return queryExpression;
}
public QueryEvalResult evaluateQuery(String query, Callback<T> callback)
throws QueryException, InterruptedException {
return evaluateQuery(QueryExpression.parse(query, this), callback);
}
@Override
public void reportBuildFileError(QueryExpression caller, String message) throws QueryException {
if (!keepGoing) {
throw new QueryException(caller, message);
} else {
// Keep consistent with evaluateQuery() above.
eventHandler.handle(Event.error("Evaluation of query \"" + caller + "\" failed: " + message));
}
}
public abstract Target getTarget(Label label) throws TargetNotFoundException, QueryException;
@Override
public Set<T> getVariable(String name) {
return letBindings.get(name);
}
@Override
public Set<T> setVariable(String name, Set<T> value) {
return letBindings.put(name, value);
}
protected boolean validateScope(Label label, boolean strict) throws QueryException {
if (!labelFilter.apply(label)) {
String error = String.format("target '%s' is not within the scope of the query", label);
if (strict) {
throw new QueryException(error);
} else {
eventHandler.handle(Event.warn(error + ". Skipping"));
return false;
}
}
return true;
}
public Set<T> evalTargetPattern(QueryExpression caller, String pattern)
throws QueryException {
try {
preloadOrThrow(caller, ImmutableList.of(pattern));
} catch (TargetParsingException e) {
// Will skip the target and keep going if -k is specified.
reportBuildFileError(caller, e.getMessage());
}
AggregateAllCallback<T> aggregatingCallback = new AggregateAllCallback<>();
getTargetsMatchingPattern(caller, pattern, aggregatingCallback);
return aggregatingCallback.getResult();
}
/**
* Perform any work that should be done ahead of time to resolve the target patterns in the
* query. Implementations may choose to cache the results of resolving the patterns, cache
* intermediate work, or not cache and resolve patterns on the fly.
*/
protected abstract void preloadOrThrow(QueryExpression caller, Collection<String> patterns)
throws QueryException, TargetParsingException;
@Override
public boolean isSettingEnabled(Setting setting) {
return settings.contains(Preconditions.checkNotNull(setting));
}
@Override
public Iterable<QueryFunction> getFunctions() {
ImmutableList.Builder<QueryFunction> builder = ImmutableList.builder();
builder.addAll(DEFAULT_QUERY_FUNCTIONS);
builder.addAll(extraFunctions);
return builder.build();
}
}
| apache-2.0 |
codescale/logging-log4j2 | log4j-core/src/main/java/org/apache/logging/log4j/core/appender/rolling/action/IfAccumulatedFileSize.java | 4872 | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache license, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the license for the specific language governing permissions and
* limitations under the license.
*/
package org.apache.logging.log4j.core.appender.rolling.action;
import java.nio.file.Path;
import java.nio.file.attribute.BasicFileAttributes;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
import org.apache.logging.log4j.Logger;
import org.apache.logging.log4j.core.Core;
import org.apache.logging.log4j.core.appender.rolling.FileSize;
import org.apache.logging.log4j.core.config.plugins.Plugin;
import org.apache.logging.log4j.core.config.plugins.PluginAttribute;
import org.apache.logging.log4j.core.config.plugins.PluginElement;
import org.apache.logging.log4j.core.config.plugins.PluginFactory;
import org.apache.logging.log4j.status.StatusLogger;
/**
* PathCondition that accepts paths after the accumulated file size threshold is exceeded during the file tree walk.
*/
@Plugin(name = "IfAccumulatedFileSize", category = Core.CATEGORY_NAME, printObject = true)
public final class IfAccumulatedFileSize implements PathCondition {
private static final Logger LOGGER = StatusLogger.getLogger();
private final long thresholdBytes;
private long accumulatedSize;
private final PathCondition[] nestedConditions;
private IfAccumulatedFileSize(final long thresholdSize, final PathCondition[] nestedConditions) {
if (thresholdSize <= 0) {
throw new IllegalArgumentException("Count must be a positive integer but was " + thresholdSize);
}
this.thresholdBytes = thresholdSize;
this.nestedConditions = nestedConditions == null ? new PathCondition[0] : Arrays.copyOf(nestedConditions,
nestedConditions.length);
}
public long getThresholdBytes() {
return thresholdBytes;
}
public List<PathCondition> getNestedConditions() {
return Collections.unmodifiableList(Arrays.asList(nestedConditions));
}
/*
* (non-Javadoc)
*
* @see org.apache.logging.log4j.core.appender.rolling.action.PathCondition#accept(java.nio.file.Path,
* java.nio.file.Path, java.nio.file.attribute.BasicFileAttributes)
*/
@Override
public boolean accept(final Path basePath, final Path relativePath, final BasicFileAttributes attrs) {
accumulatedSize += attrs.size();
final boolean result = accumulatedSize > thresholdBytes;
final String match = result ? ">" : "<=";
final String accept = result ? "ACCEPTED" : "REJECTED";
LOGGER.trace("IfAccumulatedFileSize {}: {} accumulated size '{}' {} thresholdBytes '{}'", accept, relativePath,
accumulatedSize, match, thresholdBytes);
if (result) {
return IfAll.accept(nestedConditions, basePath, relativePath, attrs);
}
return result;
}
/*
* (non-Javadoc)
*
* @see org.apache.logging.log4j.core.appender.rolling.action.PathCondition#beforeFileTreeWalk()
*/
@Override
public void beforeFileTreeWalk() {
accumulatedSize = 0;
IfAll.beforeFileTreeWalk(nestedConditions);
}
/**
* Create an IfAccumulatedFileSize condition.
*
* @param threshold The threshold accumulated file size from which files will be deleted.
* @return An IfAccumulatedFileSize condition.
*/
@PluginFactory
public static IfAccumulatedFileSize createFileSizeCondition(
// @formatter:off
@PluginAttribute("exceeds") final String size,
@PluginElement("PathConditions") final PathCondition... nestedConditions) {
// @formatter:on
if (size == null) {
LOGGER.error("IfAccumulatedFileSize missing mandatory size threshold.");
}
final long threshold = size == null ? Long.MAX_VALUE : FileSize.parse(size, Long.MAX_VALUE);
return new IfAccumulatedFileSize(threshold, nestedConditions);
}
@Override
public String toString() {
final String nested = nestedConditions.length == 0 ? "" : " AND " + Arrays.toString(nestedConditions);
return "IfAccumulatedFileSize(exceeds=" + thresholdBytes + nested + ")";
}
}
| apache-2.0 |
Hope6537/hope-tactical-equipment | hope-share-module/hope-service/src/main/java/org/hope6537/service/ClassesService.java | 2572 | package org.hope6537.service;
import org.hope6537.dto.ClassesDto;
import org.hope6537.entity.ResultSupport;
import java.util.List;
/**
* 实体服务接口
* Created by hope6537 on 16/1/30.
*/
public interface ClassesService {
ResultSupport<List<Integer>> generatorClasses(int count);
/**
* 标准模板生成-向数据库添加单行记录
*
* @param classesDto 数据转换对象
* @return ResultSupport.getData = 新添加的数据的ID
*/
ResultSupport<Integer> addClasses(ClassesDto classesDto);
/**
* 标准模板生成-向数据库添加单行记录 参数集合
*
* @param 数据字段集合
* @return ResultSupport.getData = 更新行数
*/
ResultSupport<Integer> addClasses(String name);
/**
* 标准模板生成-向数据库更新单行记录
*
* @param classesDto 数据转换对象
* @return ResultSupport.getData = 更新行数
*/
ResultSupport<Integer> modifyClasses(ClassesDto classesDto);
/**
* 标准模板生成-向数据库更新多行记录
*
* @param classesDto 数据转换对象
* @param idList 要更新的ID集合
* @return ResultSupport.getData = 更新行数
*/
ResultSupport<Integer> batchModifyClasses(ClassesDto classesDto, List<Integer> idList);
/**
* 标准模板生成-向数据库删除单行记录
*
* @param id 要删除的id
* @return ResultSupport.getData = 更新行数
*/
ResultSupport<Integer> removeClasses(Integer id);
/**
* 标准模板生成-向数据库删除多行记录
*
* @param idList 要删除的ID集合
* @return ResultSupport.getData = 更新行数
*/
ResultSupport<Integer> batchRemoveClasses(List<Integer> idList);
/**
* 标准模板生成-根据ID查询单个数据
*
* @param id 要查询的id
* @return ResultSupport.getData = 单条数据
*/
ResultSupport<ClassesDto> getClassesById(Integer id);
/**
* 标准模板生成-根据IDList查询多个数据
*
* @param idList 要查询的ID集合
* @return ResultSupport.getData = 多条符合条件的数据
*/
ResultSupport<List<ClassesDto>> getClassesListByIdList(List<Integer> idList);
/**
* 标准模板生成-根据Query对象查询符合条件的数据
*
* @param query 数据查询对象
* @return ResultSupport.getData = 多条符合条件的数据
*/
ResultSupport<List<ClassesDto>> getClassesListByQuery(ClassesDto query);
}
| apache-2.0 |
shelsonjava/box-java-sdk-v2 | BoxJavaLibraryV2/tst/com/box/boxjavalibv2/dao/BoxFileVersionTest.java | 979 | package com.box.boxjavalibv2.dao;
import java.io.File;
import java.io.IOException;
import junit.framework.Assert;
import org.apache.commons.io.FileUtils;
import org.junit.Test;
import com.box.boxjavalibv2.testutils.TestUtils;
import com.box.restclientv2.exceptions.BoxRestException;
public class BoxFileVersionTest {
@Test
public void testParcelRoundTrip() throws IOException, BoxRestException {
String userJson = FileUtils.readFileToString(new File("testdata/user.json"));
String fileJson = FileUtils.readFileToString(new File("testdata/fileversion.json"));
fileJson = fileJson.replace("$modified_by", userJson);
BoxFileVersion original = (BoxFileVersion) TestUtils.getFromJSON(fileJson, BoxFileVersion.class);
TestParcel parcel = new TestParcel();
original.writeToParcel(parcel, 0);
BoxFileVersion version = new BoxFileVersion(parcel);
Assert.assertEquals("testname", version.getName());
}
}
| apache-2.0 |
reallyserg/k_serg | chapter_005/generic/src/main/java/kochetov/Base.java | 419 | package kochetov;
/**
* Class Base.
* Created by Сергей on 05.04.2017.
*/
public abstract class Base {
/**
* Id of base.
*/
private String id;
/**
* Get id of item.
* @return id.
*/
public String getId() {
return this.id;
}
/**
* Set if of item.
* @param id id to set.
*/
public void setId(String id) {
this.id = id;
}
}
| apache-2.0 |
nezirus/elasticsearch | core/src/test/java/org/elasticsearch/action/admin/indices/create/ShrinkIndexIT.java | 19445 | /*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.action.admin.indices.create;
import org.apache.lucene.search.Sort;
import org.apache.lucene.search.SortField;
import org.apache.lucene.search.SortedSetSelector;
import org.apache.lucene.search.SortedSetSortField;
import org.elasticsearch.Version;
import org.elasticsearch.action.admin.cluster.reroute.ClusterRerouteResponse;
import org.elasticsearch.action.admin.cluster.state.ClusterStateResponse;
import org.elasticsearch.action.admin.indices.segments.IndexSegments;
import org.elasticsearch.action.admin.indices.segments.IndexShardSegments;
import org.elasticsearch.action.admin.indices.segments.IndicesSegmentResponse;
import org.elasticsearch.action.admin.indices.segments.ShardSegments;
import org.elasticsearch.action.admin.indices.settings.get.GetSettingsResponse;
import org.elasticsearch.action.support.ActiveShardCount;
import org.elasticsearch.cluster.ClusterInfoService;
import org.elasticsearch.cluster.InternalClusterInfoService;
import org.elasticsearch.cluster.node.DiscoveryNode;
import org.elasticsearch.cluster.routing.RoutingTable;
import org.elasticsearch.cluster.routing.UnassignedInfo;
import org.elasticsearch.common.Priority;
import org.elasticsearch.common.collect.ImmutableOpenMap;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.index.engine.Segment;
import org.elasticsearch.index.query.TermsQueryBuilder;
import org.elasticsearch.plugins.Plugin;
import org.elasticsearch.test.ESIntegTestCase;
import org.elasticsearch.test.InternalSettingsPlugin;
import org.elasticsearch.test.VersionUtils;
import java.util.Arrays;
import java.util.Collection;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount;
import static org.hamcrest.Matchers.containsString;
public class ShrinkIndexIT extends ESIntegTestCase {
@Override
protected Collection<Class<? extends Plugin>> nodePlugins() {
return Arrays.asList(InternalSettingsPlugin.class);
}
public void testCreateShrinkIndexToN() {
int[][] possibleShardSplits = new int[][] {{8,4,2}, {9, 3, 1}, {4, 2, 1}, {15,5,1}};
int[] shardSplits = randomFrom(possibleShardSplits);
assertEquals(shardSplits[0], (shardSplits[0] / shardSplits[1]) * shardSplits[1]);
assertEquals(shardSplits[1], (shardSplits[1] / shardSplits[2]) * shardSplits[2]);
internalCluster().ensureAtLeastNumDataNodes(2);
prepareCreate("source").setSettings(Settings.builder().put(indexSettings()).put("number_of_shards", shardSplits[0])).get();
for (int i = 0; i < 20; i++) {
client().prepareIndex("source", "t1", Integer.toString(i))
.setSource("{\"foo\" : \"bar\", \"i\" : " + i + "}", XContentType.JSON).get();
}
ImmutableOpenMap<String, DiscoveryNode> dataNodes = client().admin().cluster().prepareState().get().getState().nodes()
.getDataNodes();
assertTrue("at least 2 nodes but was: " + dataNodes.size(), dataNodes.size() >= 2);
DiscoveryNode[] discoveryNodes = dataNodes.values().toArray(DiscoveryNode.class);
String mergeNode = discoveryNodes[0].getName();
// ensure all shards are allocated otherwise the ensure green below might not succeed since we require the merge node
// if we change the setting too quickly we will end up with one replica unassigned which can't be assigned anymore due
// to the require._name below.
ensureGreen();
// relocate all shards to one node such that we can merge it.
client().admin().indices().prepareUpdateSettings("source")
.setSettings(Settings.builder()
.put("index.routing.allocation.require._name", mergeNode)
.put("index.blocks.write", true)).get();
ensureGreen();
// now merge source into a 4 shard index
assertAcked(client().admin().indices().prepareShrinkIndex("source", "first_shrink")
.setSettings(Settings.builder()
.put("index.number_of_replicas", 0)
.put("index.number_of_shards", shardSplits[1]).build()).get());
ensureGreen();
assertHitCount(client().prepareSearch("first_shrink").setSize(100).setQuery(new TermsQueryBuilder("foo", "bar")).get(), 20);
for (int i = 0; i < 20; i++) { // now update
client().prepareIndex("first_shrink", "t1", Integer.toString(i))
.setSource("{\"foo\" : \"bar\", \"i\" : " + i + "}", XContentType.JSON).get();
}
flushAndRefresh();
assertHitCount(client().prepareSearch("first_shrink").setSize(100).setQuery(new TermsQueryBuilder("foo", "bar")).get(), 20);
assertHitCount(client().prepareSearch("source").setSize(100).setQuery(new TermsQueryBuilder("foo", "bar")).get(), 20);
// relocate all shards to one node such that we can merge it.
client().admin().indices().prepareUpdateSettings("first_shrink")
.setSettings(Settings.builder()
.put("index.routing.allocation.require._name", mergeNode)
.put("index.blocks.write", true)).get();
ensureGreen();
// now merge source into a 2 shard index
assertAcked(client().admin().indices().prepareShrinkIndex("first_shrink", "second_shrink")
.setSettings(Settings.builder()
.put("index.number_of_replicas", 0)
.put("index.number_of_shards", shardSplits[2]).build()).get());
ensureGreen();
assertHitCount(client().prepareSearch("second_shrink").setSize(100).setQuery(new TermsQueryBuilder("foo", "bar")).get(), 20);
// let it be allocated anywhere and bump replicas
client().admin().indices().prepareUpdateSettings("second_shrink")
.setSettings(Settings.builder()
.putNull("index.routing.allocation.include._id")
.put("index.number_of_replicas", 1)).get();
ensureGreen();
assertHitCount(client().prepareSearch("second_shrink").setSize(100).setQuery(new TermsQueryBuilder("foo", "bar")).get(), 20);
for (int i = 0; i < 20; i++) { // now update
client().prepareIndex("second_shrink", "t1", Integer.toString(i))
.setSource("{\"foo\" : \"bar\", \"i\" : " + i + "}", XContentType.JSON).get();
}
flushAndRefresh();
assertHitCount(client().prepareSearch("second_shrink").setSize(100).setQuery(new TermsQueryBuilder("foo", "bar")).get(), 20);
assertHitCount(client().prepareSearch("first_shrink").setSize(100).setQuery(new TermsQueryBuilder("foo", "bar")).get(), 20);
assertHitCount(client().prepareSearch("source").setSize(100).setQuery(new TermsQueryBuilder("foo", "bar")).get(), 20);
}
public void testCreateShrinkIndex() {
internalCluster().ensureAtLeastNumDataNodes(2);
Version version = VersionUtils.randomVersion(random());
prepareCreate("source").setSettings(Settings.builder().put(indexSettings())
.put("number_of_shards", randomIntBetween(2, 7))
.put("index.version.created", version)
).get();
for (int i = 0; i < 20; i++) {
client().prepareIndex("source", "type")
.setSource("{\"foo\" : \"bar\", \"i\" : " + i + "}", XContentType.JSON).get();
}
ImmutableOpenMap<String, DiscoveryNode> dataNodes = client().admin().cluster().prepareState().get().getState().nodes()
.getDataNodes();
assertTrue("at least 2 nodes but was: " + dataNodes.size(), dataNodes.size() >= 2);
DiscoveryNode[] discoveryNodes = dataNodes.values().toArray(DiscoveryNode.class);
String mergeNode = discoveryNodes[0].getName();
// ensure all shards are allocated otherwise the ensure green below might not succeed since we require the merge node
// if we change the setting too quickly we will end up with one replica unassigned which can't be assigned anymore due
// to the require._name below.
ensureGreen();
// relocate all shards to one node such that we can merge it.
client().admin().indices().prepareUpdateSettings("source")
.setSettings(Settings.builder()
.put("index.routing.allocation.require._name", mergeNode)
.put("index.blocks.write", true)).get();
ensureGreen();
// now merge source into a single shard index
final boolean createWithReplicas = randomBoolean();
assertAcked(client().admin().indices().prepareShrinkIndex("source", "target")
.setSettings(Settings.builder().put("index.number_of_replicas", createWithReplicas ? 1 : 0).build()).get());
ensureGreen();
assertHitCount(client().prepareSearch("target").setSize(100).setQuery(new TermsQueryBuilder("foo", "bar")).get(), 20);
if (createWithReplicas == false) {
// bump replicas
client().admin().indices().prepareUpdateSettings("target")
.setSettings(Settings.builder()
.put("index.number_of_replicas", 1)).get();
ensureGreen();
assertHitCount(client().prepareSearch("target").setSize(100).setQuery(new TermsQueryBuilder("foo", "bar")).get(), 20);
}
for (int i = 20; i < 40; i++) {
client().prepareIndex("target", "type")
.setSource("{\"foo\" : \"bar\", \"i\" : " + i + "}", XContentType.JSON).get();
}
flushAndRefresh();
assertHitCount(client().prepareSearch("target").setSize(100).setQuery(new TermsQueryBuilder("foo", "bar")).get(), 40);
assertHitCount(client().prepareSearch("source").setSize(100).setQuery(new TermsQueryBuilder("foo", "bar")).get(), 20);
GetSettingsResponse target = client().admin().indices().prepareGetSettings("target").get();
assertEquals(version, target.getIndexToSettings().get("target").getAsVersion("index.version.created", null));
}
/**
* Tests that we can manually recover from a failed allocation due to shards being moved away etc.
*/
public void testCreateShrinkIndexFails() throws Exception {
internalCluster().ensureAtLeastNumDataNodes(2);
prepareCreate("source").setSettings(Settings.builder().put(indexSettings())
.put("number_of_shards", randomIntBetween(2, 7))
.put("number_of_replicas", 0)).get();
for (int i = 0; i < 20; i++) {
client().prepareIndex("source", "type")
.setSource("{\"foo\" : \"bar\", \"i\" : " + i + "}", XContentType.JSON).get();
}
ImmutableOpenMap<String, DiscoveryNode> dataNodes = client().admin().cluster().prepareState().get().getState().nodes()
.getDataNodes();
assertTrue("at least 2 nodes but was: " + dataNodes.size(), dataNodes.size() >= 2);
DiscoveryNode[] discoveryNodes = dataNodes.values().toArray(DiscoveryNode.class);
String spareNode = discoveryNodes[0].getName();
String mergeNode = discoveryNodes[1].getName();
// ensure all shards are allocated otherwise the ensure green below might not succeed since we require the merge node
// if we change the setting too quickly we will end up with one replica unassigned which can't be assigned anymore due
// to the require._name below.
ensureGreen();
// relocate all shards to one node such that we can merge it.
client().admin().indices().prepareUpdateSettings("source")
.setSettings(Settings.builder().put("index.routing.allocation.require._name", mergeNode)
.put("index.blocks.write", true)).get();
ensureGreen();
// now merge source into a single shard index
client().admin().indices().prepareShrinkIndex("source", "target")
.setWaitForActiveShards(ActiveShardCount.NONE)
.setSettings(Settings.builder()
.put("index.routing.allocation.exclude._name", mergeNode) // we manually exclude the merge node to forcefully fuck it up
.put("index.number_of_replicas", 0)
.put("index.allocation.max_retries", 1).build()).get();
client().admin().cluster().prepareHealth("target").setWaitForEvents(Priority.LANGUID).get();
// now we move all shards away from the merge node
client().admin().indices().prepareUpdateSettings("source")
.setSettings(Settings.builder().put("index.routing.allocation.require._name", spareNode)
.put("index.blocks.write", true)).get();
ensureGreen("source");
client().admin().indices().prepareUpdateSettings("target") // erase the forcefully fuckup!
.setSettings(Settings.builder().putNull("index.routing.allocation.exclude._name")).get();
// wait until it fails
assertBusy(() -> {
ClusterStateResponse clusterStateResponse = client().admin().cluster().prepareState().get();
RoutingTable routingTables = clusterStateResponse.getState().routingTable();
assertTrue(routingTables.index("target").shard(0).getShards().get(0).unassigned());
assertEquals(UnassignedInfo.Reason.ALLOCATION_FAILED,
routingTables.index("target").shard(0).getShards().get(0).unassignedInfo().getReason());
assertEquals(1,
routingTables.index("target").shard(0).getShards().get(0).unassignedInfo().getNumFailedAllocations());
});
client().admin().indices().prepareUpdateSettings("source") // now relocate them all to the right node
.setSettings(Settings.builder()
.put("index.routing.allocation.require._name", mergeNode)).get();
ensureGreen("source");
final InternalClusterInfoService infoService = (InternalClusterInfoService) internalCluster().getInstance(ClusterInfoService.class,
internalCluster().getMasterName());
infoService.refresh();
// kick off a retry and wait until it's done!
ClusterRerouteResponse clusterRerouteResponse = client().admin().cluster().prepareReroute().setRetryFailed(true).get();
long expectedShardSize = clusterRerouteResponse.getState().routingTable().index("target")
.shard(0).getShards().get(0).getExpectedShardSize();
// we support the expected shard size in the allocator to sum up over the source index shards
assertTrue("expected shard size must be set but wasn't: " + expectedShardSize, expectedShardSize > 0);
ensureGreen();
assertHitCount(client().prepareSearch("target").setSize(100).setQuery(new TermsQueryBuilder("foo", "bar")).get(), 20);
}
public void testCreateShrinkWithIndexSort() throws Exception {
SortField expectedSortField = new SortedSetSortField("id", true, SortedSetSelector.Type.MAX);
expectedSortField.setMissingValue(SortedSetSortField.STRING_FIRST);
Sort expectedIndexSort = new Sort(expectedSortField);
internalCluster().ensureAtLeastNumDataNodes(2);
prepareCreate("source")
.setSettings(
Settings.builder()
.put(indexSettings())
.put("sort.field", "id")
.put("sort.order", "desc")
.put("number_of_shards", 8)
.put("number_of_replicas", 0)
)
.addMapping("type", "id", "type=keyword,doc_values=true")
.get();
for (int i = 0; i < 20; i++) {
client().prepareIndex("source", "type", Integer.toString(i))
.setSource("{\"foo\" : \"bar\", \"id\" : " + i + "}", XContentType.JSON).get();
}
ImmutableOpenMap<String, DiscoveryNode> dataNodes = client().admin().cluster().prepareState().get().getState().nodes()
.getDataNodes();
assertTrue("at least 2 nodes but was: " + dataNodes.size(), dataNodes.size() >= 2);
DiscoveryNode[] discoveryNodes = dataNodes.values().toArray(DiscoveryNode.class);
String mergeNode = discoveryNodes[0].getName();
// ensure all shards are allocated otherwise the ensure green below might not succeed since we require the merge node
// if we change the setting too quickly we will end up with one replica unassigned which can't be assigned anymore due
// to the require._name below.
ensureGreen();
flushAndRefresh();
assertSortedSegments("source", expectedIndexSort);
// relocate all shards to one node such that we can merge it.
client().admin().indices().prepareUpdateSettings("source")
.setSettings(Settings.builder()
.put("index.routing.allocation.require._name", mergeNode)
.put("index.blocks.write", true)).get();
ensureGreen();
// check that index sort cannot be set on the target index
IllegalArgumentException exc = expectThrows(IllegalArgumentException.class,
() -> client().admin().indices().prepareShrinkIndex("source", "target")
.setSettings(Settings.builder()
.put("index.number_of_replicas", 0)
.put("index.number_of_shards", "2")
.put("index.sort.field", "foo")
.build()).get());
assertThat(exc.getMessage(), containsString("can't override index sort when shrinking index"));
// check that the index sort order of `source` is correctly applied to the `target`
assertAcked(client().admin().indices().prepareShrinkIndex("source", "target")
.setSettings(Settings.builder()
.put("index.number_of_replicas", 0)
.put("index.number_of_shards", "2").build()).get());
ensureGreen();
flushAndRefresh();
GetSettingsResponse settingsResponse =
client().admin().indices().prepareGetSettings("target").execute().actionGet();
assertEquals(settingsResponse.getSetting("target", "index.sort.field"), "id");
assertEquals(settingsResponse.getSetting("target", "index.sort.order"), "desc");
assertSortedSegments("target", expectedIndexSort);
// ... and that the index sort is also applied to updates
for (int i = 20; i < 40; i++) {
client().prepareIndex("target", "type")
.setSource("{\"foo\" : \"bar\", \"i\" : " + i + "}", XContentType.JSON).get();
}
flushAndRefresh();
assertSortedSegments("target", expectedIndexSort);
}
}
| apache-2.0 |
surlymo/dubbo | dubbo-test/dubbo-test-examples/src/main/java/com/alibaba/dubbo/examples/heartbeat/HeartbeatServer.java | 3559 | /*
* Copyright 1999-2012 Alibaba Group.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.alibaba.dubbo.examples.heartbeat;
import com.alibaba.dubbo.common.Constants;
import com.alibaba.dubbo.common.URL;
import com.alibaba.dubbo.common.utils.NetUtils;
import com.alibaba.dubbo.remoting.Transporters;
import com.alibaba.dubbo.remoting.exchange.ExchangeClient;
import com.alibaba.dubbo.remoting.exchange.ExchangeHandler;
import com.alibaba.dubbo.remoting.exchange.ExchangeServer;
import com.alibaba.dubbo.remoting.exchange.support.ExchangeHandlerAdapter;
import com.alibaba.dubbo.remoting.exchange.support.header.HeaderExchangeClient;
import com.alibaba.dubbo.remoting.exchange.support.header.HeaderExchangeServer;
/**
* @author <a href="mailto:gang.lvg@alibaba-inc.com">kimi</a>
*/
public class HeartbeatServer {
private static final URL clientUrl = URL.valueOf(
new StringBuilder( 32 )
.append( "netty://" )
.append( NetUtils.getLocalHost() )
.append( ":9999" ).toString() )
.addParameter( Constants.CODEC_KEY, "exchange" );
private static final ExchangeHandler handler = new ExchangeHandlerAdapter() {
};
private static ExchangeServer exchangeServer;
private static volatile boolean serverStarted = false;
public static void main( String[] args ) throws Exception {
final HeartBeatExchangeHandler serverHandler = new HeartBeatExchangeHandler( handler );
Thread serverThread = new Thread( new Runnable() {
public void run() {
try {
exchangeServer = new HeaderExchangeServer(
Transporters.bind(
clientUrl.addParameter( Constants.HEARTBEAT_KEY, 1000 ),
serverHandler ) );
serverStarted = true;
} catch ( Exception e ) {
e.printStackTrace();
}
}
} );
serverThread.setDaemon( true );
serverThread.start();
while ( !serverStarted ) {
Thread.sleep( 1000 );
}
HeartBeatExchangeHandler clientHandler = new HeartBeatExchangeHandler( handler );
ExchangeClient exchangeClient = new HeaderExchangeClient(
Transporters.connect( clientUrl, clientHandler ) );
for ( int i = 0; i < 10; i++ ) {
Thread.sleep( 1000 );
System.out.print( "." );
}
System.out.println();
if ( clientHandler.getHeartBeatCount() > 0 ) {
System.out.printf( "Client receives %d heartbeats",
clientHandler.getHeartBeatCount() );
} else {
throw new Exception( "Server heartbeat does not work." );
}
exchangeClient.close();
exchangeServer.close();
}
}
| apache-2.0 |
gyfora/flink | flink-table/flink-table-runtime-blink/src/test/java/org/apache/flink/table/dataformat/BaseRowTest.java | 8619 | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.table.dataformat;
import org.apache.flink.api.common.typeutils.base.StringSerializer;
import org.apache.flink.table.api.DataTypes;
import org.apache.flink.table.runtime.typeutils.BaseArraySerializer;
import org.apache.flink.table.runtime.typeutils.BaseMapSerializer;
import org.apache.flink.table.runtime.typeutils.BaseRowSerializer;
import org.apache.flink.table.runtime.typeutils.BinaryGenericSerializer;
import org.apache.flink.table.types.logical.IntType;
import org.apache.flink.table.types.logical.RowType;
import org.junit.Before;
import org.junit.Test;
import java.math.BigDecimal;
import java.sql.Timestamp;
import java.time.LocalDateTime;
import static org.apache.flink.table.utils.BinaryGenericAsserter.equivalent;
import static org.junit.Assert.assertArrayEquals;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertThat;
import static org.junit.Assert.assertTrue;
/**
* Test for {@link BaseRow}s.
*/
public class BaseRowTest {
private BinaryString str;
private BinaryGeneric generic;
private Decimal decimal1;
private Decimal decimal2;
private BinaryArray array;
private BinaryMap map;
private BinaryRow underRow;
private byte[] bytes;
private BinaryGenericSerializer<String> genericSerializer;
private SqlTimestamp sqlTimestamp1;
private SqlTimestamp sqlTimestamp2;
@Before
public void before() {
str = BinaryString.fromString("haha");
generic = new BinaryGeneric<>("haha");
genericSerializer = new BinaryGenericSerializer<>(StringSerializer.INSTANCE);
decimal1 = Decimal.fromLong(10, 5, 0);
decimal2 = Decimal.fromBigDecimal(new BigDecimal(11), 20, 0);
array = new BinaryArray();
{
BinaryArrayWriter arrayWriter = new BinaryArrayWriter(array, 2, 4);
arrayWriter.writeInt(0, 15);
arrayWriter.writeInt(1, 16);
arrayWriter.complete();
}
map = BinaryMap.valueOf(array, array);
underRow = new BinaryRow(2);
{
BinaryRowWriter writer = new BinaryRowWriter(underRow);
writer.writeInt(0, 15);
writer.writeInt(1, 16);
writer.complete();
}
bytes = new byte[] {1, 5, 6};
sqlTimestamp1 = SqlTimestamp.fromEpochMillis(123L);
sqlTimestamp2 = SqlTimestamp.fromLocalDateTime(LocalDateTime.of(1969, 1, 1, 0, 0, 0, 123456789));
}
@Test
public void testBinaryRow() {
testAll(getBinaryRow());
}
@Test
public void testNestedRow() {
BinaryRow row = new BinaryRow(1);
BinaryRowWriter writer = new BinaryRowWriter(row);
writer.writeRow(0, getBinaryRow(), null);
writer.complete();
testAll(row.getRow(0, 18));
}
private BinaryRow getBinaryRow() {
BinaryRow row = new BinaryRow(18);
BinaryRowWriter writer = new BinaryRowWriter(row);
writer.writeBoolean(0, true);
writer.writeByte(1, (byte) 1);
writer.writeShort(2, (short) 2);
writer.writeInt(3, 3);
writer.writeLong(4, 4);
writer.writeFloat(5, 5);
writer.writeDouble(6, 6);
writer.writeString(8, str);
writer.writeGeneric(9, generic, genericSerializer);
writer.writeDecimal(10, decimal1, 5);
writer.writeDecimal(11, decimal2, 20);
writer.writeArray(12, array, new BaseArraySerializer(DataTypes.INT().getLogicalType(), null));
writer.writeMap(13, map, new BaseMapSerializer(
DataTypes.INT().getLogicalType(), DataTypes.INT().getLogicalType(), null));
writer.writeRow(14, underRow, new BaseRowSerializer(null, RowType.of(new IntType(), new IntType())));
writer.writeBinary(15, bytes);
writer.writeTimestamp(16, sqlTimestamp1, 3);
writer.writeTimestamp(17, sqlTimestamp2, 9);
return row;
}
@Test
public void testGenericRow() {
GenericRow row = new GenericRow(18);
row.setField(0, true);
row.setField(1, (byte) 1);
row.setField(2, (short) 2);
row.setField(3, 3);
row.setField(4, (long) 4);
row.setField(5, (float) 5);
row.setField(6, (double) 6);
row.setField(7, (char) 7);
row.setField(8, str);
row.setField(9, generic);
row.setField(10, decimal1);
row.setField(11, decimal2);
row.setField(12, array);
row.setField(13, map);
row.setField(14, underRow);
row.setField(15, bytes);
row.setField(16, sqlTimestamp1);
row.setField(17, sqlTimestamp2);
testAll(row);
}
@Test
public void testBoxedWrapperRow() {
BoxedWrapperRow row = new BoxedWrapperRow(18);
row.setBoolean(0, true);
row.setByte(1, (byte) 1);
row.setShort(2, (short) 2);
row.setInt(3, 3);
row.setLong(4, (long) 4);
row.setFloat(5, (float) 5);
row.setDouble(6, (double) 6);
row.setNonPrimitiveValue(8, str);
row.setNonPrimitiveValue(9, generic);
row.setNonPrimitiveValue(10, decimal1);
row.setNonPrimitiveValue(11, decimal2);
row.setNonPrimitiveValue(12, array);
row.setNonPrimitiveValue(13, map);
row.setNonPrimitiveValue(14, underRow);
row.setNonPrimitiveValue(15, bytes);
row.setNonPrimitiveValue(16, sqlTimestamp1);
row.setNonPrimitiveValue(17, sqlTimestamp2);
testAll(row);
}
@Test
public void testJoinedRow() {
GenericRow row1 = new GenericRow(5);
row1.setField(0, true);
row1.setField(1, (byte) 1);
row1.setField(2, (short) 2);
row1.setField(3, 3);
row1.setField(4, (long) 4);
GenericRow row2 = new GenericRow(13);
row2.setField(0, (float) 5);
row2.setField(1, (double) 6);
row2.setField(2, (char) 7);
row2.setField(3, str);
row2.setField(4, generic);
row2.setField(5, decimal1);
row2.setField(6, decimal2);
row2.setField(7, array);
row2.setField(8, map);
row2.setField(9, underRow);
row2.setField(10, bytes);
row2.setField(11, sqlTimestamp1);
row2.setField(12, sqlTimestamp2);
testAll(new JoinedRow(row1, row2));
}
private void testAll(BaseRow row) {
assertEquals(18, row.getArity());
// test header
assertEquals(0, row.getHeader());
row.setHeader((byte) 1);
assertEquals(1, row.getHeader());
// test get
assertTrue(row.getBoolean(0));
assertEquals(1, row.getByte(1));
assertEquals(2, row.getShort(2));
assertEquals(3, row.getInt(3));
assertEquals(4, row.getLong(4));
assertEquals(5, (int) row.getFloat(5));
assertEquals(6, (int) row.getDouble(6));
assertEquals(str, row.getString(8));
assertThat(row.getGeneric(9), equivalent(generic, genericSerializer));
assertEquals(decimal1, row.getDecimal(10, 5, 0));
assertEquals(decimal2, row.getDecimal(11, 20, 0));
assertEquals(array, row.getArray(12));
assertEquals(map, row.getMap(13));
assertEquals(15, row.getRow(14, 2).getInt(0));
assertEquals(16, row.getRow(14, 2).getInt(1));
assertArrayEquals(bytes, row.getBinary(15));
assertEquals(sqlTimestamp1, row.getTimestamp(16, 3));
assertEquals(sqlTimestamp2, row.getTimestamp(17, 9));
// test set
row.setBoolean(0, false);
assertFalse(row.getBoolean(0));
row.setByte(1, (byte) 2);
assertEquals(2, row.getByte(1));
row.setShort(2, (short) 3);
assertEquals(3, row.getShort(2));
row.setInt(3, 4);
assertEquals(4, row.getInt(3));
row.setLong(4, 5);
assertEquals(5, row.getLong(4));
row.setFloat(5, 6);
assertEquals(6, (int) row.getFloat(5));
row.setDouble(6, 7);
assertEquals(7, (int) row.getDouble(6));
row.setDecimal(10, Decimal.fromLong(11, 5, 0), 5);
assertEquals(Decimal.fromLong(11, 5, 0), row.getDecimal(10, 5, 0));
row.setDecimal(11, Decimal.fromBigDecimal(new BigDecimal(12), 20, 0), 20);
assertEquals(Decimal.fromBigDecimal(new BigDecimal(12), 20, 0), row.getDecimal(11, 20, 0));
row.setTimestamp(16, SqlTimestamp.fromEpochMillis(456L), 3);
assertEquals(SqlTimestamp.fromEpochMillis(456L), row.getTimestamp(16, 3));
row.setTimestamp(17, SqlTimestamp.fromTimestamp(Timestamp.valueOf("1970-01-01 00:00:00.123456789")), 9);
assertEquals(SqlTimestamp.fromTimestamp(Timestamp.valueOf("1970-01-01 00:00:00.123456789")), row.getTimestamp(17, 9));
// test null
assertFalse(row.isNullAt(0));
row.setNullAt(0);
assertTrue(row.isNullAt(0));
}
}
| apache-2.0 |
jwagenleitner/incubator-groovy | subprojects/groovy-xml/src/main/java/groovy/xml/dom/DOMCategory.java | 20014 | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package groovy.xml.dom;
import groovy.lang.Closure;
import groovy.lang.GroovyRuntimeException;
import groovy.lang.IntRange;
import groovy.xml.DOMBuilder;
import groovy.xml.QName;
import org.codehaus.groovy.runtime.InvokerHelper;
import org.codehaus.groovy.runtime.XmlGroovyMethods;
import org.w3c.dom.Attr;
import org.w3c.dom.Document;
import org.w3c.dom.Element;
import org.w3c.dom.NamedNodeMap;
import org.w3c.dom.Node;
import org.w3c.dom.NodeList;
import org.w3c.dom.Text;
import javax.xml.xpath.XPath;
import javax.xml.xpath.XPathExpressionException;
import javax.xml.xpath.XPathFactory;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
/**
* Category class which adds GPath style operations to Java's DOM classes.
*/
public class DOMCategory {
private static boolean trimWhitespace = false;
private static boolean keepIgnorableWhitespace = false;
/**
* @return true if text elements are trimmed before returning; default false
*/
public static synchronized boolean isGlobalTrimWhitespace() {
return trimWhitespace;
}
/**
* Whether text content is trimmed (removing leading and trailing whitespace); default false.
* WARNING: this is a global setting. Altering it will affect all DOMCategory usage within the current Java process.
* It is not recommended that this is altered; instead call the trim() method on the returned text, but the
* flag is available to support legacy Groovy behavior.
*
* @param trimWhitespace the new value
*/
public static synchronized void setGlobalTrimWhitespace(boolean trimWhitespace) {
DOMCategory.trimWhitespace = trimWhitespace;
}
/**
* @return true if ignorable whitespace (e.g. whitespace between elements) is kept; default false
*/
public static synchronized boolean isGlobalKeepIgnorableWhitespace() {
return keepIgnorableWhitespace;
}
/**
* Whether ignorable whitespace (e.g. whitespace between elements) is kept (default false).
* WARNING: this is a global setting. Altering it will affect all DOMCategory usage within the current Java process.
*
* @param keepIgnorableWhitespace the new value
*/
public static synchronized void setGlobalKeepIgnorableWhitespace(boolean keepIgnorableWhitespace) {
DOMCategory.keepIgnorableWhitespace = keepIgnorableWhitespace;
}
public static Object get(Element element, String elementName) {
return xgetAt(element, elementName);
}
public static Object get(NodeList nodeList, String elementName) {
if (nodeList instanceof Element) {
// things like com.sun.org.apache.xerces.internal.dom.DeferredElementNSImpl
// do implement Element, NodeList and Node. But here we prefer element,
// so we force the usage of Element. Without this DOMCategoryTest may fail
// in strange ways
return xgetAt((Element)nodeList, elementName);
} else {
return xgetAt(nodeList, elementName);
}
}
public static Object get(NamedNodeMap nodeMap, String elementName) {
return xgetAt(nodeMap, elementName);
}
private static Object xgetAt(Element element, String elementName) {
if ("..".equals(elementName)) {
return parent(element);
}
if ("**".equals(elementName)) {
return depthFirst(element);
}
if (elementName.startsWith("@")) {
return element.getAttribute(elementName.substring(1));
}
return getChildElements(element, elementName);
}
private static Object xgetAt(NodeList nodeList, String elementName) {
List<NodeList> results = new ArrayList<NodeList>();
for (int i = 0; i < nodeList.getLength(); i++) {
Node node = nodeList.item(i);
if (node instanceof Element) {
addResult(results, get((Element)node, elementName));
}
}
if (elementName.startsWith("@")) {
return results;
}
return new NodeListsHolder(results);
}
public static NamedNodeMap attributes(Element element) {
return element.getAttributes();
}
private static String xgetAt(NamedNodeMap namedNodeMap, String elementName) {
Attr a = (Attr) namedNodeMap.getNamedItem(elementName);
return a.getValue();
}
public static int size(NamedNodeMap namedNodeMap) {
return namedNodeMap.getLength();
}
public static Node getAt(Node o, int i) {
return nodeGetAt(o, i);
}
public static Node getAt(NodeListsHolder o, int i) {
return nodeGetAt(o, i);
}
public static Node getAt(NodesHolder o, int i) {
return nodeGetAt(o, i);
}
public static NodeList getAt(Node o, IntRange r) {
return nodesGetAt(o, r);
}
public static NodeList getAt(NodeListsHolder o, IntRange r) {
return nodesGetAt(o, r);
}
public static NodeList getAt(NodesHolder o, IntRange r) {
return nodesGetAt(o, r);
}
private static Node nodeGetAt(Object o, int i) {
if (o instanceof Element) {
Node n = xgetAt((Element)o, i);
if (n != null) return n;
}
if (o instanceof NodeList) {
return xgetAt((NodeList)o, i);
}
return null;
}
private static NodeList nodesGetAt(Object o, IntRange r) {
if (o instanceof Element) {
NodeList n = xgetAt((Element)o, r);
if (n != null) return n;
}
if (o instanceof NodeList) {
return xgetAt((NodeList)o, r);
}
return null;
}
private static Node xgetAt(Element element, int i) {
if (hasChildElements(element, "*")) {
NodeList nodeList = getChildElements(element, "*");
return xgetAt(nodeList, i);
}
return null;
}
private static Node xgetAt(NodeList nodeList, int i) {
if (i < 0) {
i += nodeList.getLength();
}
if (i >= 0 && i < nodeList.getLength()) {
return nodeList.item(i);
}
return null;
}
private static NodeList xgetAt(Element element, IntRange r) {
if (hasChildElements(element, "*")) {
NodeList nodeList = getChildElements(element, "*");
return xgetAt(nodeList, r);
}
return null;
}
private static NodeList xgetAt(NodeList nodeList, IntRange r) {
int from = r.getFromInt();
int to = r.getToInt();
// If the range is of size 1, then we can use the existing
// xgetAt() that takes an integer index.
if (from == to) return new NodesHolder(Collections.singletonList(xgetAt(nodeList, from)));
// Normalise negative indices.
if (from < 0) from = from + nodeList.getLength();
if (to < 0) to = to + nodeList.getLength();
// After normalisation, 'from' may be greater than 'to'. In that
// case, we need to reverse them and make sure the range's 'reverse'
// property is correct.
// TODO We should probably use DefaultGroovyMethodsSupport.subListBorders(),
// but that's protected and unavailable to us.
if (from > to) {
r = r.isReverse() ? new IntRange(to, from) : new IntRange(from, to);
from = r.getFromInt();
to = r.getToInt();
}
// Copy the required nodes into a new list.
List<Node> nodes = new ArrayList<Node>(to - from + 1);
if (r.isReverse()) {
for (int i = to; i >= from; i--) nodes.add(nodeList.item(i));
}
else {
for (int i = from; i <= to; i++) nodes.add(nodeList.item(i));
}
return new NodesHolder(nodes);
}
public static String name(Node node) {
return node.getNodeName();
}
public static Node parent(Node node) {
return node.getParentNode();
}
public static String text(Node node) {
if (node.getNodeType() == Node.TEXT_NODE || node.getNodeType() == Node.CDATA_SECTION_NODE) {
return node.getNodeValue();
}
if (node.hasChildNodes()) {
return text(node.getChildNodes());
}
return "";
}
public static String text(NodeList nodeList) {
StringBuilder sb = new StringBuilder();
for (int i = 0; i < nodeList.getLength(); i++) {
sb.append(text(nodeList.item(i)));
}
return sb.toString();
}
public static List<Node> list(NodeList self) {
List<Node> answer = new ArrayList<Node>();
Iterator<Node> it = XmlGroovyMethods.iterator(self);
while (it.hasNext()) {
answer.add(it.next());
}
return answer;
}
public static NodeList depthFirst(Element self) {
List<NodeList> result = new ArrayList<NodeList>();
result.add(createNodeList(self));
result.add(self.getElementsByTagName("*"));
return new NodeListsHolder(result);
}
public static void setValue(Element self, String value) {
Node firstChild = self.getFirstChild();
if (firstChild == null) {
firstChild = self.getOwnerDocument().createTextNode(value);
self.appendChild(firstChild);
}
firstChild.setNodeValue(value);
}
public static void putAt(Element self, String property, Object value) {
if (property.startsWith("@")) {
String attributeName = property.substring(1);
Document doc = self.getOwnerDocument();
Attr newAttr = doc.createAttribute(attributeName);
newAttr.setValue(value.toString());
self.setAttributeNode(newAttr);
return;
}
InvokerHelper.setProperty(self, property, value);
}
public static Element appendNode(Element self, Object name) {
return appendNode(self, name, (String)null);
}
public static Element appendNode(Element self, Object name, Map attributes) {
return appendNode(self, name, attributes, null);
}
public static Element appendNode(Element self, Object name, String value) {
Document doc = self.getOwnerDocument();
Element newChild;
if (name instanceof QName) {
QName qn = (QName) name;
newChild = doc.createElementNS(qn.getNamespaceURI(), qn.getQualifiedName());
} else {
newChild = doc.createElement(name.toString());
}
if (value != null) {
Text text = doc.createTextNode(value);
newChild.appendChild(text);
}
self.appendChild(newChild);
return newChild;
}
public static Element appendNode(Element self, Object name, Map attributes, String value) {
Element result = appendNode(self, name, value);
for (Object o : attributes.entrySet()) {
Map.Entry e = (Map.Entry) o;
putAt(result, "@" + e.getKey().toString(), e.getValue());
}
return result;
}
public static Node replaceNode(NodesHolder self, Closure c) {
if (self.getLength() <= 0 || self.getLength() > 1) {
throw new GroovyRuntimeException(
"replaceNode() can only be used to replace a single element, " +
"but was applied to " + self.getLength() + " elements."
);
}
return replaceNode(self.item(0), c);
}
public static Node replaceNode(Node self, Closure c) {
if (self.getParentNode() instanceof Document) {
throw new UnsupportedOperationException("Replacing the root node is not supported");
}
appendNodes(self, c);
self.getParentNode().removeChild(self);
return self;
}
public static void plus(Element self, Closure c) {
if (self.getParentNode() instanceof Document) {
throw new UnsupportedOperationException("Adding sibling nodes to the root node is not supported");
}
appendNodes(self, c);
}
private static void appendNodes(Node self, Closure c) {
Node parent = self.getParentNode();
Node beforeNode = self.getNextSibling();
DOMBuilder b = new DOMBuilder(self.getOwnerDocument());
Element newNodes = (Element) b.invokeMethod("rootNode", c);
Iterator<Node> iter = XmlGroovyMethods.iterator(children(newNodes));
while (iter.hasNext()) {
parent.insertBefore(iter.next(), beforeNode);
}
}
/**
* Returns the list of any direct String nodes of this node.
*
* @return the list of String values from this node
* @since 2.3.0
*/
public static List<String> localText(Element self) {
List<String> result = new ArrayList<String>();
if (self.getNodeType() == Node.TEXT_NODE || self.getNodeType() == Node.CDATA_SECTION_NODE) {
result.add(self.getNodeValue());
} else if (self.hasChildNodes()) {
NodeList nodeList = self.getChildNodes();
for (int i = 0; i < nodeList.getLength(); i++) {
Node item = nodeList.item(i);
if (item.getNodeType() == Node.TEXT_NODE || item.getNodeType() == Node.CDATA_SECTION_NODE) {
result.add(item.getNodeValue());
}
}
}
return result;
}
public static void plus(NodeList self, Closure c) {
for (int i = 0; i < self.getLength(); i++) {
plus((Element) self.item(i), c);
}
}
private static NodeList createNodeList(Element self) {
List<Node> first = new ArrayList<Node>();
first.add(self);
return new NodesHolder(first);
}
public static NodeList breadthFirst(Element self) {
List<NodeList> result = new ArrayList<NodeList>();
NodeList thisLevel = createNodeList(self);
while (thisLevel.getLength() > 0) {
result.add(thisLevel);
thisLevel = getNextLevel(thisLevel);
}
return new NodeListsHolder(result);
}
private static NodeList getNextLevel(NodeList thisLevel) {
List<NodeList> result = new ArrayList<NodeList>();
for (int i = 0; i < thisLevel.getLength(); i++) {
Node n = thisLevel.item(i);
if (n instanceof Element) {
result.add(getChildElements((Element) n, "*"));
}
}
return new NodeListsHolder(result);
}
public static NodeList children(Element self) {
return getChildElements(self, "*");
}
private static boolean hasChildElements(Element self, String elementName) {
return getChildElements(self, elementName).getLength() > 0;
}
private static NodeList getChildElements(Element self, String elementName) {
List<Node> result = new ArrayList<Node>();
NodeList nodeList = self.getChildNodes();
for (int i = 0; i < nodeList.getLength(); i++) {
Node node = nodeList.item(i);
if (node.getNodeType() == Node.ELEMENT_NODE) {
Element child = (Element) node;
if ("*".equals(elementName) || child.getTagName().equals(elementName)) {
result.add(child);
}
} else if (node.getNodeType() == Node.TEXT_NODE) {
String value = node.getNodeValue();
if ((!isGlobalKeepIgnorableWhitespace() && value.trim().length() == 0) || isGlobalTrimWhitespace()) {
value = value.trim();
}
if ("*".equals(elementName) && value.length() > 0) {
node.setNodeValue(value);
result.add(node);
}
}
}
return new NodesHolder(result);
}
public static String toString(Object o) {
if (o instanceof Node) {
if (((Node) o).getNodeType() == Node.TEXT_NODE) {
return ((Node) o).getNodeValue();
}
}
if (o instanceof NodeList) {
return toString((NodeList) o);
}
return o.toString();
}
public static Object xpath(Node self, String expression, javax.xml.namespace.QName returnType) {
final XPath xpath = XPathFactory.newInstance().newXPath();
try {
return xpath.evaluate(expression, self, returnType);
} catch (XPathExpressionException e) {
throw new GroovyRuntimeException(e);
}
}
public static String xpath(Node self, String expression) {
final XPath xpath = XPathFactory.newInstance().newXPath();
try {
return xpath.evaluate(expression, self);
} catch (XPathExpressionException e) {
throw new GroovyRuntimeException(e);
}
}
private static String toString(NodeList self) {
StringBuilder sb = new StringBuilder();
sb.append("[");
Iterator it = XmlGroovyMethods.iterator(self);
while (it.hasNext()) {
if (sb.length() > 1) sb.append(", ");
sb.append(it.next().toString());
}
sb.append("]");
return sb.toString();
}
public static int size(NodeList self) {
return self.getLength();
}
public static boolean isEmpty(NodeList self) {
return size(self) == 0;
}
@SuppressWarnings("unchecked")
private static void addResult(List results, Object result) {
if (result != null) {
if (result instanceof Collection) {
results.addAll((Collection) result);
} else {
results.add(result);
}
}
}
private static final class NodeListsHolder implements NodeList {
private final List<NodeList> nodeLists;
private NodeListsHolder(List<NodeList> nodeLists) {
this.nodeLists = nodeLists;
}
public int getLength() {
int length = 0;
for (NodeList nl : nodeLists) {
length += nl.getLength();
}
return length;
}
public Node item(int index) {
int relativeIndex = index;
for (NodeList nl : nodeLists) {
if (relativeIndex < nl.getLength()) {
return nl.item(relativeIndex);
}
relativeIndex -= nl.getLength();
}
return null;
}
public String toString() {
return DOMCategory.toString(this);
}
}
private static final class NodesHolder implements NodeList {
private final List<Node> nodes;
private NodesHolder(List<Node> nodes) {
this.nodes = nodes;
}
public int getLength() {
return nodes.size();
}
public Node item(int index) {
if (index < 0 || index >= getLength()) {
return null;
}
return nodes.get(index);
}
}
}
| apache-2.0 |
andrewvc/elasticsearch | src/main/java/org/elasticsearch/common/io/stream/StreamInput.java | 13283 | /*
* Licensed to ElasticSearch and Shay Banon under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. ElasticSearch licenses this
* file to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.common.io.stream;
import org.apache.lucene.util.ArrayUtil;
import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.RamUsageEstimator;
import org.elasticsearch.Version;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.bytes.BytesArray;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.text.StringAndBytesText;
import org.elasticsearch.common.text.StringText;
import org.elasticsearch.common.text.Text;
import org.joda.time.DateTime;
import java.io.IOException;
import java.io.InputStream;
import java.lang.ref.SoftReference;
import java.util.*;
/**
*
*/
public abstract class StreamInput extends InputStream {
private static final ThreadLocal<SoftReference<char[]>> charCache = new ThreadLocal<SoftReference<char[]>>();
private static char[] charCache(int size) {
SoftReference<char[]> ref = charCache.get();
char[] arr = (ref == null) ? null : ref.get();
if (arr == null || arr.length < size) {
arr = new char[ArrayUtil.oversize(size, RamUsageEstimator.NUM_BYTES_CHAR)];
charCache.set(new SoftReference<char[]>(arr));
}
return arr;
}
private Version version = Version.CURRENT;
public Version getVersion() {
return this.version;
}
public StreamInput setVersion(Version version) {
this.version = version;
return this;
}
/**
* Reads and returns a single byte.
*/
public abstract byte readByte() throws IOException;
/**
* Reads a specified number of bytes into an array at the specified offset.
*
* @param b the array to read bytes into
* @param offset the offset in the array to start storing bytes
* @param len the number of bytes to read
*/
public abstract void readBytes(byte[] b, int offset, int len) throws IOException;
/**
* Reads a bytes reference from this stream, might hold an actual reference to the underlying
* bytes of the stream.
*/
public BytesReference readBytesReference() throws IOException {
int length = readVInt();
return readBytesReference(length);
}
/**
* Reads a bytes reference from this stream, might hold an actual reference to the underlying
* bytes of the stream.
*/
public BytesReference readBytesReference(int length) throws IOException {
if (length == 0) {
return BytesArray.EMPTY;
}
byte[] bytes = new byte[length];
readBytes(bytes, 0, length);
return new BytesArray(bytes, 0, length);
}
public BytesRef readBytesRef() throws IOException {
int length = readVInt();
return readBytesRef(length);
}
public BytesRef readBytesRef(int length) throws IOException {
if (length == 0) {
return new BytesRef();
}
byte[] bytes = new byte[length];
readBytes(bytes, 0, length);
return new BytesRef(bytes, 0, length);
}
public void readFully(byte[] b) throws IOException {
readBytes(b, 0, b.length);
}
public short readShort() throws IOException {
return (short) (((readByte() & 0xFF) << 8) | (readByte() & 0xFF));
}
/**
* Reads four bytes and returns an int.
*/
public int readInt() throws IOException {
return ((readByte() & 0xFF) << 24) | ((readByte() & 0xFF) << 16)
| ((readByte() & 0xFF) << 8) | (readByte() & 0xFF);
}
/**
* Reads an int stored in variable-length format. Reads between one and
* five bytes. Smaller values take fewer bytes. Negative numbers
* will always use all 5 bytes and are therefore better serialized
* using {@link #readInt}
*/
public int readVInt() throws IOException {
byte b = readByte();
int i = b & 0x7F;
if ((b & 0x80) == 0) {
return i;
}
b = readByte();
i |= (b & 0x7F) << 7;
if ((b & 0x80) == 0) {
return i;
}
b = readByte();
i |= (b & 0x7F) << 14;
if ((b & 0x80) == 0) {
return i;
}
b = readByte();
i |= (b & 0x7F) << 21;
if ((b & 0x80) == 0) {
return i;
}
b = readByte();
assert (b & 0x80) == 0;
return i | ((b & 0x7F) << 28);
}
/**
* Reads eight bytes and returns a long.
*/
public long readLong() throws IOException {
return (((long) readInt()) << 32) | (readInt() & 0xFFFFFFFFL);
}
/**
* Reads a long stored in variable-length format. Reads between one and
* nine bytes. Smaller values take fewer bytes. Negative numbers are not
* supported.
*/
public long readVLong() throws IOException {
byte b = readByte();
long i = b & 0x7FL;
if ((b & 0x80) == 0) {
return i;
}
b = readByte();
i |= (b & 0x7FL) << 7;
if ((b & 0x80) == 0) {
return i;
}
b = readByte();
i |= (b & 0x7FL) << 14;
if ((b & 0x80) == 0) {
return i;
}
b = readByte();
i |= (b & 0x7FL) << 21;
if ((b & 0x80) == 0) {
return i;
}
b = readByte();
i |= (b & 0x7FL) << 28;
if ((b & 0x80) == 0) {
return i;
}
b = readByte();
i |= (b & 0x7FL) << 35;
if ((b & 0x80) == 0) {
return i;
}
b = readByte();
i |= (b & 0x7FL) << 42;
if ((b & 0x80) == 0) {
return i;
}
b = readByte();
i |= (b & 0x7FL) << 49;
if ((b & 0x80) == 0) {
return i;
}
b = readByte();
assert (b & 0x80) == 0;
return i | ((b & 0x7FL) << 56);
}
@Nullable
public Text readOptionalText() throws IOException {
int length = readInt();
if (length == -1) {
return null;
}
return new StringAndBytesText(readBytesReference(length));
}
public Text readText() throws IOException {
// use StringAndBytes so we can cache the string if its ever converted to it
int length = readInt();
return new StringAndBytesText(readBytesReference(length));
}
public Text[] readTextArray() throws IOException {
int size = readVInt();
if (size == 0) {
return StringText.EMPTY_ARRAY;
}
Text[] ret = new Text[size];
for (int i = 0; i < size; i++) {
ret[i] = readText();
}
return ret;
}
public Text readSharedText() throws IOException {
return readText();
}
@Nullable
public String readOptionalString() throws IOException {
if (readBoolean()) {
return readString();
}
return null;
}
@Nullable
public String readOptionalSharedString() throws IOException {
if (readBoolean()) {
return readSharedString();
}
return null;
}
public String readString() throws IOException {
int charCount = readVInt();
char[] chars = charCache(charCount);
int c, charIndex = 0;
while (charIndex < charCount) {
c = readByte() & 0xff;
switch (c >> 4) {
case 0:
case 1:
case 2:
case 3:
case 4:
case 5:
case 6:
case 7:
chars[charIndex++] = (char) c;
break;
case 12:
case 13:
chars[charIndex++] = (char) ((c & 0x1F) << 6 | readByte() & 0x3F);
break;
case 14:
chars[charIndex++] = (char) ((c & 0x0F) << 12 | (readByte() & 0x3F) << 6 | (readByte() & 0x3F) << 0);
break;
}
}
return new String(chars, 0, charCount);
}
public String readSharedString() throws IOException {
return readString();
}
public final float readFloat() throws IOException {
return Float.intBitsToFloat(readInt());
}
public final double readDouble() throws IOException {
return Double.longBitsToDouble(readLong());
}
/**
* Reads a boolean.
*/
public final boolean readBoolean() throws IOException {
return readByte() != 0;
}
@Nullable
public final Boolean readOptionalBoolean() throws IOException {
byte val = readByte();
if (val == 2) {
return null;
}
if (val == 1) {
return true;
}
return false;
}
/**
* Resets the stream.
*/
public abstract void reset() throws IOException;
/**
* Closes the stream to further operations.
*/
public abstract void close() throws IOException;
// // IS
//
// @Override public int read() throws IOException {
// return readByte();
// }
//
// // Here, we assume that we always can read the full byte array
//
// @Override public int read(byte[] b, int off, int len) throws IOException {
// readBytes(b, off, len);
// return len;
// }
public String[] readStringArray() throws IOException {
int size = readVInt();
if (size == 0) {
return Strings.EMPTY_ARRAY;
}
String[] ret = new String[size];
for (int i = 0; i < size; i++) {
ret[i] = readString();
}
return ret;
}
@Nullable
public Map<String, Object> readMap() throws IOException {
return (Map<String, Object>) readGenericValue();
}
@SuppressWarnings({"unchecked"})
@Nullable
public Object readGenericValue() throws IOException {
byte type = readByte();
switch (type) {
case -1:
return null;
case 0:
return readString();
case 1:
return readInt();
case 2:
return readLong();
case 3:
return readFloat();
case 4:
return readDouble();
case 5:
return readBoolean();
case 6:
int bytesSize = readVInt();
byte[] value = new byte[bytesSize];
readBytes(value, 0, bytesSize);
return value;
case 7:
int size = readVInt();
List list = new ArrayList(size);
for (int i = 0; i < size; i++) {
list.add(readGenericValue());
}
return list;
case 8:
int size8 = readVInt();
Object[] list8 = new Object[size8];
for (int i = 0; i < size8; i++) {
list8[i] = readGenericValue();
}
return list8;
case 9:
int size9 = readVInt();
Map map9 = new LinkedHashMap(size9);
for (int i = 0; i < size9; i++) {
map9.put(readSharedString(), readGenericValue());
}
return map9;
case 10:
int size10 = readVInt();
Map map10 = new HashMap(size10);
for (int i = 0; i < size10; i++) {
map10.put(readSharedString(), readGenericValue());
}
return map10;
case 11:
return readByte();
case 12:
return new Date(readLong());
case 13:
return new DateTime(readLong());
case 14:
return readBytesReference();
case 15:
return readText();
case 16:
return readShort();
default:
throw new IOException("Can't read unknown type [" + type + "]");
}
}
/**
* Serializes a potential null value.
*/
public <T extends Streamable> T readOptionalStreamable(T streamable) throws IOException {
if (readBoolean()) {
streamable.readFrom(this);
return streamable;
} else {
return null;
}
}
}
| apache-2.0 |
WouterBanckenACA/aries | tx-control/tx-control-api/src/main/java/org/osgi/service/transaction/control/TransactionContext.java | 4017 | /*
* Copyright (c) OSGi Alliance (2016). All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.osgi.service.transaction.control;
import java.util.function.Consumer;
import javax.transaction.xa.XAResource;
import org.osgi.service.transaction.control.recovery.RecoverableXAResource;
/**
* A transaction context defines the current transaction, and allows resources
* to register information and/or synchronisations
*/
public interface TransactionContext {
/**
* Get the key associated with the current transaction
*
* @return the transaction key, or null if there is no transaction
*/
Object getTransactionKey();
/**
* Get a value scoped to this transaction
*
* @param key
* @return The resource, or <code>null</code>
*/
Object getScopedValue(Object key);
/**
* Associate a value with this transaction
*
* @param key
* @param value
*/
void putScopedValue(Object key, Object value);
/**
* Is this transaction marked for rollback only
*
* @return true if this transaction is rollback only
* @throws IllegalStateException if no transaction is active
*/
boolean getRollbackOnly() throws IllegalStateException;
/**
* Mark this transaction for rollback
*
* @throws IllegalStateException if no transaction is active
*/
void setRollbackOnly() throws IllegalStateException;
/**
* @return The current transaction status
*/
TransactionStatus getTransactionStatus();
/**
* Register a callback that will be made before a call to commit or rollback
*
* @param job
* @throws IllegalStateException if no transaction is active or the
* transaction has already passed beyond the
* {@link TransactionStatus#MARKED_ROLLBACK} state
*/
void preCompletion(Runnable job) throws IllegalStateException;
/**
* Register a callback that will be made after the decision to commit or
* rollback
*
* @param job
* @throws IllegalStateException if no transaction is active
*/
void postCompletion(Consumer<TransactionStatus> job)
throws IllegalStateException;
/**
* @return true if the current transaction supports XA resources
*/
boolean supportsXA();
/**
* @return true if the current transaction supports Local resources
*/
boolean supportsLocal();
/**
* @return true if the TransactionContext supports read-only optimisations
* <em>and</em> the transaction was marked read only. In particular it is
* legal for this method to return false even if the transaction was marked
* read only by the initiating client.
*/
boolean isReadOnly();
/**
* Register an XA resource with the current transaction
*
* @param resource
* @param name The resource name used for recovery, may be <code>null</code>
* if this resource is not recoverable. If a name is passed then
* a corresponding {@link RecoverableXAResource} must be registered
* in the service registry
* @throws IllegalStateException if no transaction is active, or the current
* transaction is not XA capable
*/
void registerXAResource(XAResource resource, String name) throws IllegalStateException;
/**
* Register an XA resource with the current transaction
*
* @param resource
* @throws IllegalStateException if no transaction is active, or the current
* transaction is not XA capable
*/
void registerLocalResource(LocalResource resource)
throws IllegalStateException;
}
| apache-2.0 |
Stratio/cassandra-lucene-index | builder/src/main/java/com/stratio/cassandra/lucene/builder/index/schema/mapping/SingleColumnMapper.java | 1465 | /*
* Copyright (C) 2014 Stratio (http://stratio.com)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.stratio.cassandra.lucene.builder.index.schema.mapping;
import com.fasterxml.jackson.annotation.JsonProperty;
/**
* Class for mapping between Cassandra's columns and Lucene documents.
*
* @param <T> the type of the mapper to be built
* @author Andres de la Pena {@literal <adelapena@stratio.com>}
*/
public abstract class SingleColumnMapper<T extends SingleColumnMapper<T>> extends Mapper<T> {
/** The name of the column to be mapped. */
@JsonProperty("column")
protected String column;
/**
* Sets the name of the Cassandra column to be mapped.
*
* @param column the name of the column to be mapped
* @return this with the specified column
*/
@SuppressWarnings("unchecked")
public final T column(String column) {
this.column = column;
return (T) this;
}
}
| apache-2.0 |
longjl/JFinal_Authority | jfinal-authority/src/main/java/com/jayqqaa12/system/model/Chart.java | 483 | package com.jayqqaa12.system.model;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
public class Chart
{
public List<String> categories = new ArrayList<String>();
public List<Object> series = new ArrayList<Object>();
public void setSeriesDate(String name ,List data){
Map<String ,Object> datas = new HashMap<String ,Object>();
datas.put("name", name);
datas.put("data", data);
series.add( datas);
}
} | apache-2.0 |
geekboxzone/mmallow_external_jetty | src/java/org/eclipse/jetty/webapp/Descriptor.java | 2232 | //
// ========================================================================
// Copyright (c) 1995-2014 Mort Bay Consulting Pty. Ltd.
// ------------------------------------------------------------------------
// All rights reserved. This program and the accompanying materials
// are made available under the terms of the Eclipse Public License v1.0
// and Apache License v2.0 which accompanies this distribution.
//
// The Eclipse Public License is available at
// http://www.eclipse.org/legal/epl-v10.html
//
// The Apache License v2.0 is available at
// http://www.opensource.org/licenses/apache2.0.php
//
// You may elect to redistribute this code under either of these licenses.
// ========================================================================
//
package org.eclipse.jetty.webapp;
import java.net.URL;
import org.eclipse.jetty.util.resource.Resource;
import org.eclipse.jetty.xml.XmlParser;
public abstract class Descriptor
{
protected Resource _xml;
protected XmlParser.Node _root;
protected XmlParser _parser;
protected boolean _validating;
public Descriptor (Resource xml)
{
_xml = xml;
}
public abstract XmlParser newParser()
throws ClassNotFoundException;
public abstract void ensureParser()
throws ClassNotFoundException;
protected void redirect(XmlParser parser, String resource, URL source)
{
if (source != null) parser.redirectEntity(resource, source);
}
public void setValidating (boolean validating)
{
_validating = validating;
}
public void parse ()
throws Exception
{
if (_parser == null)
ensureParser();
if (_root == null)
{
try
{
_root = _parser.parse(_xml.getInputStream());
}
finally
{
_xml.release();
}
}
}
public Resource getResource ()
{
return _xml;
}
public XmlParser.Node getRoot ()
{
return _root;
}
public String toString()
{
return this.getClass().getSimpleName()+"("+_xml+")";
}
}
| apache-2.0 |
apache/cocoon | blocks/cocoon-forms/cocoon-forms-impl/src/main/java/org/apache/cocoon/forms/formmodel/tree/builder/TreeModelDefinitionBuilder.java | 1231 | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.cocoon.forms.formmodel.tree.builder;
import org.apache.cocoon.forms.formmodel.tree.TreeModelDefinition;
import org.w3c.dom.Element;
/**
* Builds {@link TreeModel}s from an XML description.
*
* @version $Id$
*/
public interface TreeModelDefinitionBuilder {
String ROLE = TreeModelDefinitionBuilder.class.getName();
TreeModelDefinition build(Element treeModelElement) throws Exception;
}
| apache-2.0 |
codefacts/watertemplate-engine | watertemplate-engine/src/test/java/org/watertemplate/interpreter/parser/NonTerminalIdWithNestedPropertiesTest.java | 1708 | package org.watertemplate.interpreter.parser;
import org.junit.Test;
import org.watertemplate.interpreter.parser.exception.IncorrectLocationForToken;
import static org.junit.Assert.assertNotNull;
import static org.watertemplate.interpreter.parser.TokenFixture.*;
public class NonTerminalIdWithNestedPropertiesTest {
@Test
public void singlePropertyKey() {
TokenStream tokenStream = new TokenStream(
Wave(), PropertyKey("x"), Wave()
);
assertNotNull(NonTerminal.EVALUATION.buildAbstractSyntaxTree(tokenStream));
}
@Test
public void nestedProperties() {
TokenStream tokenStream = new TokenStream(
Wave(),
PropertyKey("x"),
Accessor(),
PropertyKey("y"),
Wave()
);
assertNotNull(NonTerminal.EVALUATION.buildAbstractSyntaxTree(tokenStream));
}
@Test(expected = IncorrectLocationForToken.class)
public void doubleAccessor() {
TokenStream tokenStream = new TokenStream(
Wave(),
PropertyKey("x"),
Accessor(),
Accessor(),
PropertyKey("y"),
Wave()
);
NonTerminal.TEMPLATE.buildAbstractSyntaxTree(tokenStream);
}
@Test(expected = IncorrectLocationForToken.class)
public void extraAccessor() {
TokenStream tokenStream = new TokenStream(
Wave(),
PropertyKey("x"),
Accessor(),
PropertyKey("y"),
Accessor(),
Wave()
);
NonTerminal.TEMPLATE.buildAbstractSyntaxTree(tokenStream);
}
}
| apache-2.0 |
Scarabei/Scarabei | scarabei-red/src/com/jfixby/scarabei/red/graphs/MultiGraphImpl.java | 6836 |
package com.jfixby.scarabei.red.graphs;
import com.jfixby.scarabei.api.collections.Collections;
import com.jfixby.scarabei.api.collections.List;
import com.jfixby.scarabei.api.collections.Set;
import com.jfixby.scarabei.api.err.Err;
import com.jfixby.scarabei.api.graphs.Edge;
import com.jfixby.scarabei.api.graphs.MultiGraph;
import com.jfixby.scarabei.api.graphs.PathInGraph;
import com.jfixby.scarabei.api.graphs.Vertex;
import com.jfixby.scarabei.api.log.L;
public class MultiGraphImpl<VertexType, EdgeType> implements MultiGraph<VertexType, EdgeType> {
public MultiGraphImpl () {
super();
}
void print (final MultiGraphImpl<VertexType, EdgeType> graph) {
L.d("---MultiGraph---");
L.d("Nodes:");
for (int i = 0; i < graph.size(); i++) {
final VertexImpl<VertexType, EdgeType> n = graph.getVertex(i);
L.d(" [" + i + "] " + n);
final Set<EdgeImpl<VertexType, EdgeType>> links = n.getLinks();
for (int k = 0; k < links.size(); k++) {
final EdgeImpl<VertexType, EdgeType> e = links.getElementAt(k);
L.d(" " + graph.toString(e));
}
}
// printEdges("Edges", graph.edges, graph);
}
void printEdges (final String string, final List<EdgeImpl<VertexType, EdgeType>> list,
final MultiGraphImpl<VertexType, EdgeType> graph) {
L.d(string + ":");
for (int i = 0; i < list.size(); i++) {
final EdgeImpl<VertexType, EdgeType> n = list.getElementAt(i);
L.d(" [" + i + "] " + graph.toString(n));
L.d(" " + n);
}
}
static void printNodes (final String string, final List<VertexImpl<Object, Object>> list,
final MultiGraphImpl<Object, Object> graph) {
L.d(string + ":");
for (int i = 0; i < list.size(); i++) {
final VertexImpl<Object, Object> n = list.getElementAt(i);
L.d(" [" + i + "] " + graph.toString(n));
L.d(" " + n);
}
}
private String toString (final VertexImpl<Object, Object> n) {
return n.toString();
}
private VertexImpl<VertexType, EdgeType> getVertex (final int i) {
return this.vertices.getElementAt(i);
}
final List<VertexImpl<VertexType, EdgeType>> vertices = Collections.newList();
final List<EdgeImpl<VertexType, EdgeType>> edges = Collections.newList();
public int size () {
return this.vertices.size();
}
public void establishLinks () {
for (int i = 0; i < this.edges.size(); i++) {
final EdgeImpl<VertexType, EdgeType> e = this.edges.getElementAt(i);
e.getLeftNode().addLink(e);
e.getRightNode().addLink(e);
}
}
public int numberOfEdges () {
return this.edges.size();
}
public EdgeImpl<VertexType, EdgeType> getEdge (final int i) {
return this.edges.getElementAt(i);
}
private String toString (final EdgeImpl<VertexType, EdgeType> current) {
return "[" + this.indexOf(current) + "] " + current.toString();
}
public int indexOf (final EdgeImpl<VertexType, EdgeType> n) {
return this.indexOf(n, this.edges);
}
int indexOf (final EdgeImpl<VertexType, EdgeType> n, final List<EdgeImpl<VertexType, EdgeType>> edges) {
for (int i = 0; i < edges.size(); i++) {
final EdgeImpl<VertexType, EdgeType> e = edges.getElementAt(i);
if (e == n) {
return i;
}
}
return -1;
}
public void print () {
// print(this);
}
@Override
public Vertex<VertexType> newVertex () {
final VertexImpl<VertexType, EdgeType> element = new VertexImpl<VertexType, EdgeType>();
this.vertices.add(element);
return element;
}
@Override
public Vertex<VertexType> findVertexByObject (final VertexType vertex_object) {
if (vertex_object == null) {
Err.reportError("Null argument exception.");
}
for (int i = 0; i < this.vertices.size(); i++) {
final VertexImpl<VertexType, EdgeType> vertex = this.vertices.getElementAt(i);
if (vertex_object == vertex.getVertexObject()) {
// if (vertex_object.equals(vertex.getObject())) {
return vertex;
}
}
return null;
}
@Override
public Edge<EdgeType> newEdge (final Vertex<VertexType> vertex_a, final Vertex<VertexType> vertex_b) {
final EdgeImpl<VertexType, EdgeType> edge = this.createNewEdge((VertexImpl<VertexType, EdgeType>)vertex_a,
(VertexImpl<VertexType, EdgeType>)vertex_b);
this.establishLinks();
return edge;
}
public EdgeImpl<VertexType, EdgeType> createNewEdge (final VertexImpl<VertexType, EdgeType> left_node,
final VertexImpl<VertexType, EdgeType> right_node) {
if (left_node == null || right_node == null) {
Err.reportError("left_node=" + left_node + " , right_node=" + right_node);
}
final EdgeImpl<VertexType, EdgeType> edge = new EdgeImpl<VertexType, EdgeType>();
edge.setLeftNode(left_node);
edge.setRightNode(right_node);
this.edges.add(edge);
return edge;
}
@Override
public PathInGraph<VertexType, EdgeType> findPath (final Vertex<VertexType> from_vertex, final Vertex<VertexType> to_vertex) {
final List<VertexImpl<VertexType, EdgeType>> visited = Collections.newList();
final List<VertexImpl<VertexType, EdgeType>> states = Collections.newList();
final List<EdgeImpl<VertexType, EdgeType>> steps = Collections.newList();
visited.add((VertexImpl<VertexType, EdgeType>)from_vertex);
states.add((VertexImpl<VertexType, EdgeType>)from_vertex);
this.try_search((VertexImpl<VertexType, EdgeType>)from_vertex, (VertexImpl<VertexType, EdgeType>)to_vertex, visited, states,
steps);
// L.d("--------------------------------------------------");
// L.d("from_vertex", from_vertex);
// L.d(" to_vertex", to_vertex);
// states.print("states");
// steps.print("steps");
final PathImpl<VertexType, EdgeType> path = new PathImpl<VertexType, EdgeType>();
path.setup(states, steps);
// path.print("path");
// RedTriplane.exit();
return path;
}
private boolean try_search (final VertexImpl<VertexType, EdgeType> from_vertex,
final VertexImpl<VertexType, EdgeType> final_vertex, final List<VertexImpl<VertexType, EdgeType>> visited,
final List<VertexImpl<VertexType, EdgeType>> states, final List<EdgeImpl<VertexType, EdgeType>> steps) {
if (from_vertex == final_vertex) {
return true;
}
for (int i = 0; i < from_vertex.getLinks().size(); i++) {
final EdgeImpl<VertexType, EdgeType> edge = from_vertex.getLinks().getElementAt(i);
final VertexImpl<VertexType, EdgeType> next = edge.getOtherNode(from_vertex);
if (!visited.contains(next)) {
states.add(next);
steps.add(edge);
visited.add(next);
if (this.try_search(next, final_vertex, visited, states, steps)) {
return true;
} else {
states.remove(next);
steps.remove(edge);
}
}
}
return false;
}
@Override
public void print (final String tag) {
this.print(this);
}
}
| apache-2.0 |
electrum/presto | plugin/trino-iceberg/src/main/java/io/trino/plugin/iceberg/IcebergMetastoreModule.java | 1338 | /*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.trino.plugin.iceberg;
import com.google.inject.Binder;
import com.google.inject.Module;
import io.trino.plugin.hive.metastore.HiveMetastore;
import io.trino.plugin.hive.metastore.cache.CachingHiveMetastore;
import javax.inject.Inject;
public class IcebergMetastoreModule
implements Module
{
@Override
public void configure(Binder binder)
{
binder.bind(MetastoreValidator.class).asEagerSingleton();
}
public static class MetastoreValidator
{
@Inject
public MetastoreValidator(HiveMetastore metastore)
{
if (metastore instanceof CachingHiveMetastore) {
throw new RuntimeException("Hive metastore caching must not be enabled for Iceberg");
}
}
}
}
| apache-2.0 |
Faravy/UiAutomatorExample | src/com/tobrun/android/test/uiautomator/UiAutomatorUtils.java | 2286 | package com.tobrun.android.test.uiautomator;
import com.android.uiautomator.core.UiObject;
import com.android.uiautomator.core.UiObjectNotFoundException;
import com.android.uiautomator.core.UiScrollable;
import com.android.uiautomator.core.UiSelector;
import com.android.uiautomator.testrunner.UiAutomatorTestCase;
public class UiAutomatorUtils {
private final static UiObject findViewByText(final String text, final String className) {
return new UiObject(new UiSelector().text(text).className(className));
}
public static UiObject findTextViewByText(final String text) {
return findViewByText(text, android.widget.TextView.class.getName());
}
public static UiObject findButtonByText(final String text) {
return findViewByText(text, android.widget.Button.class.getName());
}
public static void clickOnTextView(final String text) throws UiObjectNotFoundException {
new UiObject(new UiSelector().className(android.widget.TextView.class.getName()).text(text)).click();
}
public static void clickOnListViewItem(final String text) throws UiObjectNotFoundException {
final UiScrollable listView = new UiScrollable(new UiSelector());
listView.setMaxSearchSwipes(100);
listView.scrollTextIntoView(text);
listView.waitForExists(5000);
final UiObject listViewItem = listView.getChildByText(
new UiSelector().className(android.widget.TextView.class.getName()), "" + text + "");
listViewItem.click();
}
public static void clickOnHomeButton(final UiAutomatorTestCase test) {
test.getUiDevice().pressHome();
}
public static void clickOnBackButton(final UiAutomatorTestCase test, int times) {
while (times > 0) {
test.getUiDevice().pressBack();
times--;
}
}
public final static void openApplication(final UiAutomatorTestCase test, final String appName)
throws UiObjectNotFoundException {
// go home
clickOnHomeButton(test);
// click on menu icon
new UiObject(new UiSelector().description("Apps")).clickAndWaitForNewWindow();
// make UI scrollable
final UiScrollable appViews = new UiScrollable(new UiSelector().scrollable(true));
appViews.setAsHorizontalList();
// click on Settings app
appViews.getChildByText(new UiSelector().className("android.widget.TextView"), appName)
.clickAndWaitForNewWindow();
}
}
| apache-2.0 |
apache/cocoon | core/cocoon-sitemap/cocoon-sitemap-impl/src/main/java/org/apache/cocoon/components/treeprocessor/sitemap/RedirectToNodeBuilder.java | 3934 | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.cocoon.components.treeprocessor.sitemap;
import org.apache.avalon.framework.configuration.Configuration;
import org.apache.avalon.framework.configuration.ConfigurationException;
import org.apache.cocoon.components.treeprocessor.AbstractProcessingNodeBuilder;
import org.apache.cocoon.components.treeprocessor.CategoryNode;
import org.apache.cocoon.components.treeprocessor.CategoryNodeBuilder;
import org.apache.cocoon.components.treeprocessor.LinkedProcessingNodeBuilder;
import org.apache.cocoon.components.treeprocessor.ProcessingNode;
import org.apache.cocoon.components.treeprocessor.variables.VariableResolverFactory;
import java.util.HashMap;
import java.util.Map;
/**
*
* @version $Id$
*/
public class RedirectToNodeBuilder extends AbstractProcessingNodeBuilder
implements LinkedProcessingNodeBuilder {
private CallNode callNode;
private String resourceName;
/** This builder has no parameters -- return <code>false</code> */
protected boolean hasParameters() {
return false;
}
public ProcessingNode buildNode(Configuration config) throws Exception {
if (((SitemapLanguage)this.treeBuilder).isBuildingErrorHandler()) {
throw new ConfigurationException("'map:redirect' is forbidden inside a 'map:handle-errors', at "
+ config.getLocation());
}
// Is it a redirect to resource ?
this.resourceName = config.getAttribute("resource", null);
if (this.resourceName != null) {
getLogger().warn("Redirect to resource is deprecated. Use map:call instead at " +
config.getLocation());
this.callNode = new CallNode();
this.treeBuilder.setupNode(this.callNode, config);
String target = config.getAttribute("target", null);
if (target != null) {
Map params = new HashMap(1);
params.put("target", VariableResolverFactory.getResolver(target, this.manager));
this.callNode.setParameters(params);
}
return this.callNode;
}
ProcessingNode URINode = new RedirectToURINode(
VariableResolverFactory.getResolver(config.getAttribute("uri"), this.manager),
config.getAttributeAsBoolean("session", false),
config.getAttributeAsBoolean("global", false),
config.getAttributeAsBoolean("permanent", false)
);
return this.treeBuilder.setupNode(URINode, config);
}
public void linkNode() throws Exception {
if (this.callNode != null) {
CategoryNode resources = CategoryNodeBuilder.getCategoryNode(this.treeBuilder, "resources");
if (resources == null) {
String msg = "This sitemap contains no resources. Cannot redirect at " +
this.callNode.getLocation();
throw new ConfigurationException(msg);
}
this.callNode.setResource(
resources,
VariableResolverFactory.getResolver(this.resourceName, this.manager)
);
}
}
}
| apache-2.0 |
cthiebaud/jaxrs-analyzer | src/test/java/com/sebastian_daschner/jaxrs_analyzer/analysis/classes/testclasses/resource/response/TestClass35.java | 1500 | /*
* Copyright (C) 2015 Sebastian Daschner, sebastian-daschner.com
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.sebastian_daschner.jaxrs_analyzer.analysis.classes.testclasses.resource.response;
import com.sebastian_daschner.jaxrs_analyzer.model.elements.HttpResponse;
import javax.ws.rs.core.Response;
import java.util.Collections;
import java.util.Set;
import java.util.function.BiFunction;
public class TestClass35 {
@javax.ws.rs.GET public Response method() {
BiFunction<Response.Status, Integer, BiFunction<String, Double, Response>> function = (sta, i) -> (str, d) -> Response.status(sta).header(str, "Test").build();
return function.apply(Response.Status.OK, 1).apply("X-Header", 1d);
}
public static Set<HttpResponse> getResult() {
final HttpResponse result = new HttpResponse();
result.getStatuses().add(200);
result.getHeaders().add("X-Header");
return Collections.singleton(result);
}
}
| apache-2.0 |
agwlvssainokuni/springapp | foundation/src/main/java/cherry/foundation/logicalerror/LogicalErrorUtil.java | 2110 | /*
* Copyright 2014,2015 agwlvssainokuni
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package cherry.foundation.logicalerror;
import org.springframework.context.MessageSourceResolvable;
import org.springframework.context.support.DefaultMessageSourceResolvable;
import org.springframework.validation.BindingResult;
import cherry.foundation.type.Code;
public class LogicalErrorUtil {
public static void reject(BindingResult binding, ILogicalError logicalError, Object... args) {
binding.reject(logicalError.code(), args, logicalError.code());
}
public static void rejectValue(BindingResult binding, String name, ILogicalError logicError, Object... args) {
binding.rejectValue(name, logicError.code(), args, logicError.code());
}
public static MessageSourceResolvable resolve(ILogicalError code, Object... args) {
return resolve(code.code(), args);
}
public static MessageSourceResolvable resolve(Code<String> code, Object... args) {
return resolve(code.code(), args);
}
public static MessageSourceResolvable resolve(String code, Object... args) {
return new DefaultMessageSourceResolvable(new String[] { code }, args);
}
public static void rejectOnOneTimeTokenError(BindingResult binding) {
reject(binding, LogicalError.OneTimeTokenError);
}
public static void rejectOnOptimisticLockError(BindingResult binding) {
reject(binding, LogicalError.OptimisticLockError);
}
public static void rejectOnSearchResultEmpty(BindingResult binding) {
reject(binding, LogicalError.SearchResultEmpty);
}
}
| apache-2.0 |
cscorley/solr-only-mirror | solrj/src/java/org/apache/solr/client/solrj/impl/ConcurrentUpdateSolrServer.java | 17801 | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.solr.client.solrj.impl;
import java.io.IOException;
import java.io.OutputStream;
import java.nio.charset.StandardCharsets;
import java.util.LinkedList;
import java.util.Locale;
import java.util.Queue;
import java.util.Set;
import java.util.concurrent.BlockingQueue;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.LinkedBlockingQueue;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.locks.Lock;
import java.util.concurrent.locks.ReentrantLock;
import org.apache.http.HttpResponse;
import org.apache.http.HttpStatus;
import org.apache.http.client.HttpClient;
import org.apache.http.client.methods.HttpPost;
import org.apache.http.entity.ContentProducer;
import org.apache.http.entity.EntityTemplate;
import org.apache.solr.client.solrj.ResponseParser;
import org.apache.solr.client.solrj.SolrRequest;
import org.apache.solr.client.solrj.SolrServer;
import org.apache.solr.client.solrj.SolrServerException;
import org.apache.solr.client.solrj.request.RequestWriter;
import org.apache.solr.client.solrj.request.UpdateRequest;
import org.apache.solr.client.solrj.util.ClientUtils;
import org.apache.solr.common.SolrException;
import org.apache.solr.common.SolrException.ErrorCode;
import org.apache.solr.common.params.CommonParams;
import org.apache.solr.common.params.ModifiableSolrParams;
import org.apache.solr.common.params.SolrParams;
import org.apache.solr.common.params.UpdateParams;
import org.apache.solr.common.util.NamedList;
import org.apache.solr.common.util.SolrjNamedThreadFactory;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* ConcurrentUpdateSolrServer buffers all added documents and writes
* them into open HTTP connections. This class is thread safe.
*
* Params from {@link UpdateRequest} are converted to http request
* parameters. When params change between UpdateRequests a new HTTP
* request is started.
*
* Although any SolrServer request can be made with this implementation, it is
* only recommended to use ConcurrentUpdateSolrServer with /update
* requests. The class {@link HttpSolrServer} is better suited for the
* query interface.
*/
public class ConcurrentUpdateSolrServer extends SolrServer {
private static final long serialVersionUID = 1L;
static final Logger log = LoggerFactory
.getLogger(ConcurrentUpdateSolrServer.class);
private HttpSolrServer server;
final BlockingQueue<UpdateRequest> queue;
final ExecutorService scheduler;
final Queue<Runner> runners;
volatile CountDownLatch lock = null; // used to block everything
final int threadCount;
boolean shutdownExecutor = false;
int pollQueueTime = 250;
private final boolean streamDeletes;
/**
* Uses an internally managed HttpClient instance.
*
* @param solrServerUrl
* The Solr server URL
* @param queueSize
* The buffer size before the documents are sent to the server
* @param threadCount
* The number of background threads used to empty the queue
*/
public ConcurrentUpdateSolrServer(String solrServerUrl, int queueSize,
int threadCount) {
this(solrServerUrl, null, queueSize, threadCount);
shutdownExecutor = true;
}
public ConcurrentUpdateSolrServer(String solrServerUrl,
HttpClient client, int queueSize, int threadCount) {
this(solrServerUrl, client, queueSize, threadCount, Executors.newCachedThreadPool(
new SolrjNamedThreadFactory("concurrentUpdateScheduler")));
shutdownExecutor = true;
}
/**
* Uses the supplied HttpClient to send documents to the Solr server.
*/
public ConcurrentUpdateSolrServer(String solrServerUrl,
HttpClient client, int queueSize, int threadCount, ExecutorService es) {
this(solrServerUrl, client, queueSize, threadCount, es, false);
}
/**
* Uses the supplied HttpClient to send documents to the Solr server.
*/
public ConcurrentUpdateSolrServer(String solrServerUrl,
HttpClient client, int queueSize, int threadCount, ExecutorService es, boolean streamDeletes) {
this.server = new HttpSolrServer(solrServerUrl, client);
this.server.setFollowRedirects(false);
queue = new LinkedBlockingQueue<>(queueSize);
this.threadCount = threadCount;
runners = new LinkedList<>();
scheduler = es;
this.streamDeletes = streamDeletes;
}
public Set<String> getQueryParams() {
return this.server.getQueryParams();
}
/**
* Expert Method.
* @param queryParams set of param keys to only send via the query string
*/
public void setQueryParams(Set<String> queryParams) {
this.server.setQueryParams(queryParams);
}
/**
* Opens a connection and sends everything...
*/
class Runner implements Runnable {
final Lock runnerLock = new ReentrantLock();
@Override
public void run() {
runnerLock.lock();
log.debug("starting runner: {}", this);
HttpPost method = null;
HttpResponse response = null;
try {
while (!queue.isEmpty()) {
try {
final UpdateRequest updateRequest =
queue.poll(pollQueueTime, TimeUnit.MILLISECONDS);
if (updateRequest == null)
break;
String contentType = server.requestWriter.getUpdateContentType();
final boolean isXml = ClientUtils.TEXT_XML.equals(contentType);
final ModifiableSolrParams origParams = new ModifiableSolrParams(updateRequest.getParams());
EntityTemplate template = new EntityTemplate(new ContentProducer() {
@Override
public void writeTo(OutputStream out) throws IOException {
try {
if (isXml) {
out.write("<stream>".getBytes(StandardCharsets.UTF_8)); // can be anything
}
UpdateRequest req = updateRequest;
while (req != null) {
SolrParams currentParams = new ModifiableSolrParams(req.getParams());
if (!origParams.toNamedList().equals(currentParams.toNamedList())) {
queue.add(req); // params are different, push back to queue
break;
}
server.requestWriter.write(req, out);
if (isXml) {
// check for commit or optimize
SolrParams params = req.getParams();
if (params != null) {
String fmt = null;
if (params.getBool(UpdateParams.OPTIMIZE, false)) {
fmt = "<optimize waitSearcher=\"%s\" />";
} else if (params.getBool(UpdateParams.COMMIT, false)) {
fmt = "<commit waitSearcher=\"%s\" />";
}
if (fmt != null) {
byte[] content = String.format(Locale.ROOT,
fmt,
params.getBool(UpdateParams.WAIT_SEARCHER, false)
+ "").getBytes(StandardCharsets.UTF_8);
out.write(content);
}
}
}
out.flush();
req = queue.poll(pollQueueTime, TimeUnit.MILLISECONDS);
}
if (isXml) {
out.write("</stream>".getBytes(StandardCharsets.UTF_8));
}
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
log.warn("", e);
}
}
});
// The parser 'wt=' and 'version=' params are used instead of the
// original params
ModifiableSolrParams requestParams = new ModifiableSolrParams(origParams);
requestParams.set(CommonParams.WT, server.parser.getWriterType());
requestParams.set(CommonParams.VERSION, server.parser.getVersion());
method = new HttpPost(server.getBaseURL() + "/update"
+ ClientUtils.toQueryString(requestParams, false));
method.setEntity(template);
method.addHeader("User-Agent", HttpSolrServer.AGENT);
method.addHeader("Content-Type", contentType);
response = server.getHttpClient().execute(method);
int statusCode = response.getStatusLine().getStatusCode();
if (statusCode != HttpStatus.SC_OK) {
StringBuilder msg = new StringBuilder();
msg.append(response.getStatusLine().getReasonPhrase());
msg.append("\n\n\n\n");
msg.append("request: ").append(method.getURI());
SolrException solrExc = new SolrException(ErrorCode.getErrorCode(statusCode), msg.toString());
// parse out the metadata from the SolrException
try {
NamedList<Object> resp =
server.parser.processResponse(response.getEntity().getContent(),
response.getEntity().getContentType().getValue());
NamedList<Object> error = (NamedList<Object>) resp.get("error");
if (error != null)
solrExc.setMetadata((NamedList<String>) error.get("metadata"));
} catch (Exception exc) {
// don't want to fail to report error if parsing the response fails
log.warn("Failed to parse error response from "+server.getBaseURL()+" due to: "+exc);
}
handleError(solrExc);
} else {
onSuccess(response);
}
} finally {
try {
if (response != null) {
response.getEntity().getContent().close();
}
} catch (Exception ex) {
log.warn("", ex);
}
}
}
} catch (Throwable e) {
if (e instanceof OutOfMemoryError) {
throw (OutOfMemoryError) e;
}
handleError(e);
} finally {
synchronized (runners) {
if (runners.size() == 1 && !queue.isEmpty()) {
// keep this runner alive
scheduler.execute(this);
} else {
runners.remove(this);
if (runners.isEmpty())
runners.notifyAll();
}
}
log.debug("finished: {}", this);
runnerLock.unlock();
}
}
}
@Override
public NamedList<Object> request(final SolrRequest request)
throws SolrServerException, IOException {
if (!(request instanceof UpdateRequest)) {
return server.request(request);
}
UpdateRequest req = (UpdateRequest) request;
// this happens for commit...
if (streamDeletes) {
if ((req.getDocuments() == null || req.getDocuments().isEmpty())
&& (req.getDeleteById() == null || req.getDeleteById().isEmpty())
&& (req.getDeleteByIdMap() == null || req.getDeleteByIdMap().isEmpty())) {
if (req.getDeleteQuery() == null) {
blockUntilFinished();
return server.request(request);
}
}
} else {
if ((req.getDocuments() == null || req.getDocuments().isEmpty())) {
blockUntilFinished();
return server.request(request);
}
}
SolrParams params = req.getParams();
if (params != null) {
// check if it is waiting for the searcher
if (params.getBool(UpdateParams.WAIT_SEARCHER, false)) {
log.info("blocking for commit/optimize");
blockUntilFinished(); // empty the queue
return server.request(request);
}
}
try {
CountDownLatch tmpLock = lock;
if (tmpLock != null) {
tmpLock.await();
}
boolean success = queue.offer(req);
for (;;) {
synchronized (runners) {
// see if queue is half full and we can add more runners
// special case: if only using a threadCount of 1 and the queue
// is filling up, allow 1 add'l runner to help process the queue
if (runners.isEmpty() || (queue.remainingCapacity() < queue.size() && runners.size() < threadCount))
{
// We need more runners, so start a new one.
Runner r = new Runner();
runners.add(r);
scheduler.execute(r);
} else {
// break out of the retry loop if we added the element to the queue
// successfully, *and*
// while we are still holding the runners lock to prevent race
// conditions.
if (success)
break;
}
}
// Retry to add to the queue w/o the runners lock held (else we risk
// temporary deadlock)
// This retry could also fail because
// 1) existing runners were not able to take off any new elements in the
// queue
// 2) the queue was filled back up since our last try
// If we succeed, the queue may have been completely emptied, and all
// runners stopped.
// In all cases, we should loop back to the top to see if we need to
// start more runners.
//
if (!success) {
success = queue.offer(req, 100, TimeUnit.MILLISECONDS);
}
}
} catch (InterruptedException e) {
log.error("interrupted", e);
throw new IOException(e.getLocalizedMessage());
}
// RETURN A DUMMY result
NamedList<Object> dummy = new NamedList<>();
dummy.add("NOTE", "the request is processed in a background stream");
return dummy;
}
public synchronized void blockUntilFinished() {
lock = new CountDownLatch(1);
try {
synchronized (runners) {
while (!runners.isEmpty()) {
try {
runners.wait();
} catch (InterruptedException e) {
Thread.interrupted();
}
if (scheduler.isTerminated())
break;
// if we reach here, then we probably got the notifyAll, but need to check if
// the queue is empty before really considering this is finished (SOLR-4260)
int queueSize = queue.size();
if (queueSize > 0) {
log.warn("No more runners, but queue still has "+
queueSize+" adding more runners to process remaining requests on queue");
Runner r = new Runner();
runners.add(r);
scheduler.execute(r);
}
}
}
} finally {
lock.countDown();
lock = null;
}
}
public void handleError(Throwable ex) {
log.error("error", ex);
}
/**
* Intended to be used as an extension point for doing post processing after a request completes.
*/
public void onSuccess(HttpResponse resp) {
// no-op by design, override to add functionality
}
@Override
public void shutdown() {
server.shutdown();
if (shutdownExecutor) {
scheduler.shutdown();
try {
if (!scheduler.awaitTermination(60, TimeUnit.SECONDS)) {
scheduler.shutdownNow();
if (!scheduler.awaitTermination(60, TimeUnit.SECONDS)) log
.error("ExecutorService did not terminate");
}
} catch (InterruptedException ie) {
scheduler.shutdownNow();
Thread.currentThread().interrupt();
}
}
}
public void setConnectionTimeout(int timeout) {
HttpClientUtil.setConnectionTimeout(server.getHttpClient(), timeout);
}
/**
* set soTimeout (read timeout) on the underlying HttpConnectionManager. This is desirable for queries, but probably
* not for indexing.
*/
public void setSoTimeout(int timeout) {
HttpClientUtil.setSoTimeout(server.getHttpClient(), timeout);
}
public void shutdownNow() {
server.shutdown();
if (shutdownExecutor) {
scheduler.shutdownNow(); // Cancel currently executing tasks
try {
if (!scheduler.awaitTermination(30, TimeUnit.SECONDS))
log.error("ExecutorService did not terminate");
} catch (InterruptedException ie) {
scheduler.shutdownNow();
Thread.currentThread().interrupt();
}
}
}
public void setParser(ResponseParser responseParser) {
server.setParser(responseParser);
}
/**
* @param pollQueueTime time for an open connection to wait for updates when
* the queue is empty.
*/
public void setPollQueueTime(int pollQueueTime) {
this.pollQueueTime = pollQueueTime;
}
public void setRequestWriter(RequestWriter requestWriter) {
server.setRequestWriter(requestWriter);
}
}
| apache-2.0 |
andrey7mel/android-step-by-step | app/src/main/java/com/andrey7mel/stepbystep/view/fragments/RepoListFragment.java | 4134 | package com.andrey7mel.stepbystep.view.fragments;
import android.app.Activity;
import android.os.Bundle;
import android.support.annotation.Nullable;
import android.support.design.widget.Snackbar;
import android.support.v7.widget.LinearLayoutManager;
import android.support.v7.widget.RecyclerView;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.widget.Button;
import android.widget.EditText;
import com.andrey7mel.stepbystep.R;
import com.andrey7mel.stepbystep.other.di.view.DaggerViewComponent;
import com.andrey7mel.stepbystep.other.di.view.ViewComponent;
import com.andrey7mel.stepbystep.other.di.view.ViewDynamicModule;
import com.andrey7mel.stepbystep.presenter.BasePresenter;
import com.andrey7mel.stepbystep.presenter.RepoListPresenter;
import com.andrey7mel.stepbystep.presenter.vo.Repository;
import com.andrey7mel.stepbystep.view.ActivityCallback;
import com.andrey7mel.stepbystep.view.adapters.RepoListAdapter;
import java.util.ArrayList;
import java.util.List;
import javax.inject.Inject;
import butterknife.Bind;
import butterknife.ButterKnife;
import butterknife.OnClick;
public class RepoListFragment extends BaseFragment implements RepoListView {
@Bind(R.id.recycler_view)
protected RecyclerView recyclerView;
@Bind(R.id.edit_text)
protected EditText editText;
@Bind(R.id.button_search)
protected Button searchButton;
@Inject
protected RepoListPresenter presenter;
private RepoListAdapter adapter;
private ActivityCallback activityCallback;
private ViewComponent viewComponent;
@OnClick(R.id.button_search)
public void onClickSearch(View v) {
if (presenter != null) {
presenter.onSearchButtonClick();
}
}
@Override
public void onAttach(Activity activity) {
super.onAttach(activity);
try {
activityCallback = (ActivityCallback) activity;
} catch (ClassCastException e) {
throw new ClassCastException(activity.toString()
+ " must implement activityCallback");
}
}
@Override
public void onCreate(@Nullable Bundle savedInstanceState) {
if (viewComponent == null) {
viewComponent = DaggerViewComponent.builder()
.viewDynamicModule(new ViewDynamicModule(this))
.build();
}
viewComponent.inject(this);
super.onCreate(savedInstanceState);
}
public void setViewComponent(ViewComponent viewComponent) {
this.viewComponent = viewComponent;
}
@Nullable
@Override
public View onCreateView(LayoutInflater inflater, ViewGroup container, Bundle savedInstanceState) {
View view = inflater.inflate(R.layout.fragment_repo_list, container, false);
ButterKnife.bind(this, view);
LinearLayoutManager llm = new LinearLayoutManager(getContext());
recyclerView.setLayoutManager(llm);
adapter = new RepoListAdapter(new ArrayList<>(), presenter);
recyclerView.setAdapter(adapter);
presenter.onCreateView(savedInstanceState);
return view;
}
private void makeToast(String text) {
Snackbar.make(recyclerView, text, Snackbar.LENGTH_LONG).show();
}
@Override
protected BasePresenter getPresenter() {
return presenter;
}
@Override
public void showError(String error) {
makeToast(error);
}
@Override
public void showRepoList(List<Repository> repoList) {
adapter.setRepoList(repoList);
}
@Override
public void showEmptyList() {
makeToast(getActivity().getString(R.string.empty_list));
}
@Override
public String getUserName() {
return editText.getText().toString();
}
@Override
public void startRepoInfoFragment(Repository repository) {
activityCallback.startRepoInfoFragment(repository);
}
@Override
public void onSaveInstanceState(Bundle outState) {
super.onSaveInstanceState(outState);
presenter.onSaveInstanceState(outState);
}
}
| apache-2.0 |
masonmei/apm-agent | profiler/src/main/java/com/baidu/oped/apm/profiler/util/bindvalue/BindValueConverter.java | 4125 | /*
* Copyright 2014 NAVER Corp.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.baidu.oped.apm.profiler.util.bindvalue;
import java.util.HashMap;
import java.util.Map;
import com.baidu.oped.apm.profiler.util.bindvalue.converter.*;
public class BindValueConverter {
private static final BindValueConverter converter;
static {
converter = new BindValueConverter();
converter.register();
}
public final Map<String, Converter> convertermap = new HashMap<String, Converter>() ;
private void register() {
simpleType();
classNameType();
// There also is method with 3 parameters.
convertermap.put("setNull", new NullTypeConverter());
BytesConverter bytesConverter = new BytesConverter();
convertermap.put("setBytes", bytesConverter);
convertermap.put("setObject", new ObjectConverter());
}
private void classNameType() {
// replace with class name if we don't want to (or can't) read the value
ClassNameConverter classNameConverter = new ClassNameConverter();
// There also is method with 3 parameters.
convertermap.put("setAsciiStream", classNameConverter);
convertermap.put("setUnicodeStream", classNameConverter);
convertermap.put("setBinaryStream", classNameConverter);
// There also is method with 3 parameters.
convertermap.put("setBlob", classNameConverter);
// There also is method with 3 parameters.
convertermap.put("setClob", classNameConverter);
convertermap.put("setArray", classNameConverter);
convertermap.put("setNCharacterStream", classNameConverter);
// There also is method with 3 parameters.
convertermap.put("setNClob", classNameConverter);
convertermap.put("setCharacterStream", classNameConverter);
convertermap.put("setSQLXML", classNameConverter);
}
private void simpleType() {
SimpleTypeConverter simpleTypeConverter = new SimpleTypeConverter();
convertermap.put("setByte", simpleTypeConverter);
convertermap.put("setBoolean", simpleTypeConverter);
convertermap.put("setShort", simpleTypeConverter);
convertermap.put("setInt", simpleTypeConverter);
convertermap.put("setLong", simpleTypeConverter);
convertermap.put("setFloat", simpleTypeConverter);
convertermap.put("setDouble", simpleTypeConverter);
convertermap.put("setBigDecimal", simpleTypeConverter);
convertermap.put("setString", simpleTypeConverter);
convertermap.put("setDate", simpleTypeConverter);
// There also is method with 3 parameters.
convertermap.put("setTime", simpleTypeConverter);
//convertermap.put("setTime", simpleTypeConverter);
// There also is method with 3 parameters.
convertermap.put("setTimestamp", simpleTypeConverter);
//convertermap.put("setTimestamp", simpleTypeConverter);
// could be replaced with string
convertermap.put("setURL", simpleTypeConverter);
// could be replaced with string
convertermap.put("setRef", simpleTypeConverter);
convertermap.put("setNString", simpleTypeConverter);
}
public String convert0(String methodName, Object[] args) {
Converter converter = this.convertermap.get(methodName);
if (converter == null) {
return "";
}
return converter.convert(args);
}
public static String convert(String methodName, Object[] args) {
return converter.convert0(methodName, args);
}
}
| apache-2.0 |
apache/incubator-asterixdb | hyracks-fullstack/algebricks/algebricks-runtime/src/main/java/org/apache/hyracks/algebricks/runtime/evaluators/ColumnAccessEvalFactory.java | 2207 | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.hyracks.algebricks.runtime.evaluators;
import org.apache.hyracks.algebricks.runtime.base.IEvaluatorContext;
import org.apache.hyracks.algebricks.runtime.base.IScalarEvaluator;
import org.apache.hyracks.algebricks.runtime.base.IScalarEvaluatorFactory;
import org.apache.hyracks.api.exceptions.HyracksDataException;
import org.apache.hyracks.data.std.api.IPointable;
import org.apache.hyracks.dataflow.common.data.accessors.IFrameTupleReference;
public class ColumnAccessEvalFactory implements IScalarEvaluatorFactory {
private static final long serialVersionUID = 1L;
private final int fieldIndex;
public ColumnAccessEvalFactory(int fieldIndex) {
this.fieldIndex = fieldIndex;
}
@Override
public String toString() {
return "ColumnAccess(" + fieldIndex + ")";
}
@Override
public IScalarEvaluator createScalarEvaluator(final IEvaluatorContext ctx) throws HyracksDataException {
return new IScalarEvaluator() {
@Override
public void evaluate(IFrameTupleReference tuple, IPointable result) throws HyracksDataException {
byte[] buffer = tuple.getFieldData(fieldIndex);
int start = tuple.getFieldStart(fieldIndex);
int length = tuple.getFieldLength(fieldIndex);
result.set(buffer, start, length);
}
};
}
}
| apache-2.0 |
cyberdrcarr/optaplanner | drools-planner-core/src/main/java/org/drools/planner/core/heuristic/selector/common/iterator/ListIterable.java | 1373 | /*
* Copyright 2012 JBoss Inc
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.drools.planner.core.heuristic.selector.common.iterator;
import java.util.List;
import java.util.ListIterator;
/**
* An extension on the {@link Iterable} interface that supports {@link #listIterator()} and {@link #listIterator(int)}.
* @param <T> the element type
*/
public interface ListIterable<T> extends Iterable<T> {
/**
* See {@link List#listIterator()}
*
* @return never null, see {@link List#listIterator()}.
*/
ListIterator<T> listIterator();
/**
* See {@link List#listIterator()}
*
* @param index lower than the size of this {@link ListIterable}, see {@link List#listIterator(int)}.
* @return never null, see {@link List#listIterator(int)}.
*/
ListIterator<T> listIterator(int index);
}
| apache-2.0 |
Skarlso/gocd | server/src/test-fast/java/com/thoughtworks/go/server/messaging/elasticagents/CreateAgentMessageTest.java | 3320 | /*
* Copyright 2022 ThoughtWorks, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.thoughtworks.go.server.messaging.elasticagents;
import com.thoughtworks.go.config.elastic.ClusterProfile;
import com.thoughtworks.go.config.elastic.ElasticProfile;
import com.thoughtworks.go.domain.config.ConfigurationKey;
import com.thoughtworks.go.domain.config.ConfigurationProperty;
import com.thoughtworks.go.domain.config.ConfigurationValue;
import com.thoughtworks.go.domain.packagerepository.ConfigurationPropertyMother;
import org.junit.jupiter.api.Test;
import java.util.List;
import java.util.Map;
import static java.util.Collections.singletonList;
import static org.assertj.core.api.Assertions.assertThat;
class CreateAgentMessageTest {
@Test
void shouldGetPluginId() {
List<ConfigurationProperty> properties = singletonList(new ConfigurationProperty(new ConfigurationKey("key"), new ConfigurationValue("value")));
ElasticProfile elasticProfile = new ElasticProfile("foo", "prod-cluster", properties);
ClusterProfile clusterProfile = new ClusterProfile("foo", "plugin-id", properties);
Map<String, String> clusterProfileConfigurations = clusterProfile.getConfigurationAsMap(true);
Map<String, String> configurationAsMap = elasticProfile.getConfigurationAsMap(true);
CreateAgentMessage message = new CreateAgentMessage("key", "env", elasticProfile, clusterProfile, null);
assertThat(message.pluginId()).isEqualTo(clusterProfile.getPluginId());
assertThat(message.getClusterProfileConfiguration()).isEqualTo(clusterProfileConfigurations);
assertThat(message.configuration()).isEqualTo(configurationAsMap);
}
@Test
void shouldReturnResolvedValues() {
ConfigurationProperty k1 = ConfigurationPropertyMother.create("key", "value");
ConfigurationProperty k2 = ConfigurationPropertyMother.create("key1", false, "{{SECRET:[config_id][lookup_key]}}");
k2.getSecretParams().get(0).setValue("some-resolved-value");
ElasticProfile elasticProfile = new ElasticProfile("foo", "prod-cluster", k1, k2);
ClusterProfile clusterProfile = new ClusterProfile("foo", "plugin-id", k1, k2);
Map<String, String> clusterProfileConfigurations = clusterProfile.getConfigurationAsMap(true, true);
Map<String, String> configurationAsMap = elasticProfile.getConfigurationAsMap(true, true);
CreateAgentMessage message = new CreateAgentMessage("key", "env", elasticProfile, clusterProfile, null);
assertThat(message.pluginId()).isEqualTo(clusterProfile.getPluginId());
assertThat(message.getClusterProfileConfiguration()).isEqualTo(clusterProfileConfigurations);
assertThat(message.configuration()).isEqualTo(configurationAsMap);
}
}
| apache-2.0 |