repo_name stringlengths 5 108 | path stringlengths 6 333 | size stringlengths 1 6 | content stringlengths 4 977k | license stringclasses 15 values |
|---|---|---|---|---|
kongchen/swagger-maven-plugin | src/main/java/com/github/kongchen/swagger/docgen/mavenplugin/SecurityDefinition.java | 5429 | package com.github.kongchen.swagger.docgen.mavenplugin;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.github.kongchen.swagger.docgen.GenerateException;
import io.swagger.models.auth.ApiKeyAuthDefinition;
import io.swagger.models.auth.BasicAuthDefinition;
import io.swagger.models.auth.OAuth2Definition;
import io.swagger.models.auth.SecuritySchemeDefinition;
import io.swagger.util.Json;
import org.apache.commons.lang3.reflect.FieldUtils;
import java.io.FileInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.lang.reflect.Field;
import java.util.HashMap;
import java.util.Iterator;
import java.util.Map;
/**
* @author chekong on 15/5/5.
*/
public class SecurityDefinition {
private String name;
private String type;
private String in;
private String description;
private String json;
private String jsonPath;
private ObjectMapper mapper = Json.mapper();
public Map<String, SecuritySchemeDefinition> generateSecuritySchemeDefinitions() throws GenerateException {
Map<String, SecuritySchemeDefinition> map = new HashMap<String, SecuritySchemeDefinition>();
Map<String, JsonNode> securityDefinitions = new HashMap<String, JsonNode>();
if (json != null || jsonPath != null) {
securityDefinitions = loadSecurityDefintionsFromJsonFile();
} else {
JsonNode tree = mapper.valueToTree(this);
securityDefinitions.put(tree.get("name").asText(), tree);
}
for (Map.Entry<String, JsonNode> securityDefinition : securityDefinitions.entrySet()) {
JsonNode definition = securityDefinition.getValue();
SecuritySchemeDefinition ssd = getSecuritySchemeDefinitionByType(definition.get("type").asText(), definition);
tryFillNameField(ssd, securityDefinition.getKey());
if (ssd != null) {
map.put(securityDefinition.getKey(), ssd);
}
}
return map;
}
/**
* <p>Try to fill the name property of some authentication definition, if no user defined value was set.</p>
* <p>If the current value of the name property is empty, this will fill it to be the same as the name of the
* security definition.</br>
* If no {@link Field} named "name" is found inside the given SecuritySchemeDefinition, no action will be taken.
*
* @param ssd security scheme
* @param value value to set the name to
*/
private void tryFillNameField(SecuritySchemeDefinition ssd, String value) {
if (ssd == null) {
return;
}
Field nameField = FieldUtils.getField(ssd.getClass(), "name", true);
try {
if (nameField != null && nameField.get(ssd) == null) {
nameField.set(ssd, value);
}
} catch (IllegalAccessException e) {
// ignored
}
}
private Map<String, JsonNode> loadSecurityDefintionsFromJsonFile() throws GenerateException {
Map<String, JsonNode> securityDefinitions = new HashMap<String, JsonNode>();
try {
InputStream jsonStream = json != null ? this.getClass().getResourceAsStream(json) : new FileInputStream(jsonPath);
JsonNode tree = mapper.readTree(jsonStream);
Iterator<Map.Entry<String, JsonNode>> fields = tree.fields();
while(fields.hasNext()) {
Map.Entry<String, JsonNode> field = fields.next();
JsonNode securityDefinition = field.getValue();
securityDefinitions.put(field.getKey(), securityDefinition);
}
} catch (IOException e) {
throw new GenerateException(e);
}
return securityDefinitions;
}
private SecuritySchemeDefinition getSecuritySchemeDefinitionByType(String type, JsonNode node) throws GenerateException {
try {
SecuritySchemeDefinition def = null;
if (type.equals(new OAuth2Definition().getType())) {
def = new OAuth2Definition();
if (node != null) {
def = mapper.readValue(node.traverse(), OAuth2Definition.class);
}
} else if (type.equals(new BasicAuthDefinition().getType())) {
def = new BasicAuthDefinition();
if (node != null) {
def = mapper.readValue(node.traverse(), BasicAuthDefinition.class);
}
} else if (type.equals(new ApiKeyAuthDefinition().getType())) {
def = new ApiKeyAuthDefinition();
if (node != null) {
def = mapper.readValue(node.traverse(), ApiKeyAuthDefinition.class);
}
}
return def;
} catch (IOException e) {
throw new GenerateException(e);
}
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public String getType() {
return type;
}
public void setType(String type) {
this.type = type;
}
public String getIn() {
return in;
}
public void setIn(String in) {
this.in = in;
}
public String getJson() {
return json;
}
public void setJson(String json) {
this.json = json;
}
}
| apache-2.0 |
leafclick/intellij-community | plugins/properties/properties-resource-bundle-editor/src/com/intellij/lang/properties/editor/GotoResourceBundleLocalizationsProvider.java | 2077 | // Copyright 2000-2019 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.
package com.intellij.lang.properties.editor;
import com.intellij.lang.properties.ResourceBundle;
import com.intellij.lang.properties.psi.PropertiesFile;
import com.intellij.navigation.GotoRelatedItem;
import com.intellij.navigation.GotoRelatedProvider;
import com.intellij.openapi.actionSystem.CommonDataKeys;
import com.intellij.openapi.actionSystem.DataContext;
import com.intellij.openapi.actionSystem.PlatformDataKeys;
import com.intellij.openapi.fileEditor.FileEditor;
import com.intellij.psi.PsiElement;
import com.intellij.psi.PsiFile;
import com.intellij.util.containers.ContainerUtil;
import org.jetbrains.annotations.NotNull;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
/**
* @author Dmitry Batkovich
*/
public class GotoResourceBundleLocalizationsProvider extends GotoRelatedProvider {
@NotNull
@Override
public List<? extends GotoRelatedItem> getItems(@NotNull final DataContext context) {
final FileEditor editor = PlatformDataKeys.FILE_EDITOR.getData(context);
if (editor instanceof ResourceBundleEditor) {
return Collections.emptyList();
}
final PsiFile psiFile = CommonDataKeys.PSI_FILE.getData(context);
if (!(psiFile instanceof PropertiesFile)) {
return Collections.emptyList();
}
final ResourceBundle resourceBundle = ((PropertiesFile)psiFile).getResourceBundle();
final List<PropertiesFile> bundlePropertiesFiles = resourceBundle.getPropertiesFiles();
assert bundlePropertiesFiles.size() != 0;
if (bundlePropertiesFiles.size() != 1) {
final ArrayList<PropertiesFile> propertiesFilesWithoutCurrent = new ArrayList<>(bundlePropertiesFiles);
propertiesFilesWithoutCurrent.remove(psiFile);
return ContainerUtil.map(propertiesFilesWithoutCurrent, propertiesFile -> new GotoRelatedItem((PsiElement) propertiesFile, "Other Localizations"));
} else {
return Collections.emptyList();
}
}
}
| apache-2.0 |
ouchadam/jogame | core/src/main/java/com/ouchadam/jogame/api/JogameRequestBuilder.java | 3446 | package com.ouchadam.jogame.api;
import com.github.kevinsawicki.http.HttpRequest;
import java.util.HashMap;
import java.util.Map;
public class JogameRequestBuilder {
public static Get get() {
return new Get();
}
public static Post post() {
return new Post();
}
public static class Get extends builder<Get> {
@Override
public JogameRequest build() {
return new JogameRequest(get());
}
@Override
protected Get getThis() {
return this;
}
}
public static class Post extends builder<Post> {
public JogameRequest form(Map<String, String> formData) {
return new JogameRequest(build().asRequest().form(formData));
}
@Override
public JogameRequest build() {
return new JogameRequest(post());
}
@Override
protected Post getThis() {
return this;
}
}
public abstract static class builder<T> {
private static final String USER_AGENT = "Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/34.0.1847.116 Safari/537.36";
private final Map<String, String> queryParams;
private String url;
private Session session;
private String referer;
protected builder() {
this.queryParams = new HashMap<String, String>();
this.url = null;
this.session = null;
this.referer = null;
}
public T url(String baseUrl) {
this.url = baseUrl;
return getThis();
}
public T session(Session session) {
this.session = session;
return getThis();
}
public T referer(String referer) {
this.referer = referer;
return getThis();
}
public T page(String page) {
queryParams.put("page", page);
return getThis();
}
public T planet(String planet) {
queryParams.put("cp", planet);
return getThis();
}
public T param(String name, String value) {
queryParams.put(name, value);
return getThis();
}
protected HttpRequest get() {
return apply(HttpRequest.get(url, queryParams, false));
}
protected HttpRequest post() {
return apply(HttpRequest.post(url, queryParams, false));
}
private HttpRequest apply(HttpRequest httpRequest) {
httpRequest.userAgent(USER_AGENT);
httpRequest.followRedirects(false);
httpRequest.header("Accept-Language", "en-GB,en-US;q=0.8,en;q=0.6");
httpRequest.header("Accept-Encoding", "gzip,deflate,sdch");
httpRequest.header("Connection", "keep-alive");
httpRequest.uncompress(true);
httpRequest.header("Host", "s125-en.ogame.gameforge.com");
httpRequest.header("Accept", "text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8");
if (session != null) {
httpRequest.header("Cookie", session.value());
}
if (referer != null) {
httpRequest.referer(referer);
}
return httpRequest;
}
public abstract JogameRequest build();
protected abstract T getThis();
}
}
| apache-2.0 |
consulo/consulo | modules/base/core-api/src/main/java/consulo/extensions/LocalizeValueConverter.java | 1208 | /*
* Copyright 2013-2020 consulo.io
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package consulo.extensions;
import com.intellij.util.xmlb.Converter;
import consulo.localize.LocalizeManager;
import consulo.localize.LocalizeValue;
import javax.annotation.Nonnull;
/**
* @author VISTALL
* @since 2020-05-29
*/
public final class LocalizeValueConverter extends Converter<LocalizeValue> {
@Override
public LocalizeValue fromString(@Nonnull String value) {
return LocalizeManager.get().fromStringKey(value);
}
@Nonnull
@Override
public String toString(@Nonnull LocalizeValue localizeValue) {
throw new UnsupportedOperationException("We can't write localize value");
}
}
| apache-2.0 |
NotFound403/WePay | src/main/java/cn/felord/wepay/ali/sdk/api/domain/ValidDateInfo.java | 2322 | package cn.felord.wepay.ali.sdk.api.domain;
import java.util.Date;
import cn.felord.wepay.ali.sdk.api.AlipayObject;
import cn.felord.wepay.ali.sdk.api.internal.mapping.ApiField;
/**
* 有效期
*
* @author auto create
* @version $Id: $Id
*/
public class ValidDateInfo extends AlipayObject {
private static final long serialVersionUID = 3717368843728674357L;
/**
* 截至时间
*/
@ApiField("end_time")
private Date endTime;
/**
* 相对有效期
*/
@ApiField("relative_time")
private PeriodInfo relativeTime;
/**
* 开始时间
*/
@ApiField("start_time")
private Date startTime;
/**
* 时间模式,RELATIVE=相对时间,RELATIVE=绝对模式
*/
@ApiField("time_mode")
private String timeMode;
/**
* <p>Getter for the field <code>endTime</code>.</p>
*
* @return a {@link java.util.Date} object.
*/
public Date getEndTime() {
return this.endTime;
}
/**
* <p>Setter for the field <code>endTime</code>.</p>
*
* @param endTime a {@link java.util.Date} object.
*/
public void setEndTime(Date endTime) {
this.endTime = endTime;
}
/**
* <p>Getter for the field <code>relativeTime</code>.</p>
*
* @return a {@link cn.felord.wepay.ali.sdk.api.domain.PeriodInfo} object.
*/
public PeriodInfo getRelativeTime() {
return this.relativeTime;
}
/**
* <p>Setter for the field <code>relativeTime</code>.</p>
*
* @param relativeTime a {@link cn.felord.wepay.ali.sdk.api.domain.PeriodInfo} object.
*/
public void setRelativeTime(PeriodInfo relativeTime) {
this.relativeTime = relativeTime;
}
/**
* <p>Getter for the field <code>startTime</code>.</p>
*
* @return a {@link java.util.Date} object.
*/
public Date getStartTime() {
return this.startTime;
}
/**
* <p>Setter for the field <code>startTime</code>.</p>
*
* @param startTime a {@link java.util.Date} object.
*/
public void setStartTime(Date startTime) {
this.startTime = startTime;
}
/**
* <p>Getter for the field <code>timeMode</code>.</p>
*
* @return a {@link java.lang.String} object.
*/
public String getTimeMode() {
return this.timeMode;
}
/**
* <p>Setter for the field <code>timeMode</code>.</p>
*
* @param timeMode a {@link java.lang.String} object.
*/
public void setTimeMode(String timeMode) {
this.timeMode = timeMode;
}
}
| apache-2.0 |
aminmf/crawljax | examples/src/test/java/com/crawljax/plugins/testilizer/generated/claroline_RAND/GeneratedTestCase3.java | 76184 | package com.crawljax.plugins.testilizer.generated.claroline_RAND;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashSet;
import java.util.Set;
import java.util.concurrent.TimeUnit;
import org.junit.*;
import static org.junit.Assert.*;
import org.openqa.selenium.*;
import org.openqa.selenium.firefox.FirefoxDriver;
import org.openqa.selenium.firefox.FirefoxProfile;
import org.w3c.dom.Document;
import org.w3c.dom.NamedNodeMap;
import org.w3c.dom.NodeList;
import com.crawljax.forms.RandomInputValueGenerator;
import com.crawljax.util.DomUtils;
/*
* Generated @ Fri Apr 18 00:45:05 PDT 2014
*/
public class GeneratedTestCase3 {
private WebDriver driver;
private String url;
private boolean acceptNextAlert = true;
private StringBuffer verificationErrors = new StringBuffer();
private DOMElement element;
private DOMElement parentElement;
private ArrayList<DOMElement> childrenElements = new ArrayList<DOMElement>();
private String DOM = null;
boolean getCoverageReport = false;
@Before
public void setUp() throws Exception {
// Setting the JavaScript code coverage switch
getCoverageReport = com.crawljax.plugins.testilizer.Testilizer.getCoverageReport();
if (getCoverageReport)
driver = new FirefoxDriver(getProfile());
else
driver = new FirefoxDriver();
url = "http://localhost:8888/claroline-1.11.7/index.php?logout=true";
driver.manage().timeouts().implicitlyWait(30, TimeUnit.SECONDS);
}
public static FirefoxProfile getProfile() {
FirefoxProfile profile = new FirefoxProfile();
profile.setPreference("network.proxy.http", "localhost");
profile.setPreference("network.proxy.http_port", 3128);
profile.setPreference("network.proxy.type", 1);
/* use proxy for everything, including localhost */
profile.setPreference("network.proxy.no_proxies_on", "");
return profile;
}
@After
public void tearDown() throws Exception {
if (getCoverageReport)
((JavascriptExecutor) driver).executeScript(" if (window.jscoverage_report) {return jscoverage_report('CodeCoverageReport');}");
driver.quit();
String verificationErrorString = verificationErrors.toString();
if (!"".equals(verificationErrorString)) {
fail(verificationErrorString);
}
}
/*
* Test Cases
*/
@Test
public void method3(){
driver.get(url);
//From state 0 to state 22
//Eventable{eventType=click, identification=xpath /HTML[1]/BODY[1]/DIV[1]/DIV[2]/DIV[1]/DIV[1]/P[1]/A[2], element=Element{node=[A: null], tag=A, text=Create user account, attributes={href=/claroline-1.11.7/claroline/auth/inscription.php}}, source=StateVertexImpl{id=0, name=index}, target=StateVertexImpl{id=22, name=state22}}
mutateDOMTree(0);
checkState0_OriginalAssertions();
checkState0_ReusedAssertions();
checkState0_GeneratedAssertions();
checkState0_LearnedAssertions();
checkState0_AllAssertions();
checkState0_RandAssertions1();
checkState0_RandAssertions2();
checkState0_RandAssertions3();
checkState0_RandAssertions4();
checkState0_RandAssertions5();
driver.findElement(By.id("login")).clear();
driver.findElement(By.id("login")).sendKeys("nainy");
driver.findElement(By.id("password")).clear();
driver.findElement(By.id("password")).sendKeys("nainy");
driver.findElement(By.xpath("/HTML[1]/BODY[1]/DIV[1]/DIV[2]/DIV[1]/DIV[1]/P[1]/A[2]")).click();
//From state 22 to state 28
//Eventable{eventType=click, identification=xpath /HTML[1]/BODY[1]/DIV[1]/DIV[1]/DIV[3]/DIV[1]/UL[1]/LI[2]/A[1], element=Element{node=[A: null], tag=A, text=Create user account, attributes={href=inscription.php, target=_top}}, source=StateVertexImpl{id=22, name=state22}, target=StateVertexImpl{id=28, name=state28}}
mutateDOMTree(22);
checkState22_OriginalAssertions();
checkState22_ReusedAssertions();
checkState22_GeneratedAssertions();
checkState22_LearnedAssertions();
checkState22_AllAssertions();
checkState22_RandAssertions1();
checkState22_RandAssertions2();
checkState22_RandAssertions3();
checkState22_RandAssertions4();
checkState22_RandAssertions5();
driver.findElement(By.id("password")).clear();
driver.findElement(By.id("password")).sendKeys("nainy");
driver.findElement(By.xpath("/HTML[1]/BODY[1]/DIV[1]/DIV[1]/DIV[3]/DIV[1]/UL[1]/LI[2]/A[1]")).click();
//From state 28 to state 29
//Eventable{eventType=click, identification=xpath /HTML[1]/BODY[1]/DIV[1]/DIV[2]/FORM[1]/DL[1]/DT[1]/A[1], element=Element{node=[A: null], tag=A, text=, attributes={href=http://localhost:8888/claroline-1.11.7/claroline/auth/inscription.php}}, source=StateVertexImpl{id=28, name=state28}, target=StateVertexImpl{id=29, name=state29}}
mutateDOMTree(28);
checkState28_OriginalAssertions();
checkState28_ReusedAssertions();
checkState28_GeneratedAssertions();
checkState28_LearnedAssertions();
checkState28_AllAssertions();
checkState28_RandAssertions1();
checkState28_RandAssertions2();
checkState28_RandAssertions3();
checkState28_RandAssertions4();
checkState28_RandAssertions5();
driver.findElement(By.id("password")).clear();
driver.findElement(By.id("password")).sendKeys("nainy");
driver.findElement(By.xpath("/HTML[1]/BODY[1]/DIV[1]/DIV[2]/FORM[1]/DL[1]/DT[1]/A[1]")).click();
//Sink node at state 29
mutateDOMTree(29);
checkState29_OriginalAssertions();
checkState29_ReusedAssertions();
checkState29_GeneratedAssertions();
checkState29_LearnedAssertions();
checkState29_AllAssertions();
checkState29_RandAssertions1();
checkState29_RandAssertions2();
checkState29_RandAssertions3();
checkState29_RandAssertions4();
checkState29_RandAssertions5();
}
public void checkState0_OriginalAssertions(){
}
public void checkState0_ReusedAssertions(){
}
public void checkState0_GeneratedAssertions(){
}
public void checkState0_LearnedAssertions(){
}
public void checkState0_AllAssertions(){
}
public void checkState0_RandAssertions1(){
element = new DOMElement("LABEL", "Searchfromkeyword", new ArrayList<String>(Arrays.asList("for=\"keyword\"")));
parentElement = new DOMElement("H3", "", new ArrayList<String>(Arrays.asList("")));
childrenElements.clear();
childrenElements.add(new DOMElement("#text", "Searchfromkeyword", new ArrayList<String>(Arrays.asList(""))));
if(!(isElementRegionTagAttPresent(parentElement , element, childrenElements))) System.out.println("166"); // Random element assertion
element = new DOMElement("SCRIPT", "", new ArrayList<String>(Arrays.asList("src=\"/claroline-1.11.7/web/js/jquery.js?1311784942\"","type=\"text/javascript\"")));
parentElement = new DOMElement("HEAD", "", new ArrayList<String>(Arrays.asList("")));
childrenElements.clear();
if(!(isElementRegionTagPresent(parentElement , element, childrenElements))) System.out.println("170"); // Random element assertion
element = new DOMElement("SCRIPT", "", new ArrayList<String>(Arrays.asList("type=\"text/javascript\"")));
parentElement = new DOMElement("HEAD", "", new ArrayList<String>(Arrays.asList("")));
childrenElements.clear();
childrenElements.add(new DOMElement("#text", "", new ArrayList<String>(Arrays.asList(""))));
if(!(isElementRegionTagAttPresent(parentElement , element, childrenElements))) System.out.println("175"); // Random element assertion
element = new DOMElement("DIV", "", new ArrayList<String>(Arrays.asList("id=\"campusFooter\"")));
parentElement = new DOMElement("DIV", "", new ArrayList<String>(Arrays.asList("id=\"claroPage\"")));
childrenElements.clear();
childrenElements.add(new DOMElement("#text", "", new ArrayList<String>(Arrays.asList(""))));
childrenElements.add(new DOMElement("HR", "", new ArrayList<String>(Arrays.asList("id=\"campusFooterLeft\""))));
childrenElements.add(new DOMElement("#text", "", new ArrayList<String>(Arrays.asList("id=\"campusFooterRight\""))));
childrenElements.add(new DOMElement("DIV", "", new ArrayList<String>(Arrays.asList("id=\"campusFooterCenter\""))));
childrenElements.add(new DOMElement("#text", "", new ArrayList<String>(Arrays.asList(""))));
childrenElements.add(new DOMElement("DIV", "AdministratorforClaroline:mmnainy", new ArrayList<String>(Arrays.asList(""))));
childrenElements.add(new DOMElement("#text", "", new ArrayList<String>(Arrays.asList(""))));
childrenElements.add(new DOMElement("DIV", "PoweredbyClaroline2001-2013", new ArrayList<String>(Arrays.asList(""))));
childrenElements.add(new DOMElement("#text", "", new ArrayList<String>(Arrays.asList(""))));
if(!(isElementRegionTagAttPresent(parentElement , element, childrenElements))) System.out.println("188"); // Random element assertion
element = new DOMElement("DIV", "Claroline", new ArrayList<String>(Arrays.asList("class=\"breadcrumbTrails\"")));
parentElement = new DOMElement("DIV", "", new ArrayList<String>(Arrays.asList("id=\"breadcrumbLine\"")));
childrenElements.clear();
childrenElements.add(new DOMElement("#text", "", new ArrayList<String>(Arrays.asList("class=\"breadCrumbs\""))));
childrenElements.add(new DOMElement("UL", "Claroline", new ArrayList<String>(Arrays.asList(""))));
childrenElements.add(new DOMElement("#text", "", new ArrayList<String>(Arrays.asList(""))));
if(!(isElementRegionFullPresent(parentElement , element, childrenElements))) System.out.println("195"); // Random element assertion
}
public void checkState0_RandAssertions2(){
element = new DOMElement("BUTTON", "UsernamePasswordEnter", new ArrayList<String>(Arrays.asList("tabindex=\"3\"","type=\"submit\"")));
parentElement = new DOMElement("FIELDSET", "", new ArrayList<String>(Arrays.asList("style=\"border: 0; margin: 10px 0 15px 0; padding: 5px;\"")));
childrenElements.clear();
childrenElements.add(new DOMElement("#text", "Enter", new ArrayList<String>(Arrays.asList(""))));
if(!(isElementRegionTagAttPresent(parentElement , element, childrenElements))) System.out.println("203"); // Random element assertion
element = new DOMElement("TITLE", "Claroline", new ArrayList<String>(Arrays.asList("")));
parentElement = new DOMElement("HEAD", "", new ArrayList<String>(Arrays.asList("")));
childrenElements.clear();
childrenElements.add(new DOMElement("#text", "Claroline", new ArrayList<String>(Arrays.asList(""))));
if(!(isElementRegionTagAttPresent(parentElement , element, childrenElements))) System.out.println("208"); // Random element assertion
element = new DOMElement("LINK", "", new ArrayList<String>(Arrays.asList("href=\"http://www.claroline.net/documentation.htm\"","rel=\"Help\"")));
parentElement = new DOMElement("HEAD", "", new ArrayList<String>(Arrays.asList("")));
childrenElements.clear();
if(!(isElementRegionTagAttPresent(parentElement , element, childrenElements))) System.out.println("212"); // Random element assertion
element = new DOMElement("INPUT", "UsernamePasswordEnter", new ArrayList<String>(Arrays.asList("class=\"inputLogin\"","id=\"login\"","name=\"login\"","size=\"12\"","tabindex=\"1\"","type=\"text\"")));
parentElement = new DOMElement("FIELDSET", "", new ArrayList<String>(Arrays.asList("style=\"border: 0; margin: 10px 0 15px 0; padding: 5px;\"")));
childrenElements.clear();
if(!(isElementRegionTagAttPresent(parentElement , element, childrenElements))) System.out.println("216"); // Random element assertion
element = new DOMElement("TITLE", "Claroline", new ArrayList<String>(Arrays.asList("")));
parentElement = new DOMElement("HEAD", "", new ArrayList<String>(Arrays.asList("")));
childrenElements.clear();
childrenElements.add(new DOMElement("#text", "Claroline", new ArrayList<String>(Arrays.asList(""))));
if(!(isElementRegionTagPresent(parentElement , element, childrenElements))) System.out.println("221"); // Random element assertion
}
public void checkState0_RandAssertions3(){
element = new DOMElement("A", "Claroline", new ArrayList<String>(Arrays.asList("href=\"/claroline-1.11.7/index.php\"","target=\"_top\"")));
parentElement = new DOMElement("LI", "", new ArrayList<String>(Arrays.asList("class=\"breadCrumbsNode lastBreadCrumbsNode\"")));
childrenElements.clear();
childrenElements.add(new DOMElement("IMG", "", new ArrayList<String>(Arrays.asList("alt=\"home\"","src=\"/claroline-1.11.7/web/img/home.png?1232379976\""))));
childrenElements.add(new DOMElement("#text", "Claroline", new ArrayList<String>(Arrays.asList(""))));
if(!(isElementRegionFullPresent(parentElement , element, childrenElements))) System.out.println("230"); // Random element assertion
element = new DOMElement("IMG", "Claroline", new ArrayList<String>(Arrays.asList("alt=\"home\"","src=\"/claroline-1.11.7/web/img/home.png?1232379976\"")));
parentElement = new DOMElement("A", "", new ArrayList<String>(Arrays.asList("href=\"/claroline-1.11.7/index.php\"","target=\"_top\"")));
childrenElements.clear();
if(!(isElementRegionFullPresent(parentElement , element, childrenElements))) System.out.println("234"); // Random element assertion
element = new DOMElement("DIV", "ClarolineLoginClaroline", new ArrayList<String>(Arrays.asList("id=\"breadcrumbLine\"")));
parentElement = new DOMElement("DIV", "", new ArrayList<String>(Arrays.asList("id=\"topBanner\"")));
childrenElements.clear();
childrenElements.add(new DOMElement("#text", "", new ArrayList<String>(Arrays.asList(""))));
childrenElements.add(new DOMElement("HR", "", new ArrayList<String>(Arrays.asList("class=\"breadcrumbTrails\""))));
childrenElements.add(new DOMElement("#text", "", new ArrayList<String>(Arrays.asList("id=\"toolViewOption\""))));
childrenElements.add(new DOMElement("DIV", "Claroline", new ArrayList<String>(Arrays.asList("class=\"spacer\""))));
childrenElements.add(new DOMElement("#text", "", new ArrayList<String>(Arrays.asList(""))));
childrenElements.add(new DOMElement("DIV", "", new ArrayList<String>(Arrays.asList(""))));
childrenElements.add(new DOMElement("#text", "", new ArrayList<String>(Arrays.asList(""))));
childrenElements.add(new DOMElement("DIV", "", new ArrayList<String>(Arrays.asList(""))));
childrenElements.add(new DOMElement("#text", "", new ArrayList<String>(Arrays.asList(""))));
childrenElements.add(new DOMElement("HR", "", new ArrayList<String>(Arrays.asList(""))));
childrenElements.add(new DOMElement("#text", "", new ArrayList<String>(Arrays.asList(""))));
if(!(isElementRegionFullPresent(parentElement , element, childrenElements))) System.out.println("249"); // Random element assertion
element = new DOMElement("IMG", "AAOC112EngineeringGraphics", new ArrayList<String>(Arrays.asList("alt=\"Access allowed to anybody (even without login)\"","class=\"access qtip \"","src=\"/claroline-1.11.7/web/img/access_open.png?1232379976\"")));
parentElement = new DOMElement("DT", "", new ArrayList<String>(Arrays.asList("")));
childrenElements.clear();
if(!(isElementRegionTagAttPresent(parentElement , element, childrenElements))) System.out.println("253"); // Random element assertion
element = new DOMElement("DL", "AAOC112EngineeringGraphicsmmnainy-English", new ArrayList<String>(Arrays.asList("class=\"courseList\"")));
parentElement = new DOMElement("DIV", "", new ArrayList<String>(Arrays.asList("id=\"leftContent\"")));
childrenElements.clear();
childrenElements.add(new DOMElement("#text", "", new ArrayList<String>(Arrays.asList(""))));
childrenElements.add(new DOMElement("#comment", "Renderthecourseanditschildren", new ArrayList<String>(Arrays.asList(""))));
childrenElements.add(new DOMElement("#text", "", new ArrayList<String>(Arrays.asList(""))));
childrenElements.add(new DOMElement("#comment", "", new ArrayList<String>(Arrays.asList(""))));
childrenElements.add(new DOMElement("#text", "", new ArrayList<String>(Arrays.asList(""))));
childrenElements.add(new DOMElement("DT", "AAOC112EngineeringGraphics", new ArrayList<String>(Arrays.asList(""))));
childrenElements.add(new DOMElement("#text", "", new ArrayList<String>(Arrays.asList(""))));
childrenElements.add(new DOMElement("DD", "mmnainy-English", new ArrayList<String>(Arrays.asList(""))));
childrenElements.add(new DOMElement("#text", "", new ArrayList<String>(Arrays.asList(""))));
if(!(isElementRegionTagAttPresent(parentElement , element, childrenElements))) System.out.println("266"); // Random element assertion
}
public void checkState0_RandAssertions4(){
element = new DOMElement("A", "AdministratorforClaroline:mmnainy", new ArrayList<String>(Arrays.asList("href=\"mailto:m@ga.com?subject=[Claroline]\"")));
parentElement = new DOMElement("DIV", "", new ArrayList<String>(Arrays.asList("id=\"platformManager\"")));
childrenElements.clear();
childrenElements.add(new DOMElement("#text", "mmnainy", new ArrayList<String>(Arrays.asList(""))));
if(!(isElementRegionTagAttPresent(parentElement , element, childrenElements))) System.out.println("274"); // Random element assertion
element = new DOMElement("DIV", "Claroline", new ArrayList<String>(Arrays.asList("class=\"spacer\"")));
parentElement = new DOMElement("DIV", "", new ArrayList<String>(Arrays.asList("id=\"platformBanner\"")));
childrenElements.clear();
if(!(isElementRegionTagAttPresent(parentElement , element, childrenElements))) System.out.println("278"); // Random element assertion
element = new DOMElement("DIV", "AdministratorforClaroline:mmnainy", new ArrayList<String>(Arrays.asList("id=\"campusFooterRight\"")));
parentElement = new DOMElement("DIV", "", new ArrayList<String>(Arrays.asList("id=\"campusFooter\"")));
childrenElements.clear();
childrenElements.add(new DOMElement("#text", "", new ArrayList<String>(Arrays.asList("id=\"platformManager\""))));
childrenElements.add(new DOMElement("#comment", "campusFooterRight", new ArrayList<String>(Arrays.asList(""))));
childrenElements.add(new DOMElement("#text", "", new ArrayList<String>(Arrays.asList(""))));
childrenElements.add(new DOMElement("#comment", "EndofcampusFooterRight", new ArrayList<String>(Arrays.asList(""))));
childrenElements.add(new DOMElement("#text", "", new ArrayList<String>(Arrays.asList(""))));
childrenElements.add(new DOMElement("DIV", "AdministratorforClaroline:mmnainy", new ArrayList<String>(Arrays.asList(""))));
childrenElements.add(new DOMElement("#text", "", new ArrayList<String>(Arrays.asList(""))));
if(!(isElementRegionFullPresent(parentElement , element, childrenElements))) System.out.println("289"); // Random element assertion
element = new DOMElement("FORM", "UsernamePasswordEnter", new ArrayList<String>(Arrays.asList("action=\"/claroline-1.11.7/claroline/auth/login.php\"","class=\"claroLoginForm\"","method=\"post\"")));
parentElement = new DOMElement("DIV", "", new ArrayList<String>(Arrays.asList("id=\"loginBox\"")));
childrenElements.clear();
childrenElements.add(new DOMElement("#text", "", new ArrayList<String>(Arrays.asList("style=\"border: 0; margin: 10px 0 15px 0; padding: 5px;\""))));
childrenElements.add(new DOMElement("FIELDSET", "UsernamePasswordEnter", new ArrayList<String>(Arrays.asList(""))));
childrenElements.add(new DOMElement("#text", "", new ArrayList<String>(Arrays.asList(""))));
if(!(isElementRegionTagAttPresent(parentElement , element, childrenElements))) System.out.println("296"); // Random element assertion
element = new DOMElement("DD", "AAOC112EngineeringGraphicsmmnainy-English", new ArrayList<String>(Arrays.asList("")));
parentElement = new DOMElement("DL", "", new ArrayList<String>(Arrays.asList("class=\"courseList\"")));
childrenElements.clear();
childrenElements.add(new DOMElement("#text", "", new ArrayList<String>(Arrays.asList(""))));
childrenElements.add(new DOMElement("SPAN", "mmnainy-English", new ArrayList<String>(Arrays.asList(""))));
childrenElements.add(new DOMElement("#text", "", new ArrayList<String>(Arrays.asList(""))));
if(!(isElementRegionTagAttPresent(parentElement , element, childrenElements))) System.out.println("303"); // Random element assertion
}
public void checkState0_RandAssertions5(){
element = new DOMElement("LI", "Login", new ArrayList<String>(Arrays.asList("")));
parentElement = new DOMElement("UL", "", new ArrayList<String>(Arrays.asList("class=\"menu\"")));
childrenElements.clear();
childrenElements.add(new DOMElement("SPAN", "Login", new ArrayList<String>(Arrays.asList(""))));
if(!(isElementRegionTagPresent(parentElement , element, childrenElements))) System.out.println("311"); // Random element assertion
element = new DOMElement("HR", "Claroline", new ArrayList<String>(Arrays.asList("")));
parentElement = new DOMElement("DIV", "", new ArrayList<String>(Arrays.asList("id=\"breadcrumbLine\"")));
childrenElements.clear();
if(!(isElementRegionTagAttPresent(parentElement , element, childrenElements))) System.out.println("315"); // Random element assertion
if(!(isElementPresent(By.xpath("/HTML/HEAD/LINK[[@href=\"/claroline-1.11.7/claroline/../favicon.ico\" and @rel=\"shortcut icon\"]")))) System.out.println("316"); // Random element assertion
element = new DOMElement("LABEL", "UsernamePasswordEnter", new ArrayList<String>(Arrays.asList("for=\"password\"")));
parentElement = new DOMElement("FIELDSET", "", new ArrayList<String>(Arrays.asList("style=\"border: 0; margin: 10px 0 15px 0; padding: 5px;\"")));
childrenElements.clear();
childrenElements.add(new DOMElement("#text", "Password", new ArrayList<String>(Arrays.asList(""))));
if(!(isElementRegionFullPresent(parentElement , element, childrenElements))) System.out.println("321"); // Random element assertion
if(!(isElementPresent(By.xpath("/HTML/HEAD/LINK[[@href=\"/index.php\" and @@rel=\"top\" and @title=\"\"]")))) System.out.println("322"); // Random element assertion
}
public void checkState22_OriginalAssertions(){
}
public void checkState22_ReusedAssertions(){
}
public void checkState22_GeneratedAssertions(){
}
public void checkState22_LearnedAssertions(){
}
public void checkState22_AllAssertions(){
}
public void checkState22_RandAssertions1(){
element = new DOMElement("INPUT", "", new ArrayList<String>(Arrays.asList("id=\"lastname\"","name=\"lastname\"","type=\"text\"","value=\"\"")));
parentElement = new DOMElement("DD", "", new ArrayList<String>(Arrays.asList("")));
childrenElements.clear();
if(!(isElementRegionFullPresent(parentElement , element, childrenElements))) System.out.println("344"); // Random element assertion
element = new DOMElement("LEGEND", "OtherinformationsEmailPhoneSkypeaccount", new ArrayList<String>(Arrays.asList("")));
parentElement = new DOMElement("FIELDSET", "", new ArrayList<String>(Arrays.asList("")));
childrenElements.clear();
childrenElements.add(new DOMElement("#text", "Otherinformations", new ArrayList<String>(Arrays.asList(""))));
if(!(isElementRegionTagAttPresent(parentElement , element, childrenElements))) System.out.println("349"); // Random element assertion
element = new DOMElement("LINK", "", new ArrayList<String>(Arrays.asList("href=\"/claroline-1.11.7/web/css/print.css?1301592634\"","media=\"print\"","rel=\"stylesheet\"","type=\"text/css\"")));
parentElement = new DOMElement("HEAD", "", new ArrayList<String>(Arrays.asList("")));
childrenElements.clear();
if(!(isElementRegionTagPresent(parentElement , element, childrenElements))) System.out.println("353"); // Random element assertion
element = new DOMElement("DT", "", new ArrayList<String>(Arrays.asList("")));
parentElement = new DOMElement("DL", "", new ArrayList<String>(Arrays.asList("")));
childrenElements.clear();
childrenElements.add(new DOMElement("#text", "", new ArrayList<String>(Arrays.asList("id=\"applyChange\"","name=\"applyChange\"","type=\"submit\"","value=\"Ok\""))));
childrenElements.add(new DOMElement("INPUT", "", new ArrayList<String>(Arrays.asList("href=\"http://localhost:8888/claroline-1.11.7/index.php?logout=true\""))));
childrenElements.add(new DOMElement("#text", "", new ArrayList<String>(Arrays.asList(""))));
childrenElements.add(new DOMElement("A", "", new ArrayList<String>(Arrays.asList(""))));
childrenElements.add(new DOMElement("#text", "", new ArrayList<String>(Arrays.asList(""))));
if(!(isElementRegionTagAttPresent(parentElement , element, childrenElements))) System.out.println("362"); // Random element assertion
if(!(isElementPresent(By.xpath("/HTML/BODY/DIV/DIV/DIV[3]/HR[]")))) System.out.println("363"); // Random element assertion
}
public void checkState22_RandAssertions2(){
element = new DOMElement("DIV", "ClarolineLoginClaroline>Createuseraccount", new ArrayList<String>(Arrays.asList("id=\"breadcrumbLine\"")));
parentElement = new DOMElement("DIV", "", new ArrayList<String>(Arrays.asList("id=\"topBanner\"")));
childrenElements.clear();
childrenElements.add(new DOMElement("#text", "", new ArrayList<String>(Arrays.asList(""))));
childrenElements.add(new DOMElement("HR", "", new ArrayList<String>(Arrays.asList("class=\"breadcrumbTrails\""))));
childrenElements.add(new DOMElement("#text", "", new ArrayList<String>(Arrays.asList("id=\"toolViewOption\""))));
childrenElements.add(new DOMElement("DIV", "Claroline>Createuseraccount", new ArrayList<String>(Arrays.asList("class=\"spacer\""))));
childrenElements.add(new DOMElement("#text", "", new ArrayList<String>(Arrays.asList(""))));
childrenElements.add(new DOMElement("DIV", "", new ArrayList<String>(Arrays.asList(""))));
childrenElements.add(new DOMElement("#text", "", new ArrayList<String>(Arrays.asList(""))));
childrenElements.add(new DOMElement("DIV", "", new ArrayList<String>(Arrays.asList(""))));
childrenElements.add(new DOMElement("#text", "", new ArrayList<String>(Arrays.asList(""))));
childrenElements.add(new DOMElement("HR", "", new ArrayList<String>(Arrays.asList(""))));
childrenElements.add(new DOMElement("#text", "", new ArrayList<String>(Arrays.asList(""))));
if(!(isElementRegionFullPresent(parentElement , element, childrenElements))) System.out.println("381"); // Random element assertion
if(!(isElementPresent(By.xpath("/HTML/BODY/DIV/DIV[2]/FORM/FIELDSET/DL/DT[3]/LABEL[for=\"officialCode\"]")))) System.out.println("382"); // Random element assertion
if(!(isElementPresent(By.xpath("/HTML/BODY/DIV/DIV[2]/FORM/INPUT[@id=\"cmd\" and @@name=\"cmd\" and @@type=\"hidden\" and @value=\"registration\"]")))) System.out.println("383"); // Random element assertion
element = new DOMElement("INPUT", "", new ArrayList<String>(Arrays.asList("name=\"claroFormId\"","type=\"hidden\"","value=\"5350d32f3ac01\"")));
parentElement = new DOMElement("FORM", "", new ArrayList<String>(Arrays.asList("action=\"/claroline-1.11.7/claroline/auth/inscription.php\"","enctype=\"multipart/form-data\"","id=\"userSettings\"","method=\"post\"")));
childrenElements.clear();
if(!(isElementRegionTagAttPresent(parentElement , element, childrenElements))) System.out.println("387"); // Random element assertion
element = new DOMElement("SPAN", "Password(Confirmation)*", new ArrayList<String>(Arrays.asList("class=\"required\"")));
parentElement = new DOMElement("LABEL", "", new ArrayList<String>(Arrays.asList("for=\"password_conf\"")));
childrenElements.clear();
childrenElements.add(new DOMElement("#text", "*", new ArrayList<String>(Arrays.asList(""))));
if(!(isElementRegionTagPresent(parentElement , element, childrenElements))) System.out.println("392"); // Random element assertion
}
public void checkState22_RandAssertions3(){
element = new DOMElement("LABEL", "Skypeaccount", new ArrayList<String>(Arrays.asList("for=\"skype\"")));
parentElement = new DOMElement("DT", "", new ArrayList<String>(Arrays.asList("")));
childrenElements.clear();
childrenElements.add(new DOMElement("#text", "Skypeaccount", new ArrayList<String>(Arrays.asList(""))));
if(!(isElementRegionTagPresent(parentElement , element, childrenElements))) System.out.println("400"); // Random element assertion
element = new DOMElement("DD", "Name*Firstname*Administrativecode", new ArrayList<String>(Arrays.asList("")));
parentElement = new DOMElement("DL", "", new ArrayList<String>(Arrays.asList("")));
childrenElements.clear();
childrenElements.add(new DOMElement("#text", "", new ArrayList<String>(Arrays.asList("id=\"lastname\"","name=\"lastname\"","type=\"text\"","value=\"\""))));
childrenElements.add(new DOMElement("INPUT", "", new ArrayList<String>(Arrays.asList(""))));
childrenElements.add(new DOMElement("#text", "", new ArrayList<String>(Arrays.asList(""))));
if(!(isElementRegionTagPresent(parentElement , element, childrenElements))) System.out.println("407"); // Random element assertion
if(!(isElementPresent(By.xpath("/HTML/BODY/DIV/DIV[2]/FORM/FIELDSET[2]/DL/DT[4]/LABEL[for=\"password_conf\"]")))) System.out.println("408"); // Random element assertion
element = new DOMElement("DIV", "", new ArrayList<String>(Arrays.asList("class=\"spacer\"")));
parentElement = new DOMElement("DIV", "", new ArrayList<String>(Arrays.asList("id=\"claroBody\"")));
childrenElements.clear();
if(!(isElementRegionFullPresent(parentElement , element, childrenElements))) System.out.println("412"); // Random element assertion
if(!(isElementPresent(By.xpath("/HTML/BODY/DIV/DIV[3]/DIV[2]/DIV/A[href=\"mailto:m@ga.com?subject=[Claroline]\"]")))) System.out.println("413"); // Random element assertion
}
public void checkState22_RandAssertions4(){
if(!(isElementPresent(By.xpath("/HTML/HEAD/LINK[@href=\"/claroline-1.11.7/web/css/classic/main.css?1337672464\" and @@media=\"screen, projection, tv\" and @@rel=\"stylesheet\" and @type=\"text/css\"]")))) System.out.println("417"); // Random element assertion
element = new DOMElement("DD", "Name*Firstname*Administrativecode", new ArrayList<String>(Arrays.asList("")));
parentElement = new DOMElement("DL", "", new ArrayList<String>(Arrays.asList("")));
childrenElements.clear();
childrenElements.add(new DOMElement("#text", "", new ArrayList<String>(Arrays.asList("id=\"officialCode\"","name=\"officialCode\"","type=\"text\"","value=\"\""))));
childrenElements.add(new DOMElement("INPUT", "", new ArrayList<String>(Arrays.asList(""))));
childrenElements.add(new DOMElement("#text", "", new ArrayList<String>(Arrays.asList(""))));
if(!(isElementRegionTagAttPresent(parentElement , element, childrenElements))) System.out.println("424"); // Random element assertion
element = new DOMElement("LABEL", "Password(Confirmation)*", new ArrayList<String>(Arrays.asList("for=\"password_conf\"")));
parentElement = new DOMElement("DT", "", new ArrayList<String>(Arrays.asList("")));
childrenElements.clear();
childrenElements.add(new DOMElement("#text", "Password(Confirmation)", new ArrayList<String>(Arrays.asList("class=\"required\""))));
childrenElements.add(new DOMElement("SPAN", "*", new ArrayList<String>(Arrays.asList(""))));
childrenElements.add(new DOMElement("#text", "", new ArrayList<String>(Arrays.asList(""))));
if(!(isElementRegionFullPresent(parentElement , element, childrenElements))) System.out.println("431"); // Random element assertion
if(!(isElementPresent(By.xpath("/HTML/BODY/DIV/DIV[2]/FORM/FIELDSET[3]/DL/DD[2]/INPUT[@id=\"phone\" and @@name=\"phone\" and @@type=\"text\" and @value=\"\"]")))) System.out.println("432"); // Random element assertion
if(!(isElementPresent(By.xpath("/HTML/BODY/DIV/DIV/DIV[3]/DIV[[class=\"spacer\"]")))) System.out.println("433"); // Random element assertion
}
public void checkState22_RandAssertions5(){
element = new DOMElement("DD", "EmailPhoneSkypeaccount", new ArrayList<String>(Arrays.asList("")));
parentElement = new DOMElement("DL", "", new ArrayList<String>(Arrays.asList("")));
childrenElements.clear();
childrenElements.add(new DOMElement("#text", "", new ArrayList<String>(Arrays.asList("id=\"skype\"","name=\"skype\"","type=\"text\"","value=\"\""))));
childrenElements.add(new DOMElement("INPUT", "", new ArrayList<String>(Arrays.asList(""))));
childrenElements.add(new DOMElement("#text", "", new ArrayList<String>(Arrays.asList(""))));
if(!(isElementRegionFullPresent(parentElement , element, childrenElements))) System.out.println("443"); // Random element assertion
element = new DOMElement("INPUT", "", new ArrayList<String>(Arrays.asList("id=\"username\"","name=\"username\"","type=\"text\"","value=\"\"")));
parentElement = new DOMElement("DD", "", new ArrayList<String>(Arrays.asList("")));
childrenElements.clear();
if(!(isElementRegionTagPresent(parentElement , element, childrenElements))) System.out.println("447"); // Random element assertion
if(!(isElementPresent(By.xpath("/HTML/HEAD/SCRIPT[[type=\"text/javascript\"]")))) System.out.println("448"); // Random element assertion
element = new DOMElement("P", "", new ArrayList<String>(Arrays.asList("class=\"notice\"")));
parentElement = new DOMElement("DD", "", new ArrayList<String>(Arrays.asList("")));
childrenElements.clear();
childrenElements.add(new DOMElement("#text", "Choosenowausernameandapasswordfortheuseraccount.", new ArrayList<String>(Arrays.asList(""))));
childrenElements.add(new DOMElement("BR", "", new ArrayList<String>(Arrays.asList(""))));
childrenElements.add(new DOMElement("#text", "", new ArrayList<String>(Arrays.asList(""))));
if(!(isElementRegionFullPresent(parentElement , element, childrenElements))) System.out.println("455"); // Random element assertion
element = new DOMElement("TITLE", "Claroline", new ArrayList<String>(Arrays.asList("")));
parentElement = new DOMElement("HEAD", "", new ArrayList<String>(Arrays.asList("")));
childrenElements.clear();
childrenElements.add(new DOMElement("#text", "Claroline", new ArrayList<String>(Arrays.asList(""))));
if(!(isElementRegionTagPresent(parentElement , element, childrenElements))) System.out.println("460"); // Random element assertion
}
public void checkState28_OriginalAssertions(){
}
public void checkState28_ReusedAssertions(){
}
public void checkState28_GeneratedAssertions(){
}
public void checkState28_LearnedAssertions(){
}
public void checkState28_AllAssertions(){
}
public void checkState28_RandAssertions1(){
element = new DOMElement("DIV", "Claroline>Createuseraccount", new ArrayList<String>(Arrays.asList("class=\"spacer\"")));
parentElement = new DOMElement("DIV", "", new ArrayList<String>(Arrays.asList("id=\"breadcrumbLine\"")));
childrenElements.clear();
if(!(isElementRegionTagAttPresent(parentElement , element, childrenElements))) System.out.println("482"); // Random element assertion
element = new DOMElement("LINK", "", new ArrayList<String>(Arrays.asList("href=\"http://www.claroline.net\"","rel=\"Copyright\"")));
parentElement = new DOMElement("HEAD", "", new ArrayList<String>(Arrays.asList("")));
childrenElements.clear();
if(!(isElementRegionTagAttPresent(parentElement , element, childrenElements))) System.out.println("486"); // Random element assertion
element = new DOMElement("DIV", "", new ArrayList<String>(Arrays.asList("id=\"campusFooterLeft\"")));
parentElement = new DOMElement("DIV", "", new ArrayList<String>(Arrays.asList("id=\"campusFooter\"")));
childrenElements.clear();
childrenElements.add(new DOMElement("#text", "", new ArrayList<String>(Arrays.asList(""))));
childrenElements.add(new DOMElement("#comment", "campusFooterLeft", new ArrayList<String>(Arrays.asList(""))));
childrenElements.add(new DOMElement("#text", "", new ArrayList<String>(Arrays.asList(""))));
childrenElements.add(new DOMElement("#comment", "EndofcampusFooterLeft", new ArrayList<String>(Arrays.asList(""))));
childrenElements.add(new DOMElement("#text", "", new ArrayList<String>(Arrays.asList(""))));
if(!(isElementRegionFullPresent(parentElement , element, childrenElements))) System.out.println("495"); // Random element assertion
if(!(isElementPresent(By.xpath("/HTML/BODY/DIV/DIV[2]/FORM/FIELDSET[3]/DL/DT[3]/LABEL[for=\"skype\"]")))) System.out.println("496"); // Random element assertion
element = new DOMElement("H1", "Createuseraccount", new ArrayList<String>(Arrays.asList("class=\"toolTitle mainTitle\"")));
parentElement = new DOMElement("TD", "", new ArrayList<String>(Arrays.asList("")));
childrenElements.clear();
childrenElements.add(new DOMElement("#text", "Createuseraccount", new ArrayList<String>(Arrays.asList(""))));
if(!(isElementRegionFullPresent(parentElement , element, childrenElements))) System.out.println("501"); // Random element assertion
}
public void checkState28_RandAssertions2(){
element = new DOMElement("SCRIPT", "", new ArrayList<String>(Arrays.asList("src=\"/claroline-1.11.7/web/js/tooltitle.js?1311696294\"","type=\"text/javascript\"")));
parentElement = new DOMElement("HEAD", "", new ArrayList<String>(Arrays.asList("")));
childrenElements.clear();
if(!(isElementRegionTagAttPresent(parentElement , element, childrenElements))) System.out.println("508"); // Random element assertion
if(!(isElementPresent(By.xpath("/HTML/BODY/DIV/DIV[2]/DIV[class=\"toolTitleBlock\"]")))) System.out.println("509"); // Random element assertion
element = new DOMElement("DIV", "AdministratorforClaroline:mmnainy", new ArrayList<String>(Arrays.asList("id=\"platformManager\"")));
parentElement = new DOMElement("DIV", "", new ArrayList<String>(Arrays.asList("id=\"campusFooterRight\"")));
childrenElements.clear();
childrenElements.add(new DOMElement("#text", "AdministratorforClaroline:", new ArrayList<String>(Arrays.asList("href=\"mailto:m@ga.com?subject=[Claroline]\""))));
childrenElements.add(new DOMElement("A", "mmnainy", new ArrayList<String>(Arrays.asList(""))));
if(!(isElementRegionFullPresent(parentElement , element, childrenElements))) System.out.println("515"); // Random element assertion
element = new DOMElement("DIV", "", new ArrayList<String>(Arrays.asList("id=\"claroPage\"")));
parentElement = new DOMElement("BODY", "", new ArrayList<String>(Arrays.asList("dir=\"ltr\"")));
childrenElements.clear();
childrenElements.add(new DOMElement("#text", "", new ArrayList<String>(Arrays.asList("id=\"topBanner\""))));
childrenElements.add(new DOMElement("#comment", "Banner", new ArrayList<String>(Arrays.asList("id=\"claroBody\""))));
childrenElements.add(new DOMElement("#text", "", new ArrayList<String>(Arrays.asList("id=\"campusFooter\""))));
childrenElements.add(new DOMElement("DIV", "ClarolineLoginClaroline>Createuseraccount", new ArrayList<String>(Arrays.asList(""))));
childrenElements.add(new DOMElement("#text", "", new ArrayList<String>(Arrays.asList(""))));
childrenElements.add(new DOMElement("#comment", "EndoftopBanner", new ArrayList<String>(Arrays.asList(""))));
childrenElements.add(new DOMElement("#text", "", new ArrayList<String>(Arrays.asList(""))));
childrenElements.add(new DOMElement("#comment", "$Id:body.tpl.php143322012-11-2310:08:10Zzefredz$", new ArrayList<String>(Arrays.asList(""))));
childrenElements.add(new DOMElement("#text", "", new ArrayList<String>(Arrays.asList(""))));
childrenElements.add(new DOMElement("#comment", "----------ClarolineBody---------", new ArrayList<String>(Arrays.asList(""))));
childrenElements.add(new DOMElement("#text", "", new ArrayList<String>(Arrays.asList(""))));
childrenElements.add(new DOMElement("DIV", "", new ArrayList<String>(Arrays.asList(""))));
childrenElements.add(new DOMElement("#text", "", new ArrayList<String>(Arrays.asList(""))));
childrenElements.add(new DOMElement("#comment", "----------EndofClarolineBody----------", new ArrayList<String>(Arrays.asList(""))));
childrenElements.add(new DOMElement("#text", "", new ArrayList<String>(Arrays.asList(""))));
childrenElements.add(new DOMElement("#comment", "", new ArrayList<String>(Arrays.asList(""))));
childrenElements.add(new DOMElement("#text", "", new ArrayList<String>(Arrays.asList(""))));
childrenElements.add(new DOMElement("DIV", "", new ArrayList<String>(Arrays.asList(""))));
childrenElements.add(new DOMElement("#text", "", new ArrayList<String>(Arrays.asList(""))));
childrenElements.add(new DOMElement("#comment", "endofclaroPage", new ArrayList<String>(Arrays.asList(""))));
childrenElements.add(new DOMElement("#text", "", new ArrayList<String>(Arrays.asList(""))));
if(!(isElementRegionFullPresent(parentElement , element, childrenElements))) System.out.println("540"); // Random element assertion
element = new DOMElement("DD", "Followcourses(student)Createcourses(teacher)", new ArrayList<String>(Arrays.asList("")));
parentElement = new DOMElement("DL", "", new ArrayList<String>(Arrays.asList("")));
childrenElements.clear();
childrenElements.add(new DOMElement("#text", "", new ArrayList<String>(Arrays.asList("checked=\"checked\"","id=\"student\"","name=\"platformRole\"","type=\"radio\"","value=\"student\""))));
childrenElements.add(new DOMElement("INPUT", "", new ArrayList<String>(Arrays.asList("for=\"student\""))));
childrenElements.add(new DOMElement("LABEL", "Followcourses(student)", new ArrayList<String>(Arrays.asList(""))));
childrenElements.add(new DOMElement("BR", "", new ArrayList<String>(Arrays.asList("id=\"courseManager\"","name=\"platformRole\"","type=\"radio\"","value=\"courseManager\""))));
childrenElements.add(new DOMElement("#text", "", new ArrayList<String>(Arrays.asList("for=\"courseManager\""))));
childrenElements.add(new DOMElement("INPUT", "", new ArrayList<String>(Arrays.asList(""))));
childrenElements.add(new DOMElement("LABEL", "Createcourses(teacher)", new ArrayList<String>(Arrays.asList(""))));
childrenElements.add(new DOMElement("BR", "", new ArrayList<String>(Arrays.asList(""))));
childrenElements.add(new DOMElement("#text", "", new ArrayList<String>(Arrays.asList(""))));
if(!(isElementRegionTagAttPresent(parentElement , element, childrenElements))) System.out.println("553"); // Random element assertion
}
public void checkState28_RandAssertions3(){
element = new DOMElement("DT", "Password*", new ArrayList<String>(Arrays.asList("")));
parentElement = new DOMElement("DL", "", new ArrayList<String>(Arrays.asList("")));
childrenElements.clear();
childrenElements.add(new DOMElement("#text", "", new ArrayList<String>(Arrays.asList("for=\"password\""))));
childrenElements.add(new DOMElement("LABEL", "Password*", new ArrayList<String>(Arrays.asList(""))));
childrenElements.add(new DOMElement("#text", "", new ArrayList<String>(Arrays.asList(""))));
if(!(isElementRegionFullPresent(parentElement , element, childrenElements))) System.out.println("563"); // Random element assertion
element = new DOMElement("TABLE", "Createuseraccount", new ArrayList<String>(Arrays.asList("")));
parentElement = new DOMElement("DIV", "", new ArrayList<String>(Arrays.asList("class=\"toolTitleBlock\"")));
childrenElements.clear();
childrenElements.add(new DOMElement("TBODY", "Createuseraccount", new ArrayList<String>(Arrays.asList(""))));
if(!(isElementRegionTagPresent(parentElement , element, childrenElements))) System.out.println("568"); // Random element assertion
if(!(isElementPresent(By.xpath("/HTML/BODY/DIV/DIV[2]/FORM/DL/DT[]")))) System.out.println("569"); // Random element assertion
element = new DOMElement("FIELDSET", "", new ArrayList<String>(Arrays.asList("")));
parentElement = new DOMElement("FORM", "", new ArrayList<String>(Arrays.asList("action=\"/claroline-1.11.7/claroline/auth/inscription.php\"","enctype=\"multipart/form-data\"","id=\"userSettings\"","method=\"post\"")));
childrenElements.clear();
childrenElements.add(new DOMElement("#text", "", new ArrayList<String>(Arrays.asList(""))));
childrenElements.add(new DOMElement("LEGEND", "Permissions", new ArrayList<String>(Arrays.asList(""))));
childrenElements.add(new DOMElement("#text", "", new ArrayList<String>(Arrays.asList(""))));
childrenElements.add(new DOMElement("DL", "", new ArrayList<String>(Arrays.asList(""))));
childrenElements.add(new DOMElement("#text", "", new ArrayList<String>(Arrays.asList(""))));
if(!(isElementRegionTagPresent(parentElement , element, childrenElements))) System.out.println("578"); // Random element assertion
element = new DOMElement("SCRIPT", "", new ArrayList<String>(Arrays.asList("src=\"/claroline-1.11.7/web/js/jquery.qtip.js?1329310394\"","type=\"text/javascript\"")));
parentElement = new DOMElement("HEAD", "", new ArrayList<String>(Arrays.asList("")));
childrenElements.clear();
if(!(isElementRegionTagPresent(parentElement , element, childrenElements))) System.out.println("582"); // Random element assertion
}
public void checkState28_RandAssertions4(){
element = new DOMElement("A", "", new ArrayList<String>(Arrays.asList("href=\"http://localhost:8888/claroline-1.11.7/claroline/auth/inscription.php\"")));
parentElement = new DOMElement("DT", "", new ArrayList<String>(Arrays.asList("")));
childrenElements.clear();
childrenElements.add(new DOMElement("INPUT", "", new ArrayList<String>(Arrays.asList("onclick=\"document.location='http://localhost:8888/claroline-1.11.7/claroline/auth/inscription.php';return false\"","type=\"button\"","value=\"Cancel\""))));
if(!(isElementRegionTagAttPresent(parentElement , element, childrenElements))) System.out.println("590"); // Random element assertion
element = new DOMElement("DD", "", new ArrayList<String>(Arrays.asList("")));
parentElement = new DOMElement("DL", "", new ArrayList<String>(Arrays.asList("")));
childrenElements.clear();
childrenElements.add(new DOMElement("#text", "", new ArrayList<String>(Arrays.asList("autocomplete=\"off\"","id=\"password\"","name=\"password\"","type=\"password\""))));
childrenElements.add(new DOMElement("INPUT", "", new ArrayList<String>(Arrays.asList(""))));
childrenElements.add(new DOMElement("#text", "", new ArrayList<String>(Arrays.asList(""))));
if(!(isElementRegionFullPresent(parentElement , element, childrenElements))) System.out.println("597"); // Random element assertion
element = new DOMElement("DIV", "", new ArrayList<String>(Arrays.asList("id=\"claroPage\"")));
parentElement = new DOMElement("BODY", "", new ArrayList<String>(Arrays.asList("dir=\"ltr\"")));
childrenElements.clear();
childrenElements.add(new DOMElement("#text", "", new ArrayList<String>(Arrays.asList("id=\"topBanner\""))));
childrenElements.add(new DOMElement("#comment", "Banner", new ArrayList<String>(Arrays.asList("id=\"claroBody\""))));
childrenElements.add(new DOMElement("#text", "", new ArrayList<String>(Arrays.asList("id=\"campusFooter\""))));
childrenElements.add(new DOMElement("DIV", "ClarolineLoginClaroline>Createuseraccount", new ArrayList<String>(Arrays.asList(""))));
childrenElements.add(new DOMElement("#text", "", new ArrayList<String>(Arrays.asList(""))));
childrenElements.add(new DOMElement("#comment", "EndoftopBanner", new ArrayList<String>(Arrays.asList(""))));
childrenElements.add(new DOMElement("#text", "", new ArrayList<String>(Arrays.asList(""))));
childrenElements.add(new DOMElement("#comment", "$Id:body.tpl.php143322012-11-2310:08:10Zzefredz$", new ArrayList<String>(Arrays.asList(""))));
childrenElements.add(new DOMElement("#text", "", new ArrayList<String>(Arrays.asList(""))));
childrenElements.add(new DOMElement("#comment", "----------ClarolineBody---------", new ArrayList<String>(Arrays.asList(""))));
childrenElements.add(new DOMElement("#text", "", new ArrayList<String>(Arrays.asList(""))));
childrenElements.add(new DOMElement("DIV", "", new ArrayList<String>(Arrays.asList(""))));
childrenElements.add(new DOMElement("#text", "", new ArrayList<String>(Arrays.asList(""))));
childrenElements.add(new DOMElement("#comment", "----------EndofClarolineBody----------", new ArrayList<String>(Arrays.asList(""))));
childrenElements.add(new DOMElement("#text", "", new ArrayList<String>(Arrays.asList(""))));
childrenElements.add(new DOMElement("#comment", "", new ArrayList<String>(Arrays.asList(""))));
childrenElements.add(new DOMElement("#text", "", new ArrayList<String>(Arrays.asList(""))));
childrenElements.add(new DOMElement("DIV", "", new ArrayList<String>(Arrays.asList(""))));
childrenElements.add(new DOMElement("#text", "", new ArrayList<String>(Arrays.asList(""))));
childrenElements.add(new DOMElement("#comment", "endofclaroPage", new ArrayList<String>(Arrays.asList(""))));
childrenElements.add(new DOMElement("#text", "", new ArrayList<String>(Arrays.asList(""))));
if(!(isElementRegionFullPresent(parentElement , element, childrenElements))) System.out.println("622"); // Random element assertion
if(!(isElementPresent(By.xpath("/HTML/BODY/DIV/DIV[[id=\"campusFooter\"]")))) System.out.println("623"); // Random element assertion
element = new DOMElement("INPUT", "", new ArrayList<String>(Arrays.asList("onclick=\"document.location='http://localhost:8888/claroline-1.11.7/claroline/auth/inscription.php';return false\"","type=\"button\"","value=\"Cancel\"")));
parentElement = new DOMElement("A", "", new ArrayList<String>(Arrays.asList("href=\"http://localhost:8888/claroline-1.11.7/claroline/auth/inscription.php\"")));
childrenElements.clear();
if(!(isElementRegionTagAttPresent(parentElement , element, childrenElements))) System.out.println("627"); // Random element assertion
}
public void checkState28_RandAssertions5(){
if(!(isElementPresent(By.xpath("/HTML/BODY/DIV/DIV/DIV[[id=\"userBanner\"]")))) System.out.println("631"); // Random element assertion
if(!(isElementPresent(By.xpath("/HTML/BODY/DIV/DIV[2]/FORM/FIELDSET[4]/DL/DD/LABEL[[for=\"courseManager\"]")))) System.out.println("632"); // Random element assertion
element = new DOMElement("UL", "Claroline>Createuseraccount", new ArrayList<String>(Arrays.asList("class=\"breadCrumbs\"")));
parentElement = new DOMElement("DIV", "", new ArrayList<String>(Arrays.asList("class=\"breadcrumbTrails\"")));
childrenElements.clear();
childrenElements.add(new DOMElement("#text", "", new ArrayList<String>(Arrays.asList("class=\"breadCrumbsNode firstBreadCrumbsNode\""))));
childrenElements.add(new DOMElement("LI", "Claroline>", new ArrayList<String>(Arrays.asList("class=\"breadCrumbsNode lastBreadCrumbsNode\""))));
childrenElements.add(new DOMElement("#text", "", new ArrayList<String>(Arrays.asList(""))));
childrenElements.add(new DOMElement("LI", "Createuseraccount", new ArrayList<String>(Arrays.asList(""))));
childrenElements.add(new DOMElement("#text", "", new ArrayList<String>(Arrays.asList(""))));
if(!(isElementRegionFullPresent(parentElement , element, childrenElements))) System.out.println("641"); // Random element assertion
element = new DOMElement("SPAN", "", new ArrayList<String>(Arrays.asList("id=\"institution\"")));
parentElement = new DOMElement("DIV", "", new ArrayList<String>(Arrays.asList("id=\"campusBannerRight\"")));
childrenElements.clear();
childrenElements.add(new DOMElement("#text", "", new ArrayList<String>(Arrays.asList(""))));
if(!(isElementRegionTagPresent(parentElement , element, childrenElements))) System.out.println("646"); // Random element assertion
element = new DOMElement("DIV", "Claroline>Createuseraccount", new ArrayList<String>(Arrays.asList("class=\"breadcrumbTrails\"")));
parentElement = new DOMElement("DIV", "", new ArrayList<String>(Arrays.asList("id=\"breadcrumbLine\"")));
childrenElements.clear();
childrenElements.add(new DOMElement("#text", "", new ArrayList<String>(Arrays.asList("class=\"breadCrumbs\""))));
childrenElements.add(new DOMElement("UL", "Claroline>Createuseraccount", new ArrayList<String>(Arrays.asList(""))));
childrenElements.add(new DOMElement("#text", "", new ArrayList<String>(Arrays.asList(""))));
if(!(isElementRegionFullPresent(parentElement , element, childrenElements))) System.out.println("653"); // Random element assertion
}
public void checkState29_OriginalAssertions(){
}
public void checkState29_ReusedAssertions(){
}
public void checkState29_GeneratedAssertions(){
}
public void checkState29_LearnedAssertions(){
}
public void checkState29_AllAssertions(){
}
public void checkState29_RandAssertions1(){
element = new DOMElement("LINK", "", new ArrayList<String>(Arrays.asList("href=\"/claroline-1.11.7/claroline/../favicon.ico\"","rel=\"shortcut icon\"")));
parentElement = new DOMElement("HEAD", "", new ArrayList<String>(Arrays.asList("")));
childrenElements.clear();
if(!(isElementRegionFullPresent(parentElement , element, childrenElements))) System.out.println("675"); // Random element assertion
element = new DOMElement("H1", "Createuseraccount", new ArrayList<String>(Arrays.asList("class=\"toolTitle mainTitle\"")));
parentElement = new DOMElement("TD", "", new ArrayList<String>(Arrays.asList("")));
childrenElements.clear();
childrenElements.add(new DOMElement("#text", "Createuseraccount", new ArrayList<String>(Arrays.asList(""))));
if(!(isElementRegionTagAttPresent(parentElement , element, childrenElements))) System.out.println("680"); // Random element assertion
if(!(isElementPresent(By.xpath("/HTML/BODY/DIV/DIV/DIV[3]/DIV[[class=\"spacer\"]")))) System.out.println("681"); // Random element assertion
element = new DOMElement("INPUT", "", new ArrayList<String>(Arrays.asList("id=\"applyChange\"","name=\"applyChange\"","type=\"submit\"","value=\"Ok\"")));
parentElement = new DOMElement("DT", "", new ArrayList<String>(Arrays.asList("")));
childrenElements.clear();
if(!(isElementRegionFullPresent(parentElement , element, childrenElements))) System.out.println("685"); // Random element assertion
element = new DOMElement("LABEL", "Followcourses(student)Createcourses(teacher)", new ArrayList<String>(Arrays.asList("for=\"student\"")));
parentElement = new DOMElement("DD", "", new ArrayList<String>(Arrays.asList("")));
childrenElements.clear();
childrenElements.add(new DOMElement("#text", "Followcourses(student)", new ArrayList<String>(Arrays.asList(""))));
if(!(isElementRegionFullPresent(parentElement , element, childrenElements))) System.out.println("690"); // Random element assertion
}
public void checkState29_RandAssertions2(){
element = new DOMElement("LEGEND", "OtherinformationsEmailPhoneSkypeaccount", new ArrayList<String>(Arrays.asList("")));
parentElement = new DOMElement("FIELDSET", "", new ArrayList<String>(Arrays.asList("")));
childrenElements.clear();
childrenElements.add(new DOMElement("#text", "Otherinformations", new ArrayList<String>(Arrays.asList(""))));
if(!(isElementRegionFullPresent(parentElement , element, childrenElements))) System.out.println("698"); // Random element assertion
element = new DOMElement("HR", "Claroline>Createuseraccount", new ArrayList<String>(Arrays.asList("")));
parentElement = new DOMElement("DIV", "", new ArrayList<String>(Arrays.asList("id=\"breadcrumbLine\"")));
childrenElements.clear();
if(!(isElementRegionFullPresent(parentElement , element, childrenElements))) System.out.println("702"); // Random element assertion
if(!(isElementPresent(By.xpath("/HTML/BODY/DIV/DIV[2]/FORM/FIELDSET[3]/DL/DT[[]")))) System.out.println("703"); // Random element assertion
element = new DOMElement("DT", "EmailPhoneSkypeaccount", new ArrayList<String>(Arrays.asList("")));
parentElement = new DOMElement("DL", "", new ArrayList<String>(Arrays.asList("")));
childrenElements.clear();
childrenElements.add(new DOMElement("#text", "", new ArrayList<String>(Arrays.asList("for=\"skype\""))));
childrenElements.add(new DOMElement("LABEL", "Skypeaccount", new ArrayList<String>(Arrays.asList(""))));
childrenElements.add(new DOMElement("#text", "", new ArrayList<String>(Arrays.asList(""))));
if(!(isElementRegionTagPresent(parentElement , element, childrenElements))) System.out.println("710"); // Random element assertion
element = new DOMElement("DIV", "ClarolineLoginClaroline>Createuseraccount", new ArrayList<String>(Arrays.asList("id=\"breadcrumbLine\"")));
parentElement = new DOMElement("DIV", "", new ArrayList<String>(Arrays.asList("id=\"topBanner\"")));
childrenElements.clear();
childrenElements.add(new DOMElement("#text", "", new ArrayList<String>(Arrays.asList(""))));
childrenElements.add(new DOMElement("HR", "", new ArrayList<String>(Arrays.asList("class=\"breadcrumbTrails\""))));
childrenElements.add(new DOMElement("#text", "", new ArrayList<String>(Arrays.asList("id=\"toolViewOption\""))));
childrenElements.add(new DOMElement("DIV", "Claroline>Createuseraccount", new ArrayList<String>(Arrays.asList("class=\"spacer\""))));
childrenElements.add(new DOMElement("#text", "", new ArrayList<String>(Arrays.asList(""))));
childrenElements.add(new DOMElement("DIV", "", new ArrayList<String>(Arrays.asList(""))));
childrenElements.add(new DOMElement("#text", "", new ArrayList<String>(Arrays.asList(""))));
childrenElements.add(new DOMElement("DIV", "", new ArrayList<String>(Arrays.asList(""))));
childrenElements.add(new DOMElement("#text", "", new ArrayList<String>(Arrays.asList(""))));
childrenElements.add(new DOMElement("HR", "", new ArrayList<String>(Arrays.asList(""))));
childrenElements.add(new DOMElement("#text", "", new ArrayList<String>(Arrays.asList(""))));
if(!(isElementRegionFullPresent(parentElement , element, childrenElements))) System.out.println("725"); // Random element assertion
}
public void checkState29_RandAssertions3(){
element = new DOMElement("FIELDSET", "", new ArrayList<String>(Arrays.asList("")));
parentElement = new DOMElement("FORM", "", new ArrayList<String>(Arrays.asList("action=\"/claroline-1.11.7/claroline/auth/inscription.php\"","enctype=\"multipart/form-data\"","id=\"userSettings\"","method=\"post\"")));
childrenElements.clear();
childrenElements.add(new DOMElement("#text", "", new ArrayList<String>(Arrays.asList(""))));
childrenElements.add(new DOMElement("LEGEND", "Personalinformations", new ArrayList<String>(Arrays.asList(""))));
childrenElements.add(new DOMElement("#text", "", new ArrayList<String>(Arrays.asList(""))));
childrenElements.add(new DOMElement("DL", "Name*Firstname*Administrativecode", new ArrayList<String>(Arrays.asList(""))));
childrenElements.add(new DOMElement("#text", "", new ArrayList<String>(Arrays.asList(""))));
if(!(isElementRegionTagPresent(parentElement , element, childrenElements))) System.out.println("737"); // Random element assertion
element = new DOMElement("DD", "", new ArrayList<String>(Arrays.asList("")));
parentElement = new DOMElement("DL", "", new ArrayList<String>(Arrays.asList("")));
childrenElements.clear();
childrenElements.add(new DOMElement("#text", "", new ArrayList<String>(Arrays.asList("autocomplete=\"off\"","id=\"password_conf\"","name=\"password_conf\"","type=\"password\""))));
childrenElements.add(new DOMElement("INPUT", "", new ArrayList<String>(Arrays.asList(""))));
childrenElements.add(new DOMElement("#text", "", new ArrayList<String>(Arrays.asList(""))));
if(!(isElementRegionTagPresent(parentElement , element, childrenElements))) System.out.println("744"); // Random element assertion
element = new DOMElement("DD", "", new ArrayList<String>(Arrays.asList("")));
parentElement = new DOMElement("DL", "", new ArrayList<String>(Arrays.asList("")));
childrenElements.clear();
childrenElements.add(new DOMElement("#text", "", new ArrayList<String>(Arrays.asList("autocomplete=\"off\"","id=\"password\"","name=\"password\"","type=\"password\""))));
childrenElements.add(new DOMElement("INPUT", "", new ArrayList<String>(Arrays.asList(""))));
childrenElements.add(new DOMElement("#text", "", new ArrayList<String>(Arrays.asList(""))));
if(!(isElementRegionTagPresent(parentElement , element, childrenElements))) System.out.println("751"); // Random element assertion
element = new DOMElement("DD", "", new ArrayList<String>(Arrays.asList("")));
parentElement = new DOMElement("DL", "", new ArrayList<String>(Arrays.asList("")));
childrenElements.clear();
childrenElements.add(new DOMElement("#text", "", new ArrayList<String>(Arrays.asList("id=\"username\"","name=\"username\"","type=\"text\"","value=\"\""))));
childrenElements.add(new DOMElement("INPUT", "", new ArrayList<String>(Arrays.asList(""))));
childrenElements.add(new DOMElement("#text", "", new ArrayList<String>(Arrays.asList(""))));
if(!(isElementRegionFullPresent(parentElement , element, childrenElements))) System.out.println("758"); // Random element assertion
element = new DOMElement("LEGEND", "Permissions", new ArrayList<String>(Arrays.asList("")));
parentElement = new DOMElement("FIELDSET", "", new ArrayList<String>(Arrays.asList("")));
childrenElements.clear();
childrenElements.add(new DOMElement("#text", "Permissions", new ArrayList<String>(Arrays.asList(""))));
if(!(isElementRegionTagAttPresent(parentElement , element, childrenElements))) System.out.println("763"); // Random element assertion
}
public void checkState29_RandAssertions4(){
element = new DOMElement("INPUT", "Followcourses(student)Createcourses(teacher)", new ArrayList<String>(Arrays.asList("checked=\"checked\"","id=\"student\"","name=\"platformRole\"","type=\"radio\"","value=\"student\"")));
parentElement = new DOMElement("DD", "", new ArrayList<String>(Arrays.asList("")));
childrenElements.clear();
if(!(isElementRegionTagPresent(parentElement , element, childrenElements))) System.out.println("770"); // Random element assertion
if(!(isElementPresent(By.xpath("/HTML/BODY/DIV/DIV/DIV[3]/DIV/UL/LI[class=\"breadCrumbsNode firstBreadCrumbsNode\"]")))) System.out.println("771"); // Random element assertion
element = new DOMElement("DL", "", new ArrayList<String>(Arrays.asList("")));
parentElement = new DOMElement("FORM", "", new ArrayList<String>(Arrays.asList("action=\"/claroline-1.11.7/claroline/auth/inscription.php\"","enctype=\"multipart/form-data\"","id=\"userSettings\"","method=\"post\"")));
childrenElements.clear();
childrenElements.add(new DOMElement("#text", "", new ArrayList<String>(Arrays.asList(""))));
childrenElements.add(new DOMElement("DT", "", new ArrayList<String>(Arrays.asList(""))));
childrenElements.add(new DOMElement("#text", "", new ArrayList<String>(Arrays.asList(""))));
childrenElements.add(new DOMElement("DD", "", new ArrayList<String>(Arrays.asList(""))));
childrenElements.add(new DOMElement("#text", "", new ArrayList<String>(Arrays.asList(""))));
if(!(isElementRegionTagPresent(parentElement , element, childrenElements))) System.out.println("780"); // Random element assertion
element = new DOMElement("INPUT", "Followcourses(student)Createcourses(teacher)", new ArrayList<String>(Arrays.asList("id=\"courseManager\"","name=\"platformRole\"","type=\"radio\"","value=\"courseManager\"")));
parentElement = new DOMElement("DD", "", new ArrayList<String>(Arrays.asList("")));
childrenElements.clear();
if(!(isElementRegionTagAttPresent(parentElement , element, childrenElements))) System.out.println("784"); // Random element assertion
element = new DOMElement("A", "Claroline", new ArrayList<String>(Arrays.asList("href=\"/claroline-1.11.7/index.php\"","target=\"_top\"")));
parentElement = new DOMElement("SPAN", "", new ArrayList<String>(Arrays.asList("id=\"siteName\"")));
childrenElements.clear();
childrenElements.add(new DOMElement("#text", "Claroline", new ArrayList<String>(Arrays.asList(""))));
if(!(isElementRegionTagPresent(parentElement , element, childrenElements))) System.out.println("789"); // Random element assertion
}
public void checkState29_RandAssertions5(){
element = new DOMElement("BR", "Followcourses(student)Createcourses(teacher)", new ArrayList<String>(Arrays.asList("")));
parentElement = new DOMElement("DD", "", new ArrayList<String>(Arrays.asList("")));
childrenElements.clear();
if(!(isElementRegionTagAttPresent(parentElement , element, childrenElements))) System.out.println("796"); // Random element assertion
element = new DOMElement("LABEL", "Password*", new ArrayList<String>(Arrays.asList("for=\"password\"")));
parentElement = new DOMElement("DT", "", new ArrayList<String>(Arrays.asList("")));
childrenElements.clear();
childrenElements.add(new DOMElement("#text", "Password", new ArrayList<String>(Arrays.asList("class=\"required\""))));
childrenElements.add(new DOMElement("SPAN", "*", new ArrayList<String>(Arrays.asList(""))));
childrenElements.add(new DOMElement("#text", "", new ArrayList<String>(Arrays.asList(""))));
if(!(isElementRegionTagPresent(parentElement , element, childrenElements))) System.out.println("803"); // Random element assertion
if(!(isElementPresent(By.xpath("/HTML/BODY/DIV/DIV[2]/FORM/FIELDSET[3]/DL/DT/LABEL[for=\"email\"]")))) System.out.println("804"); // Random element assertion
element = new DOMElement("SCRIPT", "", new ArrayList<String>(Arrays.asList("src=\"/claroline-1.11.7/web/js/claroline.ui.js?1358760626\"","type=\"text/javascript\"")));
parentElement = new DOMElement("HEAD", "", new ArrayList<String>(Arrays.asList("")));
childrenElements.clear();
if(!(isElementRegionFullPresent(parentElement , element, childrenElements))) System.out.println("808"); // Random element assertion
element = new DOMElement("DL", "Name*Firstname*Administrativecode", new ArrayList<String>(Arrays.asList("")));
parentElement = new DOMElement("FIELDSET", "", new ArrayList<String>(Arrays.asList("")));
childrenElements.clear();
childrenElements.add(new DOMElement("#text", "", new ArrayList<String>(Arrays.asList(""))));
childrenElements.add(new DOMElement("DT", "Name*", new ArrayList<String>(Arrays.asList(""))));
childrenElements.add(new DOMElement("#text", "", new ArrayList<String>(Arrays.asList(""))));
childrenElements.add(new DOMElement("DD", "", new ArrayList<String>(Arrays.asList(""))));
childrenElements.add(new DOMElement("#text", "", new ArrayList<String>(Arrays.asList(""))));
childrenElements.add(new DOMElement("DT", "Firstname*", new ArrayList<String>(Arrays.asList(""))));
childrenElements.add(new DOMElement("#text", "", new ArrayList<String>(Arrays.asList(""))));
childrenElements.add(new DOMElement("DD", "", new ArrayList<String>(Arrays.asList(""))));
childrenElements.add(new DOMElement("#text", "", new ArrayList<String>(Arrays.asList(""))));
childrenElements.add(new DOMElement("DT", "Administrativecode", new ArrayList<String>(Arrays.asList(""))));
childrenElements.add(new DOMElement("#text", "", new ArrayList<String>(Arrays.asList(""))));
childrenElements.add(new DOMElement("DD", "", new ArrayList<String>(Arrays.asList(""))));
childrenElements.add(new DOMElement("#text", "", new ArrayList<String>(Arrays.asList(""))));
if(!(isElementRegionTagPresent(parentElement , element, childrenElements))) System.out.println("825"); // Random element assertion
}
/*
* Auxiliary methods
*/
private boolean isElementPresent(By by) {
try {
driver.findElement(by);
return true;
} catch (NoSuchElementException e) {
return false;
}
}
private boolean isElementRegionTagPresent(DOMElement parent, DOMElement element, ArrayList<DOMElement> children) {
try {
String source = driver.getPageSource();
Document dom = DomUtils.asDocument(source);
NodeList nodeList = dom.getElementsByTagName(element.getTagName());
org.w3c.dom.Element sourceElement = null;
for (int i = 0; i < nodeList.getLength(); i++){
sourceElement = (org.w3c.dom.Element) nodeList.item(i);
// check parent node's tag and attributes
String parentTagName = sourceElement.getParentNode().getNodeName();
if (!parentTagName.equals(parent.getTagName()))
continue;
// check children nodes' tags
HashSet<String> childrenTagNameFromDOM = new HashSet<String>();
for (int j=0; j<sourceElement.getChildNodes().getLength();j++)
childrenTagNameFromDOM.add(sourceElement.getChildNodes().item(j).getNodeName());
HashSet<String> childrenTagNameToTest = new HashSet<String>();
for (int k=0; k<children.size();k++)
childrenTagNameToTest.add(children.get(k).getTagName());
if (!childrenTagNameToTest.equals(childrenTagNameFromDOM))
continue;
return true;
}
} catch (IOException e) {
e.printStackTrace();
}
return false;
}
private boolean isElementRegionTagAttPresent(DOMElement parent, DOMElement element, ArrayList<DOMElement> children) {
try {
String source = driver.getPageSource();
Document dom = DomUtils.asDocument(source);
NodeList nodeList = dom.getElementsByTagName(element.getTagName());
org.w3c.dom.Element sourceElement = null;
for (int i = 0; i < nodeList.getLength(); i++){
// check node's attributes
sourceElement = (org.w3c.dom.Element) nodeList.item(i);
NamedNodeMap elementAttList = sourceElement.getAttributes();
HashSet<String> elemetAtts = new HashSet<String>();
for (int j = 0; j < elementAttList.getLength(); j++)
elemetAtts.add(elementAttList.item(j).getNodeName() + "=\"" + elementAttList.item(j).getNodeValue() + "\"");
if (!element.getAttributes().equals(elemetAtts))
continue;
// check parent node's tag and attributes
String parentTagName = sourceElement.getParentNode().getNodeName();
if (!parentTagName.equals(parent.getTagName()))
continue;
NamedNodeMap parentAttList = sourceElement.getParentNode().getAttributes();
HashSet<String> parentAtts = new HashSet<String>();
for (int j = 0; j < parentAttList.getLength(); j++)
parentAtts.add(parentAttList.item(j).getNodeName() + "=\"" + parentAttList.item(j).getNodeValue() + "\"");
if (!parent.getAttributes().equals(parentAtts))
continue;
// check children nodes' tags
HashSet<String> childrenTagNameFromDOM = new HashSet<String>();
for (int j=0; j<sourceElement.getChildNodes().getLength();j++)
childrenTagNameFromDOM.add(sourceElement.getChildNodes().item(j).getNodeName());
HashSet<String> childrenTagNameToTest = new HashSet<String>();
for (int k=0; k<children.size();k++)
childrenTagNameToTest.add(children.get(k).getTagName());
if (!childrenTagNameToTest.equals(childrenTagNameFromDOM))
continue;
// check children nodes' attributes
HashSet<HashSet<String>> childrenAttsFromDOM = new HashSet<HashSet<String>>();
for (int j=0; j<sourceElement.getChildNodes().getLength();j++){
NamedNodeMap childAttListFromDOM = sourceElement.getChildNodes().item(j).getAttributes();
HashSet<String> childAtts = new HashSet<String>();
if (childAttListFromDOM!=null)
for (int k = 0; k < childAttListFromDOM.getLength(); k++)
childAtts.add(childAttListFromDOM.item(k).getNodeName() + "=\"" + childAttListFromDOM.item(k).getNodeValue() + "\"");
childrenAttsFromDOM.add(childAtts);
}
HashSet<HashSet<String>> childrenAttsToTest = new HashSet<HashSet<String>>();
for (int k=0; k<children.size();k++)
childrenAttsToTest.add(children.get(k).getAttributes());
if (!childrenAttsToTest.equals(childrenAttsFromDOM))
continue;
return true;
}
} catch (IOException e) {
e.printStackTrace();
}
return false;
}
private boolean isElementRegionFullPresent(DOMElement parent, DOMElement element, ArrayList<DOMElement> children) {
try {
String source = driver.getPageSource();
Document dom = DomUtils.asDocument(source);
NodeList nodeList = dom.getElementsByTagName(element.getTagName());
org.w3c.dom.Element sourceElement = null;
for (int i = 0; i < nodeList.getLength(); i++){
// check node's text and attributes
sourceElement = (org.w3c.dom.Element) nodeList.item(i);
if (!element.getTextContent().equals(sourceElement.getTextContent().replace("\n", "").replace("\r", "").replace(" ", "").replace("\t", "").replaceAll("[^\\x00-\\x7F]", "")))
continue;
NamedNodeMap elementAttList = sourceElement.getAttributes();
HashSet<String> elemetAtts = new HashSet<String>();
for (int j = 0; j < elementAttList.getLength(); j++)
elemetAtts.add(elementAttList.item(j).getNodeName() + "=\"" + elementAttList.item(j).getNodeValue() + "\"");
if (!element.getAttributes().equals(elemetAtts))
continue;
// check parent node's text, tag and attributes
String parentTagName = sourceElement.getParentNode().getNodeName();
if (!parent.getTextContent().equals(sourceElement.getParentNode().getTextContent().replace("\n", "").replace("\r", "").replace(" ", "").replace("\t", "").replaceAll("[^\\x00-\\x7F]", "")))
continue;
if (!parentTagName.equals(parent.getTagName()))
continue;
NamedNodeMap parentAttList = sourceElement.getParentNode().getAttributes();
HashSet<String> parentAtts = new HashSet<String>();
for (int j = 0; j < parentAttList.getLength(); j++)
parentAtts.add(parentAttList.item(j).getNodeName() + "=\"" + parentAttList.item(j).getNodeValue() + "\"");
if (!parent.getAttributes().equals(parentAtts))
continue;
// check children nodes' text
HashSet<String> childrenTextFromDOM = new HashSet<String>();
for (int j=0; j<sourceElement.getChildNodes().getLength();j++)
childrenTextFromDOM.add(sourceElement.getChildNodes().item(j).getTextContent().replace("\n", "").replace("\r", "").replace(" ", "").replace("\t", "").replaceAll("[^\\x00-\\x7F]", ""));
HashSet<String> childrenTextToTest = new HashSet<String>();
for (int k=0; k<children.size();k++)
childrenTextToTest.add(children.get(k).getTextContent());
if (!childrenTextToTest.equals(childrenTextFromDOM))
continue;
// check children nodes' tags
HashSet<String> childrenTagNameFromDOM = new HashSet<String>();
for (int j=0; j<sourceElement.getChildNodes().getLength();j++)
childrenTagNameFromDOM.add(sourceElement.getChildNodes().item(j).getNodeName());
HashSet<String> childrenTagNameToTest = new HashSet<String>();
for (int k=0; k<children.size();k++)
childrenTagNameToTest.add(children.get(k).getTagName());
if (!childrenTagNameToTest.equals(childrenTagNameFromDOM))
continue;
// check children nodes' attributes
HashSet<HashSet<String>> childrenAttsFromDOM = new HashSet<HashSet<String>>();
for (int j=0; j<sourceElement.getChildNodes().getLength();j++){
NamedNodeMap childAttListFromDOM = sourceElement.getChildNodes().item(j).getAttributes();
HashSet<String> childAtts = new HashSet<String>();
if (childAttListFromDOM!=null)
for (int k = 0; k < childAttListFromDOM.getLength(); k++)
childAtts.add(childAttListFromDOM.item(k).getNodeName() + "=\"" + childAttListFromDOM.item(k).getNodeValue() + "\"");
childrenAttsFromDOM.add(childAtts);
}
HashSet<HashSet<String>> childrenAttsToTest = new HashSet<HashSet<String>>();
for (int k=0; k<children.size();k++)
childrenAttsToTest.add(children.get(k).getAttributes());
if (!childrenAttsToTest.equals(childrenAttsFromDOM))
continue;
return true;
}
} catch (IOException e) {
e.printStackTrace();
}
return false;
}
private boolean isAlertPresent() {
try {
driver.switchTo().alert();
return true;
} catch (NoAlertPresentException e) {
return false;
}
}
private String closeAlertAndGetItsText() {
try {
Alert alert = driver.switchTo().alert();
String alertText = alert.getText();
if (acceptNextAlert) {
alert.accept();
} else {
alert.dismiss();
}
return alertText;
} finally {
acceptNextAlert = true;
}
}
public class DOMElement {
private String tagName;
private String textContent;
private HashSet<String> attributes = new HashSet<String>();
public DOMElement(String tagName, String textContent, ArrayList<String> attributes){
this.tagName = tagName;
this.textContent = textContent;
if (attributes.get(0)!="")
for (int i=0; i<attributes.size();i++)
this.attributes.add(attributes.get(i));
}
public String getTagName() {
return tagName;
}
public String getTextContent() {
return textContent;
}
public HashSet<String> getAttributes() {
return attributes;
}
}
private void mutateDOMTree(int stateID){
// execute JavaScript code to mutate DOM
String code = com.crawljax.plugins.testilizer.Testilizer.mutateDOMTreeCode(stateID);
if (code!= null){
long RandomlySelectedDOMElementID = (long) ((JavascriptExecutor)driver).executeScript(code);
int MutationOperatorCode = com.crawljax.plugins.testilizer.Testilizer.MutationOperatorCode;
int StateToBeMutated = com.crawljax.plugins.testilizer.Testilizer.StateToBeMutated;
com.crawljax.plugins.testilizer.Testilizer.SelectedRandomElementInDOM[MutationOperatorCode][StateToBeMutated]
= (int) RandomlySelectedDOMElementID;
}
}
}
| apache-2.0 |
shaoxuan-wang/flink | flink-libraries/flink-state-processing-api/src/main/java/org/apache/flink/state/api/input/KeyedStateInputFormat.java | 9513 | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.state.api.input;
import org.apache.flink.annotation.Internal;
import org.apache.flink.api.common.functions.util.FunctionUtils;
import org.apache.flink.api.common.io.DefaultInputSplitAssigner;
import org.apache.flink.api.common.io.RichInputFormat;
import org.apache.flink.api.common.io.statistics.BaseStatistics;
import org.apache.flink.api.common.state.StateDescriptor;
import org.apache.flink.api.common.typeinfo.TypeInformation;
import org.apache.flink.api.common.typeutils.TypeSerializer;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.core.fs.CloseableRegistry;
import org.apache.flink.core.io.InputSplitAssigner;
import org.apache.flink.runtime.checkpoint.OperatorState;
import org.apache.flink.runtime.checkpoint.StateAssignmentOperation;
import org.apache.flink.runtime.execution.Environment;
import org.apache.flink.runtime.state.AbstractKeyedStateBackend;
import org.apache.flink.runtime.state.DefaultKeyedStateStore;
import org.apache.flink.runtime.state.KeyGroupRange;
import org.apache.flink.runtime.state.KeyedStateHandle;
import org.apache.flink.runtime.state.StateBackend;
import org.apache.flink.state.api.functions.KeyedStateReaderFunction;
import org.apache.flink.state.api.input.splits.KeyGroupRangeInputSplit;
import org.apache.flink.state.api.runtime.NeverFireProcessingTimeService;
import org.apache.flink.state.api.runtime.SavepointEnvironment;
import org.apache.flink.state.api.runtime.SavepointRuntimeContext;
import org.apache.flink.streaming.api.operators.KeyContext;
import org.apache.flink.streaming.api.operators.StreamOperatorStateContext;
import org.apache.flink.streaming.api.operators.StreamTaskStateInitializer;
import org.apache.flink.streaming.api.operators.StreamTaskStateInitializerImpl;
import org.apache.flink.util.CollectionUtil;
import org.apache.flink.util.Preconditions;
import javax.annotation.Nonnull;
import java.io.IOException;
import java.util.Comparator;
import java.util.Iterator;
import java.util.List;
/**
* Input format for reading partitioned state.
*
* @param <K> The type of the key.
* @param <OUT> The type of the output of the {@link KeyedStateReaderFunction}.
*/
@Internal
public class KeyedStateInputFormat<K, OUT> extends RichInputFormat<OUT, KeyGroupRangeInputSplit> implements KeyContext {
private static final long serialVersionUID = 8230460226049597182L;
private final OperatorState operatorState;
private final StateBackend stateBackend;
private final TypeInformation<K> keyType;
private final KeyedStateReaderFunction<K, OUT> userFunction;
private transient TypeSerializer<K> keySerializer;
private transient CloseableRegistry registry;
private transient BufferingCollector<OUT> out;
private transient Iterator<K> keys;
private transient AbstractKeyedStateBackend<K> keyedStateBackend;
private transient Context ctx;
/**
* Creates an input format for reading partitioned state from an operator in a savepoint.
*
* @param operatorState The state to be queried.
* @param stateBackend The state backed used to snapshot the operator.
* @param keyType The type information describing the key type.
* @param userFunction The {@link KeyedStateReaderFunction} called for each key in the operator.
*/
public KeyedStateInputFormat(
OperatorState operatorState,
StateBackend stateBackend,
TypeInformation<K> keyType,
KeyedStateReaderFunction<K, OUT> userFunction) {
Preconditions.checkNotNull(operatorState, "The operator state cannot be null");
Preconditions.checkNotNull(stateBackend, "The state backend cannot be null");
Preconditions.checkNotNull(keyType, "The key type information cannot be null");
Preconditions.checkNotNull(userFunction, "The userfunction cannot be null");
this.operatorState = operatorState;
this.stateBackend = stateBackend;
this.keyType = keyType;
this.userFunction = userFunction;
}
@Override
public void configure(Configuration parameters) {
}
@Override
public InputSplitAssigner getInputSplitAssigner(KeyGroupRangeInputSplit[] inputSplits) {
return new DefaultInputSplitAssigner(inputSplits);
}
@Override
public BaseStatistics getStatistics(BaseStatistics cachedStatistics) {
return cachedStatistics;
}
@Override
public KeyGroupRangeInputSplit[] createInputSplits(int minNumSplits) throws IOException {
final int maxParallelism = operatorState.getMaxParallelism();
final List<KeyGroupRange> keyGroups = sortedKeyGroupRanges(minNumSplits, maxParallelism);
return CollectionUtil.mapWithIndex(
keyGroups,
(keyGroupRange, index) -> createKeyGroupRangeInputSplit(
operatorState,
maxParallelism,
keyGroupRange,
index)
).toArray(KeyGroupRangeInputSplit[]::new);
}
@Override
public void openInputFormat() {
out = new BufferingCollector<>();
keySerializer = keyType.createSerializer(getRuntimeContext().getExecutionConfig());
}
@Override
@SuppressWarnings("unchecked")
public void open(KeyGroupRangeInputSplit split) throws IOException {
registry = new CloseableRegistry();
final Environment environment = new SavepointEnvironment
.Builder(getRuntimeContext(), split.getNumKeyGroups())
.setSubtaskIndex(split.getSplitNumber())
.setPrioritizedOperatorSubtaskState(split.getPrioritizedOperatorSubtaskState())
.build();
final StreamOperatorStateContext context = getStreamOperatorStateContext(environment);
keyedStateBackend = (AbstractKeyedStateBackend<K>) context.keyedStateBackend();
final DefaultKeyedStateStore keyedStateStore = new DefaultKeyedStateStore(keyedStateBackend, getRuntimeContext().getExecutionConfig());
SavepointRuntimeContext ctx = new SavepointRuntimeContext(getRuntimeContext(), keyedStateStore);
FunctionUtils.setFunctionRuntimeContext(userFunction, ctx);
keys = getKeyIterator(ctx);
this.ctx = new Context();
}
@SuppressWarnings("unchecked")
private Iterator<K> getKeyIterator(SavepointRuntimeContext ctx) throws IOException {
final List<StateDescriptor<?, ?>> stateDescriptors;
try {
FunctionUtils.openFunction(userFunction, new Configuration());
ctx.disableStateRegistration();
stateDescriptors = ctx.getStateDescriptors();
} catch (Exception e) {
throw new IOException("Failed to open user defined function", e);
}
return new MultiStateKeyIterator<>(stateDescriptors, keyedStateBackend);
}
private StreamOperatorStateContext getStreamOperatorStateContext(Environment environment) throws IOException {
StreamTaskStateInitializer initializer = new StreamTaskStateInitializerImpl(
environment,
stateBackend,
new NeverFireProcessingTimeService());
try {
return initializer.streamOperatorStateContext(
operatorState.getOperatorID(),
operatorState.getOperatorID().toString(),
this,
keySerializer,
registry,
getRuntimeContext().getMetricGroup());
} catch (Exception e) {
throw new IOException("Failed to restore state backend", e);
}
}
@Override
public void close() throws IOException {
registry.close();
}
@Override
public boolean reachedEnd() {
return !out.hasNext() && !keys.hasNext();
}
@Override
public OUT nextRecord(OUT reuse) throws IOException {
if (out.hasNext()) {
return out.next();
}
final K key = keys.next();
setCurrentKey(key);
try {
userFunction.readKey(key, ctx, out);
} catch (Exception e) {
throw new IOException("User defined function KeyedStateReaderFunction#readKey threw an exception", e);
}
keys.remove();
return out.next();
}
@Override
@SuppressWarnings("unchecked")
public void setCurrentKey(Object key) {
keyedStateBackend.setCurrentKey((K) key);
}
@Override
public Object getCurrentKey() {
return keyedStateBackend.getCurrentKey();
}
private static KeyGroupRangeInputSplit createKeyGroupRangeInputSplit(
OperatorState operatorState,
int maxParallelism,
KeyGroupRange keyGroupRange,
Integer index) {
final List<KeyedStateHandle> managedKeyedState = StateAssignmentOperation.getManagedKeyedStateHandles(operatorState, keyGroupRange);
final List<KeyedStateHandle> rawKeyedState = StateAssignmentOperation.getRawKeyedStateHandles(operatorState, keyGroupRange);
return new KeyGroupRangeInputSplit(managedKeyedState, rawKeyedState, maxParallelism, index);
}
@Nonnull
private static List<KeyGroupRange> sortedKeyGroupRanges(int minNumSplits, int maxParallelism) {
List<KeyGroupRange> keyGroups = StateAssignmentOperation.createKeyGroupPartitions(
maxParallelism,
Math.min(minNumSplits, maxParallelism));
keyGroups.sort(Comparator.comparing(KeyGroupRange::getStartKeyGroup));
return keyGroups;
}
private static class Context implements KeyedStateReaderFunction.Context {}
}
| apache-2.0 |
Netflix/blitz4j | src/main/java/com/netflix/logging/log4jAdapter/NFPatternParser.java | 3278 | /*
* Copyright 2012 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.netflix.logging.log4jAdapter;
import java.util.Arrays;
import java.util.List;
import org.apache.log4j.helpers.FormattingInfo;
import org.apache.log4j.helpers.PatternConverter;
import org.apache.log4j.helpers.PatternParser;
import org.apache.log4j.spi.LocationInfo;
import org.apache.log4j.spi.LoggingEvent;
import com.netflix.blitz4j.LoggingContext;
/**
* A custom parser class that provides a better performing implementation than the one in log4j for finding location information such
* as class, line number etc.
*
* @author Karthik Ranganathan
*/
public class NFPatternParser extends PatternParser {
private static List<Character> contextCharList = Arrays.asList(Character.valueOf('c'),
Character.valueOf('l'),
Character.valueOf('M'),
Character.valueOf('C'),
Character.valueOf('L'),
Character.valueOf('F'));
public NFPatternParser(String pattern) {
super(pattern);
}
protected void finalizeConverter(char c) {
if (contextCharList.contains(Character.valueOf(c))) {
PatternConverter pc = new NFPatternConverter(formattingInfo, c);
addConverter(pc);
currentLiteral.setLength(0);
} else {
super.finalizeConverter(c);
}
}
private static class NFPatternConverter extends PatternConverter {
private char type;
NFPatternConverter(FormattingInfo formattingInfo, char type) {
super(formattingInfo);
this.type = type;
}
@Override
public String convert(LoggingEvent event) {
LoggingContext.getInstance().shouldGenerateLocationInfo(event.getLogger());
LocationInfo locationInfo = LoggingContext.getInstance().getLocationInfo(event);
if (locationInfo == null) {
return "";
}
switch (type) {
case 'M':
return locationInfo.getMethodName();
case 'c':
return event.getLoggerName();
case 'C':
return locationInfo.getClassName();
case 'L':
return locationInfo.getLineNumber();
case 'l':
return (locationInfo.getFileName() + ":"
+ locationInfo.getClassName() + " "
+ locationInfo.getLineNumber() + " " + locationInfo
.getMethodName());
case 'F':
return locationInfo.getFileName();
}
return "";
}
}
} | apache-2.0 |
cmjjsnt/spms | spms-server/src/main/java/com/huihuan/jerrys/shiro/server/dao/AppDao.java | 494 | package com.huihuan.jerrys.shiro.server.dao;
import java.util.List;
import com.huihuan.jerrys.shiro.entity.App;
/**
*
* 应用管理
*
* @author jerrys
* @Date 2015-09-25
* @version 1.0.1
* @since
*/
public interface AppDao {
public App createApp(App app);
public App updateApp(App app);
public void deleteApp(Long appId);
public App findOne(Long appId);
public List<App> findAll();
Long findAppIdByAppKey(String appKey);
public App getAppByAppKey(String appkey);
}
| apache-2.0 |
bootique/bootique | bootique/src/main/java/io/bootique/run/DefaultRunner.java | 2613 | /*
* Licensed to ObjectStyle LLC under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ObjectStyle LLC licenses
* this file to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package io.bootique.run;
import io.bootique.cli.Cli;
import io.bootique.command.Command;
import io.bootique.command.CommandManager;
import io.bootique.command.CommandOutcome;
import io.bootique.command.ExecutionPlanBuilder;
import io.bootique.command.ManagedCommand;
public class DefaultRunner implements Runner {
private final Cli cli;
private final CommandManager commandManager;
private final ExecutionPlanBuilder executionPlanBuilder;
public DefaultRunner(Cli cli, CommandManager commandManager, ExecutionPlanBuilder executionPlanBuilder) {
this.cli = cli;
this.commandManager = commandManager;
this.executionPlanBuilder = executionPlanBuilder;
}
@Override
public CommandOutcome run() {
return getCommand().run(cli);
}
private Command getCommand() {
return prepareForExecution(bareCommand());
}
private Command prepareForExecution(Command bareCommand) {
return executionPlanBuilder.prepareForExecution(bareCommand);
}
private Command bareCommand() {
if (cli.commandName() != null) {
ManagedCommand explicitCommand = commandManager.getAllCommands().get(cli.commandName());
if (explicitCommand == null || explicitCommand.isHidden() || explicitCommand.isDefault()) {
throw new IllegalStateException("Not a valid command: " + cli.commandName());
}
return explicitCommand.getCommand();
}
// command not found in CLI .. go through defaults
return commandManager.getPublicDefaultCommand() // 1. runtime default command
.orElse(commandManager.getPublicHelpCommand() // 2. help command
.orElse(cli -> CommandOutcome.succeeded())); // 3. placeholder noop command
}
}
| apache-2.0 |
williamyyj/mjo2 | src/main/java/hyweb/jo/fun/db/mq_fill.java | 1340 | package hyweb.jo.fun.db;
import hyweb.jo.IJOFunction;
import hyweb.jo.IJOType;
import hyweb.jo.JOConst;
import hyweb.jo.db.IDB;
import hyweb.jo.log.JOLogger;
import hyweb.jo.org.json.JSONArray;
import hyweb.jo.org.json.JSONObject;
import hyweb.jo.util.JOTools;
import java.sql.PreparedStatement;
import java.sql.SQLException;
/**
*
* @author william
*/
public class mq_fill implements IJOFunction<Boolean, JSONObject> {
@Override
public Boolean exec(JSONObject mq) throws Exception {
PreparedStatement ps = JOTools.obj(PreparedStatement.class, mq, JOConst.param_ctrl);
IDB db = JOTools.obj(IDB.class, mq, JOConst.param_dp);
try {
proc_fill_all(db, ps, mq.optJSONArray(JOConst.param_fields));
return true;
} catch (Exception e) {
JOLogger.error("mq_fill fail : "+mq);
return false;
}
}
private void proc_fill_all(IDB db, PreparedStatement ps, JSONArray fields) throws SQLException {
for (int i = 0; i < fields.length(); i++) {
JSONObject fld = fields.optJSONObject(i);
Object dt = fld.opt("dt");
Object value = fld.opt("value");
IJOType<?> type = db.types().type(dt);
type.setPS(ps, i + 1, value);
}
}
}
| apache-2.0 |
ralscha/wampspring-demos | wampspring-demo-security/src/main/java/ch/rasc/wampspring/demo/security/config/SecurityWampConfigurer.java | 644 | package ch.rasc.wampspring.demo.security.config;
import org.springframework.context.annotation.Configuration;
import ch.rasc.wampspring.security.AbstractSecurityWampConfigurer;
import ch.rasc.wampspring.security.WampMessageSecurityMetadataSourceRegistry;
@Configuration
public class SecurityWampConfigurer extends AbstractSecurityWampConfigurer {
@Override
protected void configureInbound(WampMessageSecurityMetadataSourceRegistry messages) {
messages.wampPublishDestMatchers("/queue/**", "/topic/**").denyAll()
.wampSubscribeDestMatchers("/queue/**/*-user*", "/topic/**/*-user*")
.denyAll().anyMessage().authenticated();
}
}
| apache-2.0 |
small-dream/AndroidDemos | app/src/main/java/com/jx/androiddemos/viewPager/ThreePagerActivity.java | 1899 | package com.jx.androiddemos.viewPager;
import android.os.Bundle;
import android.support.v4.view.PagerAdapter;
import android.support.v4.view.ViewPager;
import android.support.v7.app.AppCompatActivity;
import android.util.TypedValue;
import android.view.View;
import android.view.ViewGroup;
import com.jx.androiddemos.R;
import com.jx.androiddemos.viewPager.transformer.CubeOutTransformer;
import com.jx.androiddemos.viewPager.transformer.MyPageTransformer;
public class ThreePagerActivity extends AppCompatActivity {
private ViewPager mViewPager;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_viewpager_three);
mViewPager= (ViewPager) findViewById(R.id.mViewPager);
int margin = (int) TypedValue.applyDimension(TypedValue.COMPLEX_UNIT_DIP, 16 * 2, getResources().getDisplayMetrics());
mViewPager.setPageMargin(-margin);
mViewPager.setAdapter(new MyPagerAdapter());
mViewPager.setOffscreenPageLimit(2);
mViewPager.setPageTransformer(false, new MyPageTransformer(mViewPager));
mViewPager.setCurrentItem(100, true);
}
private class MyPagerAdapter extends PagerAdapter {
@Override
public Object instantiateItem(ViewGroup container, int position) {
View view=getLayoutInflater().inflate(R.layout.item_viewpager_three_layout,null);
container.addView(view);
return view;
}
@Override
public void destroyItem(ViewGroup container, int position, Object object) {
container.removeView((View)object);
}
@Override
public int getCount() {
return 200;
}
@Override
public boolean isViewFromObject(View view, Object object) {
return (view == object);
}
}
}
| apache-2.0 |
mehmetakiftutuncu/MyKentKart | app/src/main/java/com/mehmetakiftutuncu/mykentkart/models/KentKartInformation.java | 4012 | /*
* Copyright (C) 2015 Mehmet Akif Tütüncü
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.mehmetakiftutuncu.mykentkart.models;
import android.os.Parcel;
import android.os.Parcelable;
import com.google.gson.Gson;
import com.mehmetakiftutuncu.mykentkart.utilities.Log;
/**
* A model of information to show about a {@link com.mehmetakiftutuncu.mykentkart.models.KentKart},
* it implements {@link android.os.Parcelable} so it can be moved around in a {@link android.os.Bundle}
*
* @author mehmetakiftutuncu
*/
public class KentKartInformation implements Parcelable {
/** Balance amount of the KentKart */
public double balance;
/** Amount used when the KentKart is last used */
public double lastUseAmount;
/** Timestamp value of the time when the KentKart is last used */
public long lastUseTime;
/** Amount used when the KentKart is last loaded */
public double lastLoadAmount;
/** Timestamp value of the time when the KentKart is last loaded */
public long lastLoadTime;
/**
* Constructor initializing all values
*
* @param balance Value to set as {@link com.mehmetakiftutuncu.mykentkart.models.KentKartInformation#balance}
* @param lastUseAmount Value to set as {@link com.mehmetakiftutuncu.mykentkart.models.KentKartInformation#lastUseAmount}
* @param lastUseTime Value to set as {@link com.mehmetakiftutuncu.mykentkart.models.KentKartInformation#lastUseTime}
* @param lastLoadAmount Value to set as {@link com.mehmetakiftutuncu.mykentkart.models.KentKartInformation#lastLoadAmount}
* @param lastLoadTime Value to set as {@link com.mehmetakiftutuncu.mykentkart.models.KentKartInformation#lastLoadTime}
*/
public KentKartInformation(double balance, double lastUseAmount, long lastUseTime, double lastLoadAmount, long lastLoadTime) {
this.balance = balance;
this.lastUseAmount = lastUseAmount;
this.lastUseTime = lastUseTime;
this.lastLoadAmount = lastLoadAmount;
this.lastLoadTime = lastLoadTime;
}
/**
* Converts this KentKartInformation to a String representation as JSON using {@link com.google.gson.Gson}
*
* @return A String representation of this KentKartInformation as JSON
*/
public String toJson() {
try {
Gson gson = new Gson();
return gson.toJson(this, KentKartInformation.class);
} catch (Exception e) {
Log.error(this, "Failed to convert KentKartInformation to json! balance: " + balance + ", lastUseAmount: " + lastUseAmount + ", lastUseTime: " + lastUseTime + ", lastLoadAmount: " + lastLoadAmount + ", lastLoadTime: " + lastLoadTime, e);
return null;
}
}
/* Represents this KentKart as a String, internally calls toJson() */
@Override
public String toString() {
return toJson();
}
/* Needed for making KentKartInformation a {@link android.os.Parcelable} */
@Override
public int describeContents() {
return 0;
}
/* Needed for making KentKartInformation a {@link android.os.Parcelable}
*
* Writes each field of this KentKartInformation to a {@link android.os.Parcel}
*/
@Override
public void writeToParcel(Parcel dest, int flags) {
dest.writeDouble(balance);
dest.writeDouble(lastUseAmount);
dest.writeLong(lastUseTime);
dest.writeDouble(lastLoadAmount);
dest.writeLong(lastLoadTime);
}
}
| apache-2.0 |
chenakam/scala-lang | src/main/java/hobby/wei/c/tool/throttle/Range.java | 1364 | /*
* Copyright (C) 2016-present, Wei Chou(weichou2010@gmail.com)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package hobby.wei.c.tool.throttle;
import static scala.Predef.require;
/**
* @author Wei Chou(weichou2010@gmail.com)
* @version 1.0, 17/08/2016
*/
public final class Range<D extends Discrete<D>> extends AbsSorter.AbsR<D> {
public final D from, to;
public Range(D from, D to) {
require(from.delta(to) <= 0);
this.from = from;
this.to = to;
}
@Override
final D from() {
return from;
}
@Override
final D to() {
return to;
}
@Override
public final String unique() {
return from.unique() + (single() ? "" : "~" + to.unique());
}
@Override
public final String toString() {
return "[" + from + ", " + to + "]";
}
}
| apache-2.0 |
AsuraTeam/monitor | server/src/main/java/com/asura/monitor/configure/entity/PushServerEntity.java | 680 | package com.asura.monitor.configure.entity;
/**
* <p></p>
*
* <PRE>
* <BR> 修改记录
* <BR>-----------------------------------------------
* <BR> 修改日期 修改人 修改内容
* </PRE>
*
* @author zhaozq
* @version 1.0
* @since 1.0
*/
public class PushServerEntity {
// ip地址
private String ip;
// 最近更新时间
private Long update;
public String getIp() {
return ip;
}
public void setIp(String ip) {
this.ip = ip;
}
public Long getUpdate() {
return update;
}
public void setUpdate(Long update) {
this.update = update;
}
}
| apache-2.0 |
jhelmer-unicon/uPortal | uportal-war/src/main/java/org/apereo/portal/url/xml/FailSafePortletUrlBuilder.java | 5454 | /**
* Licensed to Apereo under one or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information regarding copyright ownership. Apereo
* licenses this file to you under the Apache License, Version 2.0 (the "License"); you may not use
* this file except in compliance with the License. You may obtain a copy of the License at the
* following location:
*
* <p>http://www.apache.org/licenses/LICENSE-2.0
*
* <p>Unless required by applicable law or agreed to in writing, software distributed under the
* License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apereo.portal.url.xml;
import java.util.List;
import java.util.Map;
import javax.portlet.PortletMode;
import javax.portlet.WindowState;
import org.apereo.portal.portlet.om.IPortletWindowId;
import org.apereo.portal.url.IPortalUrlBuilder;
import org.apereo.portal.url.IPortletUrlBuilder;
import org.apereo.portal.url.ParameterMap;
/**
* @author Eric Dalquist
* @version $Revision$
*/
class FailSafePortletUrlBuilder implements IPortletUrlBuilder {
private final Map<String, String[]> parameters = new ParameterMap();
private final Map<String, String[]> publicParameters = new ParameterMap();
private final IPortletWindowId portletWindowId;
private final IPortalUrlBuilder portalUrlBuilder;
public FailSafePortletUrlBuilder(
IPortletWindowId portletWindowId, IPortalUrlBuilder portalUrlBuilder) {
this.portletWindowId = portletWindowId;
this.portalUrlBuilder = portalUrlBuilder;
}
/* (non-Javadoc)
* @see org.apereo.portal.url.IUrlBuilder#setParameter(java.lang.String, java.lang.String[])
*/
@Override
public void setParameter(String name, String... values) {
//NOOP
}
/* (non-Javadoc)
* @see org.apereo.portal.url.IUrlBuilder#setParameter(java.lang.String, java.util.List)
*/
@Override
public void setParameter(String name, List<String> values) {
//NOOP
}
/* (non-Javadoc)
* @see org.apereo.portal.url.IUrlBuilder#addParameter(java.lang.String, java.lang.String[])
*/
@Override
public void addParameter(String name, String... values) {
//NOOP
}
/* (non-Javadoc)
* @see org.apereo.portal.url.IUrlBuilder#setParameters(java.util.Map)
*/
@Override
public void setParameters(Map<String, List<String>> parameters) {
//NOOP
}
/* (non-Javadoc)
* @see org.apereo.portal.url.IPortletUrlBuilder#setCopyCurrentRenderParameters(boolean)
*/
@Override
public void setCopyCurrentRenderParameters(boolean copyCurrentRenderParameters) {
//NOOP
}
/* (non-Javadoc)
* @see org.apereo.portal.url.IPortletUrlBuilder#getCopyCurrentRenderParameters()
*/
@Override
public boolean getCopyCurrentRenderParameters() {
return false;
}
/* (non-Javadoc)
* @see org.apereo.portal.url.IUrlBuilder#getParameters()
*/
@Override
public Map<String, String[]> getParameters() {
return parameters;
}
/* (non-Javadoc)
* @see org.apereo.portal.url.IPortletUrlBuilder#getPortletWindowId()
*/
@Override
public IPortletWindowId getPortletWindowId() {
return this.portletWindowId;
}
/* (non-Javadoc)
* @see org.apereo.portal.url.IPortletUrlBuilder#getPortalUrlBuilder()
*/
@Override
public IPortalUrlBuilder getPortalUrlBuilder() {
return this.portalUrlBuilder;
}
/* (non-Javadoc)
* @see org.apereo.portal.url.IPortletUrlBuilder#setWindowState(javax.portlet.WindowState)
*/
@Override
public void setWindowState(WindowState windowState) {
//NOOP
}
/* (non-Javadoc)
* @see org.apereo.portal.url.IPortletUrlBuilder#getWindowState()
*/
@Override
public WindowState getWindowState() {
//NOOP
return null;
}
/* (non-Javadoc)
* @see org.apereo.portal.url.IPortletUrlBuilder#setPortletMode(javax.portlet.PortletMode)
*/
@Override
public void setPortletMode(PortletMode portletMode) {
//NOOP
}
/* (non-Javadoc)
* @see org.apereo.portal.url.IPortletUrlBuilder#getPortletMode()
*/
@Override
public PortletMode getPortletMode() {
//NOOP
return null;
}
/* (non-Javadoc)
* @see org.apereo.portal.url.IPortletUrlBuilder#setResourceId(java.lang.String)
*/
@Override
public void setResourceId(String resourceId) {
//NOOP
}
/* (non-Javadoc)
* @see org.apereo.portal.url.IPortletUrlBuilder#getResourceId()
*/
@Override
public String getResourceId() {
//NOOP
return null;
}
/* (non-Javadoc)
* @see org.apereo.portal.url.IPortletUrlBuilder#setCacheability(java.lang.String)
*/
@Override
public void setCacheability(String cacheability) {
//NOOP
}
/* (non-Javadoc)
* @see org.apereo.portal.url.IPortletUrlBuilder#getCacheability()
*/
@Override
public String getCacheability() {
//NOOP
return null;
}
@Override
public Map<String, String[]> getPublicRenderParameters() {
//NOOP
return publicParameters;
}
}
| apache-2.0 |
mehmetakiftutuncu/EshotroidPlusAndroid | app/src/main/java/com/mehmetakiftutuncu/eshotroid/activities/WithToolbar.java | 116 | package com.mehmetakiftutuncu.eshotroid.activities;
public interface WithToolbar {
void initializeToolbar();
}
| apache-2.0 |
aracrown/ara-commons | ara-commons/src/test/java/org/aracrown/commons/rest/exception/ResourceDeleteExceptionTest.java | 1884 | /*
* Copyright 2013 ARA Crown
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.aracrown.commons.rest.exception;
import java.util.List;
import org.junit.Assert;
import org.junit.Test;
public class ResourceDeleteExceptionTest {
@Test
public void test_1() {
Assert.assertEquals("test", new ResourceDeleteException("test", null).getMessage());
Assert.assertEquals("test", new ResourceDeleteException("test").getMessage());
Assert.assertEquals("test", new ResourceDeleteException("test").getFirstError().getKey());
Assert.assertEquals("test", new ResourceDeleteException("test").getFirstError().getMessage());
}
@Test
public void test_2() {
Assert.assertNull(new ResourceDeleteException(null).getFirstError());
}
@Test
public void testParseErrorObject() {
ErrorMessage m = new ResourceDeleteException("key!:!value", null).getErrors().get(0);
Assert.assertEquals("value", m.getMessage());
Assert.assertEquals("key", m.getKey());
}
@Test
public void testParseTwoErrorObjects() {
List<ErrorMessage> items = new ResourceDeleteException("key!:!value!~!key1!:!value1", null).getErrors();
ErrorMessage m = items.get(0);
Assert.assertEquals("value", m.getMessage());
Assert.assertEquals("key", m.getKey());
m = items.get(1);
Assert.assertEquals("value1", m.getMessage());
Assert.assertEquals("key1", m.getKey());
}
}
| apache-2.0 |
usc-isi-i2/fril-service | src/main/java/cdc/gui/wizards/specific/actions/LinkageConfigureConditionsAction.java | 16986 | /* ***** BEGIN LICENSE BLOCK *****
* Version: MPL 1.1/GPL 2.0/LGPL 2.1
*
* The contents of this file are subject to the Mozilla Public License Version
* 1.1 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
* http://www.mozilla.org/MPL/
*
* Software distributed under the License is distributed on an "AS IS" basis,
* WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License
* for the specific language governing rights and limitations under the
* License.
*
* The Original Code is the FRIL Framework.
*
* The Initial Developers of the Original Code are
* The Department of Math and Computer Science, Emory University and
* The Centers for Disease Control and Prevention.
* Portions created by the Initial Developer are Copyright (C) 2008
* the Initial Developer. All Rights Reserved.
*
* Alternatively, the contents of this file may be used under the terms of
* either the GNU General Public License Version 2 or later (the "GPL"), or
* the GNU Lesser General Public License Version 2.1 or later (the "LGPL"),
* in which case the provisions of the GPL or the LGPL are applicable instead
* of those above. If you wish to allow use of your version of this file only
* under the terms of either the GPL or the LGPL, and not to allow others to
* use your version of this file under the terms of the MPL, indicate your
* decision by deleting the provisions above and replace them with the notice
* and other provisions required by the GPL or the LGPL. If you do not delete
* the provisions above, a recipient may use your version of this file under
* the terms of any one of the MPL, the GPL or the LGPL.
*
* ***** END LICENSE BLOCK ***** */
package cdc.gui.wizards.specific.actions;
import java.awt.BorderLayout;
import java.awt.Dimension;
import java.awt.FlowLayout;
import java.awt.event.ActionEvent;
import java.awt.event.ActionListener;
import java.io.IOException;
import javax.swing.BorderFactory;
import javax.swing.BoxLayout;
import javax.swing.JButton;
import javax.swing.JComboBox;
import javax.swing.JLabel;
import javax.swing.JOptionPane;
import javax.swing.JPanel;
import javax.swing.JTabbedPane;
import cdc.components.AbstractDataSource;
import cdc.components.AbstractJoin;
import cdc.components.AbstractJoinCondition;
import cdc.datamodel.DataColumnDefinition;
import cdc.gui.GUIVisibleComponent;
import cdc.gui.OptionDialog;
import cdc.gui.components.table.TablePanel;
import cdc.gui.components.uicomponents.AvaialbleColumnsPanel;
import cdc.gui.external.JXErrorDialog;
import cdc.gui.wizards.AbstractWizard;
import cdc.gui.wizards.WizardAction;
import cdc.impl.join.strata.DataStratum;
import cdc.impl.join.strata.StrataJoinWrapper;
import cdc.utils.GuiUtils;
import cdc.utils.RJException;
public class LinkageConfigureConditionsAction extends WizardAction {
public class PasteActionListener implements ActionListener {
private int id;
public PasteActionListener(int id) {
this.id = id;
}
public void actionPerformed(ActionEvent arg0) {
pasteCombo[id].setSelectedIndex(0);
comboPanel[id].setVisible(false);
pastePanel[id].setVisible(true);
}
}
public class CancelPasteActionListener implements ActionListener {
private int id;
public CancelPasteActionListener(int id) {
this.id = id;
}
public void actionPerformed(ActionEvent arg0) {
comboPanel[id].setVisible(true);
pastePanel[id].setVisible(false);
}
}
public class ComboSelectListener implements ActionListener {
private int id;
public ComboSelectListener(int id) {
this.id = id;
}
public void actionPerformed(ActionEvent arg0) {
if (pasteCombo[id].getSelectedIndex() != 0) {
try {
int n = -1;
String selectedStr = (String) pasteCombo[id].getSelectedItem();
for (int i = 0; i < strataChooser.getStrata().length; i++) {
if (selectedStr.equals(strataChooser.getStrata()[i].getName())) {
n = i;
}
}
AbstractJoinCondition that = (AbstractJoinCondition) ((GUIVisibleComponent)activeCombo[n].getSelectedItem()).generateSystemComponent();
if (joinCondition == null) {
joinCondition = new AbstractJoinCondition[pasteCombo.length];
}
joinCondition[id] = (AbstractJoinCondition) that.clone();
GUIVisibleComponent[] joinTypes = GuiUtils.getJoinConditions();
int selected = -1;
for (int i = 0; i < joinTypes.length; i++) {
if (joinCondition[id].getClass().equals(joinTypes[i].getProducedComponentClass())) {
selected = i;
}
}
tabs.setComponentAt(id, getStratumJoinConditionPanel(parent, id));
activeCombo[id].setSelectedIndex(selected);
//tabs.removeTabAt(id);
} catch (Exception e) {
e.printStackTrace();
JXErrorDialog.showDialog(parent, "Error when pasting condition", e);
}
comboPanel[id].setVisible(true);
pastePanel[id].setVisible(false);
}
}
}
private AbstractWizard parent;
private DSConfigureTypeAction leftSourceAction;
private DSConfigureTypeAction rightSourceAction;
private LinkageConfigureStrataAction strataChooser;
private AbstractDataSource sourceA;
private AbstractDataSource sourceB;
private AbstractJoinCondition joinCondition[];
private JComboBox[] activeCombo;
private TablePanel tablePanel;
private JTabbedPane tabs;
private JButton[] paste;
private JButton[] cancel;
private JPanel[] comboPanel;
private JPanel[] pastePanel;
private JComboBox[] pasteCombo;
private AbstractJoin restoredJoin = null;
public LinkageConfigureConditionsAction(DSConfigureTypeAction left, DSConfigureTypeAction right, LinkageConfigureStrataAction joinStratificationConfiguration) {
leftSourceAction = left;
rightSourceAction = right;
this.strataChooser = joinStratificationConfiguration;
createTable();
fillInDefaultValues();
}
public LinkageConfigureConditionsAction(AbstractDataSource sourceA, AbstractDataSource sourceB, LinkageConfigureStrataAction joinStratificationConfiguration) {
this.sourceA = sourceA;
this.sourceB = sourceB;
this.strataChooser = joinStratificationConfiguration;
createTable();
fillInDefaultValues();
}
private void fillInDefaultValues() {
addCols(sourceA.getDataModel().getOutputFormat());
addCols(sourceB.getDataModel().getOutputFormat());
}
private void addCols(DataColumnDefinition[] outputFormat) {
for (int i = 0; i < outputFormat.length; i++) {
tablePanel.addRow(new DataColumnDefinition[] {outputFormat[i]});
}
}
private void createTable() {
tablePanel = new TablePanel(new String[] {"Selected columns"}, false);
tablePanel.addAddButtonListener(new ActionListener() {
public void actionPerformed(ActionEvent e) {
OptionDialog dialog = new OptionDialog(parent, "Choose column");
AvaialbleColumnsPanel panel = new AvaialbleColumnsPanel(getAllAvailableColumns());
dialog.setMainPanel(panel);
dialog.setLocationRelativeTo((JButton)e.getSource());
if (dialog.getResult() == OptionDialog.RESULT_OK) {
DataColumnDefinition[] cols = panel.getSelectedColumns();
for (int i = 0; i < cols.length; i++) {
tablePanel.addRow(new DataColumnDefinition[] {cols[i]});
}
}
}
});
tablePanel.setSize(500, 130);
tablePanel.setPreferredSize(new Dimension(500, 130));
}
public JPanel beginStep(AbstractWizard wizard) {
wizard.getMainPanel().setLayout(new BoxLayout(wizard.getMainPanel(), BoxLayout.LINE_AXIS));
JPanel panelCond;
parent = wizard;
restoreJoinIfPossible();
if (strataChooser.getStrata() == null) {
activeCombo = new JComboBox[1];
comboPanel = new JPanel[1];
pastePanel = new JPanel[1];
paste = new JButton[1];
cancel = new JButton[1];
pasteCombo = new JComboBox[1];
panelCond = getStratumJoinConditionPanel(wizard, 0);
} else {
panelCond = new JPanel(new BorderLayout());
tabs = new JTabbedPane();
panelCond.add(tabs, BorderLayout.CENTER);
DataStratum strata[] = strataChooser.getStrata();
activeCombo = new JComboBox[strata.length];
comboPanel = new JPanel[strata.length];
pastePanel = new JPanel[strata.length];
paste = new JButton[strata.length];
cancel = new JButton[strata.length];
pasteCombo = new JComboBox[strata.length];
for (int i = 0; i < strata.length; i++) {
tabs.addTab(strata[i].getName(), getStratumJoinConditionPanel(wizard, i));
}
}
GUIVisibleComponent[] joinTypes = GuiUtils.getJoinConditions();
int selected = 0;
for (int i = 0; i < joinTypes.length; i++) {
if (joinCondition == null || joinCondition[i] == null) {
continue;
}
if (joinCondition != null && joinCondition[i].getClass().equals(joinTypes[i].getProducedComponentClass())) {
selected = i;
}
}
for (int i = 0; i < activeCombo.length; i++) {
activeCombo[i].setSelectedIndex(selected);
}
panelCond.setBorder(BorderFactory.createTitledBorder("Join condition"));
JPanel panelOut = new JPanel();
panelOut.setPreferredSize(new Dimension(600, 160));
panelOut.setMinimumSize(new Dimension(600, 160));
panelOut.setMaximumSize(new Dimension(6000, 160));
panelOut.setBorder(BorderFactory.createTitledBorder("Output columns"));
panelOut.add(tablePanel);
JPanel buffer = new JPanel();
buffer.setLayout(new BoxLayout(buffer, BoxLayout.PAGE_AXIS));
buffer.add(panelCond);
buffer.add(panelOut);
return buffer;
}
private void restoreJoinIfPossible() {
//joinCondition = null;
if (joinCondition != null) {
//make sure that the join condition is ok
if (strataChooser.getStrata() == null || joinCondition.length == strataChooser.getStrata().length) {
return;
} else {
AbstractJoinCondition[] oldJoinConditions = joinCondition;
joinCondition = new AbstractJoinCondition[strataChooser.getStrata().length];
for (int i = 0; i < joinCondition.length; i++) {
if (i < oldJoinConditions.length) {
joinCondition[i] = oldJoinConditions[i];
} else if (oldJoinConditions.length == 1) {
joinCondition[i] = oldJoinConditions[0];
} else {
joinCondition[i] = null;
}
}
}
return;
}
if (restoredJoin == null) {
return;
}
if (restoredJoin instanceof StrataJoinWrapper && strataChooser != null && strataChooser.getStrata() != null) {
AbstractJoinCondition[] conds = ((StrataJoinWrapper)restoredJoin).getJoinConditions();
String[] strataNames = ((StrataJoinWrapper)restoredJoin).getStrataNames();
joinCondition = new AbstractJoinCondition[strataChooser.getStrata().length];
for (int i = 0; i < strataNames.length; i++) {
DataStratum[] strata = strataChooser.getStrata();
for (int j = 0; j < strata.length; j++) {
if (strata[j].getName().equals(strataNames[i])) {
joinCondition[j] = conds[i];
}
}
}
} else {
if (strataChooser.getStrata() == null || strataChooser.getStrata().length == 1) {
joinCondition = new AbstractJoinCondition[1];
joinCondition[0] = restoredJoin.getJoinCondition();
}
}
}
private JPanel getStratumJoinConditionPanel(AbstractWizard wizard, int id) {
JPanel panelCond = new JPanel();
panelCond.setLayout(new BorderLayout());
JPanel insideComboPanel = new JPanel();
activeCombo[id] = new JComboBox();
GUIVisibleComponent[] joinTypes = GuiUtils.getJoinConditions();
for (int i = 0; i < joinTypes.length; i++) {
activeCombo[id].addItem(joinTypes[i]);
}
AbstractJoinCondition conditionToUse = joinCondition == null ? null : joinCondition[id];
ComboListener comboListener = new ComboListener(wizard, insideComboPanel, new Object[] {
this.sourceA != null ? this.sourceA : leftSourceAction.getDataSource(),
this.sourceB != null ? this.sourceB : rightSourceAction.getDataSource(),
wizard, conditionToUse
});
insideComboPanel.addComponentListener(comboListener);
activeCombo[id].addActionListener(comboListener);
panelCond.setLayout(new BorderLayout());
JLabel activeLabel = new JLabel("Join condition type:");
comboPanel[id] = new JPanel(new FlowLayout(FlowLayout.LEFT));
comboPanel[id].add(activeLabel);
comboPanel[id].add(activeCombo[id]);
pastePanel[id] = new JPanel(new FlowLayout(FlowLayout.LEFT));
pastePanel[id].setVisible(false);
cancel[id] = new JButton("Cancel");
cancel[id].setPreferredSize(new Dimension(120, 20));
cancel[id].addActionListener(new CancelPasteActionListener(id));
paste[id] = new JButton("Copy from");
paste[id].setPreferredSize(new Dimension(120, 20));
paste[id].addActionListener(new PasteActionListener(id));
if (strataChooser.getStrata() != null) {
String[] strata = new String[strataChooser.getStrata().length];
strata[0] = "Select...";
int skip = 0;
for (int i = 0; i < strata.length; i++) {
if (i != id) {
strata[i + 1 + skip] = strataChooser.getStrata()[i].getName();
} else {
skip = -1;
}
}
pasteCombo[id] = new JComboBox(strata);
pasteCombo[id].setPreferredSize(new Dimension(200, pasteCombo[id].getPreferredSize().height));
pasteCombo[id].addActionListener(new ComboSelectListener(id));
comboPanel[id].add(paste[id]);
pastePanel[id].add(new JLabel("Copy condition from: "));
pastePanel[id].add(pasteCombo[id]);
pastePanel[id].add(cancel[id]);
}
activeCombo[id].setPreferredSize(new Dimension(200, activeCombo[id].getPreferredSize().height));
JPanel pan = new JPanel(new FlowLayout(FlowLayout.LEFT));
pan.add(comboPanel[id]);
if (strataChooser.getStrata() != null) {
pan.add(pastePanel[id]);
}
panelCond.add(pan, BorderLayout.NORTH);
panelCond.add(insideComboPanel, BorderLayout.CENTER);
return panelCond;
}
public boolean endStep(AbstractWizard wizard) {
for (int i = 0; i < activeCombo.length; i++) {
if (!((GUIVisibleComponent)this.activeCombo[i].getSelectedItem()).validate(wizard)) {
if (tabs != null) {
tabs.setSelectedIndex(i);
}
return false;
}
}
if (strataChooser.getStrata() == null) {
joinCondition = new AbstractJoinCondition[1];
} else {
joinCondition = new AbstractJoinCondition[strataChooser.getStrata().length];
}
try {
for (int j = 0; j < joinCondition.length; j++) {
joinCondition[j] = (AbstractJoinCondition) ((GUIVisibleComponent)activeCombo[j].getSelectedItem()).generateSystemComponent();
}
} catch (RJException e) {
JXErrorDialog.showDialog(wizard, "Error creating join condition", e);
} catch (IOException e) {
JXErrorDialog.showDialog(wizard, "Error creating join condition", e);
}
if (tablePanel.getRows().length == 0) {
JOptionPane.showMessageDialog(wizard, "At least one output column is required.");
return false;
}
return true;
}
private DataColumnDefinition[] getAllAvailableColumns() {
DataColumnDefinition[] c1 = getLeftColumns();
DataColumnDefinition[] c2 = getRightColumns();
DataColumnDefinition[] out = new DataColumnDefinition[c1.length + c2.length];
System.arraycopy(c1, 0, out, 0, c1.length);
System.arraycopy(c2, 0, out, c1.length, c2.length);
return out;
}
private DataColumnDefinition[] getLeftColumns() {
if (sourceA != null) {
return sourceA.getDataModel().getSortedOutputColumns();
} else {
return leftSourceAction.getDataSource().getDataModel().getSortedOutputColumns();
}
}
private DataColumnDefinition[] getRightColumns() {
if (sourceB != null) {
return sourceB.getDataModel().getSortedOutputColumns();
} else {
return rightSourceAction.getDataSource().getDataModel().getSortedOutputColumns();
}
}
public AbstractJoinCondition[] getJoinConditons() {
return joinCondition;
}
public DataColumnDefinition[] getOutColumns() {
Object[] rows = tablePanel.getRows();
DataColumnDefinition[] columns = new DataColumnDefinition[rows.length];
for (int i = 0; i < columns.length; i++) {
columns[i] = (DataColumnDefinition)(((Object[])rows[i])[0]);
}
return columns;
}
public void setJoin(AbstractJoin join) {
this.restoredJoin = join;
DataColumnDefinition[] out = join.getOutColumns();
tablePanel.removeAllRows();
for (int j = 0; j < out.length; j++) {
tablePanel.addRow(new DataColumnDefinition[] {out[j]});
}
// System.out.println("setJoin not implemented.");
// if (join instanceof StrataJoinWrapper) {
//
// } else {
// joinCondition = new AbstractJoinCondition[joins.length];
// for (int i = 0; i < joins.length; i++) {
// joinCondition[i] = joins[i].getJoinCondition();
// }
// }
}
public void setSize(int width, int height) {
}
public void dispose() {
this.activeCombo = null;
this.cancel = null;
this.comboPanel = null;
this.joinCondition = null;
this.leftSourceAction = null;
this.parent = null;
this.paste = null;
this.pasteCombo = null;
this.pastePanel = null;
this.restoredJoin = null;
this.rightSourceAction = null;
this.sourceA = null;
this.sourceB = null;
this.strataChooser = null;
this.tablePanel = null;
this.tabs = null;
}
}
| apache-2.0 |
laiweiwei/eweb4j-2 | eweb4j-webapp/src/main/java/model/AdminPermission.java | 10876 | package model;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import javax.persistence.Entity;
import javax.persistence.Table;
/**
* 管理员权限模型
* @author vivi
* @date 2013-7-2 上午11:27:50
*/
@Entity
@Table(name="qhee_admin_permission")
public class AdminPermission {
// public Map<String, String> validate_create(){
// Map<String, String> err = new HashMap<String, String>(2);
//
// if (this.permission_name == null || this.permission_desc.trim().length() == 0)
// err.put("permission_name", "权限名不能为空");
//
// if (this.uri == null || this.uri.trim().length() == 0)
// err.put("uri", "URI不能为空");
//
// //验证名字和URL是否重复
// if (findByName(this.permission_name) != null)
// err.put("permission_name", "权限名已定义");
//
// if (findByUri(this.uri) != null)
// err.put("uri", "URI已定义");
//
// return err;
// }
//
// public Map<String, String> validate_update(){
// Map<String, String> err = new HashMap<String, String>(2);
//
// //检查ID是否存在
// if (this.permission_id <= 0)
// err.put("permission_id", "无效");
//
// if (this.permission_name == null || this.permission_name.trim().length() == 0)
// err.put("permission_name", "权限名不能为空");
//
// if (this.uri == null || this.uri.trim().length() == 0)
// err.put("uri", "URI不能为空");
//
// //验证名字和URL是否重复
// AdminPermission db_perm = findOtherByName(this.permission_id, this.permission_name);
// if (db_perm != null)
// err.put("permission_name", "权限名已定义");
// db_perm = findOtherByUri(this.permission_id, this.uri);
// if (db_perm != null)
// err.put("uri", "URI已定义");
//
// return err;
// }
//
// public static List<PermissionGroup> findAllSplitByGroup(){
// return findAllSplitByGroup("", "");
// }
// public static List<PermissionGroup> findAllSplitByGroup(String searchField, String searchKeyword){
// List<PermissionGroup> groups = new ArrayList<PermissionGroup>();
//
// String sql2 = "select DISTINCT p.permission_group from #table p where p.permission_group is not null and p.permission_group <> ''";
// List<String> groupNames = Db.model().query(String.class, sql2);
// if (groupNames != null) {
// for (String groupName : groupNames){
// PermissionGroup normalGroup = new PermissionGroup();
// List<AdminPermission> perms = findByGroup(groupName, searchField, searchKeyword);
// if (perms == null || perms.isEmpty())
// continue;
//
// normalGroup.group_name = groupName;
// normalGroup.items.addAll(perms);
// groups.add(normalGroup);
// }
// }
// String condition = "";
// if (StringUtils.isNotBlank(searchKeyword)) {
// final String keyword = searchKeyword.trim();
// if (StringUtils.isBlank(searchField)){
// //查询所有字段
// condition = "permission_name like '%"+keyword+"%' or uri like '%"+keyword+"%' or permission_desc like '%"+keyword+"%'";
// } else {
// //查询指定字段
// condition = searchField + " like '%"+keyword+"%'";
// }
// }
// if (StringUtils.isNotBlank(condition)){
// condition = " and ( " + condition + " )";
// }
//
// String sql = "select p.* from "+INSTANCE.TableName()+" p where ( p.permission_group is null or p.permission_group = '' )" + condition;
// List<AdminPermission> permsNoGroup = QueryHelper.query(AdminPermission.class, sql);
// if (permsNoGroup != null && !permsNoGroup.isEmpty()) {
// PermissionGroup defaultGroup = new PermissionGroup();
// defaultGroup.group_name = "待分组权限";
// defaultGroup.items.addAll(permsNoGroup);
// groups.add(defaultGroup);
// }
//
// return groups;
// }
//
// public static List<AdminPermission> findByGroup(String group, String searchField, String searchKeyword){
// String condition = "";
// if (StringUtils.isNotBlank(searchKeyword)) {
// final String keyword = searchKeyword.trim();
// if (StringUtils.isBlank(searchField)){
// //查询所有字段
// condition = "permission_name like '%"+keyword+"%' or uri like '%"+keyword+"%' or permission_desc like '%"+keyword+"%' ";
// } else {
// //查询指定字段
// condition = searchField + " like '%"+keyword+"%'";
// }
// }
// if (StringUtils.isNotBlank(condition)){
// condition = " and ( " + condition + " )";
// }
//
// final String sql = "select * from " + INSTANCE.TableName() + " where permission_group = ?"+condition;
// return QueryHelper.query(CLASS, sql, group);
// }
//
// public static List<AdminPermission> findAll(){
// return QueryHelper.query(CLASS, "select * from " + INSTANCE.TableName());
// }
//
// public static long totalCount(String searchField, String searchKeyword){
// String condition = "";
// if (StringUtils.isNotBlank(searchKeyword)) {
// final String keyword = searchKeyword.trim();
// if (StringUtils.isBlank(searchField)){
// //查询所有字段
// condition = "permission_name like '%"+keyword+"%' or uri like '%"+keyword+"%' or permission_desc like '%"+keyword+"%' or permission_group like '%"+keyword+"%'";
// } else {
// //查询指定字段
// condition = searchField + " like '%"+keyword+"%'";
// }
// }
// if (StringUtils.isNotBlank(condition))
// return INSTANCE.TotalCount(condition);
// return INSTANCE.TotalCount();
// }
//
// /**
// * 批量删除权限记录
// * @param ids
// * @return
// */
// public static long batchDelete(String... ids) {
// StringBuilder idSB = new StringBuilder();
// for (String id : ids) {
// if (idSB.length() > 0)
// idSB.append(",");
// idSB.append(id);
// }
// final String sql = "delete from " + INSTANCE.TableName() + " where permission_id in("+idSB.toString()+")";
// return QueryHelper.update(sql);
// }
//
// /**
// * 修改指定字段值
// * @param fields
// * @param values
// * @return
// */
// public long update(String[]fields, Object... values){
// StringBuilder fieldSB = new StringBuilder();
// for (String field : fields) {
// if (fieldSB.length() > 0)
// fieldSB.append(", ");
// fieldSB.append(field).append(" = ?");
// }
//
// final String sql = "update " + INSTANCE.TableName() + " set " + fieldSB.toString() + " where permission_id = " + this.permission_id;
// return QueryHelper.update(sql, values);
// }
//
// /**
// * 通过ID查找权限记录
// * @param id
// * @return
// */
// public static AdminPermission findById(long id) {
// final String sql = "select * from " + INSTANCE.TableName() + " where permission_id = ?";
// return QueryHelper.read(CLASS, sql, id);
// }
//
// /**
// * 通过名字查找与给定ID不同的记录
// * @param id
// * @param name
// * @return
// */
// public static AdminPermission findOtherByName(long id, String name) {
// final String sql = "select * from " + INSTANCE.TableName() + " where permission_name = ? and permission_id <> ?";
// return QueryHelper.read(CLASS, sql, name, id);
// }
//
// /**
// * 通过权限名查找
// * @param name
// * @return
// */
// public static AdminPermission findByName(String name) {
// final String sql = "select * from " + INSTANCE.TableName() + " where permission_name = ?";
// return QueryHelper.read(CLASS, sql, name);
// }
//
// /**
// * 通过URI查找与给定ID不同的记录
// * @param id
// * @param uri
// * @return
// */
// public static AdminPermission findOtherByUri(long id, String uri) {
// final String sql = "select * from " + INSTANCE.TableName() + " where uri = ? and permission_id <> ?";
// return QueryHelper.read(CLASS, sql, uri, id);
// }
//
// public static AdminPermission findByUri(String uri) {
// final String sql = "select * from " + INSTANCE.TableName() + " where uri = ?";
// return QueryHelper.read(CLASS, sql, uri);
// }
//
// /**
// * 分页+检索
// * @param page
// * @param size
// * @param orderField
// * @param orderDirection
// * @return
// */
// public static List<AdminPermission> getPage(int page, int size, String orderField, String orderDirection, String searchField, String searchKeyword){
// String condition = "";
// if (StringUtils.isNotBlank(searchKeyword)) {
// final String keyword = searchKeyword.trim();
// if (StringUtils.isBlank(searchField)){
// //查询所有字段
// condition = " where permission_name like '%"+keyword+"%' or uri like '%"+keyword+"%' or permission_desc like '%"+keyword+"%' or permission_group like '%"+keyword+"%'";
// } else {
// //查询指定字段
// condition = " where "+searchField + " like '%"+keyword+"%'";
// }
// }
// final String sql = "select * from " + INSTANCE.TableName() + condition + " order by " + orderField + " " + orderDirection;
// return QueryHelper.query_slice(CLASS, sql, page, size);
// }
//
// /**
// * 自增长ID
// */
// private long permission_id;
//
// /**
// * 权限名
// */
// private String permission_name;
//
// private String permission_group;//分组
//
// /**
// * 权限描述
// */
// private String permission_desc;
//
// /**
// * 权限对应的Action URI
// */
// private String uri;
//
// public long getId(){
// return permission_id;
// }
//
// public void setId(long id) {
// this.permission_id = id;
// }
//
// public long getPermission_id() {
// return permission_id;
// }
//
// public void setPermission_id(long permission_id) {
// this.permission_id = permission_id;
// }
//
// public String getPermission_name() {
// return permission_name;
// }
//
// public void setPermission_name(String permission_name) {
// this.permission_name = permission_name;
// }
//
// public String getPermission_desc() {
// return permission_desc;
// }
//
// public void setPermission_desc(String permission_desc) {
// this.permission_desc = permission_desc;
// }
//
// public String getUri() {
// return uri;
// }
//
// public void setUri(String uri) {
// this.uri = uri;
// }
//
// public String getPermission_group() {
// return permission_group;
// }
//
// public void setPermission_group(String permission_group) {
// this.permission_group = permission_group;
// }
//
// @Override
// public String toString() {
// return "AdminPermission [permission_id=" + permission_id
// + ", permission_name=" + permission_name
// + ", permission_group=" + permission_group
// + ", permission_desc=" + permission_desc + ", uri=" + uri + "]";
// }
//
// public static class PermissionGroup {
// private String group_name = "待分组权限";
// private List<AdminPermission> items = new ArrayList<AdminPermission>();
//
// public String getGroup_name() {
// return group_name;
// }
// public void setGroup_name(String group_name) {
// this.group_name = group_name;
// }
// public List<AdminPermission> getItems() {
// return items;
// }
// public void setItems(List<AdminPermission> items) {
// this.items = items;
// }
// }
}
| apache-2.0 |
confluentinc/camus | camus-etl-kafka/src/main/java/com/linkedin/camus/etl/kafka/partitioner/DailyPartitioner.java | 1641 | package com.linkedin.camus.etl.kafka.partitioner;
import org.apache.hadoop.conf.Configuration;
import org.joda.time.DateTimeZone;
import java.util.Locale;
import java.util.concurrent.TimeUnit;
import static com.linkedin.camus.etl.kafka.mapred.EtlMultiOutputFormat.ETL_DEFAULT_TIMEZONE;
import static com.linkedin.camus.etl.kafka.mapred.EtlMultiOutputFormat.ETL_DESTINATION_PATH_TOPIC_SUBDIRECTORY;
/**
* Partitions incoming data into daily partitions, and generates pathnames of the form:
* {@code etl.destination.path/topic-name/recent/YYYY/MM/dd}.
*
* The following configurations are supported:
* <ul>
* <li>{@code etl.destination.path} - top-level data output directory, required</li>
* <li>{@code etl.destination.path.topic.sub.dir} - sub-dir to create under topic dir, defaults to {@code daily}</li>
* <li>{@code etl.default.timezone} - timezone of the events, defaults to {@code America/Los_Angeles}</li>
* </ul>
*/
public class DailyPartitioner extends BaseTimeBasedPartitioner {
private static final String DEFAULT_TOPIC_SUB_DIR = "daily";
@Override
public void setConf(Configuration conf) {
if (conf != null) {
String destPathTopicSubDir = conf.get(ETL_DESTINATION_PATH_TOPIC_SUBDIRECTORY, DEFAULT_TOPIC_SUB_DIR);
DateTimeZone outputTimeZone = DateTimeZone.forID(conf.get(ETL_DEFAULT_TIMEZONE, DEFAULT_TIME_ZONE));
long outfilePartitionMs = TimeUnit.HOURS.toMillis(24);
String destSubTopicPathFormat = "'" + destPathTopicSubDir + "'/YYYY/MM/dd";
init(outfilePartitionMs, destSubTopicPathFormat, Locale.US, outputTimeZone);
}
super.setConf(conf);
}
}
| apache-2.0 |
krenfro/eztexting-java | src/main/java/com/github/krenfro/eztexting/EzTextingCredentials.java | 428 | package com.github.krenfro.eztexting;
public class EzTextingCredentials {
private final String username;
private final String password;
public EzTextingCredentials(String username, String password){
this.username = username;
this.password = password;
}
public String getUsername(){
return username;
}
public String getPassword(){
return password;
}
}
| apache-2.0 |
jembi/openxds | openxds-api/src/main/java/org/openhealthtools/openxds/repository/api/XdsRepository.java | 1379 | /**
* Copyright (c) 2009-2010 Misys Open Source Solutions (MOSS) and others
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
* implied. See the License for the specific language governing
* permissions and limitations under the License.
*
* Contributors:
* Misys Open Source Solutions - initial API and implementation
* -
*/
package org.openhealthtools.openxds.repository.api;
import org.openhealthtools.common.ihe.IheActor;
import org.openhealthtools.openexchange.actorconfig.net.IConnectionDescription;
/**
* This interface defines the operations of an XDS Repository actor.
*
* @author <a href="mailto:wenzhi.li@misys.com">Wenzhi Li</a>
*
*/
public interface XdsRepository extends IheActor {
/**
* Gets the client side Registry <code>IConnectionDescription</code> of this actor.
*
* @return the client side Registry connection
*/
public IConnectionDescription getRegistryClientConnection();
}
| apache-2.0 |
jdgwartney/vsphere-ws | java/JAXWS/samples/com/vmware/vim25/PatchSuperseded.java | 1863 |
package com.vmware.vim25;
import java.util.ArrayList;
import java.util.List;
import javax.xml.bind.annotation.XmlAccessType;
import javax.xml.bind.annotation.XmlAccessorType;
import javax.xml.bind.annotation.XmlType;
/**
* <p>Java class for PatchSuperseded complex type.
*
* <p>The following schema fragment specifies the expected content contained within this class.
*
* <pre>
* <complexType name="PatchSuperseded">
* <complexContent>
* <extension base="{urn:vim25}PatchNotApplicable">
* <sequence>
* <element name="supersede" type="{http://www.w3.org/2001/XMLSchema}string" maxOccurs="unbounded" minOccurs="0"/>
* </sequence>
* </extension>
* </complexContent>
* </complexType>
* </pre>
*
*
*/
@XmlAccessorType(XmlAccessType.FIELD)
@XmlType(name = "PatchSuperseded", propOrder = {
"supersede"
})
public class PatchSuperseded
extends PatchNotApplicable
{
protected List<String> supersede;
/**
* Gets the value of the supersede property.
*
* <p>
* This accessor method returns a reference to the live list,
* not a snapshot. Therefore any modification you make to the
* returned list will be present inside the JAXB object.
* This is why there is not a <CODE>set</CODE> method for the supersede property.
*
* <p>
* For example, to add a new item, do as follows:
* <pre>
* getSupersede().add(newItem);
* </pre>
*
*
* <p>
* Objects of the following type(s) are allowed in the list
* {@link String }
*
*
*/
public List<String> getSupersede() {
if (supersede == null) {
supersede = new ArrayList<String>();
}
return this.supersede;
}
}
| apache-2.0 |
jlannoy/ninja | ninja-core/src/main/java/ninja/Results.java | 5224 | /**
* Copyright (C) 2012-2019 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package ninja;
import java.util.Optional;
import ninja.utils.NoHttpBody;
/**
* Convenience methods for the generation of Results.
*
* {@link Results#forbidden() generates a results and sets it to forbidden.
*
* A range of shortcuts are available from here.
*
* @author rbauer
*
*/
public class Results {
public static Result status(int statusCode) {
Result result = new Result(statusCode);
return result;
}
public static Result ok() {
return status(Result.SC_200_OK);
}
public static Result created(Optional<String> url) {
Result result = status(Result.SC_201_CREATED);
if (url.isPresent()) {
result.addHeader(Result.LOCATION, url.get());
}
return result;
}
public static Result notFound() {
return status(Result.SC_404_NOT_FOUND);
}
public static Result forbidden() {
return status(Result.SC_403_FORBIDDEN);
}
public static Result unauthorized() {
return status(Result.SC_401_UNAUTHORIZED);
}
public static Result badRequest() {
return status(Result.SC_400_BAD_REQUEST);
}
public static Result noContent() {
return status(Result.SC_204_NO_CONTENT)
.render(Result.NO_HTTP_BODY);
}
public static Result internalServerError() {
return status(Result.SC_500_INTERNAL_SERVER_ERROR);
}
/**
* A redirect that uses 303 see other.
*
* The redirect does NOT need a template and does NOT
* render a text in the Http body by default.
*
* If you wish to do so please
* remove the {@link NoHttpBody} that is set as renderable of
* the Result.
*
* @param url
* The url used as redirect target.
* @return A nicely configured result with status code 303 and the url set
* as Location header. Renders no Http body by default.
*/
public static Result redirect(String url) {
Result result = status(Result.SC_303_SEE_OTHER);
result.addHeader(Result.LOCATION, url);
result.render(Result.NO_HTTP_BODY);
return result;
}
/**
* A redirect that uses 307 see other.
*
* The redirect does NOT need a template and does NOT
* render a text in the Http body by default.
*
* If you wish to do so please
* remove the {@link NoHttpBody} that is set as renderable of
* the Result.
*
* @param url
* The url used as redirect target.
* @return A nicely configured result with status code 307 and the url set
* as Location header. Renders no Http body by default.
*/
public static Result redirectTemporary(String url) {
Result result = status(Result.SC_307_TEMPORARY_REDIRECT);
result.addHeader(Result.LOCATION, url);
result.render(Result.NO_HTTP_BODY);
return result;
}
public static Result contentType(String contentType) {
Result result = status(Result.SC_200_OK);
result.contentType(contentType);
return result;
}
public static Result html() {
Result result = status(Result.SC_200_OK);
result.contentType(Result.TEXT_HTML);
return result;
}
public static Result text() {
Result result = status(Result.SC_200_OK);
result.contentType(Result.TEXT_PLAIN);
return result;
}
public static Result json() {
Result result = status(Result.SC_200_OK).json();
return result;
}
public static Result jsonp() {
Result result = status(Result.SC_200_OK).jsonp();
return result;
}
public static Result xml() {
Result result = status(Result.SC_200_OK).xml();
return result;
}
public static Result TODO() {
Result result = status(Result.SC_501_NOT_IMPLEMENTED);
result.contentType(Result.APPLICATION_JSON);
return result;
}
/**
* Instructs ninja (and the http container) that it is okay to continue with
* the websocket handshake. This sets the HTTP status code to
* <code>101 Switching Protocols</code>. Do not include any result via
* a render() as writing it out will not be possible if you intend on
* switching protocols (e.g. upgrade to a websocket).
*/
public static Result webSocketContinue() {
return status(Result.SC_101_SWITCHING_PROTOCOLS);
}
public static AsyncResult async() {
return new AsyncResult();
}
}
| apache-2.0 |
johnpr01/STAF-Aeron | src/main/java/com/kaazing/staf_aeron/tests/Test0105.java | 3381 | /*
* Copyright 2015 Kaazing Corporation
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.kaazing.staf_aeron.tests;
import com.kaazing.staf_aeron.AeronSTAFProcess;
import com.kaazing.staf_aeron.STAFHost;
import com.kaazing.staf_aeron.YAMLTestCase;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.concurrent.CountDownLatch;
// While a Publisher is actively sending to a subscriber, a new publisher is started and sends to the same subscriber
// New (late) publisher will use a different stream id
public class Test0105 extends Test
{
public Test0105(YAMLTestCase testCase)
{
STAFHost host1 = testCase.getStafHosts().get(0);
STAFHost host2 = testCase.getStafHosts().get(1);
STAFHost host3 = testCase.getStafHosts().get(2);
processes = new HashMap<String, AeronSTAFProcess>();
latch = new CountDownLatch(3);
final String aeronDir = "-Daeron.dir=" + host1.getTmpDir() + host1.getPathSeperator() + testCase.getName();
int port = getPort(host1);
startProcess(host1,
host1.getJavaPath() + host1.getPathSeperator() + "java " + aeronDir + host1.getPathSeperator() + "sub" + host1.getProperties() +
" -cp " + host1.getClasspath() +
" uk.co.real_logic.aeron.tools.SubscriberTool" +
" -c=udp://localhost:" + port + " " + host1.getOptions(),
"Test0105-sub", 10);
startProcess(host2,
host2.getJavaPath() + host2.getPathSeperator() + "java " + aeronDir + host2.getPathSeperator() + "pub" + host2.getProperties() +
" -cp " + host2.getClasspath() +
" uk.co.real_logic.aeron.tools.PublisherTool" +
" -c=udp://localhost:" + port + " " + host2.getOptions(),
"Test00105-pub1", 10);
//Allow publisher 1 to send to the subscriber a few seconds before starting publisher 2 using a different
// stream id
try
{
Thread.sleep(10000);
}
catch (Exception e) {
e.printStackTrace();
}
startProcess(host3,
host3.getJavaPath() + host3.getPathSeperator() + "java " + aeronDir + "/pub" + host3.getProperties() +
" -cp " + host3.getClasspath() +
" uk.co.real_logic.aeron.tools.PublisherTool" +
" -c=udp://localhost:" + port + " " + host3.getOptions(),
"Test0105-pub2", 10);
try
{
latch.await();
}
catch (Exception e)
{
e.printStackTrace();
}
}
public void run()
{}
// Verification: The subscriber will only receive messages from publisher 1.
public void validate()
{ }
}
| apache-2.0 |
hcoles/pitest | pitest/src/main/java/org/pitest/testng/FailureTracker.java | 91 | package org.pitest.testng;
public interface FailureTracker {
boolean hasHadFailure();
}
| apache-2.0 |
signed/intellij-community | xml/dom-openapi/src/com/intellij/util/xml/ui/UndoHelper.java | 3244 | /*
* Copyright 2000-2017 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.util.xml.ui;
import com.intellij.openapi.command.CommandEvent;
import com.intellij.openapi.command.CommandListener;
import com.intellij.openapi.command.CommandProcessor;
import com.intellij.openapi.editor.Document;
import com.intellij.openapi.editor.event.DocumentAdapter;
import com.intellij.openapi.editor.event.DocumentEvent;
import com.intellij.openapi.project.Project;
import com.intellij.psi.PsiDocumentManager;
import java.util.HashSet;
import java.util.Set;
/**
* @author peter
*/
public class UndoHelper {
private boolean myShowing;
private final Set<Document> myCurrentDocuments = new HashSet<>();
private boolean myDirty;
private final DocumentAdapter myDocumentAdapter = new DocumentAdapter() {
@Override
public void documentChanged(DocumentEvent e) {
if (myShowing) {
myDirty = true;
}
}
};
public UndoHelper(final Project project, final Committable committable) {
final PsiDocumentManager psiDocumentManager = PsiDocumentManager.getInstance(project);
CommandProcessor.getInstance().addCommandListener(new CommandListener() {
@Override
public void commandStarted(CommandEvent event) {
undoTransparentActionStarted();
}
@Override
public void undoTransparentActionStarted() {
myDirty = false;
}
@Override
public void undoTransparentActionFinished() {
if (myDirty) {
psiDocumentManager.commitAllDocuments();
committable.reset();
}
}
@Override
public void commandFinished(CommandEvent event) {
undoTransparentActionFinished();
}
}, committable);
}
public final void startListeningDocuments() {
for (final Document document : myCurrentDocuments) {
document.addDocumentListener(myDocumentAdapter);
}
}
public final void stopListeningDocuments() {
for (final Document document : myCurrentDocuments) {
document.removeDocumentListener(myDocumentAdapter);
}
}
public final void setShowing(final boolean showing) {
myShowing = showing;
}
public boolean isShowing() {
return myShowing;
}
public final void addWatchedDocument(final Document document) {
stopListeningDocuments();
myCurrentDocuments.add(document);
startListeningDocuments();
}
public final void removeWatchedDocument(final Document document) {
stopListeningDocuments();
myCurrentDocuments.remove(document);
startListeningDocuments();
}
public final Document[] getDocuments() {
return myCurrentDocuments.toArray(new Document[myCurrentDocuments.size()]);
}
}
| apache-2.0 |
lyloou/Android | app/src/main/java/com/lyloou/android/view/CircleView.java | 1768 | package com.lyloou.android.view;
import android.content.Context;
import android.content.res.TypedArray;
import android.graphics.Bitmap;
import android.graphics.Canvas;
import android.graphics.Color;
import android.graphics.PorterDuff;
import android.util.AttributeSet;
import android.view.View;
import com.lyloou.android.R;
import com.lyloou.android.util.ScreenUtil;
import com.lyloou.android.util.ViewUtil;
public class CircleView extends View {
private Context mContext;
private int mPicId;
private PorterDuff.Mode mMode;
private Bitmap mOutBitmap;
public CircleView(Context context) {
this(context, null);
}
public CircleView(Context context, AttributeSet attrs) {
this(context, attrs, 0);
}
public CircleView(Context context, AttributeSet attrs, int defStyleAttr) {
super(context, attrs, defStyleAttr);
mContext = context;
mMode = PorterDuff.Mode.SRC_IN;
TypedArray ta = context.obtainStyledAttributes(attrs, R.styleable.CircleView);
mPicId = ta.getResourceId(R.styleable.CircleView_pic, R.mipmap.lou);
ta.recycle();
}
@Override
protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) {
int width = ViewUtil.getSizeFromMeasureSpec(widthMeasureSpec, ScreenUtil.dp2Px(mContext, 100));
int height = ViewUtil.getSizeFromMeasureSpec(heightMeasureSpec, ScreenUtil.dp2Px(mContext, 100));
mOutBitmap = ViewUtil.getBitmapByXfermode(mContext, mPicId, Color.BLACK, width, height, getPaddingLeft(), getPaddingTop(), getPaddingRight(), getPaddingBottom(), mMode);
//mOutBitmap = ViewUtil.getBitmapByXfermode(mContext, mPicId, Color.BLACK, width, height, mMode);
setMeasuredDimension(width, height);
}
@Override
protected void onDraw(Canvas canvas) {
canvas.drawBitmap(mOutBitmap, 0, 0, null);
}
}
| apache-2.0 |
HuaweiBigData/carbondata | core/src/main/java/org/apache/carbondata/core/scan/expression/RangeExpressionEvaluator.java | 17828 | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.carbondata.core.scan.expression;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import org.apache.carbondata.common.logging.LogService;
import org.apache.carbondata.common.logging.LogServiceFactory;
import org.apache.carbondata.core.constants.CarbonCommonConstants;
import org.apache.carbondata.core.metadata.datatype.DataType;
import org.apache.carbondata.core.scan.expression.conditional.GreaterThanEqualToExpression;
import org.apache.carbondata.core.scan.expression.conditional.GreaterThanExpression;
import org.apache.carbondata.core.scan.expression.conditional.LessThanEqualToExpression;
import org.apache.carbondata.core.scan.expression.conditional.LessThanExpression;
import org.apache.carbondata.core.scan.expression.logical.AndExpression;
import org.apache.carbondata.core.scan.expression.logical.OrExpression;
import org.apache.carbondata.core.scan.expression.logical.RangeExpression;
import org.apache.carbondata.core.scan.expression.logical.TrueExpression;
import org.apache.carbondata.core.scan.filter.intf.ExpressionType;
import static org.apache.carbondata.core.scan.filter.intf.ExpressionType.FALSE;
import static org.apache.carbondata.core.scan.filter.intf.ExpressionType.GREATERTHAN;
import static org.apache.carbondata.core.scan.filter.intf.ExpressionType.GREATERTHAN_EQUALTO;
import static org.apache.carbondata.core.scan.filter.intf.ExpressionType.LESSTHAN;
import static org.apache.carbondata.core.scan.filter.intf.ExpressionType.LESSTHAN_EQUALTO;
public class RangeExpressionEvaluator {
private static final LogService LOG =
LogServiceFactory.getLogService(RangeExpressionEvaluator.class.getName());
private Expression expr;
private Expression srcNode;
private Expression srcParentNode;
private Expression tarNode;
private Expression tarParentNode;
public RangeExpressionEvaluator(Expression expr) {
this.expr = expr;
}
public Expression getExpr() {
return expr;
}
public void setExpr(Expression expr) {
this.expr = expr;
}
public Expression getSrcNode() {
return srcNode;
}
public void setTarNode(Expression expr) {
this.tarNode = expr;
}
public void setTarParentNode(Expression expr) {
this.tarParentNode = expr;
}
/**
* This method evaluates is any greaterthan or less than expression can be transformed
* into a single RANGE filter.
*/
public void rangeExpressionEvaluatorMapBased() {
// The algorithm :
// Get all the nodes of the Expression Tree and fill it into a MAP.
// The Map structure will be currentNode, ColumnName, LessThanOrgreaterThan, Value, ParentNode
// Group the rows in MAP according to the columns and then evaluate if it can be transformed
// into a RANGE or not.
//
// AND AND
// | |
// / \ / \
// / \ / \
// Less Greater => TRUE Range
// / \ / \ / \
// / \ / \ / \
// a 10 a 5 Less greater
// /\ /\
// / \ / \
// a 10 a 5
//
Map<String, List<FilterModificationNode>> filterExpressionMap;
filterExpressionMap = convertFilterTreeToMap();
replaceWithRangeExpression(filterExpressionMap);
filterExpressionMap.clear();
}
public void replaceWithRangeExpression(
Map<String, List<FilterModificationNode>> filterExpressionMap) {
List<FilterModificationNode> deleteExp = new ArrayList<>();
Iterator<Map.Entry<String, List<FilterModificationNode>>> iterator =
filterExpressionMap.entrySet().iterator();
Map.Entry<String, List<FilterModificationNode>> nextEntry = null;
while (iterator.hasNext()) {
nextEntry = iterator.next();
List<FilterModificationNode> filterExp = nextEntry.getValue();
if (filterExp.size() > 1) {
// There are multiple Expression for the same column traverse and check if they can
// form a range.
FilterModificationNode startMin = null;
FilterModificationNode endMax = null;
for (FilterModificationNode exp : filterExp) {
if ((exp.getExpType() == GREATERTHAN) || (exp.getExpType() == GREATERTHAN_EQUALTO)) {
if ((null == endMax) || ((checkLiteralValue(exp.getCurrentExp(),
endMax.getCurrentExp())))) {
if (null == startMin) {
startMin = exp;
} else {
// There is already some value in startMin so check which one is greater.
LiteralExpression srcLiteral = getChildLiteralExpression(startMin.getCurrentExp());
LiteralExpression tarLiteral = getChildLiteralExpression(exp.getCurrentExp());
ExpressionResult srcExpResult = srcLiteral.evaluate(null);
ExpressionResult tarExpResult = tarLiteral.evaluate(null);
if (srcExpResult.compareTo(tarExpResult) < 0) {
// Before replacing the startMin add the current StartMin into deleteExp List
// as they will be replaced with TRUE expression after RANGE formation.
deleteExp.add(startMin);
startMin = exp;
}
}
}
}
if ((exp.getExpType() == LESSTHAN) || (exp.getExpType() == LESSTHAN_EQUALTO)) {
if ((null == startMin) || ((checkLiteralValue(exp.getCurrentExp(),
startMin.getCurrentExp())))) {
if (null == endMax) {
endMax = exp;
} else {
// There is already some value in endMax so check which one is less.
LiteralExpression srcLiteral = getChildLiteralExpression(endMax.getCurrentExp());
LiteralExpression tarLiteral = getChildLiteralExpression(exp.getCurrentExp());
ExpressionResult srcExpResult = srcLiteral.evaluate(null);
ExpressionResult tarExpResult = tarLiteral.evaluate(null);
if (srcExpResult.compareTo(tarExpResult) > 0) {
// Before replacing the endMax add the current endMax into deleteExp List
// as they will be replaced with TRUE expression after RANGE formation.
deleteExp.add(endMax);
endMax = exp;
}
}
}
}
}
if ((null != startMin) && (null != endMax)) {
LOG.info(
"GreaterThan and LessThan Filter Expression changed to Range Expression for column "
+ nextEntry.getKey());
// the node can be converted to RANGE.
Expression n1 = startMin.getCurrentExp();
Expression n2 = endMax.getCurrentExp();
RangeExpression rangeTree = new RangeExpression(n1, n2);
Expression srcParentNode = startMin.getParentExp();
Expression tarParentNode = endMax.getParentExp();
srcParentNode.findAndSetChild(startMin.getCurrentExp(), rangeTree);
tarParentNode.findAndSetChild(endMax.getCurrentExp(), new TrueExpression(null));
if (deleteExp.size() > 0) {
// There are some expression to Delete as they are Redundant after Range Formation.
for (FilterModificationNode trueExp : deleteExp) {
trueExp.getParentExp()
.findAndSetChild(trueExp.getCurrentExp(), new TrueExpression(null));
}
}
}
}
}
}
private Map<String, List<FilterModificationNode>> convertFilterTreeToMap() {
// Traverse the Filter Tree and add the nodes in filterExpressionMap.
// Only those nodes will be added which has got LessThan, LessThanEqualTo
// GreaterThan, GreaterThanEqualTo Expression Only.
Map<String, List<FilterModificationNode>> filterExpressionMap =
new HashMap<>(CarbonCommonConstants.DEFAULT_COLLECTION_SIZE);
// Traverse the Tree.
fillExpressionMap(filterExpressionMap, null, null);
return filterExpressionMap;
}
private void fillExpressionMap(Map<String, List<FilterModificationNode>> filterExpressionMap,
Expression currentNode, Expression parentNode) {
if (null == currentNode) {
currentNode = this.getExpr();
parentNode = currentNode;
}
// if the parentNode is a ANDExpression and the current node is LessThan, GreaterThan
// then add the node into filterExpressionMap.
if ((parentNode instanceof AndExpression) && (isLessThanGreaterThanExp(currentNode)
&& eligibleForRangeExpConv(currentNode))) {
addFilterExpressionMap(filterExpressionMap, currentNode, parentNode);
}
for (Expression exp : currentNode.getChildren()) {
if (null != exp) {
fillExpressionMap(filterExpressionMap, exp, currentNode);
if (exp instanceof OrExpression) {
replaceWithRangeExpression(filterExpressionMap);
filterExpressionMap.clear();
}
}
}
}
private void addFilterExpressionMap(Map<String, List<FilterModificationNode>> filterExpressionMap,
Expression currentNode, Expression parentNode) {
String colName = getColumnName(currentNode);
DataType dataType = getLiteralDataType(currentNode);
Object literalVal = getLiteralValue(currentNode);
ExpressionType expType = getExpressionType(currentNode);
FilterModificationNode filterExpression =
new FilterModificationNode(currentNode, parentNode, expType, dataType, literalVal, colName);
if (null == filterExpressionMap.get(colName)) {
filterExpressionMap.put(colName, new ArrayList<FilterModificationNode>());
}
filterExpressionMap.get(colName).add(filterExpression);
}
/**
* This method checks if the Expression is among LessThan, LessThanEqualTo,
* GreaterThan or GreaterThanEqualTo
*
* @param expr
* @return
*/
private boolean isLessThanGreaterThanExp(Expression expr) {
return (expr instanceof LessThanEqualToExpression) || (expr instanceof LessThanExpression)
|| (expr instanceof GreaterThanEqualToExpression)
|| (expr instanceof GreaterThanExpression);
}
/**
* This method verifies if the Expression is qualified for Range Expression conversion.
*
* @param expChild
* @return
*/
private boolean eligibleForRangeExpConv(Expression expChild) {
for (Expression exp : expChild.getChildren()) {
if (exp instanceof ColumnExpression) {
return ((ColumnExpression) exp).isDimension() &&
! (((ColumnExpression) exp).getDimension().getDataType().isComplexType());
}
}
return false;
}
/**
* This method returns the Column name from the ColumnExpression ExpressionType.
*
* @param andNode
* @return
*/
private String getColumnName(Expression andNode) {
// returns the Column Name from Column Expression.
for (Expression exp : andNode.getChildren()) {
if (exp instanceof ColumnExpression) {
return ((ColumnExpression) exp).getColumnName();
}
}
return null;
}
/**
* This method returns the Value from the Literal ExpressionType.
*
* @param exp
* @return
*/
private Object getLiteralValue(Expression exp) {
for (Expression expr : exp.getChildren()) {
if (expr instanceof LiteralExpression) {
return (((LiteralExpression) expr).getLiteralExpValue());
}
}
return null;
}
/**
* This method returns the DataType of the Literal Expression.
*
* @param exp
* @return
*/
private DataType getLiteralDataType(Expression exp) {
for (Expression expr : exp.getChildren()) {
if (expr instanceof LiteralExpression) {
return (((LiteralExpression) expr).getLiteralExpDataType());
}
}
return null;
}
/**
* This method returns the DataType of the Literal Expression.
*
* @param exp
* @return
*/
private LiteralExpression getChildLiteralExpression(Expression exp) {
for (Expression expr : exp.getChildren()) {
if (expr instanceof LiteralExpression) {
return ((LiteralExpression) expr);
}
}
return null;
}
/**
* This method returns the ExpressionType based on the Expression.
*
* @param exp
* @return
*/
private ExpressionType getExpressionType(Expression exp) {
// return the expressionType. Note among the children of the
// andNode one should be columnExpression others should be
// LessThan, LessThanEqualTo, GreaterThan, GreaterThanEqualTo.
//
if (exp instanceof LessThanExpression) {
return LESSTHAN;
} else if (exp instanceof LessThanEqualToExpression) {
return LESSTHAN_EQUALTO;
} else if (exp instanceof GreaterThanExpression) {
return GREATERTHAN;
} else if (exp instanceof GreaterThanEqualToExpression) {
return GREATERTHAN_EQUALTO;
} else {
return FALSE;
}
}
/**
* This method checks if the Source Expression matches with Target Expression.
*
* @param src
* @param tar
* @return
*/
private boolean matchExpType(ExpressionType src, ExpressionType tar) {
return (((src == LESSTHAN) || (src == LESSTHAN_EQUALTO)) && ((tar == GREATERTHAN) || (tar
== GREATERTHAN_EQUALTO))) || (((src == GREATERTHAN) || (src == GREATERTHAN_EQUALTO)) && (
(tar == LESSTHAN) || (tar == LESSTHAN_EQUALTO)));
}
/**
* This Method Traverses the Expression Tree to find the corresponding node of the Range
* Expression. If one node of Range Expression is LessThan then a corresponding GreaterThan
* will be choosen or vice versa.
*
* @param currentNode
* @param parentNode
* @return
*/
private Expression traverseTree(Expression currentNode, Expression parentNode) {
Expression result = null;
if (null == parentNode) {
currentNode = this.getExpr();
parentNode = currentNode;
}
if (!this.getSrcNode().equals(currentNode) && isLessThanGreaterThanExp(currentNode)) {
String srcColumnName = getColumnName(this.getSrcNode());
String tarColumnName = getColumnName(currentNode);
ExpressionType srcExpType = getExpressionType(this.getSrcNode());
ExpressionType tarExpType = getExpressionType(currentNode);
if ((null != srcColumnName) && (null != tarColumnName) && (srcColumnName
.equals(tarColumnName)) && (srcExpType != ExpressionType.FALSE) && (tarExpType
!= ExpressionType.FALSE) && ((matchExpType(srcExpType, tarExpType)) && checkLiteralValue(
this.getSrcNode(), currentNode))) {
this.setTarNode(currentNode);
this.setTarParentNode(parentNode);
return parentNode;
}
}
for (Expression exp : currentNode.getChildren()) {
if (null != exp && !(exp instanceof RangeExpression)) {
result = traverseTree(exp, currentNode);
if (null != result) {
return result;
}
}
}
return null;
}
/**
* This method will check if the literal values attached to GreaterThan of GreaterThanEqualTo
* is less or equal to LessThan and LessThanEqualTo literal.
*
* @param src
* @param tar
* @return
*/
private boolean checkLiteralValue(Expression src, Expression tar) {
ExpressionType srcExpressionType = getExpressionType(src);
ExpressionType tarExpressionType = getExpressionType(tar);
LiteralExpression srcLiteral = getChildLiteralExpression(src);
LiteralExpression tarLiteral = getChildLiteralExpression(tar);
ExpressionResult srcExpResult = srcLiteral.evaluate(null);
ExpressionResult tarExpResult = tarLiteral.evaluate(null);
switch (srcExpressionType) {
case LESSTHAN:
case LESSTHAN_EQUALTO:
switch (tarExpressionType) {
case GREATERTHAN:
if (srcExpResult.compareTo(tarExpResult) > 0) {
return true;
}
break;
case GREATERTHAN_EQUALTO:
if (srcExpResult.compareTo(tarExpResult) >= 0) {
return true;
}
break;
}
break;
case GREATERTHAN:
case GREATERTHAN_EQUALTO:
switch (tarExpressionType) {
case LESSTHAN:
if (srcExpResult.compareTo(tarExpResult) < 0) {
return true;
}
break;
case LESSTHAN_EQUALTO:
if (srcExpResult.compareTo(tarExpResult) <= 0) {
return true;
}
break;
}
break;
}
return false;
}
}
| apache-2.0 |
yp-creative/yop-java-sdk-old | src/main/java/com/yeepay/g3/sdk/yop/encrypt/RSA.java | 4964 | package com.yeepay.g3.sdk.yop.encrypt;
import com.google.common.base.Charsets;
import com.yeepay.g3.sdk.yop.exception.DecryptFailedException;
import com.yeepay.g3.sdk.yop.exception.EncryptFailedException;
import com.yeepay.g3.sdk.yop.exception.SignFailedException;
import com.yeepay.g3.sdk.yop.exception.VerifySignFailedException;
import javax.crypto.Cipher;
import java.security.Key;
import java.security.PrivateKey;
import java.security.PublicKey;
import java.security.Signature;
import java.util.HashMap;
import java.util.Map;
/**
* title: <br>
* description: 描述<br>
* Copyright: Copyright (c)2014<br>
* Company: 易宝支付(YeePay)<br>
*
* @author wenkang.zhang
* @version 1.0.0
* @since 16/11/27 下午9:13
*/
public class RSA {
private static final String RSA_ECB_PKCS1PADDING = "RSA/ECB/PKCS1Padding";
private static final Map<DigestAlgEnum, String> SIGN_ALG_MAP = new HashMap<DigestAlgEnum, String>();
static {
SIGN_ALG_MAP.put(DigestAlgEnum.SHA256, "SHA256withRSA");
SIGN_ALG_MAP.put(DigestAlgEnum.SHA512, "SHA512withRSA");
}
/**
* 验证签名
*
* @param data 数据
* @param sign 签名
* @param publicKey 公钥
* @param digestAlg 签名算法
* @return boolean
*/
public static boolean verifySign(byte[] data, byte[] sign, PublicKey publicKey, DigestAlgEnum digestAlg) {
try {
Signature signature = Signature.getInstance(SIGN_ALG_MAP.get(digestAlg));
signature.initVerify(publicKey);
signature.update(data);
boolean result = signature.verify(sign);
return result;
} catch (Exception e) {
throw new VerifySignFailedException("verifySign fail!", e);
}
}
/**
* 验证签名
*
* @param data 数据
* @param sign 签名
* @param pubicKey 公钥
* @param digestAlg 签名算法
* @return boolean
*/
public static boolean verifySign(String data, String sign, PublicKey pubicKey, DigestAlgEnum digestAlg) {
byte[] dataByte = data.getBytes(Charsets.UTF_8);
byte[] signByte = Encodes.decodeBase64(sign);
return verifySign(dataByte, signByte, pubicKey, digestAlg);
}
/**
* 签名
*
* @param data 数据
* @param key 密钥
* @param digestAlg 签名算法
* @return byte[]
*/
public static byte[] sign(byte[] data, PrivateKey key, DigestAlgEnum digestAlg) {
try {
Signature signature = Signature.getInstance(SIGN_ALG_MAP.get(digestAlg));
signature.initSign(key);
signature.update(data);
return signature.sign();
} catch (Exception e) {
throw new SignFailedException("sign fail!", e);
}
}
/**
* 签名
*
* @param data 数据
* @param key 密钥
* @param digestAlg 签名算法
* @return String
*/
public static String sign(String data, PrivateKey key, DigestAlgEnum digestAlg) {
byte[] dataByte = data.getBytes(Charsets.UTF_8);
return Encodes.encodeUrlSafeBase64(sign(dataByte, key, digestAlg));
}
/**
* 加密
*
* @param data 数据
* @param key 密钥
* @return byte[]
*/
public static byte[] encrypt(byte[] data, Key key) {
Cipher cipher;
try {
cipher = Cipher.getInstance(RSA_ECB_PKCS1PADDING);
cipher.init(Cipher.ENCRYPT_MODE, key);
return cipher.doFinal(data);
} catch (Exception e) {
throw new EncryptFailedException("rsa encrypt failed!", e);
}
}
/**
* 加密
*
* @param data 数据
* @param key 密钥
* @return String
*/
public static String encryptToBase64(String data, Key key) {
try {
return Encodes.encodeUrlSafeBase64(encrypt(data.getBytes(Charsets.UTF_8), key));
} catch (Exception e) {
throw new EncryptFailedException("rsa encrypt fail!", e);
}
}
/**
* 解密
*
* @param data 数据
* @param key 密钥
* @return byte[]
*/
public static byte[] decrypt(byte[] data, Key key) {
try {
Cipher cipher = Cipher.getInstance(RSA_ECB_PKCS1PADDING);
cipher.init(Cipher.DECRYPT_MODE, key);
return cipher.doFinal(data);
} catch (Exception e) {
throw new DecryptFailedException("rsa decrypt fail!", e);
}
}
/**
* 解密
*
* @param data 数据
* @param key 密钥
* @return String
*/
public static String decryptFromBase64(String data, Key key) {
try {
return new String(decrypt(Encodes.decodeBase64(data), key), Charsets.UTF_8);
} catch (Exception e) {
throw new DecryptFailedException("rsa decrypt fail!", e);
}
}
}
| apache-2.0 |
j-coll/opencga | opencga-core/src/main/java/org/opencb/opencga/core/response/VariantQueryResult.java | 6010 | /*
* Copyright 2015-2020 OpenCB
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.opencb.opencga.core.response;
import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
import org.opencb.commons.datastore.core.DataResult;
import org.opencb.commons.datastore.core.Event;
import org.opencb.commons.datastore.core.ObjectMap;
import java.util.List;
import java.util.Map;
/**
* Created on 07/02/17.
*
* @author Jacobo Coll <jacobo167@gmail.com>
*/
@JsonIgnoreProperties({"samples", "numTotalSamples", "numSamples", "source", "approximateCount", "approximateCountSamplingSize"})
public class VariantQueryResult<T> extends OpenCGAResult<T> {
private static final String SAMPLES = "samples";
private static final String NUM_TOTAL_SAMPLES = "numTotalSamples";
private static final String NUM_SAMPLES = "numSamples";
private static final String SOURCE = "source";
private static final String APPROXIMATE_COUNT = "approximateCount";
private static final String APPROXIMATE_COUNT_SAMPLING_SIZE = "approximateCountSamplingSize";
public VariantQueryResult() {
}
public VariantQueryResult(int dbTime, int numResults, long numMatches, List<Event> events, List<T> result,
Map<String, List<String>> samples, String source) {
this(dbTime, numResults, numMatches, events, result, samples, source, null, null, null);
}
public VariantQueryResult(int dbTime, int numResults, long numMatches, List<Event> events, List<T> result,
Map<String, List<String>> samples, String source, Boolean approximateCount,
Integer approximateCountSamplingSize, Integer numTotalSamples) {
super(dbTime, events, numResults, result, numMatches);
setSamples(samples);
setSource(source);
setApproximateCount(approximateCount);
setApproximateCountSamplingSize(approximateCountSamplingSize);
if (samples != null) {
setNumSamples(samples.values().stream().mapToInt(List::size).sum());
}
setNumTotalSamples(numTotalSamples);
}
public VariantQueryResult(DataResult<T> dataResult) {
super(dataResult.getTime(),
dataResult.getEvents(),
dataResult.getNumMatches(),
dataResult.getNumInserted(),
dataResult.getNumUpdated(),
dataResult.getNumDeleted(),
dataResult.getAttributes());
setResults(dataResult.getResults());
setNumResults(dataResult.getNumResults());
}
public VariantQueryResult(DataResult<T> queryResult, Map<String, List<String>> samples) {
this(queryResult, samples, null);
}
public VariantQueryResult(DataResult<T> dataResult, Map<String, List<String>> samples, String source) {
this(dataResult);
setSamples(samples);
if (getNumMatches() >= 0) {
setApproximateCount(false);
}
if (samples != null) {
this.setNumSamples(samples.values().stream().mapToInt(List::size).sum());
}
this.setNumTotalSamples(getNumSamples());
this.setSource(source);
}
public Map<String, List<String>> getSamples() {
Object o = getAttributes().get(SAMPLES);
if (!(o instanceof Map)) {
return null;
} else {
return ((Map<String, List<String>>) o);
}
}
public VariantQueryResult<T> setSamples(Map<String, List<String>> samples) {
getAttributes().put(SAMPLES, samples);
return this;
}
public Integer getNumTotalSamples() {
return getAttributes().containsKey(NUM_TOTAL_SAMPLES) ? getAttributes().getInt(NUM_TOTAL_SAMPLES) : null;
}
public VariantQueryResult<T> setNumTotalSamples(Integer numTotalSamples) {
getAttributes().put(NUM_TOTAL_SAMPLES, numTotalSamples);
return this;
}
public Integer getNumSamples() {
return getAttributes().containsKey(NUM_SAMPLES) ? getAttributes().getInt(NUM_SAMPLES) : null;
}
public VariantQueryResult<T> setNumSamples(Integer numSamples) {
getAttributes().put(NUM_SAMPLES, numSamples);
return this;
}
public Boolean getApproximateCount() {
return getAttributes().containsKey(APPROXIMATE_COUNT) ? getAttributes().getBoolean(APPROXIMATE_COUNT) : null;
}
public VariantQueryResult<T> setApproximateCount(Boolean approximateCount) {
getAttributes().put(APPROXIMATE_COUNT, approximateCount);
return this;
}
public Integer getApproximateCountSamplingSize() {
return getAttributes().containsKey(APPROXIMATE_COUNT_SAMPLING_SIZE) ? getAttributes().getInt(APPROXIMATE_COUNT_SAMPLING_SIZE) : null;
}
public VariantQueryResult<T> setApproximateCountSamplingSize(Integer approximateCountSamplingSize) {
getAttributes().put(APPROXIMATE_COUNT_SAMPLING_SIZE, approximateCountSamplingSize);
return this;
}
public String getSource() {
return getAttributes().getString(SOURCE);
}
public VariantQueryResult<T> setSource(String source) {
getAttributes().put(SOURCE, source);
return this;
}
@Override
public ObjectMap getAttributes() {
ObjectMap attributes = super.getAttributes();
if (attributes == null) {
attributes = new ObjectMap();
setAttributes(attributes);
}
return attributes;
}
}
| apache-2.0 |
jpaw/jdp | jdp-core/src/main/java/de/jpaw/dp/StartupOnly.java | 692 | package de.jpaw.dp;
/** Interface to be implemented by classes which run code upon startup, but not shutdown.
* The startup priority is determined by the numeric parameter to the @Startup annotation,
* the shutdown sequence is the reverse of the startup sequence.
*
* This offers an alternative to invoking the static onStartup method.
* The benefit of using the interface is, that reflection code to find the class and invoke the method can
* be separated from the actual business logic, allowing for clearer stack traces in case of exceptions,
* and separate exception codes. */
public interface StartupOnly {
/** Code to be executed at startup time. */
void onStartup();
}
| apache-2.0 |
alanfgates/hive | ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/Vectorizer.java | 230352 | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hive.ql.optimizer.physical;
import static org.apache.hadoop.hive.ql.plan.ReduceSinkDesc.ReducerTraits.UNIFORM;
import java.io.IOException;
import java.io.Serializable;
import java.lang.annotation.Annotation;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.HashMap;
import java.util.HashSet;
import java.util.IdentityHashMap;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Properties;
import java.util.Set;
import java.util.Stack;
import java.util.TreeSet;
import java.util.regex.Pattern;
import org.apache.commons.lang.ArrayUtils;
import org.apache.commons.lang3.tuple.ImmutablePair;
import org.apache.hadoop.hive.ql.exec.vector.VectorizedInputFormatInterface;
import org.apache.hadoop.hive.ql.exec.vector.reducesink.*;
import org.apache.hadoop.hive.ql.exec.vector.udf.VectorUDFArgDesc;
import org.apache.hadoop.hive.ql.io.AcidUtils;
import org.apache.hadoop.hive.ql.parse.spark.SparkPartitionPruningSinkOperator;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hive.common.type.DataTypePhysicalVariation;
import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
import org.apache.hadoop.hive.llap.io.api.LlapProxy;
import org.apache.hadoop.hive.ql.CompilationOpContext;
import org.apache.hadoop.hive.ql.exec.*;
import org.apache.hadoop.hive.ql.exec.mr.MapRedTask;
import org.apache.hadoop.hive.ql.exec.persistence.MapJoinKey;
import org.apache.hadoop.hive.ql.exec.spark.SparkTask;
import org.apache.hadoop.hive.ql.exec.tez.TezTask;
import org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor;
import org.apache.hadoop.hive.ql.exec.vector.filesink.VectorFileSinkArrowOperator;
import org.apache.hadoop.hive.ql.exec.vector.mapjoin.VectorMapJoinInnerBigOnlyLongOperator;
import org.apache.hadoop.hive.ql.exec.vector.mapjoin.VectorMapJoinInnerBigOnlyMultiKeyOperator;
import org.apache.hadoop.hive.ql.exec.vector.mapjoin.VectorMapJoinInnerBigOnlyStringOperator;
import org.apache.hadoop.hive.ql.exec.vector.mapjoin.VectorMapJoinInnerLongOperator;
import org.apache.hadoop.hive.ql.exec.vector.mapjoin.VectorMapJoinInnerMultiKeyOperator;
import org.apache.hadoop.hive.ql.exec.vector.mapjoin.VectorMapJoinInnerStringOperator;
import org.apache.hadoop.hive.ql.exec.vector.mapjoin.VectorMapJoinLeftSemiLongOperator;
import org.apache.hadoop.hive.ql.exec.vector.mapjoin.VectorMapJoinLeftSemiMultiKeyOperator;
import org.apache.hadoop.hive.ql.exec.vector.mapjoin.VectorMapJoinLeftSemiStringOperator;
import org.apache.hadoop.hive.ql.exec.vector.mapjoin.VectorMapJoinOuterLongOperator;
import org.apache.hadoop.hive.ql.exec.vector.mapjoin.VectorMapJoinOuterMultiKeyOperator;
import org.apache.hadoop.hive.ql.exec.vector.mapjoin.VectorMapJoinOuterStringOperator;
import org.apache.hadoop.hive.ql.exec.vector.mapjoin.VectorMapJoinFullOuterLongOperator;
import org.apache.hadoop.hive.ql.exec.vector.mapjoin.VectorMapJoinFullOuterMultiKeyOperator;
import org.apache.hadoop.hive.ql.exec.vector.mapjoin.VectorMapJoinFullOuterStringOperator;
import org.apache.hadoop.hive.ql.exec.vector.ptf.VectorPTFOperator;
import org.apache.hadoop.hive.ql.exec.vector.udf.VectorUDFAdaptor;
import org.apache.hadoop.hive.ql.exec.vector.ColumnVector;
import org.apache.hadoop.hive.ql.exec.vector.ColumnVector.Type;
import org.apache.hadoop.hive.ql.exec.vector.VectorAggregationDesc;
import org.apache.hadoop.hive.ql.exec.vector.VectorColumnOutputMapping;
import org.apache.hadoop.hive.ql.exec.vector.VectorColumnSourceMapping;
import org.apache.hadoop.hive.ql.exec.vector.VectorMapJoinOperator;
import org.apache.hadoop.hive.ql.exec.vector.VectorMapJoinOuterFilteredOperator;
import org.apache.hadoop.hive.ql.exec.vector.VectorizationContext;
import org.apache.hadoop.hive.ql.exec.vector.VectorizationOperator;
import org.apache.hadoop.hive.ql.exec.vector.VectorizedUDAFs;
import org.apache.hadoop.hive.ql.exec.vector.VectorizationContext.HiveVectorAdaptorUsageMode;
import org.apache.hadoop.hive.ql.exec.vector.VectorizationContext.InConstantType;
import org.apache.hadoop.hive.ql.exec.vector.VectorizationContextRegion;
import org.apache.hadoop.hive.ql.exec.vector.VectorizedSupport.Support;
import org.apache.hadoop.hive.ql.exec.vector.expressions.IdentityExpression;
import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpression;
import org.apache.hadoop.hive.ql.exec.vector.expressions.aggregates.VectorAggregateExpression;
import org.apache.hadoop.hive.ql.io.NullRowsInputFormat;
import org.apache.hadoop.hive.ql.io.OneNullRowInputFormat;
import org.apache.hadoop.hive.ql.io.orc.OrcInputFormat;
import org.apache.hadoop.hive.ql.io.ZeroRowsInputFormat;
import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatchCtx;
import org.apache.hadoop.hive.ql.lib.Dispatcher;
import org.apache.hadoop.hive.ql.lib.Node;
import org.apache.hadoop.hive.ql.lib.NodeProcessor;
import org.apache.hadoop.hive.ql.lib.Rule;
import org.apache.hadoop.hive.ql.lib.RuleRegExp;
import org.apache.hadoop.hive.ql.lib.TaskGraphWalker;
import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.metadata.VirtualColumn;
import org.apache.hadoop.hive.ql.parse.SemanticException;
import org.apache.hadoop.hive.ql.plan.AbstractOperatorDesc;
import org.apache.hadoop.hive.ql.plan.AggregationDesc;
import org.apache.hadoop.hive.ql.plan.AppMasterEventDesc;
import org.apache.hadoop.hive.ql.plan.BaseWork;
import org.apache.hadoop.hive.ql.plan.Explain;
import org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc;
import org.apache.hadoop.hive.ql.plan.ExprNodeDesc;
import org.apache.hadoop.hive.ql.plan.ExprNodeDesc.ExprNodeDescEqualityWrapper;
import org.apache.hadoop.hive.ql.plan.ExprNodeGenericFuncDesc;
import org.apache.hadoop.hive.ql.plan.FileSinkDesc;
import org.apache.hadoop.hive.ql.plan.FilterDesc;
import org.apache.hadoop.hive.ql.plan.GroupByDesc;
import org.apache.hadoop.hive.ql.plan.JoinDesc;
import org.apache.hadoop.hive.ql.plan.LimitDesc;
import org.apache.hadoop.hive.ql.plan.MapJoinDesc;
import org.apache.hadoop.hive.ql.plan.MapWork;
import org.apache.hadoop.hive.ql.plan.MapredWork;
import org.apache.hadoop.hive.ql.plan.MergeJoinWork;
import org.apache.hadoop.hive.ql.plan.OperatorDesc;
import org.apache.hadoop.hive.ql.plan.PTFDesc;
import org.apache.hadoop.hive.ql.plan.SelectDesc;
import org.apache.hadoop.hive.ql.plan.TopNKeyDesc;
import org.apache.hadoop.hive.ql.plan.VectorAppMasterEventDesc;
import org.apache.hadoop.hive.ql.plan.VectorDesc;
import org.apache.hadoop.hive.ql.plan.VectorFileSinkDesc;
import org.apache.hadoop.hive.ql.plan.VectorFilterDesc;
import org.apache.hadoop.hive.ql.plan.VectorPTFDesc;
import org.apache.hadoop.hive.ql.plan.VectorPTFInfo;
import org.apache.hadoop.hive.ql.plan.VectorPTFDesc.SupportedFunctionType;
import org.apache.hadoop.hive.ql.plan.VectorTableScanDesc;
import org.apache.hadoop.hive.ql.plan.VectorGroupByDesc.ProcessingMode;
import org.apache.hadoop.hive.ql.plan.VectorSparkHashTableSinkDesc;
import org.apache.hadoop.hive.ql.plan.VectorSparkPartitionPruningSinkDesc;
import org.apache.hadoop.hive.ql.plan.VectorTopNKeyDesc;
import org.apache.hadoop.hive.ql.plan.VectorLimitDesc;
import org.apache.hadoop.hive.ql.plan.VectorMapJoinInfo;
import org.apache.hadoop.hive.ql.plan.VectorSMBJoinDesc;
import org.apache.hadoop.hive.ql.plan.PartitionDesc;
import org.apache.hadoop.hive.ql.plan.ReduceSinkDesc;
import org.apache.hadoop.hive.ql.plan.ReduceWork;
import org.apache.hadoop.hive.ql.plan.SMBJoinDesc;
import org.apache.hadoop.hive.ql.plan.SparkHashTableSinkDesc;
import org.apache.hadoop.hive.ql.optimizer.spark.SparkPartitionPruningSinkDesc;
import org.apache.hadoop.hive.ql.plan.SparkWork;
import org.apache.hadoop.hive.ql.plan.TableDesc;
import org.apache.hadoop.hive.ql.plan.TableScanDesc;
import org.apache.hadoop.hive.ql.plan.TezWork;
import org.apache.hadoop.hive.ql.plan.VectorGroupByDesc;
import org.apache.hadoop.hive.ql.plan.VectorMapJoinDesc;
import org.apache.hadoop.hive.ql.plan.VectorMapJoinDesc.HashTableImplementationType;
import org.apache.hadoop.hive.ql.plan.VectorMapJoinDesc.HashTableKeyType;
import org.apache.hadoop.hive.ql.plan.VectorMapJoinDesc.HashTableKind;
import org.apache.hadoop.hive.ql.plan.VectorMapJoinDesc.VectorMapJoinVariation;
import org.apache.hadoop.hive.ql.plan.VectorPartitionDesc.VectorDeserializeType;
import org.apache.hadoop.hive.ql.plan.VectorReduceSinkDesc;
import org.apache.hadoop.hive.ql.plan.VectorReduceSinkInfo;
import org.apache.hadoop.hive.ql.plan.VectorPartitionDesc;
import org.apache.hadoop.hive.ql.plan.VectorSelectDesc;
import org.apache.hadoop.hive.ql.plan.api.OperatorType;
import org.apache.hadoop.hive.ql.plan.mapper.PlanMapper;
import org.apache.hadoop.hive.ql.plan.ptf.OrderExpressionDef;
import org.apache.hadoop.hive.ql.plan.ptf.PTFExpressionDef;
import org.apache.hadoop.hive.ql.plan.ptf.PartitionedTableFunctionDef;
import org.apache.hadoop.hive.ql.plan.ptf.WindowFrameDef;
import org.apache.hadoop.hive.ql.plan.ptf.WindowFunctionDef;
import org.apache.hadoop.hive.ql.plan.ptf.WindowTableFunctionDef;
import org.apache.hadoop.hive.ql.udf.UDFAcos;
import org.apache.hadoop.hive.ql.udf.UDFAsin;
import org.apache.hadoop.hive.ql.udf.UDFAtan;
import org.apache.hadoop.hive.ql.udf.UDFBin;
import org.apache.hadoop.hive.ql.udf.UDFConv;
import org.apache.hadoop.hive.ql.udf.UDFCos;
import org.apache.hadoop.hive.ql.udf.UDFDayOfMonth;
import org.apache.hadoop.hive.ql.udf.UDFDayOfWeek;
import org.apache.hadoop.hive.ql.udf.UDFDegrees;
import org.apache.hadoop.hive.ql.udf.UDFExp;
import org.apache.hadoop.hive.ql.udf.UDFFromUnixTime;
import org.apache.hadoop.hive.ql.udf.UDFHex;
import org.apache.hadoop.hive.ql.udf.UDFHour;
import org.apache.hadoop.hive.ql.udf.UDFLike;
import org.apache.hadoop.hive.ql.udf.UDFLn;
import org.apache.hadoop.hive.ql.udf.UDFLog;
import org.apache.hadoop.hive.ql.udf.UDFLog10;
import org.apache.hadoop.hive.ql.udf.UDFLog2;
import org.apache.hadoop.hive.ql.udf.UDFMinute;
import org.apache.hadoop.hive.ql.udf.UDFMonth;
import org.apache.hadoop.hive.ql.udf.UDFRadians;
import org.apache.hadoop.hive.ql.udf.UDFRand;
import org.apache.hadoop.hive.ql.udf.UDFRegExpExtract;
import org.apache.hadoop.hive.ql.udf.UDFRegExpReplace;
import org.apache.hadoop.hive.ql.udf.UDFSecond;
import org.apache.hadoop.hive.ql.udf.UDFSign;
import org.apache.hadoop.hive.ql.udf.UDFSin;
import org.apache.hadoop.hive.ql.udf.UDFSqrt;
import org.apache.hadoop.hive.ql.udf.UDFSubstr;
import org.apache.hadoop.hive.ql.udf.UDFTan;
import org.apache.hadoop.hive.ql.udf.UDFToBoolean;
import org.apache.hadoop.hive.ql.udf.UDFToByte;
import org.apache.hadoop.hive.ql.udf.UDFToDouble;
import org.apache.hadoop.hive.ql.udf.UDFToFloat;
import org.apache.hadoop.hive.ql.udf.UDFToInteger;
import org.apache.hadoop.hive.ql.udf.UDFToLong;
import org.apache.hadoop.hive.ql.udf.UDFToShort;
import org.apache.hadoop.hive.ql.udf.UDFWeekOfYear;
import org.apache.hadoop.hive.ql.udf.UDFYear;
import org.apache.hadoop.hive.ql.udf.generic.*;
import org.apache.hadoop.hive.serde.serdeConstants;
import org.apache.hadoop.hive.serde2.Deserializer;
import org.apache.hadoop.hive.serde2.NullStructSerDe;
import org.apache.hadoop.hive.serde2.SerDeUtils;
import org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe;
import org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector.Category;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorUtils;
import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector.PrimitiveCategory;
import org.apache.hadoop.hive.serde2.objectinspector.StructField;
import org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector;
import org.apache.hadoop.hive.serde2.typeinfo.DecimalTypeInfo;
import org.apache.hadoop.hive.serde2.typeinfo.ListTypeInfo;
import org.apache.hadoop.hive.serde2.typeinfo.MapTypeInfo;
import org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo;
import org.apache.hadoop.hive.serde2.typeinfo.StructTypeInfo;
import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoUtils;
import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable;
import org.apache.hadoop.mapred.InputFormat;
import org.apache.hadoop.mapred.SequenceFileInputFormat;
import org.apache.hadoop.mapred.TextInputFormat;
import org.apache.hive.common.util.AnnotationUtils;
import org.apache.hadoop.util.ReflectionUtils;
import com.google.common.collect.ImmutableSet;
import com.google.common.base.Preconditions;
public class Vectorizer implements PhysicalPlanResolver {
protected static transient final Logger LOG = LoggerFactory.getLogger(Vectorizer.class);
private static final Pattern supportedDataTypesPattern;
private static final TypeInfo[] EMPTY_TYPEINFO_ARRAY = new TypeInfo[0];
static {
StringBuilder patternBuilder = new StringBuilder();
patternBuilder.append("int");
patternBuilder.append("|smallint");
patternBuilder.append("|tinyint");
patternBuilder.append("|bigint");
patternBuilder.append("|integer");
patternBuilder.append("|long");
patternBuilder.append("|short");
patternBuilder.append("|timestamp");
patternBuilder.append("|" + serdeConstants.INTERVAL_YEAR_MONTH_TYPE_NAME);
patternBuilder.append("|" + serdeConstants.INTERVAL_DAY_TIME_TYPE_NAME);
patternBuilder.append("|boolean");
patternBuilder.append("|binary");
patternBuilder.append("|string");
patternBuilder.append("|byte");
patternBuilder.append("|float");
patternBuilder.append("|double");
patternBuilder.append("|date");
patternBuilder.append("|void");
// Decimal types can be specified with different precision and scales e.g. decimal(10,5),
// as opposed to other data types which can be represented by constant strings.
// The regex matches only the "decimal" prefix of the type.
patternBuilder.append("|decimal.*");
// CHAR and VARCHAR types can be specified with maximum length.
patternBuilder.append("|char.*");
patternBuilder.append("|varchar.*");
supportedDataTypesPattern = Pattern.compile(patternBuilder.toString());
}
private Set<Class<?>> supportedGenericUDFs = new HashSet<Class<?>>();
private Set<String> supportedAggregationUdfs = new HashSet<String>();
// The set of virtual columns that vectorized readers *MAY* support.
public static final ImmutableSet<VirtualColumn> vectorizableVirtualColumns =
ImmutableSet.of(VirtualColumn.ROWID);
private HiveConf hiveConf;
public enum EnabledOverride {
NONE,
DISABLE,
ENABLE;
public static final Map<String, EnabledOverride> nameMap =
new HashMap<String, EnabledOverride>();
static {
for (EnabledOverride vectorizationEnabledOverride : values()) {
nameMap.put(
vectorizationEnabledOverride.name().toLowerCase(), vectorizationEnabledOverride);
}
};
}
boolean isVectorizationEnabled;
private EnabledOverride vectorizationEnabledOverride;
boolean isTestForcedVectorizationEnable;
private boolean useVectorizedInputFileFormat;
private boolean useVectorDeserialize;
private boolean useRowDeserialize;
private boolean isReduceVectorizationEnabled;
private boolean isPtfVectorizationEnabled;
private boolean isVectorizationComplexTypesEnabled;
// Now deprecated.
private boolean isVectorizationGroupByComplexTypesEnabled;
private boolean isVectorizedRowIdentifierEnabled;
private String vectorizedInputFormatSupportEnabled;
private boolean isLlapIoEnabled;
private Set<Support> vectorizedInputFormatSupportEnabledSet;
private Collection<Class<?>> rowDeserializeInputFormatExcludes;
private int vectorizedPTFMaxMemoryBufferingBatchCount;
private int vectorizedTestingReducerBatchSize;
private boolean isTestVectorizerSuppressFatalExceptions;
private boolean isSchemaEvolution;
private HiveVectorAdaptorUsageMode hiveVectorAdaptorUsageMode;
private static final Set<Support> vectorDeserializeTextSupportSet = new TreeSet<Support>();
static {
vectorDeserializeTextSupportSet.addAll(Arrays.asList(Support.values()));
}
private static final Set<String> supportedAcidInputFormats = new TreeSet<String>();
static {
supportedAcidInputFormats.add(OrcInputFormat.class.getName());
// For metadataonly or empty rows optimizations, null/onerow input format can be selected.
supportedAcidInputFormats.add(NullRowsInputFormat.class.getName());
supportedAcidInputFormats.add(OneNullRowInputFormat.class.getName());
supportedAcidInputFormats.add(ZeroRowsInputFormat.class.getName());
}
private boolean isTestVectorizationSuppressExplainExecutionMode;
private BaseWork currentBaseWork;
private Operator<? extends OperatorDesc> currentOperator;
private Collection<Class<?>> vectorizedInputFormatExcludes;
private Map<Operator<? extends OperatorDesc>, Set<ImmutablePair<Operator<? extends OperatorDesc>, Operator<? extends OperatorDesc>>>> delayedFixups =
new IdentityHashMap<Operator<? extends OperatorDesc>, Set<ImmutablePair<Operator<?>, Operator<?>>>>();
public void testSetCurrentBaseWork(BaseWork testBaseWork) {
currentBaseWork = testBaseWork;
}
private void setNodeIssue(String issue) {
currentBaseWork.setNotVectorizedReason(
VectorizerReason.createNodeIssue(issue));
}
private void setOperatorIssue(String issue) {
currentBaseWork.setNotVectorizedReason(
VectorizerReason.createOperatorIssue(currentOperator, issue));
}
private void setExpressionIssue(String expressionTitle, String issue) {
currentBaseWork.setNotVectorizedReason(
VectorizerReason.createExpressionIssue(currentOperator, expressionTitle, issue));
}
private void clearNotVectorizedReason() {
currentBaseWork.setNotVectorizedReason(null);
}
private long vectorizedVertexNum = -1;
private Set<VirtualColumn> availableVectorizedVirtualColumnSet = null;
private Set<VirtualColumn> neededVirtualColumnSet = null;
private PlanMapper planMapper;
public class VectorizerCannotVectorizeException extends Exception {
}
public Vectorizer() {
/*
* We check UDFs against the supportedGenericUDFs when
* hive.vectorized.adaptor.usage.mode=chosen or none.
*
* We allow all UDFs for hive.vectorized.adaptor.usage.mode=all.
*/
supportedGenericUDFs.add(GenericUDFOPPlus.class);
supportedGenericUDFs.add(GenericUDFOPMinus.class);
supportedGenericUDFs.add(GenericUDFOPMultiply.class);
supportedGenericUDFs.add(GenericUDFOPDivide.class);
supportedGenericUDFs.add(GenericUDFOPMod.class);
supportedGenericUDFs.add(GenericUDFOPNegative.class);
supportedGenericUDFs.add(GenericUDFOPPositive.class);
supportedGenericUDFs.add(GenericUDFOPEqualOrLessThan.class);
supportedGenericUDFs.add(GenericUDFOPEqualOrGreaterThan.class);
supportedGenericUDFs.add(GenericUDFOPGreaterThan.class);
supportedGenericUDFs.add(GenericUDFOPLessThan.class);
supportedGenericUDFs.add(GenericUDFOPNot.class);
supportedGenericUDFs.add(GenericUDFOPNotEqual.class);
supportedGenericUDFs.add(GenericUDFOPNotNull.class);
supportedGenericUDFs.add(GenericUDFOPNull.class);
supportedGenericUDFs.add(GenericUDFOPOr.class);
supportedGenericUDFs.add(GenericUDFOPAnd.class);
supportedGenericUDFs.add(GenericUDFOPEqual.class);
supportedGenericUDFs.add(GenericUDFLength.class);
supportedGenericUDFs.add(GenericUDFCharacterLength.class);
supportedGenericUDFs.add(GenericUDFOctetLength.class);
supportedGenericUDFs.add(UDFYear.class);
supportedGenericUDFs.add(UDFMonth.class);
supportedGenericUDFs.add(UDFDayOfMonth.class);
supportedGenericUDFs.add(UDFDayOfWeek.class);
supportedGenericUDFs.add(UDFHour.class);
supportedGenericUDFs.add(UDFMinute.class);
supportedGenericUDFs.add(UDFSecond.class);
supportedGenericUDFs.add(UDFWeekOfYear.class);
supportedGenericUDFs.add(GenericUDFToUnixTimeStamp.class);
supportedGenericUDFs.add(UDFFromUnixTime.class);
supportedGenericUDFs.add(GenericUDFDateAdd.class);
supportedGenericUDFs.add(GenericUDFDateSub.class);
supportedGenericUDFs.add(GenericUDFDate.class);
supportedGenericUDFs.add(GenericUDFDateDiff.class);
supportedGenericUDFs.add(UDFLike.class);
supportedGenericUDFs.add(GenericUDFRegExp.class);
supportedGenericUDFs.add(UDFRegExpExtract.class);
supportedGenericUDFs.add(UDFRegExpReplace.class);
supportedGenericUDFs.add(UDFSubstr.class);
supportedGenericUDFs.add(GenericUDFLTrim.class);
supportedGenericUDFs.add(GenericUDFRTrim.class);
supportedGenericUDFs.add(GenericUDFTrim.class);
supportedGenericUDFs.add(UDFSin.class);
supportedGenericUDFs.add(UDFCos.class);
supportedGenericUDFs.add(UDFTan.class);
supportedGenericUDFs.add(UDFAsin.class);
supportedGenericUDFs.add(UDFAcos.class);
supportedGenericUDFs.add(UDFAtan.class);
supportedGenericUDFs.add(UDFDegrees.class);
supportedGenericUDFs.add(UDFRadians.class);
supportedGenericUDFs.add(GenericUDFFloor.class);
supportedGenericUDFs.add(GenericUDFCeil.class);
supportedGenericUDFs.add(UDFExp.class);
supportedGenericUDFs.add(UDFLn.class);
supportedGenericUDFs.add(UDFLog2.class);
supportedGenericUDFs.add(UDFLog10.class);
supportedGenericUDFs.add(UDFLog.class);
supportedGenericUDFs.add(GenericUDFPower.class);
supportedGenericUDFs.add(GenericUDFRound.class);
supportedGenericUDFs.add(GenericUDFBRound.class);
supportedGenericUDFs.add(GenericUDFPosMod.class);
supportedGenericUDFs.add(UDFSqrt.class);
supportedGenericUDFs.add(UDFSign.class);
supportedGenericUDFs.add(UDFRand.class);
supportedGenericUDFs.add(UDFBin.class);
supportedGenericUDFs.add(UDFHex.class);
supportedGenericUDFs.add(UDFConv.class);
supportedGenericUDFs.add(GenericUDFLower.class);
supportedGenericUDFs.add(GenericUDFUpper.class);
supportedGenericUDFs.add(GenericUDFConcat.class);
supportedGenericUDFs.add(GenericUDFAbs.class);
supportedGenericUDFs.add(GenericUDFBetween.class);
supportedGenericUDFs.add(GenericUDFIn.class);
supportedGenericUDFs.add(GenericUDFCase.class);
supportedGenericUDFs.add(GenericUDFWhen.class);
supportedGenericUDFs.add(GenericUDFCoalesce.class);
supportedGenericUDFs.add(GenericUDFElt.class);
supportedGenericUDFs.add(GenericUDFInitCap.class);
supportedGenericUDFs.add(GenericUDFInBloomFilter.class);
// For type casts
supportedGenericUDFs.add(UDFToLong.class);
supportedGenericUDFs.add(UDFToInteger.class);
supportedGenericUDFs.add(UDFToShort.class);
supportedGenericUDFs.add(UDFToByte.class);
supportedGenericUDFs.add(UDFToBoolean.class);
supportedGenericUDFs.add(UDFToFloat.class);
supportedGenericUDFs.add(UDFToDouble.class);
supportedGenericUDFs.add(GenericUDFToString.class);
supportedGenericUDFs.add(GenericUDFTimestamp.class);
supportedGenericUDFs.add(GenericUDFToDecimal.class);
supportedGenericUDFs.add(GenericUDFToDate.class);
supportedGenericUDFs.add(GenericUDFToChar.class);
supportedGenericUDFs.add(GenericUDFToVarchar.class);
supportedGenericUDFs.add(GenericUDFToIntervalYearMonth.class);
supportedGenericUDFs.add(GenericUDFToIntervalDayTime.class);
// For conditional expressions
supportedGenericUDFs.add(GenericUDFIf.class);
supportedAggregationUdfs.add("min");
supportedAggregationUdfs.add("max");
supportedAggregationUdfs.add("count");
supportedAggregationUdfs.add("sum");
supportedAggregationUdfs.add("avg");
supportedAggregationUdfs.add("variance");
supportedAggregationUdfs.add("var_pop");
supportedAggregationUdfs.add("var_samp");
supportedAggregationUdfs.add("std");
supportedAggregationUdfs.add("stddev");
supportedAggregationUdfs.add("stddev_pop");
supportedAggregationUdfs.add("stddev_samp");
supportedAggregationUdfs.add("bloom_filter");
}
private class VectorTaskColumnInfo {
List<String> allColumnNames;
List<TypeInfo> allTypeInfos;
List<Integer> dataColumnNums;
int partitionColumnCount;
List<VirtualColumn> availableVirtualColumnList;
List<VirtualColumn> neededVirtualColumnList;
//not to be confused with useVectorizedInputFileFormat at Vectorizer level
//which represents the value of configuration hive.vectorized.use.vectorized.input.format
private boolean useVectorizedInputFileFormat;
Set<Support> inputFormatSupportSet;
Set<Support> supportSetInUse;
List<String> supportRemovedReasons;
List<DataTypePhysicalVariation> allDataTypePhysicalVariations;
boolean allNative;
boolean usesVectorUDFAdaptor;
String[] scratchTypeNameArray;
DataTypePhysicalVariation[] scratchdataTypePhysicalVariations;
String reduceColumnSortOrder;
String reduceColumnNullOrder;
VectorTaskColumnInfo() {
partitionColumnCount = 0;
}
public void assume() {
allNative = true;
usesVectorUDFAdaptor = false;
}
public void setAllColumnNames(List<String> allColumnNames) {
this.allColumnNames = allColumnNames;
}
public void setAllTypeInfos(List<TypeInfo> allTypeInfos) {
this.allTypeInfos = allTypeInfos;
}
public void setDataColumnNums(List<Integer> dataColumnNums) {
this.dataColumnNums = dataColumnNums;
}
public void setPartitionColumnCount(int partitionColumnCount) {
this.partitionColumnCount = partitionColumnCount;
}
public void setAvailableVirtualColumnList(List<VirtualColumn> availableVirtualColumnList) {
this.availableVirtualColumnList = availableVirtualColumnList;
}
public void setNeededVirtualColumnList(List<VirtualColumn> neededVirtualColumnList) {
this.neededVirtualColumnList = neededVirtualColumnList;
}
public void setSupportSetInUse(Set<Support> supportSetInUse) {
this.supportSetInUse = supportSetInUse;
}
public void setSupportRemovedReasons(List<String> supportRemovedReasons) {
this.supportRemovedReasons = supportRemovedReasons;
}
public void setAlldataTypePhysicalVariations(List<DataTypePhysicalVariation> allDataTypePhysicalVariations) {
this.allDataTypePhysicalVariations = allDataTypePhysicalVariations;
}
public void setScratchTypeNameArray(String[] scratchTypeNameArray) {
this.scratchTypeNameArray = scratchTypeNameArray;
}
public void setScratchdataTypePhysicalVariationsArray(DataTypePhysicalVariation[] scratchdataTypePhysicalVariations) {
this.scratchdataTypePhysicalVariations = scratchdataTypePhysicalVariations;
}
public void setAllNative(boolean allNative) {
this.allNative = allNative;
}
public void setUsesVectorUDFAdaptor(boolean usesVectorUDFAdaptor) {
this.usesVectorUDFAdaptor = usesVectorUDFAdaptor;
}
public void setUseVectorizedInputFileFormat(boolean useVectorizedInputFileFormat) {
this.useVectorizedInputFileFormat = useVectorizedInputFileFormat;
}
public void setInputFormatSupportSet(Set<Support> inputFormatSupportSet) {
this.inputFormatSupportSet = inputFormatSupportSet;
}
public void setReduceColumnSortOrder(String reduceColumnSortOrder) {
this.reduceColumnSortOrder = reduceColumnSortOrder;
}
public void setReduceColumnNullOrder(String reduceColumnNullOrder) {
this.reduceColumnNullOrder = reduceColumnNullOrder;
}
public void transferToBaseWork(BaseWork baseWork) {
final int virtualColumnCount =
(availableVirtualColumnList == null ? 0 : availableVirtualColumnList.size());
VirtualColumn[] neededVirtualColumns;
if (neededVirtualColumnList != null && neededVirtualColumnList.size() > 0) {
neededVirtualColumns = neededVirtualColumnList.toArray(new VirtualColumn[0]);
} else {
neededVirtualColumns = new VirtualColumn[0];
}
String[] allColumnNameArray = allColumnNames.toArray(new String[0]);
TypeInfo[] allTypeInfoArray = allTypeInfos.toArray(new TypeInfo[0]);
int[] dataColumnNumsArray;
if (dataColumnNums != null) {
dataColumnNumsArray = ArrayUtils.toPrimitive(dataColumnNums.toArray(new Integer[0]));
} else {
dataColumnNumsArray = null;
}
DataTypePhysicalVariation[] allDataTypePhysicalVariationArray;
if (allDataTypePhysicalVariations == null) {
allDataTypePhysicalVariationArray = new DataTypePhysicalVariation[allTypeInfoArray.length];
Arrays.fill(allDataTypePhysicalVariationArray, DataTypePhysicalVariation.NONE);
} else {
allDataTypePhysicalVariationArray =
allDataTypePhysicalVariations.toArray(new DataTypePhysicalVariation[0]);
}
VectorizedRowBatchCtx vectorizedRowBatchCtx =
new VectorizedRowBatchCtx(
allColumnNameArray,
allTypeInfoArray,
allDataTypePhysicalVariationArray,
dataColumnNumsArray,
partitionColumnCount,
virtualColumnCount,
neededVirtualColumns,
scratchTypeNameArray,
scratchdataTypePhysicalVariations);
baseWork.setVectorizedRowBatchCtx(vectorizedRowBatchCtx);
if (baseWork instanceof MapWork) {
MapWork mapWork = (MapWork) baseWork;
mapWork.setUseVectorizedInputFileFormat(useVectorizedInputFileFormat);
mapWork.setInputFormatSupportSet(inputFormatSupportSet);
mapWork.setSupportSetInUse(supportSetInUse);
mapWork.setSupportRemovedReasons(supportRemovedReasons);
}
if (baseWork instanceof ReduceWork) {
ReduceWork reduceWork = (ReduceWork) baseWork;
reduceWork.setVectorReduceColumnSortOrder(reduceColumnSortOrder);
reduceWork.setVectorReduceColumnNullOrder(reduceColumnNullOrder);
}
baseWork.setAllNative(allNative);
baseWork.setUsesVectorUDFAdaptor(usesVectorUDFAdaptor);
baseWork.setIsTestForcedVectorizationEnable(isTestForcedVectorizationEnable);
baseWork.setIsTestVectorizationSuppressExplainExecutionMode(
isTestVectorizationSuppressExplainExecutionMode);
}
}
/*
* Used as a dummy root operator to attach vectorized operators that will be built in parallel
* to the current non-vectorized operator tree.
*/
private static class DummyRootVectorDesc extends AbstractOperatorDesc {
public DummyRootVectorDesc() {
super();
}
}
private static class DummyOperator extends Operator<DummyRootVectorDesc> {
public DummyOperator() {
super(new CompilationOpContext());
}
@Override
public void process(Object row, int tag) throws HiveException {
throw new RuntimeException("Not used");
}
@Override
public String getName() {
return "DUMMY";
}
@Override
public OperatorType getType() {
return null;
}
}
private static class DummyVectorOperator extends DummyOperator
implements VectorizationOperator {
private VectorizationContext vContext;
public DummyVectorOperator(VectorizationContext vContext) {
super();
this.conf = new DummyRootVectorDesc();
this.vContext = vContext;
}
@Override
public VectorizationContext getInputVectorizationContext() {
return vContext;
}
@Override
public VectorDesc getVectorDesc() {
return null;
}
}
private static List<Operator<? extends OperatorDesc>> newOperatorList() {
return new ArrayList<Operator<? extends OperatorDesc>>();
}
public static void debugDisplayJoinOperatorTree(Operator<? extends OperatorDesc> joinOperator,
String prefix) {
List<Operator<? extends OperatorDesc>> currentParentList = newOperatorList();
currentParentList.add(joinOperator);
int depth = 0;
do {
List<Operator<? extends OperatorDesc>> nextParentList = newOperatorList();
final int count = currentParentList.size();
for (int i = 0; i < count; i++) {
Operator<? extends OperatorDesc> parent = currentParentList.get(i);
System.out.println(prefix + " parent depth " + depth + " " +
parent.getClass().getSimpleName() + " " + parent.toString());
List<Operator<? extends OperatorDesc>> parentList = parent.getParentOperators();
if (parentList == null || parentList.size() == 0) {
continue;
}
nextParentList.addAll(parentList);
}
currentParentList = nextParentList;
depth--;
} while (currentParentList.size() > 0);
List<Operator<? extends OperatorDesc>> currentChildList = newOperatorList();
currentChildList.addAll(joinOperator.getChildOperators());
depth = 1;
do {
List<Operator<? extends OperatorDesc>> nextChildList = newOperatorList();
final int count = currentChildList.size();
for (int i = 0; i < count; i++) {
Operator<? extends OperatorDesc> child = currentChildList.get(i);
System.out.println(prefix + " child depth " + depth + " " +
child.getClass().getSimpleName() + " " + child.toString());
List<Operator<? extends OperatorDesc>> childList = child.getChildOperators();
if (childList == null || childList.size() == 0) {
continue;
}
nextChildList.addAll(childList);
}
currentChildList = nextChildList;
depth--;
} while (currentChildList.size() > 0);
}
private Operator<? extends OperatorDesc> validateAndVectorizeOperatorTree(
Operator<? extends OperatorDesc> nonVecRootOperator,
boolean isReduce, boolean isTezOrSpark,
VectorTaskColumnInfo vectorTaskColumnInfo)
throws VectorizerCannotVectorizeException {
VectorizationContext taskVContext =
new VectorizationContext(
"Task",
vectorTaskColumnInfo.allColumnNames,
vectorTaskColumnInfo.allTypeInfos,
vectorTaskColumnInfo.allDataTypePhysicalVariations,
hiveConf);
List<Operator<? extends OperatorDesc>> currentParentList = newOperatorList();
currentParentList.add(nonVecRootOperator);
// Start with dummy vector operator as the parent of the parallel vector operator tree we are
// creating
Operator<? extends OperatorDesc> dummyVectorOperator = new DummyVectorOperator(taskVContext);
List<Operator<? extends OperatorDesc>> currentVectorParentList = newOperatorList();
currentVectorParentList.add(dummyVectorOperator);
delayedFixups.clear();
do {
List<Operator<? extends OperatorDesc>> nextParentList = newOperatorList();
List<Operator<? extends OperatorDesc>> nextVectorParentList= newOperatorList();
final int count = currentParentList.size();
for (int i = 0; i < count; i++) {
Operator<? extends OperatorDesc> parent = currentParentList.get(i);
List<Operator<? extends OperatorDesc>> childrenList = parent.getChildOperators();
if (childrenList == null || childrenList.size() == 0) {
continue;
}
Operator<? extends OperatorDesc> vectorParent = currentVectorParentList.get(i);
/*
* Vectorize this parent's children. Plug them into vectorParent's children list.
*
* Add those children / vector children to nextParentList / nextVectorParentList.
*/
doProcessChildren(
parent, vectorParent, nextParentList, nextVectorParentList,
isReduce, isTezOrSpark, vectorTaskColumnInfo);
}
currentParentList = nextParentList;
currentVectorParentList = nextVectorParentList;
} while (currentParentList.size() > 0);
runDelayedFixups();
return dummyVectorOperator;
}
private void doProcessChildren(
Operator<? extends OperatorDesc> parent,
Operator<? extends OperatorDesc> vectorParent,
List<Operator<? extends OperatorDesc>> nextParentList,
List<Operator<? extends OperatorDesc>> nextVectorParentList,
boolean isReduce, boolean isTezOrSpark,
VectorTaskColumnInfo vectorTaskColumnInfo)
throws VectorizerCannotVectorizeException {
List<Operator<? extends OperatorDesc>> vectorChildren = newOperatorList();
List<Operator<? extends OperatorDesc>> children = parent.getChildOperators();
List<List<Operator<? extends OperatorDesc>>> listOfChildMultipleParents =
new ArrayList<List<Operator<? extends OperatorDesc>>>();
final int childrenCount = children.size();
for (int i = 0; i < childrenCount; i++) {
Operator<? extends OperatorDesc> child = children.get(i);
Operator<? extends OperatorDesc> vectorChild =
doProcessChild(
child, vectorParent, isReduce, isTezOrSpark, vectorTaskColumnInfo);
fixupNewVectorChild(
parent,
vectorParent,
child,
vectorChild);
nextParentList.add(child);
nextVectorParentList.add(vectorChild);
}
}
/*
* Fixup the children and parents of a new vector child.
*
* 1) Add new vector child to the vector parent's children list.
*
* 2) Copy and fixup the parent list of the original child instead of just assuming a 1:1
* relationship.
*
* a) When the child is MapJoinOperator, it will have an extra parent HashTableDummyOperator
* for the MapJoinOperator's small table. It needs to be fixed up, too.
*/
private void fixupNewVectorChild(
Operator<? extends OperatorDesc> parent,
Operator<? extends OperatorDesc> vectorParent,
Operator<? extends OperatorDesc> child,
Operator<? extends OperatorDesc> vectorChild) {
// 1) Add new vector child to the vector parent's children list.
vectorParent.getChildOperators().add(vectorChild);
// 2) Copy and fixup the parent list of the original child instead of just assuming a 1:1
// relationship.
List<Operator<? extends OperatorDesc>> childMultipleParents = newOperatorList();
childMultipleParents.addAll(child.getParentOperators());
final int childMultipleParentCount = childMultipleParents.size();
for (int i = 0; i < childMultipleParentCount; i++) {
Operator<? extends OperatorDesc> childMultipleParent = childMultipleParents.get(i);
if (childMultipleParent == parent) {
childMultipleParents.set(i, vectorParent);
} else {
queueDelayedFixup(childMultipleParent, child, vectorChild);
}
}
vectorChild.setParentOperators(childMultipleParents);
}
/*
* The fix up is delayed so that the parent operators aren't modified until the entire operator
* tree has been vectorized.
*/
private void queueDelayedFixup(Operator<? extends OperatorDesc> parent,
Operator<? extends OperatorDesc> child, Operator<? extends OperatorDesc> vectorChild) {
if (delayedFixups.get(parent) == null) {
HashSet<ImmutablePair<Operator<? extends OperatorDesc>, Operator<? extends OperatorDesc>>> value =
new HashSet<ImmutablePair<Operator<? extends OperatorDesc>, Operator<? extends OperatorDesc>>>(1);
delayedFixups.put(parent, value);
}
delayedFixups.get(parent).add(
new ImmutablePair<Operator<? extends OperatorDesc>, Operator<? extends OperatorDesc>>(
child, vectorChild));
}
private void runDelayedFixups() {
for (Entry<Operator<? extends OperatorDesc>, Set<ImmutablePair<Operator<? extends OperatorDesc>, Operator<? extends OperatorDesc>>>> delayed
: delayedFixups.entrySet()) {
Operator<? extends OperatorDesc> key = delayed.getKey();
Set<ImmutablePair<Operator<? extends OperatorDesc>, Operator<? extends OperatorDesc>>> value =
delayed.getValue();
for (ImmutablePair<Operator<? extends OperatorDesc>, Operator<? extends OperatorDesc>> swap : value) {
fixupOtherParent(key, swap.getLeft(), swap.getRight());
}
}
delayedFixups.clear();
}
private void fixupOtherParent(
Operator<? extends OperatorDesc> childMultipleParent,
Operator<? extends OperatorDesc> child,
Operator<? extends OperatorDesc> vectorChild) {
List<Operator<? extends OperatorDesc>> children = childMultipleParent.getChildOperators();
final int childrenCount = children.size();
for (int i = 0; i < childrenCount; i++) {
Operator<? extends OperatorDesc> myChild = children.get(i);
if (myChild == child) {
children.set(i, vectorChild);
}
}
}
private Operator<? extends OperatorDesc> doProcessChild(
Operator<? extends OperatorDesc> child,
Operator<? extends OperatorDesc> vectorParent,
boolean isReduce, boolean isTezOrSpark,
VectorTaskColumnInfo vectorTaskColumnInfo)
throws VectorizerCannotVectorizeException {
// Use vector parent to get VectorizationContext.
final VectorizationContext vContext;
if (vectorParent instanceof VectorizationContextRegion) {
vContext = ((VectorizationContextRegion) vectorParent).getOutputVectorizationContext();
} else {
vContext = ((VectorizationOperator) vectorParent).getInputVectorizationContext();
}
OperatorDesc desc = child.getConf();
Operator<? extends OperatorDesc> vectorChild;
try {
vectorChild =
validateAndVectorizeOperator(child, vContext, isReduce, isTezOrSpark, vectorTaskColumnInfo);
} catch (HiveException e) {
String issue = "exception: " + VectorizationContext.getStackTraceAsSingleLine(e);
setNodeIssue(issue);
throw new VectorizerCannotVectorizeException();
}
return vectorChild;
}
class VectorizationDispatcher implements Dispatcher {
@Override
public Object dispatch(Node nd, Stack<Node> stack, Object... nodeOutputs)
throws SemanticException {
Task<? extends Serializable> currTask = (Task<? extends Serializable>) nd;
if (currTask instanceof MapRedTask) {
MapredWork mapredWork = ((MapRedTask) currTask).getWork();
MapWork mapWork = mapredWork.getMapWork();
setMapWorkExplainConditions(mapWork);
convertMapWork(mapredWork.getMapWork(), /* isTezOrSpark */ false);
logMapWorkExplainVectorization(mapWork);
ReduceWork reduceWork = mapredWork.getReduceWork();
if (reduceWork != null) {
// Always set the EXPLAIN conditions.
setReduceWorkExplainConditions(reduceWork);
// We do not vectorize MR Reduce.
logReduceWorkExplainVectorization(reduceWork);
}
} else if (currTask instanceof TezTask) {
TezWork work = ((TezTask) currTask).getWork();
for (BaseWork baseWork: work.getAllWork()) {
if (baseWork instanceof MapWork) {
MapWork mapWork = (MapWork) baseWork;
setMapWorkExplainConditions(mapWork);
convertMapWork(mapWork, /* isTezOrSpark */ true);
logMapWorkExplainVectorization(mapWork);
} else if (baseWork instanceof ReduceWork) {
ReduceWork reduceWork = (ReduceWork) baseWork;
// Always set the EXPLAIN conditions.
setReduceWorkExplainConditions(reduceWork);
// We are only vectorizing Reduce under Tez/Spark.
if (isReduceVectorizationEnabled) {
convertReduceWork(reduceWork);
}
logReduceWorkExplainVectorization(reduceWork);
} else if (baseWork instanceof MergeJoinWork){
MergeJoinWork mergeJoinWork = (MergeJoinWork) baseWork;
// Always set the EXPLAIN conditions.
setMergeJoinWorkExplainConditions(mergeJoinWork);
logMergeJoinWorkExplainVectorization(mergeJoinWork);
}
}
} else if (currTask instanceof SparkTask) {
SparkWork sparkWork = (SparkWork) currTask.getWork();
for (BaseWork baseWork : sparkWork.getAllWork()) {
if (baseWork instanceof MapWork) {
MapWork mapWork = (MapWork) baseWork;
setMapWorkExplainConditions(mapWork);
convertMapWork(mapWork, /* isTezOrSpark */ true);
logMapWorkExplainVectorization(mapWork);
} else if (baseWork instanceof ReduceWork) {
ReduceWork reduceWork = (ReduceWork) baseWork;
// Always set the EXPLAIN conditions.
setReduceWorkExplainConditions(reduceWork);
if (isReduceVectorizationEnabled) {
convertReduceWork(reduceWork);
}
logReduceWorkExplainVectorization(reduceWork);
}
}
} else if (currTask instanceof FetchTask) {
LOG.info("Vectorizing Fetch not supported");
} else {
if (LOG.isDebugEnabled()) {
LOG.debug("Ignoring vectorization of " + currTask.getClass().getSimpleName());
}
}
return null;
}
private void setExplainConditions(BaseWork baseWork) {
// Global used when setting errors, etc.
currentBaseWork = baseWork;
baseWork.setVectorizedVertexNum(++vectorizedVertexNum);
baseWork.setVectorizationExamined(true);
}
private void setMapWorkExplainConditions(MapWork mapWork) {
setExplainConditions(mapWork);
}
private void setReduceWorkExplainConditions(ReduceWork reduceWork) {
setExplainConditions(reduceWork);
reduceWork.setReduceVectorizationEnabled(isReduceVectorizationEnabled);
reduceWork.setVectorReduceEngine(
HiveConf.getVar(hiveConf, HiveConf.ConfVars.HIVE_EXECUTION_ENGINE));
}
private void setMergeJoinWorkExplainConditions(MergeJoinWork mergeJoinWork) {
setExplainConditions(mergeJoinWork);
}
private boolean logExplainVectorization(BaseWork baseWork, String name) {
if (!baseWork.getVectorizationExamined()) {
return false;
}
LOG.info(name + " vectorization enabled: " + baseWork.getVectorizationEnabled());
boolean isVectorized = baseWork.getVectorMode();
LOG.info(name + " vectorized: " + isVectorized);
if (!isVectorized) {
VectorizerReason notVectorizedReason = baseWork.getNotVectorizedReason();
if (notVectorizedReason != null) {
LOG.info(name + " notVectorizedReason: " + notVectorizedReason.toString());
}
}
LOG.info(name + " vectorizedVertexNum: " + baseWork.getVectorizedVertexNum());
if (LOG.isDebugEnabled() && isVectorized) {
VectorizedRowBatchCtx batchContext = baseWork.getVectorizedRowBatchCtx();
LOG.debug(name + " dataColumnCount: " + batchContext.getDataColumnCount());
int[] dataColumnNums = batchContext.getDataColumnNums();
if (dataColumnNums != null) {
LOG.debug(name + " includeColumns: " + Arrays.toString(dataColumnNums));
}
LOG.debug(name + " partitionColumnCount: " + batchContext.getPartitionColumnCount());
LOG.debug(name + " dataColumns: " +
BaseWork.BaseExplainVectorization.getColumns(
batchContext, 0, batchContext.getDataColumnCount()));
LOG.debug(name + " scratchColumnTypeNames: " +
BaseWork.BaseExplainVectorization.getScratchColumns(batchContext));
VirtualColumn[] neededVirtualColumns = batchContext.getNeededVirtualColumns();
if (neededVirtualColumns != null && neededVirtualColumns.length != 0) {
LOG.debug(name + " neededVirtualColumns: " + Arrays.toString(neededVirtualColumns));
}
}
return true;
}
private void logMapWorkExplainVectorization(MapWork mapWork) {
if (!logExplainVectorization(mapWork, "Map")) {
return;
}
// Conditions.
List<String> enabledConditionsMet = mapWork.getVectorizationEnabledConditionsMet();
if (enabledConditionsMet != null && !enabledConditionsMet.isEmpty()) {
LOG.info("Map enabledConditionsMet: " + enabledConditionsMet.toString());
}
List<String> enabledConditionsNotMet = mapWork.getVectorizationEnabledConditionsNotMet();
if (enabledConditionsNotMet != null && !enabledConditionsNotMet.isEmpty()) {
LOG.info("Map enabledConditionsNotMet: " + enabledConditionsNotMet.toString());
}
Set<String> inputFileFormatClassNameSet =
mapWork.getVectorizationInputFileFormatClassNameSet();
if (inputFileFormatClassNameSet != null && !inputFileFormatClassNameSet.isEmpty()) {
LOG.info("Map inputFileFormatClassNameSet: " + inputFileFormatClassNameSet.toString());
}
}
private void logReduceWorkExplainVectorization(ReduceWork reduceWork) {
if (!logExplainVectorization(reduceWork, "Reduce")) {
return;
}
// Conditions.
LOG.info("Reducer " + HiveConf.ConfVars.HIVE_VECTORIZATION_REDUCE_ENABLED.varname +
": " + reduceWork.getReduceVectorizationEnabled());
LOG.info("Reducer engine: " + reduceWork.getVectorReduceEngine());
}
private void logMergeJoinWorkExplainVectorization(MergeJoinWork mergeJoinWork) {
if (!logExplainVectorization(mergeJoinWork, "MergeJoin")) {
return;
}
}
private void convertMapWork(MapWork mapWork, boolean isTezOrSpark) throws SemanticException {
// We have to evaluate the input format to see if vectorization is enabled, so
// we do not set it right here.
VectorTaskColumnInfo vectorTaskColumnInfo = new VectorTaskColumnInfo();
vectorTaskColumnInfo.assume();
validateAndVectorizeMapWork(mapWork, vectorTaskColumnInfo, isTezOrSpark);
}
private void addMapWorkRules(Map<Rule, NodeProcessor> opRules, NodeProcessor np) {
opRules.put(new RuleRegExp("R1", TableScanOperator.getOperatorName() + ".*"
+ FileSinkOperator.getOperatorName()), np);
opRules.put(new RuleRegExp("R2", TableScanOperator.getOperatorName() + ".*"
+ ReduceSinkOperator.getOperatorName()), np);
}
/*
* Determine if there is only one TableScanOperator. Currently in Map vectorization, we do not
* try to vectorize multiple input trees.
*/
private ImmutablePair<String, TableScanOperator> verifyOnlyOneTableScanOperator(MapWork mapWork) {
// Eliminate MR plans with more than one TableScanOperator.
LinkedHashMap<String, Operator<? extends OperatorDesc>> aliasToWork = mapWork.getAliasToWork();
if ((aliasToWork == null) || (aliasToWork.size() == 0)) {
setNodeIssue("Vectorized map work requires work");
return null;
}
int tableScanCount = 0;
String alias = "";
TableScanOperator tableScanOperator = null;
for (Entry<String, Operator<? extends OperatorDesc>> entry : aliasToWork.entrySet()) {
Operator<?> op = entry.getValue();
if (op == null) {
setNodeIssue("Vectorized map work requires a valid alias");
return null;
}
if (op instanceof TableScanOperator) {
tableScanCount++;
alias = entry.getKey();
tableScanOperator = (TableScanOperator) op;
}
}
if (tableScanCount > 1) {
setNodeIssue("Vectorized map work only works with 1 TableScanOperator");
return null;
}
return new ImmutablePair(alias, tableScanOperator);
}
private void getTableScanOperatorSchemaInfo(TableScanOperator tableScanOperator,
List<String> logicalColumnNameList, List<TypeInfo> logicalTypeInfoList,
List<VirtualColumn> availableVirtualColumnList) {
// Add all columns to make a vectorization context for
// the TableScan operator.
RowSchema rowSchema = tableScanOperator.getSchema();
for (ColumnInfo c : rowSchema.getSignature()) {
// Validation will later exclude vectorization of virtual columns usage if necessary.
String columnName = c.getInternalName();
// Turns out partition columns get marked as virtual in ColumnInfo, so we need to
// check the VirtualColumn directly.
VirtualColumn virtualColumn = VirtualColumn.VIRTUAL_COLUMN_NAME_MAP.get(columnName);
if (virtualColumn != null) {
// The planner gives us a subset virtual columns available for this table scan.
// AND
// We only support some virtual columns in vectorization.
//
// So, create the intersection. Note these are available vectorizable virtual columns.
// Later we remember which virtual columns were *actually used* in the query so
// just those will be included in the Map VectorizedRowBatchCtx that has the
// information for creating the Map VectorizedRowBatch.
//
if (!vectorizableVirtualColumns.contains(virtualColumn)) {
continue;
}
if (virtualColumn == VirtualColumn.ROWID && !isVectorizedRowIdentifierEnabled) {
continue;
}
availableVirtualColumnList.add(virtualColumn);
}
// All columns: data, partition, and virtual are added.
logicalColumnNameList.add(columnName);
logicalTypeInfoList.add(TypeInfoUtils.getTypeInfoFromTypeString(c.getTypeName()));
}
}
private void determineDataColumnNums(TableScanOperator tableScanOperator,
List<String> allColumnNameList, int dataColumnCount, List<Integer> dataColumnNums) {
/*
* The TableScanOperator's needed columns are just the data columns.
*/
Set<String> neededColumns = new HashSet<String>(tableScanOperator.getNeededColumns());
for (int dataColumnNum = 0; dataColumnNum < dataColumnCount; dataColumnNum++) {
String columnName = allColumnNameList.get(dataColumnNum);
if (neededColumns.contains(columnName)) {
dataColumnNums.add(dataColumnNum);
}
}
}
private Support[] getVectorizedInputFormatSupports(
Class<? extends InputFormat> inputFileFormatClass) {
try {
InputFormat inputFormat = FetchOperator.getInputFormatFromCache(inputFileFormatClass, hiveConf);
if (inputFormat instanceof VectorizedInputFormatInterface) {
return ((VectorizedInputFormatInterface) inputFormat).getSupportedFeatures();
}
} catch (IOException e) {
LOG.error("Unable to instantiate {} input format class. Cannot determine vectorization support.", e);
}
// FUTURE: Decide how to ask an input file format what vectorization features it supports.
return null;
}
/*
* Add the support of the VectorizedInputFileFormatInterface.
*/
private void addVectorizedInputFileFormatSupport(
Set<Support> newSupportSet,
boolean isInputFileFormatVectorized, Class<? extends InputFormat>inputFileFormatClass) {
final Support[] supports;
if (isInputFileFormatVectorized) {
supports = getVectorizedInputFormatSupports(inputFileFormatClass);
} else {
supports = null;
}
if (supports == null) {
// No support.
} else {
for (Support support : supports) {
newSupportSet.add(support);
}
}
}
private void handleSupport(
boolean isFirstPartition, Set<Support> inputFormatSupportSet, Set<Support> newSupportSet) {
if (isFirstPartition) {
inputFormatSupportSet.addAll(newSupportSet);
} else if (!newSupportSet.equals(inputFormatSupportSet)){
// Do the intersection so only support in both is kept.
inputFormatSupportSet.retainAll(newSupportSet);
}
}
/*
* Add a vector partition descriptor to partition descriptor, removing duplicate object.
*
* If the same vector partition descriptor has already been allocated, share that object.
*/
private void addVectorPartitionDesc(PartitionDesc pd, VectorPartitionDesc vpd,
Map<VectorPartitionDesc, VectorPartitionDesc> vectorPartitionDescMap) {
VectorPartitionDesc existingEle = vectorPartitionDescMap.get(vpd);
if (existingEle != null) {
// Use the object we already have.
vpd = existingEle;
} else {
vectorPartitionDescMap.put(vpd, vpd);
}
pd.setVectorPartitionDesc(vpd);
}
/*
* There are 3 modes of reading for vectorization:
*
* 1) One for the Vectorized Input File Format which returns VectorizedRowBatch as the row.
*
* 2) One for using VectorDeserializeRow to deserialize each row into the VectorizedRowBatch.
* Currently, these Input File Formats:
* TEXTFILE
* SEQUENCEFILE
*
* 3) And one using the regular partition deserializer to get the row object and assigning
* the row object into the VectorizedRowBatch with VectorAssignRow.
* This picks up Input File Format not supported by the other two.
*/
private boolean verifyAndSetVectorPartDesc(
PartitionDesc pd, boolean isFullAcidTable,
List<TypeInfo> allTypeInfoList,
Set<String> inputFileFormatClassNameSet,
Map<VectorPartitionDesc, VectorPartitionDesc> vectorPartitionDescMap,
Set<String> enabledConditionsMetSet, ArrayList<String> enabledConditionsNotMetList,
Set<Support> newSupportSet, List<TypeInfo> dataTypeInfoList) {
Class<? extends InputFormat> inputFileFormatClass = pd.getInputFileFormatClass();
String inputFileFormatClassName = inputFileFormatClass.getName();
final TypeInfo[] dataTypeInfos;
if (dataTypeInfoList == null) {
dataTypeInfos = EMPTY_TYPEINFO_ARRAY;
} else {
dataTypeInfos = dataTypeInfoList.toArray(new TypeInfo[dataTypeInfoList.size()]);
}
// Always collect input file formats.
inputFileFormatClassNameSet.add(inputFileFormatClassName);
boolean isInputFileFormatVectorized = Utilities.isInputFileFormatVectorized(pd);
if (isFullAcidTable) {
// Today, ACID tables are only ORC and that format is vectorizable. Verify these
// assumptions.
Preconditions.checkState(isInputFileFormatVectorized);
Preconditions.checkState(supportedAcidInputFormats.contains(inputFileFormatClassName));
if (!useVectorizedInputFileFormat) {
enabledConditionsNotMetList.add("Vectorizing ACID tables requires "
+ HiveConf.ConfVars.HIVE_VECTORIZATION_USE_VECTORIZED_INPUT_FILE_FORMAT.varname);
return false;
}
addVectorizedInputFileFormatSupport(
newSupportSet, isInputFileFormatVectorized, inputFileFormatClass);
addVectorPartitionDesc(
pd,
VectorPartitionDesc.createVectorizedInputFileFormat(
inputFileFormatClassName,
Utilities.isInputFileFormatSelfDescribing(pd),
dataTypeInfos),
vectorPartitionDescMap);
enabledConditionsMetSet.add(HiveConf.ConfVars.HIVE_VECTORIZATION_USE_VECTORIZED_INPUT_FILE_FORMAT.varname);
return true;
}
// Look for Pass-Thru case where InputFileFormat has VectorizedInputFormatInterface
// and reads VectorizedRowBatch as a "row".
if (useVectorizedInputFileFormat) {
if (isInputFileFormatVectorized &&
!isInputFormatExcluded(
inputFileFormatClassName,
vectorizedInputFormatExcludes) &&
!hasUnsupportedVectorizedParquetDataType(
inputFileFormatClass,
allTypeInfoList)) {
addVectorizedInputFileFormatSupport(
newSupportSet, isInputFileFormatVectorized, inputFileFormatClass);
addVectorPartitionDesc(
pd,
VectorPartitionDesc.createVectorizedInputFileFormat(
inputFileFormatClassName,
Utilities.isInputFileFormatSelfDescribing(pd),
dataTypeInfos),
vectorPartitionDescMap);
enabledConditionsMetSet.add(
HiveConf.ConfVars.HIVE_VECTORIZATION_USE_VECTORIZED_INPUT_FILE_FORMAT.varname);
return true;
}
// Fall through and look for other options...
}
if (!isSchemaEvolution) {
enabledConditionsNotMetList.add(
"Vectorizing tables without Schema Evolution requires " + HiveConf.ConfVars.HIVE_VECTORIZATION_USE_VECTORIZED_INPUT_FILE_FORMAT.varname);
return false;
}
String deserializerClassName = pd.getDeserializerClassName();
// Look for InputFileFormat / Serde combinations we can deserialize more efficiently
// using VectorDeserializeRow and a deserialize class with the DeserializeRead interface.
//
// Do the "vectorized" row-by-row deserialization into a VectorizedRowBatch in the
// VectorMapOperator.
boolean isTextFormat = inputFileFormatClassName.equals(TextInputFormat.class.getName()) &&
deserializerClassName.equals(LazySimpleSerDe.class.getName());
boolean isSequenceFormat =
inputFileFormatClassName.equals(SequenceFileInputFormat.class.getName()) &&
deserializerClassName.equals(LazyBinarySerDe.class.getName());
boolean isVectorDeserializeEligable = isTextFormat || isSequenceFormat;
if (useVectorDeserialize) {
// Currently, we support LazySimple deserialization:
//
// org.apache.hadoop.mapred.TextInputFormat
// org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
//
// AND
//
// org.apache.hadoop.mapred.SequenceFileInputFormat
// org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe
if (isTextFormat) {
Properties properties = pd.getTableDesc().getProperties();
String lastColumnTakesRestString =
properties.getProperty(serdeConstants.SERIALIZATION_LAST_COLUMN_TAKES_REST);
boolean lastColumnTakesRest =
(lastColumnTakesRestString != null &&
lastColumnTakesRestString.equalsIgnoreCase("true"));
if (lastColumnTakesRest) {
// If row mode will not catch this input file format, then not enabled.
if (useRowDeserialize && !isInputFormatExcluded(inputFileFormatClassName,
rowDeserializeInputFormatExcludes)) {
enabledConditionsNotMetList.add(
inputFileFormatClassName + " " +
serdeConstants.SERIALIZATION_LAST_COLUMN_TAKES_REST + " must be disabled ");
return false;
}
} else {
// Add the support for read variations in Vectorized Text.
newSupportSet.addAll(vectorDeserializeTextSupportSet);
addVectorPartitionDesc(
pd,
VectorPartitionDesc.createVectorDeserialize(
inputFileFormatClassName, VectorDeserializeType.LAZY_SIMPLE, dataTypeInfos),
vectorPartitionDescMap);
enabledConditionsMetSet.add(HiveConf.ConfVars.HIVE_VECTORIZATION_USE_VECTOR_DESERIALIZE.varname);
return true;
}
} else if (isSequenceFormat) {
addVectorPartitionDesc(
pd,
VectorPartitionDesc.createVectorDeserialize(
inputFileFormatClassName, VectorDeserializeType.LAZY_BINARY, dataTypeInfos),
vectorPartitionDescMap);
enabledConditionsMetSet.add(HiveConf.ConfVars.HIVE_VECTORIZATION_USE_VECTOR_DESERIALIZE.varname);
return true;
}
// Fall through and look for other options...
}
// Otherwise, if enabled, deserialize rows using regular Serde and add the object
// inspect-able Object[] row to a VectorizedRowBatch in the VectorMapOperator.
if (useRowDeserialize) {
boolean isRowDeserializeExcluded =
isInputFormatExcluded(inputFileFormatClassName, rowDeserializeInputFormatExcludes);
if (!isRowDeserializeExcluded && !isInputFileFormatVectorized) {
addVectorPartitionDesc(
pd,
VectorPartitionDesc.createRowDeserialize(
inputFileFormatClassName,
Utilities.isInputFileFormatSelfDescribing(pd),
deserializerClassName,
dataTypeInfos),
vectorPartitionDescMap);
enabledConditionsMetSet.add(HiveConf.ConfVars.HIVE_VECTORIZATION_USE_ROW_DESERIALIZE.varname);
return true;
} else if (isInputFileFormatVectorized) {
/*
* Vectorizer does not vectorize in row deserialize mode if the input format has
* VectorizedInputFormat so input formats will be clear if the isVectorized flag
* is on, they are doing VRB work.
*/
enabledConditionsNotMetList.add("Row deserialization of vectorized input format not supported");
} else {
enabledConditionsNotMetList.add(ConfVars.HIVE_VECTORIZATION_USE_ROW_DESERIALIZE.varname
+ " IS true AND " + ConfVars.HIVE_VECTORIZATION_ROW_DESERIALIZE_INPUTFORMAT_EXCLUDES.varname
+ " NOT CONTAINS " + inputFileFormatClassName);
}
}
if (isInputFileFormatVectorized) {
if(useVectorizedInputFileFormat) {
enabledConditionsNotMetList.add(
ConfVars.HIVE_VECTORIZATION_USE_VECTORIZED_INPUT_FILE_FORMAT.varname + " IS true AND "
+ ConfVars.HIVE_VECTORIZATION_VECTORIZED_INPUT_FILE_FORMAT_EXCLUDES.varname
+ " NOT CONTAINS " + inputFileFormatClassName);
} else {
enabledConditionsNotMetList
.add(HiveConf.ConfVars.HIVE_VECTORIZATION_USE_VECTORIZED_INPUT_FILE_FORMAT.varname);
}
} else {
// Only offer these when the input file format is not the fast vectorized formats.
if (isVectorDeserializeEligable) {
Preconditions.checkState(!useVectorDeserialize);
enabledConditionsNotMetList.add(HiveConf.ConfVars.HIVE_VECTORIZATION_USE_VECTOR_DESERIALIZE.varname);
} else {
// Since row mode takes everyone.
enabledConditionsNotMetList.add(HiveConf.ConfVars.HIVE_VECTORIZATION_USE_ROW_DESERIALIZE.varname);
}
}
return false;
}
private boolean shouldUseVectorizedInputFormat(Set<String> inputFileFormatClassNames) {
if (inputFileFormatClassNames == null || inputFileFormatClassNames.isEmpty()
|| !useVectorizedInputFileFormat) {
return useVectorizedInputFileFormat;
}
//Global config of vectorized input format is enabled; check if these inputformats are excluded
for (String inputFormat : inputFileFormatClassNames) {
if(isInputFormatExcluded(inputFormat, vectorizedInputFormatExcludes)) {
return false;
}
}
return true;
}
private boolean isInputFormatExcluded(String inputFileFormatClassName, Collection<Class<?>> excludes) {
Class<?> ifClass = null;
try {
ifClass = Class.forName(inputFileFormatClassName);
} catch (ClassNotFoundException e) {
LOG.warn("Cannot verify class for " + inputFileFormatClassName, e);
return true;
}
if(excludes == null || excludes.isEmpty()) {
return false;
}
for (Class<?> badClass : excludes) {
if (badClass.isAssignableFrom(ifClass)) {
return true;
}
}
return false;
}
private boolean hasUnsupportedVectorizedParquetDataType(
Class<? extends InputFormat> inputFileFormatClass, List<TypeInfo> allTypeInfoList) {
if (!inputFileFormatClass.equals(org.apache.hadoop.hive.ql.io.parquet.MapredParquetInputFormat.class)) {
return false;
}
/*
* Currently, VectorizedParquetRecordReader cannot handle nested complex types.
*/
for (TypeInfo typeInfo : allTypeInfoList) {
if (!(typeInfo instanceof PrimitiveTypeInfo)) {
switch (typeInfo.getCategory()) {
case LIST:
if (!(((ListTypeInfo) typeInfo).getListElementTypeInfo() instanceof PrimitiveTypeInfo)) {
return true;
}
break;
case MAP:
{
MapTypeInfo mapTypeInfo = (MapTypeInfo) typeInfo;
if (!(mapTypeInfo.getMapKeyTypeInfo() instanceof PrimitiveTypeInfo)) {
return true;
}
if (!(mapTypeInfo.getMapValueTypeInfo() instanceof PrimitiveTypeInfo)) {
return true;
}
}
break;
case STRUCT:
{
StructTypeInfo structTypeInfo = (StructTypeInfo) typeInfo;
List<TypeInfo> fieldTypeInfos = structTypeInfo.getAllStructFieldTypeInfos();
for (TypeInfo fieldTypeInfo : fieldTypeInfos) {
if (!(fieldTypeInfo instanceof PrimitiveTypeInfo)) {
return true;
}
}
}
break;
case UNION:
// Not supported at all.
return false;
default:
throw new RuntimeException(
"Unsupported complex type category " + typeInfo.getCategory());
}
}
}
return false;
}
private void setValidateInputFormatAndSchemaEvolutionExplain(MapWork mapWork,
Set<String> inputFileFormatClassNameSet,
Map<VectorPartitionDesc, VectorPartitionDesc> vectorPartitionDescMap,
Set<String> enabledConditionsMetSet, ArrayList<String> enabledConditionsNotMetList) {
mapWork.setVectorizationInputFileFormatClassNameSet(inputFileFormatClassNameSet);
ArrayList<VectorPartitionDesc> vectorPartitionDescList = new ArrayList<VectorPartitionDesc>();
vectorPartitionDescList.addAll(vectorPartitionDescMap.keySet());
mapWork.setVectorPartitionDescList(vectorPartitionDescList);
mapWork.setVectorizationEnabledConditionsMet(new ArrayList(enabledConditionsMetSet));
mapWork.setVectorizationEnabledConditionsNotMet(enabledConditionsNotMetList);
}
private ImmutablePair<Boolean, Boolean> validateInputFormatAndSchemaEvolution(MapWork mapWork, String alias,
TableScanOperator tableScanOperator, VectorTaskColumnInfo vectorTaskColumnInfo)
throws SemanticException {
boolean isFullAcidTable = tableScanOperator.getConf().isFullAcidTable();
// These names/types are the data columns plus partition columns.
final List<String> allColumnNameList = new ArrayList<String>();
final List<TypeInfo> allTypeInfoList = new ArrayList<TypeInfo>();
final List<VirtualColumn> availableVirtualColumnList = new ArrayList<VirtualColumn>();
getTableScanOperatorSchemaInfo(
tableScanOperator,
allColumnNameList, allTypeInfoList,
availableVirtualColumnList);
final int virtualColumnCount = availableVirtualColumnList.size();
final List<Integer> dataColumnNums = new ArrayList<Integer>();
final int dataAndPartColumnCount = allColumnNameList.size() - virtualColumnCount;
/*
* Validate input formats of all the partitions can be vectorized.
*/
boolean isFirst = true;
int dataColumnCount = 0;
int partitionColumnCount = 0;
List<String> tableDataColumnList = null;
List<TypeInfo> tableDataTypeInfoList = null;
LinkedHashMap<Path, ArrayList<String>> pathToAliases = mapWork.getPathToAliases();
LinkedHashMap<Path, PartitionDesc> pathToPartitionInfo = mapWork.getPathToPartitionInfo();
// Remember the input file formats we validated and why.
Set<String> inputFileFormatClassNameSet = new HashSet<String>();
Map<VectorPartitionDesc, VectorPartitionDesc> vectorPartitionDescMap =
new LinkedHashMap<VectorPartitionDesc, VectorPartitionDesc>();
Set<String> enabledConditionsMetSet = new HashSet<String>();
ArrayList<String> enabledConditionsNotMetList = new ArrayList<String>();
Set<Support> inputFormatSupportSet = new TreeSet<Support>();
boolean outsideLoopIsFirstPartition = true;
for (Entry<Path, ArrayList<String>> entry: pathToAliases.entrySet()) {
final boolean isFirstPartition = outsideLoopIsFirstPartition;
outsideLoopIsFirstPartition = false;
Path path = entry.getKey();
List<String> aliases = entry.getValue();
boolean isPresent = (aliases != null && aliases.indexOf(alias) != -1);
if (!isPresent) {
setOperatorIssue("Alias " + alias + " not present in aliases " + aliases);
return new ImmutablePair<Boolean,Boolean>(false, false);
}
// TODO: should this use getPartitionDescFromPathRecursively? That's what other code uses.
PartitionDesc partDesc = pathToPartitionInfo.get(path);
if (partDesc.getVectorPartitionDesc() != null) {
// We've seen this already.
continue;
}
Set<Support> newSupportSet = new TreeSet<Support>();
final List<TypeInfo> nextDataTypeInfoList;
final Deserializer deserializer;
final StructObjectInspector partObjectInspector;
try {
deserializer = partDesc.getDeserializer(hiveConf);
partObjectInspector = (StructObjectInspector) deserializer.getObjectInspector();
} catch (Exception e) {
throw new SemanticException(e);
}
if (isFirst) {
/*
* Determine the data and partition columns using the first partition descriptor's
* partition count. In other words, how to split the schema columns -- the
* allColumnNameList and allTypeInfoList variables -- into the data and partition columns.
*/
LinkedHashMap<String, String> partSpec = partDesc.getPartSpec();
if (partSpec != null && partSpec.size() > 0) {
partitionColumnCount = partSpec.size();
dataColumnCount = dataAndPartColumnCount - partitionColumnCount;
} else {
partitionColumnCount = 0;
dataColumnCount = dataAndPartColumnCount;
}
determineDataColumnNums(tableScanOperator, allColumnNameList, dataColumnCount,
dataColumnNums);
tableDataColumnList = allColumnNameList.subList(0, dataColumnCount);
tableDataTypeInfoList = allTypeInfoList.subList(0, dataColumnCount);
isFirst = false;
}
if (Utilities.isInputFileFormatSelfDescribing(partDesc)) {
/*
* Self-Describing Input Format will convert its data to the table schema. So, there
* will be no VectorMapOperator conversion needed.
*/
nextDataTypeInfoList = tableDataTypeInfoList;
} else {
String nextDataTypesString = ObjectInspectorUtils.getFieldTypes(partObjectInspector);
/*
* We convert to an array of TypeInfo using a library routine since it parses the
* information and can handle use of different separators, etc. We cannot use the
* raw type string for comparison in the map because of the different separators used.
*/
nextDataTypeInfoList =
TypeInfoUtils.getTypeInfosFromTypeString(nextDataTypesString);
}
// HIVE-20419: Vectorization: Prevent mutation of VectorPartitionDesc after being used in a
// hashmap key
final boolean isVerifiedVectorPartDesc =
verifyAndSetVectorPartDesc(
partDesc, isFullAcidTable,
allTypeInfoList,
inputFileFormatClassNameSet,
vectorPartitionDescMap,
enabledConditionsMetSet, enabledConditionsNotMetList,
newSupportSet,
nextDataTypeInfoList);
final VectorPartitionDesc vectorPartDesc = partDesc.getVectorPartitionDesc();
if (!isVerifiedVectorPartDesc) {
// Always set these so EXPLAIN can see.
setValidateInputFormatAndSchemaEvolutionExplain(
mapWork, inputFileFormatClassNameSet, vectorPartitionDescMap,
enabledConditionsMetSet, enabledConditionsNotMetList);
// We consider this an enable issue, not a not vectorized issue.
return new ImmutablePair<Boolean,Boolean>(false, true);
}
handleSupport(isFirstPartition, inputFormatSupportSet, newSupportSet);
// We need to get the partition's column names from the partition serde.
// (e.g. Avro provides the table schema and ignores the partition schema..).
//
String nextDataColumnsString = ObjectInspectorUtils.getFieldNames(partObjectInspector);
String[] nextDataColumns = nextDataColumnsString.split(",");
List<String> nextDataColumnList = Arrays.asList(nextDataColumns);
/*
* Validate the column names that are present are the same. Missing columns will be
* implicitly defaulted to null.
*/
if (nextDataColumnList.size() > tableDataColumnList.size()) {
enabledConditionsNotMetList.add(
String.format(
"Could not enable vectorization due to " +
"partition column names size %d is greater than the number of table column names size %d",
nextDataColumnList.size(), tableDataColumnList.size()));
// Always set these so EXPLAIN can see.
setValidateInputFormatAndSchemaEvolutionExplain(
mapWork, inputFileFormatClassNameSet, vectorPartitionDescMap,
enabledConditionsMetSet, enabledConditionsNotMetList);
return new ImmutablePair<Boolean,Boolean>(false, true);
}
if (!(deserializer instanceof NullStructSerDe)) {
// (Don't insist NullStructSerDe produce correct column names).
for (int i = 0; i < nextDataColumnList.size(); i++) {
String nextColumnName = nextDataColumnList.get(i);
String tableColumnName = tableDataColumnList.get(i);
if (!nextColumnName.equals(tableColumnName)) {
enabledConditionsNotMetList.add(
String.format(
"Could not enable vectorization due to " +
"partition column name %s does not match table column name %s",
nextColumnName, tableColumnName));
// Always set these so EXPLAIN can see.
setValidateInputFormatAndSchemaEvolutionExplain(
mapWork, inputFileFormatClassNameSet, vectorPartitionDescMap,
enabledConditionsMetSet, enabledConditionsNotMetList);
return new ImmutablePair<Boolean,Boolean>(false, true);
}
}
}
boolean isPartitionRowConversion = false;
if (!vectorPartDesc.getIsInputFileFormatSelfDescribing()) {
final int nextDataTypeInfoSize = nextDataTypeInfoList.size();
if (nextDataTypeInfoSize > tableDataTypeInfoList.size()) {
enabledConditionsNotMetList.add(
String.format(
"Could not enable vectorization due to " +
"partition column types size %d is greater than the number of table column types size %d",
nextDataTypeInfoSize, tableDataTypeInfoList.size()));
// Always set these so EXPLAIN can see.
setValidateInputFormatAndSchemaEvolutionExplain(
mapWork, inputFileFormatClassNameSet, vectorPartitionDescMap,
enabledConditionsMetSet, enabledConditionsNotMetList);
return new ImmutablePair<Boolean,Boolean>(false, true);
}
for (int i = 0; i < nextDataTypeInfoSize; i++) {
TypeInfo tableDataTypeInfo = tableDataTypeInfoList.get(i);
TypeInfo nextDataTypeInfo = nextDataTypeInfoList.get(i);
// FUTURE: We be more sophisticated in our conversion check.
if (!tableDataTypeInfo.equals(nextDataTypeInfo)) {
isPartitionRowConversion = true;
break;
}
}
}
if (isPartitionRowConversion && isLlapIoEnabled) {
enabledConditionsNotMetList.add(
"Could not enable vectorization. " +
"LLAP I/O is enabled wbich automatically deserializes into " +
"VECTORIZED_INPUT_FILE_FORMAT. " +
"A partition requires data type conversion and that is not supported");
// Always set these so EXPLAIN can see.
setValidateInputFormatAndSchemaEvolutionExplain(
mapWork, inputFileFormatClassNameSet, vectorPartitionDescMap,
enabledConditionsMetSet, enabledConditionsNotMetList);
return new ImmutablePair<Boolean,Boolean>(false, true);
}
}
// For now, we don't know which virtual columns are going to be included. We'll add them
// later...
vectorTaskColumnInfo.setAllColumnNames(allColumnNameList);
vectorTaskColumnInfo.setAllTypeInfos(allTypeInfoList);
vectorTaskColumnInfo.setDataColumnNums(dataColumnNums);
vectorTaskColumnInfo.setPartitionColumnCount(partitionColumnCount);
vectorTaskColumnInfo.setAvailableVirtualColumnList(availableVirtualColumnList);
vectorTaskColumnInfo.setUseVectorizedInputFileFormat(
shouldUseVectorizedInputFormat(inputFileFormatClassNameSet));
vectorTaskColumnInfo.setInputFormatSupportSet(inputFormatSupportSet);
// Always set these so EXPLAIN can see.
mapWork.setVectorizationInputFileFormatClassNameSet(inputFileFormatClassNameSet);
ArrayList<VectorPartitionDesc> vectorPartitionDescList = new ArrayList<VectorPartitionDesc>();
vectorPartitionDescList.addAll(vectorPartitionDescMap.keySet());
mapWork.setVectorPartitionDescList(vectorPartitionDescList);
mapWork.setVectorizationEnabledConditionsMet(new ArrayList(enabledConditionsMetSet));
mapWork.setVectorizationEnabledConditionsNotMet(enabledConditionsNotMetList);
return new ImmutablePair<Boolean,Boolean>(true, false);
}
private void validateAndVectorizeMapWork(MapWork mapWork, VectorTaskColumnInfo vectorTaskColumnInfo,
boolean isTezOrSpark) throws SemanticException {
//--------------------------------------------------------------------------------------------
LOG.info("Examining input format to see if vectorization is enabled.");
ImmutablePair<String,TableScanOperator> onlyOneTableScanPair = verifyOnlyOneTableScanOperator(mapWork);
if (onlyOneTableScanPair == null) {
VectorizerReason notVectorizedReason = currentBaseWork.getNotVectorizedReason();
Preconditions.checkState(notVectorizedReason != null);
mapWork.setVectorizationEnabledConditionsNotMet(Arrays.asList(new String[] {notVectorizedReason.toString()}));
return;
}
String alias = onlyOneTableScanPair.left;
TableScanOperator tableScanOperator = onlyOneTableScanPair.right;
// This call fills in the column names, types, and partition column count in
// vectorTaskColumnInfo.
currentOperator = tableScanOperator;
ImmutablePair<Boolean, Boolean> validateInputFormatAndSchemaEvolutionPair =
validateInputFormatAndSchemaEvolution(
mapWork, alias, tableScanOperator, vectorTaskColumnInfo);
if (!validateInputFormatAndSchemaEvolutionPair.left) {
// Have we already set the enabled conditions not met?
if (!validateInputFormatAndSchemaEvolutionPair.right) {
VectorizerReason notVectorizedReason = currentBaseWork.getNotVectorizedReason();
Preconditions.checkState(notVectorizedReason != null);
mapWork.setVectorizationEnabledConditionsNotMet(Arrays.asList(new String[] {notVectorizedReason.toString()}));
}
return;
}
final int dataColumnCount =
vectorTaskColumnInfo.allColumnNames.size() - vectorTaskColumnInfo.partitionColumnCount;
/*
* Take what all input formats support and eliminate any of them not enabled by
* the Hive variable.
*/
List<String> supportRemovedReasons = new ArrayList<String>();
Set<Support> supportSet = new TreeSet<Support>();
if (vectorTaskColumnInfo.inputFormatSupportSet != null) {
supportSet.addAll(vectorTaskColumnInfo.inputFormatSupportSet);
}
// The retainAll method does set intersection.
supportSet.retainAll(vectorizedInputFormatSupportEnabledSet);
if (!supportSet.equals(vectorTaskColumnInfo.inputFormatSupportSet)) {
Set<Support> removedSet = new TreeSet<Support>();
removedSet.addAll(vectorizedInputFormatSupportEnabledSet);
removedSet.removeAll(supportSet);
String removeString =
removedSet.toString() + " is disabled because it is not in " +
HiveConf.ConfVars.HIVE_VECTORIZED_INPUT_FORMAT_SUPPORTS_ENABLED.varname +
" " + vectorizedInputFormatSupportEnabledSet.toString();
supportRemovedReasons.add(removeString);
}
// Now rememember what is supported for this query and any support that was
// removed.
vectorTaskColumnInfo.setSupportSetInUse(supportSet);
vectorTaskColumnInfo.setSupportRemovedReasons(supportRemovedReasons);
final boolean isSupportDecimal64 = supportSet.contains(Support.DECIMAL_64);
List<DataTypePhysicalVariation> dataTypePhysicalVariations = new ArrayList<DataTypePhysicalVariation>();
for (int i = 0; i < dataColumnCount; i++) {
DataTypePhysicalVariation dataTypePhysicalVariation = DataTypePhysicalVariation.NONE;
if (isSupportDecimal64) {
TypeInfo typeInfo = vectorTaskColumnInfo.allTypeInfos.get(i);
if (typeInfo instanceof DecimalTypeInfo) {
DecimalTypeInfo decimalTypeInfo = (DecimalTypeInfo) typeInfo;
if (HiveDecimalWritable.isPrecisionDecimal64(decimalTypeInfo.precision())) {
dataTypePhysicalVariation = DataTypePhysicalVariation.DECIMAL_64;
}
}
}
dataTypePhysicalVariations.add(dataTypePhysicalVariation);
}
// It simplifies things to just add default ones for partitions.
for (int i = 0; i < vectorTaskColumnInfo.partitionColumnCount; i++) {
dataTypePhysicalVariations.add(DataTypePhysicalVariation.NONE);
}
vectorTaskColumnInfo.setAlldataTypePhysicalVariations(dataTypePhysicalVariations);
// Set global member indicating which virtual columns are possible to be used by
// the Map vertex.
availableVectorizedVirtualColumnSet = new HashSet<VirtualColumn>();
availableVectorizedVirtualColumnSet.addAll(vectorTaskColumnInfo.availableVirtualColumnList);
// And, use set to remember which virtual columns were actually referenced.
neededVirtualColumnSet = new HashSet<VirtualColumn>();
mapWork.setVectorizationEnabled(true);
LOG.info("Vectorization is enabled for input format(s) " + mapWork.getVectorizationInputFileFormatClassNameSet().toString());
//--------------------------------------------------------------------------------------------
/*
* Validate and vectorize the Map operator tree.
*/
if (!validateAndVectorizeMapOperators(mapWork, tableScanOperator, isTezOrSpark, vectorTaskColumnInfo)) {
return;
}
//--------------------------------------------------------------------------------------------
vectorTaskColumnInfo.transferToBaseWork(mapWork);
mapWork.setVectorMode(true);
return;
}
private boolean validateAndVectorizeMapOperators(MapWork mapWork, TableScanOperator tableScanOperator,
boolean isTezOrSpark, VectorTaskColumnInfo vectorTaskColumnInfo) throws SemanticException {
LOG.info("Validating and vectorizing MapWork... (vectorizedVertexNum " + vectorizedVertexNum + ")");
// Set "global" member indicating where to store "not vectorized" information if necessary.
currentBaseWork = mapWork;
if (!validateTableScanOperator(tableScanOperator, mapWork)) {
// The "not vectorized" information has been stored in the MapWork vertex.
return false;
}
try {
validateAndVectorizeMapOperators(tableScanOperator, isTezOrSpark, vectorTaskColumnInfo);
} catch (VectorizerCannotVectorizeException e) {
// The "not vectorized" information has been stored in the MapWork vertex.
return false;
} catch (NullPointerException e) {
if (!isTestVectorizerSuppressFatalExceptions) {
// Re-throw without losing original stack trace.
throw e;
}
setNodeIssue("exception: " + VectorizationContext.getStackTraceAsSingleLine(e));
return false;
} catch (ClassCastException e) {
if (!isTestVectorizerSuppressFatalExceptions) {
throw e;
}
setNodeIssue("exception: " + VectorizationContext.getStackTraceAsSingleLine(e));
return false;
} catch (RuntimeException e) {
if (!isTestVectorizerSuppressFatalExceptions) {
throw e;
}
setNodeIssue("exception: " + VectorizationContext.getStackTraceAsSingleLine(e));
return false;
}
vectorTaskColumnInfo.setNeededVirtualColumnList(
new ArrayList<VirtualColumn>(neededVirtualColumnSet));
/*
* The scratch column information was collected by the task VectorizationContext. Go get it.
*/
VectorizationContext vContext =
((VectorizationContextRegion) tableScanOperator).getOutputVectorizationContext();
vectorTaskColumnInfo.setScratchTypeNameArray(
vContext.getScratchColumnTypeNames());
vectorTaskColumnInfo.setScratchdataTypePhysicalVariationsArray(
vContext.getScratchDataTypePhysicalVariations());
return true;
}
private void validateAndVectorizeMapOperators(TableScanOperator tableScanOperator,
boolean isTezOrSpark, VectorTaskColumnInfo vectorTaskColumnInfo)
throws VectorizerCannotVectorizeException {
Operator<? extends OperatorDesc> dummyVectorOperator =
validateAndVectorizeOperatorTree(tableScanOperator, false, isTezOrSpark, vectorTaskColumnInfo);
// Fixup parent and child relations.
List<Operator<? extends OperatorDesc>> vectorChildren = dummyVectorOperator.getChildOperators();
tableScanOperator.setChildOperators(vectorChildren);
final int vectorChildCount = vectorChildren.size();
for (int i = 0; i < vectorChildCount; i++) {
Operator<? extends OperatorDesc> vectorChild = vectorChildren.get(i);
// Replace any occurrence of dummyVectorOperator with our TableScanOperator.
List<Operator<? extends OperatorDesc>> vectorChildParents = vectorChild.getParentOperators();
final int vectorChildParentCount = vectorChildParents.size();
for (int p = 0; p < vectorChildParentCount; p++) {
Operator<? extends OperatorDesc> vectorChildParent = vectorChildParents.get(p);
if (vectorChildParent == dummyVectorOperator) {
vectorChildParents.set(p, tableScanOperator);
}
}
}
// And, finally, save the VectorizationContext.
tableScanOperator.setTaskVectorizationContext(
((VectorizationOperator) dummyVectorOperator).getInputVectorizationContext());
// Modify TableScanOperator in-place so it knows to operate vectorized.
vectorizeTableScanOperatorInPlace(tableScanOperator, vectorTaskColumnInfo);
}
/*
* We are "committing" this vertex to be vectorized.
*/
private void vectorizeTableScanOperatorInPlace(TableScanOperator tableScanOperator,
VectorTaskColumnInfo vectorTaskColumnInfo) {
TableScanDesc tableScanDesc = tableScanOperator.getConf();
VectorTableScanDesc vectorTableScanDesc = new VectorTableScanDesc();
tableScanDesc.setVectorDesc(vectorTableScanDesc);
VectorizationContext vContext =
((VectorizationContextRegion) tableScanOperator).getOutputVectorizationContext();
List<Integer> projectedColumns = vContext.getProjectedColumns();
vectorTableScanDesc.setProjectedColumns(
ArrayUtils.toPrimitive(projectedColumns.toArray(new Integer[0])));
List<String> allColumnNameList = vectorTaskColumnInfo.allColumnNames;
List<TypeInfo> allTypeInfoList = vectorTaskColumnInfo.allTypeInfos;
List<DataTypePhysicalVariation> allDataTypePhysicalVariationList = vectorTaskColumnInfo.allDataTypePhysicalVariations;
final int projectedColumnCount = projectedColumns.size();
String[] projectedDataColumnNames = new String[projectedColumnCount];
TypeInfo[] projectedDataColumnTypeInfos = new TypeInfo[projectedColumnCount];
DataTypePhysicalVariation[] projectedDataColumnDataTypePhysicalVariation =
new DataTypePhysicalVariation[projectedColumnCount];
for (int i = 0; i < projectedColumnCount; i++) {
final int projectedColumnNum = projectedColumns.get(i);
projectedDataColumnNames[i] = allColumnNameList.get(projectedColumnNum);
projectedDataColumnTypeInfos[i] = allTypeInfoList.get(projectedColumnNum);
projectedDataColumnDataTypePhysicalVariation[i] = allDataTypePhysicalVariationList.get(projectedColumnNum);
}
vectorTableScanDesc.setProjectedColumnNames(projectedDataColumnNames);
vectorTableScanDesc.setProjectedColumnTypeInfos(projectedDataColumnTypeInfos);
vectorTableScanDesc.setProjectedColumnDataTypePhysicalVariations(projectedDataColumnDataTypePhysicalVariation);
tableScanOperator.getConf().setVectorized(true);
List<Operator<? extends OperatorDesc>> children = tableScanOperator.getChildOperators();
while (children.size() > 0) {
children = dosetVectorDesc(children);
}
}
private List<Operator<? extends OperatorDesc>> dosetVectorDesc(
List<Operator<? extends OperatorDesc>> children) {
List<Operator<? extends OperatorDesc>> newChildren =
new ArrayList<Operator<? extends OperatorDesc>>();
for (Operator<? extends OperatorDesc> child : children) {
// Get the vector description from the operator.
VectorDesc vectorDesc = ((VectorizationOperator) child).getVectorDesc();
// Save the vector description for the EXPLAIN.
AbstractOperatorDesc desc = (AbstractOperatorDesc) child.getConf();
desc.setVectorDesc(vectorDesc);
List<Operator<? extends OperatorDesc>> childChildren = child.getChildOperators();
if (childChildren != null) {
newChildren.addAll(childChildren);
}
}
return newChildren;
}
private void convertReduceWork(ReduceWork reduceWork) throws SemanticException {
reduceWork.setVectorizationEnabled(true);
VectorTaskColumnInfo vectorTaskColumnInfo = new VectorTaskColumnInfo();
vectorTaskColumnInfo.assume();
reduceWork.setVectorizedTestingReducerBatchSize(vectorizedTestingReducerBatchSize);
validateAndVectorizeReduceWork(reduceWork, vectorTaskColumnInfo);
}
private void validateAndVectorizeReduceWork(ReduceWork reduceWork,
VectorTaskColumnInfo vectorTaskColumnInfo) throws SemanticException {
Operator<? extends OperatorDesc> reducer = reduceWork.getReducer();
// Validate input to ReduceWork.
if (!getOnlyStructObjectInspectors(reduceWork, vectorTaskColumnInfo)) {
return;
}
//--------------------------------------------------------------------------------------------
/*
* Validate and vectorize the Reduce operator tree.
*/
if (!validateAndVectorizeReduceOperators(reduceWork, vectorTaskColumnInfo)) {
return;
}
//--------------------------------------------------------------------------------------------
vectorTaskColumnInfo.transferToBaseWork(reduceWork);
reduceWork.setVectorMode(true);
return;
}
private boolean validateAndVectorizeReduceOperators(ReduceWork reduceWork,
VectorTaskColumnInfo vectorTaskColumnInfo)
throws SemanticException {
LOG.info("Validating and vectorizing ReduceWork... (vectorizedVertexNum " + vectorizedVertexNum + ")");
Operator<? extends OperatorDesc> newVectorReducer;
try {
newVectorReducer =
validateAndVectorizeReduceOperators(reduceWork.getReducer(), vectorTaskColumnInfo);
} catch (VectorizerCannotVectorizeException e) {
// The "not vectorized" information has been stored in the MapWork vertex.
return false;
} catch (NullPointerException e) {
if (!isTestVectorizerSuppressFatalExceptions) {
// Re-throw without losing original stack trace.
throw e;
}
setNodeIssue("exception: " + VectorizationContext.getStackTraceAsSingleLine(e));
return false;
} catch (ClassCastException e) {
if (!isTestVectorizerSuppressFatalExceptions) {
throw e;
}
setNodeIssue("exception: " + VectorizationContext.getStackTraceAsSingleLine(e));
return false;
} catch (RuntimeException e) {
if (!isTestVectorizerSuppressFatalExceptions) {
throw e;
}
setNodeIssue("exception: " + VectorizationContext.getStackTraceAsSingleLine(e));
return false;
}
/*
* The scratch column information was collected by the task VectorizationContext. Go get it.
*/
VectorizationContext vContext =
((VectorizationOperator) newVectorReducer).getInputVectorizationContext();
vectorTaskColumnInfo.setScratchTypeNameArray(
vContext.getScratchColumnTypeNames());
vectorTaskColumnInfo.setScratchdataTypePhysicalVariationsArray(
vContext.getScratchDataTypePhysicalVariations());
// Replace the reducer with our fully vectorized reduce operator tree.
reduceWork.setReducer(newVectorReducer);
return true;
}
private Operator<? extends OperatorDesc> validateAndVectorizeReduceOperators(
Operator<? extends OperatorDesc> reducerOperator,
VectorTaskColumnInfo vectorTaskColumnInfo)
throws VectorizerCannotVectorizeException {
Operator<? extends OperatorDesc> dummyOperator = new DummyOperator();
dummyOperator.getChildOperators().add(reducerOperator);
Operator<? extends OperatorDesc> dummyVectorOperator =
validateAndVectorizeOperatorTree(dummyOperator, true, true, vectorTaskColumnInfo);
Operator<? extends OperatorDesc> newVectorReducer =
dummyVectorOperator.getChildOperators().get(0);
List<Operator<? extends OperatorDesc>> children =
new ArrayList<Operator<? extends OperatorDesc>>();
children.add(newVectorReducer);
while (children.size() > 0) {
children = dosetVectorDesc(children);
}
return newVectorReducer;
}
private boolean getOnlyStructObjectInspectors(ReduceWork reduceWork,
VectorTaskColumnInfo vectorTaskColumnInfo) throws SemanticException {
ArrayList<String> reduceColumnNames = new ArrayList<String>();
ArrayList<TypeInfo> reduceTypeInfos = new ArrayList<TypeInfo>();
if (reduceWork.getNeedsTagging()) {
setNodeIssue("Tagging not supported");
return false;
}
String columnSortOrder;
String columnNullOrder;
try {
TableDesc keyTableDesc = reduceWork.getKeyDesc();
if (LOG.isDebugEnabled()) {
LOG.debug("Using reduce tag " + reduceWork.getTag());
}
TableDesc valueTableDesc = reduceWork.getTagToValueDesc().get(reduceWork.getTag());
Properties keyTableProperties = keyTableDesc.getProperties();
Deserializer keyDeserializer =
ReflectionUtils.newInstance(
keyTableDesc.getDeserializerClass(), null);
SerDeUtils.initializeSerDe(keyDeserializer, null, keyTableProperties, null);
ObjectInspector keyObjectInspector = keyDeserializer.getObjectInspector();
if (keyObjectInspector == null) {
setNodeIssue("Key object inspector null");
return false;
}
if (!(keyObjectInspector instanceof StructObjectInspector)) {
setNodeIssue("Key object inspector not StructObjectInspector");
return false;
}
StructObjectInspector keyStructObjectInspector = (StructObjectInspector) keyObjectInspector;
List<? extends StructField> keyFields = keyStructObjectInspector.getAllStructFieldRefs();
for (StructField field: keyFields) {
reduceColumnNames.add(Utilities.ReduceField.KEY.toString() + "." + field.getFieldName());
reduceTypeInfos.add(TypeInfoUtils.getTypeInfoFromTypeString(field.getFieldObjectInspector().getTypeName()));
}
columnSortOrder = keyTableProperties.getProperty(serdeConstants.SERIALIZATION_SORT_ORDER);
columnNullOrder = keyTableProperties.getProperty(serdeConstants.SERIALIZATION_NULL_SORT_ORDER);
Deserializer valueDeserializer =
ReflectionUtils.newInstance(
valueTableDesc.getDeserializerClass(), null);
SerDeUtils.initializeSerDe(valueDeserializer, null, valueTableDesc.getProperties(), null);
ObjectInspector valueObjectInspector = valueDeserializer.getObjectInspector();
if (valueObjectInspector != null) {
if (!(valueObjectInspector instanceof StructObjectInspector)) {
setNodeIssue("Value object inspector not StructObjectInspector");
return false;
}
StructObjectInspector valueStructObjectInspector = (StructObjectInspector) valueObjectInspector;
List<? extends StructField> valueFields = valueStructObjectInspector.getAllStructFieldRefs();
for (StructField field: valueFields) {
reduceColumnNames.add(Utilities.ReduceField.VALUE.toString() + "." + field.getFieldName());
reduceTypeInfos.add(TypeInfoUtils.getTypeInfoFromTypeString(field.getFieldObjectInspector().getTypeName()));
}
}
} catch (Exception e) {
throw new SemanticException(e);
}
vectorTaskColumnInfo.setAllColumnNames(reduceColumnNames);
vectorTaskColumnInfo.setAllTypeInfos(reduceTypeInfos);
vectorTaskColumnInfo.setReduceColumnSortOrder(columnSortOrder);
vectorTaskColumnInfo.setReduceColumnNullOrder(columnNullOrder);
return true;
}
}
@Override
public PhysicalContext resolve(PhysicalContext physicalContext) throws SemanticException {
hiveConf = physicalContext.getConf();
planMapper = physicalContext.getContext().getPlanMapper();
String vectorizationEnabledOverrideString =
HiveConf.getVar(hiveConf,
HiveConf.ConfVars.HIVE_TEST_VECTORIZATION_ENABLED_OVERRIDE);
vectorizationEnabledOverride =
EnabledOverride.nameMap.get(vectorizationEnabledOverrideString);
isVectorizationEnabled = HiveConf.getBoolVar(hiveConf,
HiveConf.ConfVars.HIVE_VECTORIZATION_ENABLED);
final boolean weCanAttemptVectorization;
isTestForcedVectorizationEnable = false;
switch (vectorizationEnabledOverride) {
case NONE:
weCanAttemptVectorization = isVectorizationEnabled;
break;
case DISABLE:
weCanAttemptVectorization = false;
break;
case ENABLE:
weCanAttemptVectorization = true;
isTestForcedVectorizationEnable = !isVectorizationEnabled;
// Different parts of the code rely on this being set...
HiveConf.setBoolVar(hiveConf,
HiveConf.ConfVars.HIVE_VECTORIZATION_ENABLED, true);
isVectorizationEnabled = true;
break;
default:
throw new RuntimeException("Unexpected vectorization enabled override " +
vectorizationEnabledOverride);
}
if (!weCanAttemptVectorization) {
LOG.info("Vectorization is disabled");
return physicalContext;
}
useVectorizedInputFileFormat =
HiveConf.getBoolVar(hiveConf,
HiveConf.ConfVars.HIVE_VECTORIZATION_USE_VECTORIZED_INPUT_FILE_FORMAT);
if(useVectorizedInputFileFormat) {
initVectorizedInputFormatExcludeClasses();
}
useVectorDeserialize =
HiveConf.getBoolVar(hiveConf,
HiveConf.ConfVars.HIVE_VECTORIZATION_USE_VECTOR_DESERIALIZE);
useRowDeserialize =
HiveConf.getBoolVar(hiveConf,
HiveConf.ConfVars.HIVE_VECTORIZATION_USE_ROW_DESERIALIZE);
if (useRowDeserialize) {
initRowDeserializeExcludeClasses();
}
// TODO: we could also vectorize some formats based on hive.llap.io.encode.formats if LLAP IO
// is enabled and we are going to run in LLAP. However, we don't know if we end up in
// LLAP or not at this stage, so don't do this now. We may need to add a 'force' option.
isReduceVectorizationEnabled =
HiveConf.getBoolVar(hiveConf,
HiveConf.ConfVars.HIVE_VECTORIZATION_REDUCE_ENABLED);
isPtfVectorizationEnabled =
HiveConf.getBoolVar(hiveConf,
HiveConf.ConfVars.HIVE_VECTORIZATION_PTF_ENABLED);
isVectorizationComplexTypesEnabled =
HiveConf.getBoolVar(hiveConf,
HiveConf.ConfVars.HIVE_VECTORIZATION_COMPLEX_TYPES_ENABLED);
isVectorizationGroupByComplexTypesEnabled =
HiveConf.getBoolVar(hiveConf,
HiveConf.ConfVars.HIVE_VECTORIZATION_GROUPBY_COMPLEX_TYPES_ENABLED);
isVectorizedRowIdentifierEnabled =
HiveConf.getBoolVar(hiveConf,
HiveConf.ConfVars.HIVE_VECTORIZATION_ROW_IDENTIFIER_ENABLED);
vectorizedPTFMaxMemoryBufferingBatchCount =
HiveConf.getIntVar(hiveConf,
HiveConf.ConfVars.HIVE_VECTORIZATION_PTF_MAX_MEMORY_BUFFERING_BATCH_COUNT);
vectorizedTestingReducerBatchSize =
HiveConf.getIntVar(hiveConf,
HiveConf.ConfVars.HIVE_VECTORIZATION_TESTING_REDUCER_BATCH_SIZE);
isTestVectorizerSuppressFatalExceptions =
HiveConf.getBoolVar(hiveConf,
HiveConf.ConfVars.HIVE_TEST_VECTORIZER_SUPPRESS_FATAL_EXCEPTIONS);
vectorizedInputFormatSupportEnabled =
HiveConf.getVar(hiveConf,
HiveConf.ConfVars.HIVE_VECTORIZED_INPUT_FORMAT_SUPPORTS_ENABLED);
String[] supportEnabledStrings = vectorizedInputFormatSupportEnabled.toLowerCase().split(",");
vectorizedInputFormatSupportEnabledSet = new TreeSet<Support>();
for (String supportEnabledString : supportEnabledStrings) {
Support support = Support.nameToSupportMap.get(supportEnabledString);
// Known?
if (support != null) {
vectorizedInputFormatSupportEnabledSet.add(support);
}
}
/*
* Notice the default value for LLAP_IO_ENABLED is overridden to be whether we are
* executing under LLAP.
*/
isLlapIoEnabled =
HiveConf.getBoolVar(hiveConf,
HiveConf.ConfVars.LLAP_IO_ENABLED,
LlapProxy.isDaemon());
isSchemaEvolution =
HiveConf.getBoolVar(hiveConf,
HiveConf.ConfVars.HIVE_SCHEMA_EVOLUTION);
hiveVectorAdaptorUsageMode = HiveVectorAdaptorUsageMode.getHiveConfValue(hiveConf);
isTestVectorizationSuppressExplainExecutionMode =
HiveConf.getBoolVar(hiveConf,
HiveConf.ConfVars.HIVE_TEST_VECTORIZATION_SUPPRESS_EXPLAIN_EXECUTION_MODE);
// create dispatcher and graph walker
Dispatcher disp = new VectorizationDispatcher();
TaskGraphWalker ogw = new TaskGraphWalker(disp);
// get all the tasks nodes from root task
ArrayList<Node> topNodes = new ArrayList<Node>();
topNodes.addAll(physicalContext.getRootTasks());
// begin to walk through the task tree.
ogw.startWalking(topNodes, null);
return physicalContext;
}
private void initVectorizedInputFormatExcludeClasses() {
vectorizedInputFormatExcludes = Utilities.getClassNamesFromConfig(hiveConf,
ConfVars.HIVE_VECTORIZATION_VECTORIZED_INPUT_FILE_FORMAT_EXCLUDES);
}
private void initRowDeserializeExcludeClasses() {
rowDeserializeInputFormatExcludes = Utilities.getClassNamesFromConfig(hiveConf,
ConfVars.HIVE_VECTORIZATION_ROW_DESERIALIZE_INPUTFORMAT_EXCLUDES);
}
private void setOperatorNotSupported(Operator<? extends OperatorDesc> op) {
OperatorDesc desc = op.getConf();
Annotation note = AnnotationUtils.getAnnotation(desc.getClass(), Explain.class);
if (note != null) {
Explain explainNote = (Explain) note;
setNodeIssue(explainNote.displayName() + " (" + op.getType() + ") not supported");
} else {
setNodeIssue("Operator " + op.getType() + " not supported");
}
}
private boolean validateSMBMapJoinOperator(SMBMapJoinOperator op) {
SMBJoinDesc desc = op.getConf();
// Validation is the same as for map join, since the 'small' tables are not vectorized
return validateMapJoinDesc(desc);
}
private boolean validateTableScanOperator(TableScanOperator op, MapWork mWork) {
TableScanDesc desc = op.getConf();
if (desc.isGatherStats()) {
setOperatorIssue("gather stats not supported");
return false;
}
return true;
}
private boolean validateMapJoinOperator(MapJoinOperator op) {
MapJoinDesc desc = op.getConf();
return validateMapJoinDesc(desc);
}
private boolean validateMapJoinDesc(MapJoinDesc desc) {
byte posBigTable = (byte) desc.getPosBigTable();
List<ExprNodeDesc> filterExprs = desc.getFilters().get(posBigTable);
if (!validateExprNodeDesc(
filterExprs, "Filter", VectorExpressionDescriptor.Mode.FILTER, /* allowComplex */ true)) {
return false;
}
List<ExprNodeDesc> keyExprs = desc.getKeys().get(posBigTable);
if (!validateExprNodeDesc(keyExprs, "Key")) {
return false;
}
List<ExprNodeDesc> valueExprs = desc.getExprs().get(posBigTable);
if (!validateExprNodeDesc(valueExprs, "Value")) {
return false;
}
Byte[] order = desc.getTagOrder();
Byte posSingleVectorMapJoinSmallTable = (order[0] == posBigTable ? order[1] : order[0]);
List<ExprNodeDesc> smallTableExprs = desc.getExprs().get(posSingleVectorMapJoinSmallTable);
if (!validateExprNodeDesc(smallTableExprs, "Small Table")) {
return false;
}
if (desc.getResidualFilterExprs() != null && !desc.getResidualFilterExprs().isEmpty()) {
setOperatorIssue("Non-equi joins not supported");
return false;
}
return true;
}
private boolean validateSparkHashTableSinkOperator(SparkHashTableSinkOperator op) {
SparkHashTableSinkDesc desc = op.getConf();
byte tag = desc.getTag();
// it's essentially a MapJoinDesc
List<ExprNodeDesc> filterExprs = desc.getFilters().get(tag);
List<ExprNodeDesc> keyExprs = desc.getKeys().get(tag);
List<ExprNodeDesc> valueExprs = desc.getExprs().get(tag);
return validateExprNodeDesc(
filterExprs, "Filter", VectorExpressionDescriptor.Mode.FILTER, /* allowComplex */ true) &&
validateExprNodeDesc(keyExprs, "Key") && validateExprNodeDesc(valueExprs, "Value");
}
private boolean validateReduceSinkOperator(ReduceSinkOperator op) {
List<ExprNodeDesc> keyDescs = op.getConf().getKeyCols();
List<ExprNodeDesc> partitionDescs = op.getConf().getPartitionCols();
List<ExprNodeDesc> valueDesc = op.getConf().getValueCols();
return validateExprNodeDesc(keyDescs, "Key") && validateExprNodeDesc(partitionDescs, "Partition") &&
validateExprNodeDesc(valueDesc, "Value");
}
private boolean validateSelectOperator(SelectOperator op) {
List<ExprNodeDesc> descList = op.getConf().getColList();
for (ExprNodeDesc desc : descList) {
boolean ret =
validateExprNodeDesc(
desc, "Select",
VectorExpressionDescriptor.Mode.PROJECTION,
/* allowComplex */ true, /* allowVoidProjection */ true);
if (!ret) {
return false;
}
}
return true;
}
private boolean validateFilterOperator(FilterOperator op) {
ExprNodeDesc desc = op.getConf().getPredicate();
return validateExprNodeDesc(
desc, "Predicate", VectorExpressionDescriptor.Mode.FILTER, /* allowComplex */ true);
}
private boolean validateTopNKeyOperator(TopNKeyOperator op) {
List<ExprNodeDesc> keyColumns = op.getConf().getKeyColumns();
return validateExprNodeDesc(keyColumns, "Key columns");
}
private boolean validateGroupByOperator(GroupByOperator op, boolean isReduce,
boolean isTezOrSpark, VectorGroupByDesc vectorGroupByDesc) {
GroupByDesc desc = op.getConf();
if (desc.getMode() != GroupByDesc.Mode.HASH && desc.isDistinct()) {
setOperatorIssue("DISTINCT not supported");
return false;
}
boolean ret = validateExprNodeDescNoComplex(desc.getKeys(), "Key");
if (!ret) {
return false;
}
/**
*
* GROUP BY DEFINITIONS:
*
* GroupByDesc.Mode enumeration:
*
* The different modes of a GROUP BY operator.
*
* These descriptions are hopefully less cryptic than the comments for GroupByDesc.Mode.
*
* COMPLETE Aggregates original rows into full aggregation row(s).
*
* If the key length is 0, this is also called Global aggregation and
* 1 output row is produced.
*
* When the key length is > 0, the original rows come in ALREADY GROUPED.
*
* An example for key length > 0 is a GROUP BY being applied to the
* ALREADY GROUPED rows coming from an upstream JOIN operator. Or,
* ALREADY GROUPED rows coming from upstream MERGEPARTIAL GROUP BY
* operator.
*
* PARTIAL1 The first of 2 (or more) phases that aggregates ALREADY GROUPED
* original rows into partial aggregations.
*
* Subsequent phases PARTIAL2 (optional) and MERGEPARTIAL will merge
* the partial aggregations and output full aggregations.
*
* PARTIAL2 Accept ALREADY GROUPED partial aggregations and merge them into another
* partial aggregation. Output the merged partial aggregations.
*
* (Haven't seen this one used)
*
* PARTIALS (Behaves for non-distinct the same as PARTIAL2; and behaves for
* distinct the same as PARTIAL1.)
*
* FINAL Accept ALREADY GROUPED original rows and aggregate them into
* full aggregations.
*
* Example is a GROUP BY being applied to rows from a sorted table, where
* the group key is the table sort key (or a prefix).
*
* HASH Accept UNORDERED original rows and aggregate them into a memory table.
* Output the partial aggregations on closeOp (or low memory).
*
* Similar to PARTIAL1 except original rows are UNORDERED.
*
* Commonly used in both Mapper and Reducer nodes. Always followed by
* a Reducer with MERGEPARTIAL GROUP BY.
*
* MERGEPARTIAL Always first operator of a Reducer. Data is grouped by reduce-shuffle.
*
* (Behaves for non-distinct aggregations the same as FINAL; and behaves
* for distinct aggregations the same as COMPLETE.)
*
* The output is full aggregation(s).
*
* Used in Reducers after a stage with a HASH GROUP BY operator.
*
*
* VectorGroupByDesc.ProcessingMode for VectorGroupByOperator:
*
* GLOBAL No key. All rows --> 1 full aggregation on end of input
*
* HASH Rows aggregated in to hash table on group key -->
* 1 partial aggregation per key (normally, unless there is spilling)
*
* MERGE_PARTIAL As first operator in a REDUCER, partial aggregations come grouped from
* reduce-shuffle -->
* aggregate the partial aggregations and emit full aggregation on
* endGroup / closeOp
*
* STREAMING Rows come from PARENT operator ALREADY GROUPED -->
* aggregate the rows and emit full aggregation on key change / closeOp
*
* NOTE: Hash can spill partial result rows prematurely if it runs low on memory.
* NOTE: Streaming has to compare keys where MergePartial gets an endGroup call.
*
*
* DECIDER: Which VectorGroupByDesc.ProcessingMode for VectorGroupByOperator?
*
* Decides using GroupByDesc.Mode and whether there are keys with the
* VectorGroupByDesc.groupByDescModeToVectorProcessingMode method.
*
* Mode.COMPLETE --> (numKeys == 0 ? ProcessingMode.GLOBAL : ProcessingMode.STREAMING)
*
* Mode.HASH --> ProcessingMode.HASH
*
* Mode.MERGEPARTIAL --> (numKeys == 0 ? ProcessingMode.GLOBAL : ProcessingMode.MERGE_PARTIAL)
*
* Mode.PARTIAL1,
* Mode.PARTIAL2,
* Mode.PARTIALS,
* Mode.FINAL --> ProcessingMode.STREAMING
*
*/
boolean hasKeys = (desc.getKeys().size() > 0);
ProcessingMode processingMode =
VectorGroupByDesc.groupByDescModeToVectorProcessingMode(desc.getMode(), hasKeys);
if (desc.isGroupingSetsPresent() &&
(processingMode != ProcessingMode.HASH && processingMode != ProcessingMode.STREAMING)) {
setOperatorIssue("Vectorized GROUPING SETS only expected for HASH and STREAMING processing modes");
return false;
}
if (!validateAggregationDescs(desc.getAggregators(), desc.getMode(), hasKeys)) {
return false;
}
vectorGroupByDesc.setProcessingMode(processingMode);
vectorGroupByDesc.setIsVectorizationComplexTypesEnabled(isVectorizationComplexTypesEnabled);
vectorGroupByDesc.setIsVectorizationGroupByComplexTypesEnabled(isVectorizationGroupByComplexTypesEnabled);
LOG.info("Vector GROUP BY operator will use processing mode " + processingMode.name());
return true;
}
private boolean validateFileSinkOperator(FileSinkOperator op) {
return true;
}
/*
* Determine recursively if the PTF LEAD or LAG function is being used in an expression.
*/
private boolean containsLeadLag(ExprNodeDesc exprNodeDesc) {
if (exprNodeDesc instanceof ExprNodeGenericFuncDesc) {
ExprNodeGenericFuncDesc genericFuncDesc = (ExprNodeGenericFuncDesc) exprNodeDesc;
GenericUDF genFuncClass = genericFuncDesc.getGenericUDF();
if (genFuncClass instanceof GenericUDFLag ||
genFuncClass instanceof GenericUDFLead) {
return true;
}
return containsLeadLag(genericFuncDesc.getChildren());
} else {
// ExprNodeColumnDesc, ExprNodeConstantDesc, ExprNodeDynamicValueDesc, etc do not have
// LEAD/LAG inside.
return false;
}
}
private boolean containsLeadLag(List<ExprNodeDesc> exprNodeDescList) {
for (ExprNodeDesc exprNodeDesc : exprNodeDescList) {
if (containsLeadLag(exprNodeDesc)) {
return true;
}
}
return false;
}
private boolean validatePTFOperator(PTFOperator op, VectorizationContext vContext,
VectorPTFDesc vectorPTFDesc)
throws HiveException {
if (!isPtfVectorizationEnabled) {
setNodeIssue("Vectorization of PTF is not enabled (" +
HiveConf.ConfVars.HIVE_VECTORIZATION_PTF_ENABLED.varname + " IS false)");
return false;
}
PTFDesc ptfDesc = op.getConf();
boolean isMapSide = ptfDesc.isMapSide();
if (isMapSide) {
setOperatorIssue("PTF Mapper not supported");
return false;
}
List<Operator<? extends OperatorDesc>> ptfParents = op.getParentOperators();
if (ptfParents != null && ptfParents.size() > 0) {
Operator<? extends OperatorDesc> ptfParent = op.getParentOperators().get(0);
if (!(ptfParent instanceof ReduceSinkOperator)) {
boolean isReduceShufflePtf = false;
if (ptfParent instanceof SelectOperator) {
ptfParents = ptfParent.getParentOperators();
if (ptfParents == null || ptfParents.size() == 0) {
isReduceShufflePtf = true;
} else {
ptfParent = ptfParent.getParentOperators().get(0);
isReduceShufflePtf = (ptfParent instanceof ReduceSinkOperator);
}
}
if (!isReduceShufflePtf) {
setOperatorIssue("Only PTF directly under reduce-shuffle is supported");
return false;
}
}
}
boolean forNoop = ptfDesc.forNoop();
if (forNoop) {
setOperatorIssue("NOOP not supported");
return false;
}
boolean forWindowing = ptfDesc.forWindowing();
if (!forWindowing) {
setOperatorIssue("Windowing required");
return false;
}
PartitionedTableFunctionDef funcDef = ptfDesc.getFuncDef();
boolean isWindowTableFunctionDef = (funcDef instanceof WindowTableFunctionDef);
if (!isWindowTableFunctionDef) {
setOperatorIssue("Must be a WindowTableFunctionDef");
return false;
}
// We collect information in VectorPTFDesc that doesn't need the VectorizationContext.
// We use this information for validation. Later when creating the vector operator
// we create an additional object VectorPTFInfo.
try {
createVectorPTFDesc(
op, ptfDesc, vContext, vectorPTFDesc, vectorizedPTFMaxMemoryBufferingBatchCount);
} catch (HiveException e) {
setOperatorIssue("exception: " + VectorizationContext.getStackTraceAsSingleLine(e));
return false;
}
// Output columns ok?
String[] outputColumnNames = vectorPTFDesc.getOutputColumnNames();
TypeInfo[] outputTypeInfos = vectorPTFDesc.getOutputTypeInfos();
final int outputCount = outputColumnNames.length;
for (int i = 0; i < outputCount; i++) {
String typeName = outputTypeInfos[i].getTypeName();
boolean ret = validateDataType(typeName, VectorExpressionDescriptor.Mode.PROJECTION, /* allowComplex */ false);
if (!ret) {
setExpressionIssue("PTF Output Columns", "Data type " + typeName + " of column " + outputColumnNames[i] + " not supported");
return false;
}
}
boolean isPartitionOrderBy = vectorPTFDesc.getIsPartitionOrderBy();
String[] evaluatorFunctionNames = vectorPTFDesc.getEvaluatorFunctionNames();
final int count = evaluatorFunctionNames.length;
WindowFrameDef[] evaluatorWindowFrameDefs = vectorPTFDesc.getEvaluatorWindowFrameDefs();
List<ExprNodeDesc>[] evaluatorInputExprNodeDescLists = vectorPTFDesc.getEvaluatorInputExprNodeDescLists();
for (int i = 0; i < count; i++) {
String functionName = evaluatorFunctionNames[i];
SupportedFunctionType supportedFunctionType = VectorPTFDesc.supportedFunctionsMap.get(functionName);
if (supportedFunctionType == null) {
setOperatorIssue(functionName + " not in supported functions " + VectorPTFDesc.supportedFunctionNames);
return false;
}
WindowFrameDef windowFrameDef = evaluatorWindowFrameDefs[i];
if (!windowFrameDef.isStartUnbounded()) {
setOperatorIssue(functionName + " only UNBOUNDED start frame is supported");
return false;
}
List<ExprNodeDesc> exprNodeDescList = evaluatorInputExprNodeDescLists[i];
final boolean isSingleParameter =
(exprNodeDescList != null &&
exprNodeDescList.size() == 1);
final ExprNodeDesc singleExprNodeDesc =
(isSingleParameter ? exprNodeDescList.get(0) : null);
final TypeInfo singleTypeInfo =
(isSingleParameter ? singleExprNodeDesc.getTypeInfo() : null);
final PrimitiveCategory singlePrimitiveCategory =
(singleTypeInfo instanceof PrimitiveTypeInfo ?
((PrimitiveTypeInfo) singleTypeInfo).getPrimitiveCategory() : null);
switch (windowFrameDef.getWindowType()) {
case RANGE:
if (!windowFrameDef.getEnd().isCurrentRow()) {
setOperatorIssue(functionName + " only CURRENT ROW end frame is supported for RANGE");
return false;
}
break;
case ROWS:
{
boolean isRowEndCurrent =
(windowFrameDef.getEnd().isCurrentRow() &&
(supportedFunctionType == SupportedFunctionType.AVG ||
supportedFunctionType == SupportedFunctionType.MAX ||
supportedFunctionType == SupportedFunctionType.MIN ||
supportedFunctionType == SupportedFunctionType.SUM) &&
isSingleParameter &&
singlePrimitiveCategory != null);
if (!isRowEndCurrent && !windowFrameDef.isEndUnbounded()) {
setOperatorIssue(
functionName + " UNBOUNDED end frame is required for ROWS window type");
return false;
}
}
break;
default:
throw new RuntimeException("Unexpected window type " + windowFrameDef.getWindowType());
}
// RANK/DENSE_RANK don't care about columns.
if (supportedFunctionType != SupportedFunctionType.RANK &&
supportedFunctionType != SupportedFunctionType.DENSE_RANK) {
if (exprNodeDescList != null) {
if (exprNodeDescList.size() > 1) {
setOperatorIssue("More than 1 argument expression of aggregation function " + functionName);
return false;
}
ExprNodeDesc exprNodeDesc = exprNodeDescList.get(0);
if (containsLeadLag(exprNodeDesc)) {
setOperatorIssue("lead and lag function not supported in argument expression of aggregation function " + functionName);
return false;
}
if (supportedFunctionType != SupportedFunctionType.COUNT) {
// COUNT does not care about column types. The rest do.
TypeInfo typeInfo = exprNodeDesc.getTypeInfo();
Category category = typeInfo.getCategory();
boolean isSupportedType;
if (category != Category.PRIMITIVE) {
isSupportedType = false;
} else {
ColumnVector.Type colVecType =
VectorizationContext.getColumnVectorTypeFromTypeInfo(typeInfo);
switch (colVecType) {
case LONG:
case DOUBLE:
case DECIMAL:
isSupportedType = true;
break;
default:
isSupportedType = false;
break;
}
}
if (!isSupportedType) {
setOperatorIssue(typeInfo.getTypeName() + " data type not supported in argument expression of aggregation function " + functionName);
return false;
}
}
}
}
}
return true;
}
private boolean validateExprNodeDesc(List<ExprNodeDesc> descs, String expressionTitle) {
return validateExprNodeDesc(
descs, expressionTitle, VectorExpressionDescriptor.Mode.PROJECTION, /* allowComplex */ true);
}
private boolean validateExprNodeDescNoComplex(List<ExprNodeDesc> descs, String expressionTitle) {
return validateExprNodeDesc(
descs, expressionTitle, VectorExpressionDescriptor.Mode.PROJECTION, /* allowComplex */ false);
}
private boolean validateExprNodeDesc(List<ExprNodeDesc> descs,
String expressionTitle,
VectorExpressionDescriptor.Mode mode,
boolean allowComplex) {
for (ExprNodeDesc d : descs) {
boolean ret = validateExprNodeDesc(d, expressionTitle, mode, allowComplex);
if (!ret) {
return false;
}
}
return true;
}
private boolean validateAggregationDescs(List<AggregationDesc> descs,
GroupByDesc.Mode groupByMode, boolean hasKeys) {
for (AggregationDesc d : descs) {
if (!validateAggregationDesc(d, groupByMode, hasKeys)) {
return false;
}
}
return true;
}
private boolean validateExprNodeDescRecursive(ExprNodeDesc desc, String expressionTitle,
VectorExpressionDescriptor.Mode mode, boolean allowComplex) {
return validateExprNodeDescRecursive(desc, expressionTitle, mode,
allowComplex, /* allowVoidProjection */ false);
}
private boolean validateExprNodeDescRecursive(ExprNodeDesc desc, String expressionTitle,
VectorExpressionDescriptor.Mode mode, boolean allowComplex, boolean allowVoidProjection) {
if (desc instanceof ExprNodeColumnDesc) {
ExprNodeColumnDesc c = (ExprNodeColumnDesc) desc;
String columnName = c.getColumn();
if (availableVectorizedVirtualColumnSet != null) {
// For Map, check for virtual columns.
VirtualColumn virtualColumn = VirtualColumn.VIRTUAL_COLUMN_NAME_MAP.get(columnName);
if (virtualColumn != null) {
// We support some virtual columns in vectorization for this table scan.
if (!availableVectorizedVirtualColumnSet.contains(virtualColumn)) {
setExpressionIssue(expressionTitle, "Virtual column " + columnName + " is not supported");
return false;
}
// Remember we used this one in the query.
neededVirtualColumnSet.add(virtualColumn);
}
}
}
String typeName = desc.getTypeInfo().getTypeName();
boolean ret = validateDataType(
typeName, mode, allowComplex && isVectorizationComplexTypesEnabled, allowVoidProjection);
if (!ret) {
setExpressionIssue(expressionTitle,
getValidateDataTypeErrorMsg(
typeName, mode, allowComplex, isVectorizationComplexTypesEnabled));
return false;
}
boolean isInExpression = false;
if (desc instanceof ExprNodeGenericFuncDesc) {
ExprNodeGenericFuncDesc d = (ExprNodeGenericFuncDesc) desc;
boolean r = validateGenericUdf(d);
if (!r) {
setExpressionIssue(expressionTitle, "UDF " + d + " not supported");
return false;
}
GenericUDF genericUDF = d.getGenericUDF();
isInExpression = (genericUDF instanceof GenericUDFIn);
}
if (desc.getChildren() != null) {
if (isInExpression
&& desc.getChildren().get(0).getTypeInfo().getCategory() == Category.STRUCT) {
// Don't restrict child expressions for projection.
// Always use loose FILTER mode.
if (!validateStructInExpression(
desc, expressionTitle, VectorExpressionDescriptor.Mode.FILTER)) {
return false;
}
} else {
for (ExprNodeDesc d : desc.getChildren()) {
// Don't restrict child expressions for projection.
// Always use loose FILTER mode.
if (!validateExprNodeDescRecursive(
d, expressionTitle, VectorExpressionDescriptor.Mode.FILTER,
/* allowComplex */ true, allowVoidProjection)) {
return false;
}
}
}
}
return true;
}
private boolean validateStructInExpression(ExprNodeDesc desc,
String expressionTitle, VectorExpressionDescriptor.Mode mode) {
for (ExprNodeDesc d : desc.getChildren()) {
TypeInfo typeInfo = d.getTypeInfo();
if (typeInfo.getCategory() != Category.STRUCT) {
return false;
}
StructTypeInfo structTypeInfo = (StructTypeInfo) typeInfo;
ArrayList<TypeInfo> fieldTypeInfos = structTypeInfo
.getAllStructFieldTypeInfos();
ArrayList<String> fieldNames = structTypeInfo.getAllStructFieldNames();
final int fieldCount = fieldTypeInfos.size();
for (int f = 0; f < fieldCount; f++) {
TypeInfo fieldTypeInfo = fieldTypeInfos.get(f);
Category category = fieldTypeInfo.getCategory();
if (category != Category.PRIMITIVE) {
setExpressionIssue(expressionTitle,
"Cannot vectorize struct field " + fieldNames.get(f)
+ " of type " + fieldTypeInfo.getTypeName());
return false;
}
PrimitiveTypeInfo fieldPrimitiveTypeInfo = (PrimitiveTypeInfo) fieldTypeInfo;
InConstantType inConstantType = VectorizationContext
.getInConstantTypeFromPrimitiveCategory(fieldPrimitiveTypeInfo
.getPrimitiveCategory());
// For now, limit the data types we support for Vectorized Struct IN().
if (inConstantType != InConstantType.INT_FAMILY
&& inConstantType != InConstantType.FLOAT_FAMILY
&& inConstantType != InConstantType.STRING_FAMILY) {
setExpressionIssue(expressionTitle,
"Cannot vectorize struct field " + fieldNames.get(f)
+ " of type " + fieldTypeInfo.getTypeName());
return false;
}
}
}
return true;
}
private boolean validateExprNodeDesc(ExprNodeDesc desc, String expressionTitle) {
return validateExprNodeDesc(
desc, expressionTitle, VectorExpressionDescriptor.Mode.PROJECTION,
/* allowComplex */ true, /* allowVoidProjection */ false);
}
boolean validateExprNodeDesc(ExprNodeDesc desc, String expressionTitle,
VectorExpressionDescriptor.Mode mode, boolean allowComplex) {
return validateExprNodeDescRecursive(desc, expressionTitle, mode, allowComplex);
}
boolean validateExprNodeDesc(ExprNodeDesc desc, String expressionTitle,
VectorExpressionDescriptor.Mode mode, boolean allowComplex, boolean allowVoidProjection) {
return validateExprNodeDescRecursive(
desc, expressionTitle, mode, allowComplex, allowVoidProjection);
}
private boolean validateGenericUdf(ExprNodeGenericFuncDesc genericUDFExpr) {
if (VectorizationContext.isCustomUDF(genericUDFExpr)) {
return true;
}
if (hiveVectorAdaptorUsageMode == HiveVectorAdaptorUsageMode.NONE ||
hiveVectorAdaptorUsageMode == HiveVectorAdaptorUsageMode.CHOSEN) {
GenericUDF genericUDF = genericUDFExpr.getGenericUDF();
if (genericUDF instanceof GenericUDFBridge) {
Class<? extends UDF> udf = ((GenericUDFBridge) genericUDF).getUdfClass();
return supportedGenericUDFs.contains(udf);
} else {
return supportedGenericUDFs.contains(genericUDF.getClass());
}
}
return true;
}
private boolean validateAggregationDesc(AggregationDesc aggDesc, GroupByDesc.Mode groupByMode,
boolean hasKeys) {
String udfName = aggDesc.getGenericUDAFName().toLowerCase();
if (!supportedAggregationUdfs.contains(udfName)) {
setExpressionIssue("Aggregation Function", "UDF " + udfName + " not supported");
return false;
}
// The planner seems to pull this one out.
if (aggDesc.getDistinct()) {
setExpressionIssue("Aggregation Function", "DISTINCT not supported");
return false;
}
ArrayList<ExprNodeDesc> parameters = aggDesc.getParameters();
if (parameters != null && !validateExprNodeDesc(parameters, "Aggregation Function UDF " + udfName + " parameter")) {
return false;
}
return true;
}
public static boolean validateDataType(String type, VectorExpressionDescriptor.Mode mode,
boolean allowComplex) {
return validateDataType(type, mode, allowComplex, /* allowVoidProjection */ false);
}
public static boolean validateDataType(String type, VectorExpressionDescriptor.Mode mode,
boolean allowComplex, boolean allowVoidProjection) {
type = type.toLowerCase();
boolean result = supportedDataTypesPattern.matcher(type).matches();
if (result && !allowVoidProjection &&
mode == VectorExpressionDescriptor.Mode.PROJECTION && type.equals("void")) {
return false;
}
if (!result) {
TypeInfo typeInfo = TypeInfoUtils.getTypeInfoFromTypeString(type);
if (typeInfo.getCategory() != Category.PRIMITIVE) {
if (allowComplex) {
return true;
}
}
}
return result;
}
public static String getValidateDataTypeErrorMsg(String type, VectorExpressionDescriptor.Mode mode,
boolean allowComplex, boolean isVectorizationComplexTypesEnabled) {
return getValidateDataTypeErrorMsg(
type, mode, allowComplex, isVectorizationComplexTypesEnabled, false);
}
public static String getValidateDataTypeErrorMsg(String type, VectorExpressionDescriptor.Mode mode,
boolean allowComplex, boolean isVectorizationComplexTypesEnabled,
boolean allowVoidProjection) {
type = type.toLowerCase();
boolean result = supportedDataTypesPattern.matcher(type).matches();
if (result && !allowVoidProjection &&
mode == VectorExpressionDescriptor.Mode.PROJECTION && type.equals("void")) {
return "Vectorizing data type void not supported when mode = PROJECTION";
}
if (!result) {
TypeInfo typeInfo = TypeInfoUtils.getTypeInfoFromTypeString(type);
if (typeInfo.getCategory() != Category.PRIMITIVE) {
if (allowComplex && isVectorizationComplexTypesEnabled) {
return null;
} else if (!allowComplex) {
return "Vectorizing complex type " + typeInfo.getCategory() + " not supported";
} else {
return "Vectorizing complex type " + typeInfo.getCategory() + " not enabled (" +
type + ") since " +
GroupByDesc.getComplexTypeEnabledCondition(
isVectorizationComplexTypesEnabled);
}
}
}
return (result ? null : "Vectorizing data type " + type + " not supported");
}
private VectorizationContext getVectorizationContext(String contextName,
VectorTaskColumnInfo vectorTaskColumnInfo) {
VectorizationContext vContext =
new VectorizationContext(
contextName,
vectorTaskColumnInfo.allColumnNames,
vectorTaskColumnInfo.allTypeInfos,
vectorTaskColumnInfo.allDataTypePhysicalVariations,
hiveConf);
return vContext;
}
private void fixupParentChildOperators(Operator<? extends OperatorDesc> op,
Operator<? extends OperatorDesc> vectorOp) {
if (op.getParentOperators() != null) {
vectorOp.setParentOperators(op.getParentOperators());
for (Operator<? extends OperatorDesc> p : op.getParentOperators()) {
p.replaceChild(op, vectorOp);
}
}
if (op.getChildOperators() != null) {
vectorOp.setChildOperators(op.getChildOperators());
for (Operator<? extends OperatorDesc> c : op.getChildOperators()) {
c.replaceParent(op, vectorOp);
}
}
}
private boolean isBigTableOnlyResults(MapJoinDesc desc) {
Byte[] order = desc.getTagOrder();
byte posBigTable = (byte) desc.getPosBigTable();
Byte posSingleVectorMapJoinSmallTable = (order[0] == posBigTable ? order[1] : order[0]);
int[] smallTableIndices;
int smallTableIndicesSize;
if (desc.getValueIndices() != null && desc.getValueIndices().get(posSingleVectorMapJoinSmallTable) != null) {
smallTableIndices = desc.getValueIndices().get(posSingleVectorMapJoinSmallTable);
LOG.info("Vectorizer isBigTableOnlyResults smallTableIndices " + Arrays.toString(smallTableIndices));
smallTableIndicesSize = smallTableIndices.length;
} else {
smallTableIndices = null;
LOG.info("Vectorizer isBigTableOnlyResults smallTableIndices EMPTY");
smallTableIndicesSize = 0;
}
List<Integer> smallTableRetainList = desc.getRetainList().get(posSingleVectorMapJoinSmallTable);
LOG.info("Vectorizer isBigTableOnlyResults smallTableRetainList " + smallTableRetainList);
int smallTableRetainSize = smallTableRetainList.size();
if (smallTableIndicesSize > 0) {
// Small table indices has priority over retain.
for (int i = 0; i < smallTableIndicesSize; i++) {
if (smallTableIndices[i] < 0) {
// Negative numbers indicate a column to be (deserialize) read from the small table's
// LazyBinary value row.
setOperatorIssue("Vectorizer isBigTableOnlyResults smallTableIndices[i] < 0 returning false");
return false;
}
}
} else if (smallTableRetainSize > 0) {
setOperatorIssue("Vectorizer isBigTableOnlyResults smallTableRetainSize > 0 returning false");
return false;
}
LOG.info("Vectorizer isBigTableOnlyResults returning true");
return true;
}
Operator<? extends OperatorDesc> specializeMapJoinOperator(Operator<? extends OperatorDesc> op,
VectorizationContext vContext, MapJoinDesc desc, VectorMapJoinDesc vectorDesc)
throws HiveException {
Operator<? extends OperatorDesc> vectorOp = null;
Class<? extends Operator<?>> opClass = null;
VectorMapJoinInfo vectorMapJoinInfo = vectorDesc.getVectorMapJoinInfo();
HashTableImplementationType hashTableImplementationType = HashTableImplementationType.NONE;
HashTableKind hashTableKind = HashTableKind.NONE;
HashTableKeyType hashTableKeyType = HashTableKeyType.NONE;
VectorMapJoinVariation vectorMapJoinVariation = null;
if (vectorDesc.getIsFastHashTableEnabled()) {
hashTableImplementationType = HashTableImplementationType.FAST;
} else {
hashTableImplementationType = HashTableImplementationType.OPTIMIZED;
}
int joinType = desc.getConds()[0].getType();
boolean isInnerBigOnly = false;
if (joinType == JoinDesc.INNER_JOIN && isBigTableOnlyResults(desc)) {
isInnerBigOnly = true;
}
// By default, we can always use the multi-key class.
hashTableKeyType = HashTableKeyType.MULTI_KEY;
if (!HiveConf.getBoolVar(hiveConf,
HiveConf.ConfVars.HIVE_VECTORIZATION_MAPJOIN_NATIVE_MULTIKEY_ONLY_ENABLED)) {
// Look for single column optimization.
byte posBigTable = (byte) desc.getPosBigTable();
Map<Byte, List<ExprNodeDesc>> keyExprs = desc.getKeys();
List<ExprNodeDesc> bigTableKeyExprs = keyExprs.get(posBigTable);
if (bigTableKeyExprs.size() == 1) {
TypeInfo typeInfo = bigTableKeyExprs.get(0).getTypeInfo();
LOG.info("Vectorizer vectorizeOperator map join typeName " + typeInfo.getTypeName());
switch (((PrimitiveTypeInfo) typeInfo).getPrimitiveCategory()) {
case BOOLEAN:
hashTableKeyType = HashTableKeyType.BOOLEAN;
break;
case BYTE:
hashTableKeyType = HashTableKeyType.BYTE;
break;
case SHORT:
hashTableKeyType = HashTableKeyType.SHORT;
break;
case INT:
hashTableKeyType = HashTableKeyType.INT;
break;
case LONG:
hashTableKeyType = HashTableKeyType.LONG;
break;
case STRING:
case CHAR:
case VARCHAR:
case BINARY:
hashTableKeyType = HashTableKeyType.STRING;
default:
// Stay with multi-key.
}
}
}
switch (joinType) {
case JoinDesc.INNER_JOIN:
if (!isInnerBigOnly) {
vectorMapJoinVariation = VectorMapJoinVariation.INNER;
hashTableKind = HashTableKind.HASH_MAP;
} else {
vectorMapJoinVariation = VectorMapJoinVariation.INNER_BIG_ONLY;
hashTableKind = HashTableKind.HASH_MULTISET;
}
break;
case JoinDesc.LEFT_OUTER_JOIN:
case JoinDesc.RIGHT_OUTER_JOIN:
vectorMapJoinVariation = VectorMapJoinVariation.OUTER;
hashTableKind = HashTableKind.HASH_MAP;
break;
case JoinDesc.FULL_OUTER_JOIN:
vectorMapJoinVariation = VectorMapJoinVariation.FULL_OUTER;
hashTableKind = HashTableKind.HASH_MAP;
break;
case JoinDesc.LEFT_SEMI_JOIN:
vectorMapJoinVariation = VectorMapJoinVariation.LEFT_SEMI;
hashTableKind = HashTableKind.HASH_SET;
break;
default:
throw new HiveException("Unknown join type " + joinType);
}
LOG.info("Vectorizer vectorizeOperator map join hashTableKind " + hashTableKind.name() + " hashTableKeyType " + hashTableKeyType.name());
switch (hashTableKeyType) {
case BOOLEAN:
case BYTE:
case SHORT:
case INT:
case LONG:
switch (vectorMapJoinVariation) {
case INNER:
opClass = VectorMapJoinInnerLongOperator.class;
break;
case INNER_BIG_ONLY:
opClass = VectorMapJoinInnerBigOnlyLongOperator.class;
break;
case LEFT_SEMI:
opClass = VectorMapJoinLeftSemiLongOperator.class;
break;
case OUTER:
opClass = VectorMapJoinOuterLongOperator.class;
break;
case FULL_OUTER:
opClass = VectorMapJoinFullOuterLongOperator.class;
break;
default:
throw new HiveException("Unknown operator variation " + vectorMapJoinVariation);
}
break;
case STRING:
switch (vectorMapJoinVariation) {
case INNER:
opClass = VectorMapJoinInnerStringOperator.class;
break;
case INNER_BIG_ONLY:
opClass = VectorMapJoinInnerBigOnlyStringOperator.class;
break;
case LEFT_SEMI:
opClass = VectorMapJoinLeftSemiStringOperator.class;
break;
case OUTER:
opClass = VectorMapJoinOuterStringOperator.class;
break;
case FULL_OUTER:
opClass = VectorMapJoinFullOuterStringOperator.class;
break;
default:
throw new HiveException("Unknown operator variation " + vectorMapJoinVariation);
}
break;
case MULTI_KEY:
switch (vectorMapJoinVariation) {
case INNER:
opClass = VectorMapJoinInnerMultiKeyOperator.class;
break;
case INNER_BIG_ONLY:
opClass = VectorMapJoinInnerBigOnlyMultiKeyOperator.class;
break;
case LEFT_SEMI:
opClass = VectorMapJoinLeftSemiMultiKeyOperator.class;
break;
case OUTER:
opClass = VectorMapJoinOuterMultiKeyOperator.class;
break;
case FULL_OUTER:
opClass = VectorMapJoinFullOuterMultiKeyOperator.class;
break;
default:
throw new HiveException("Unknown operator variation " + vectorMapJoinVariation);
}
break;
default:
throw new RuntimeException("Unexpected hash table key type " + hashTableKeyType.name());
}
boolean minMaxEnabled = HiveConf.getBoolVar(hiveConf,
HiveConf.ConfVars.HIVE_VECTORIZATION_MAPJOIN_NATIVE_MINMAX_ENABLED);
vectorDesc.setHashTableImplementationType(hashTableImplementationType);
vectorDesc.setHashTableKind(hashTableKind);
vectorDesc.setHashTableKeyType(hashTableKeyType);
vectorDesc.setVectorMapJoinVariation(vectorMapJoinVariation);
if (vectorMapJoinVariation == VectorMapJoinVariation.FULL_OUTER) {
vectorDesc.setIsFullOuter(true);
}
vectorDesc.setMinMaxEnabled(minMaxEnabled);
vectorDesc.setVectorMapJoinInfo(vectorMapJoinInfo);
vectorOp = OperatorFactory.getVectorOperator(
opClass, op.getCompilationOpContext(), op.getConf(), vContext, vectorDesc);
LOG.info("Vectorizer vectorizeOperator map join class " + vectorOp.getClass().getSimpleName());
return vectorOp;
}
public static boolean onExpressionHasNullSafes(MapJoinDesc desc) {
boolean[] nullSafes = desc.getNullSafes();
if (nullSafes == null) {
return false;
}
for (boolean nullSafe : nullSafes) {
if (nullSafe) {
return true;
}
}
return false;
}
private boolean canSpecializeMapJoin(Operator<? extends OperatorDesc> op, MapJoinDesc desc,
boolean isTezOrSpark, VectorizationContext vContext, VectorMapJoinDesc vectorDesc)
throws HiveException {
Preconditions.checkState(op instanceof MapJoinOperator);
VectorMapJoinInfo vectorMapJoinInfo = new VectorMapJoinInfo();
boolean isVectorizationMapJoinNativeEnabled = HiveConf.getBoolVar(hiveConf,
HiveConf.ConfVars.HIVE_VECTORIZATION_MAPJOIN_NATIVE_ENABLED);
String engine = HiveConf.getVar(hiveConf, HiveConf.ConfVars.HIVE_EXECUTION_ENGINE);
boolean oneMapJoinCondition = (desc.getConds().length == 1);
boolean hasNullSafes = onExpressionHasNullSafes(desc);
byte posBigTable = (byte) desc.getPosBigTable();
// Since we want to display all the met and not met conditions in EXPLAIN, we determine all
// information first....
List<ExprNodeDesc> keyDesc = desc.getKeys().get(posBigTable);
boolean outerJoinHasNoKeys = (!desc.isNoOuterJoin() && keyDesc.size() == 0);
// For now, we don't support joins on or using DECIMAL_64.
VectorExpression[] allBigTableKeyExpressions =
vContext.getVectorExpressionsUpConvertDecimal64(keyDesc);
final int allBigTableKeyExpressionsLength = allBigTableKeyExpressions.length;
boolean supportsKeyTypes = true; // Assume.
HashSet<String> notSupportedKeyTypes = new HashSet<String>();
// Since a key expression can be a calculation and the key will go into a scratch column,
// we need the mapping and type information.
int[] bigTableKeyColumnMap = new int[allBigTableKeyExpressionsLength];
String[] bigTableKeyColumnNames = new String[allBigTableKeyExpressionsLength];
TypeInfo[] bigTableKeyTypeInfos = new TypeInfo[allBigTableKeyExpressionsLength];
ArrayList<VectorExpression> bigTableKeyExpressionsList = new ArrayList<VectorExpression>();
VectorExpression[] slimmedBigTableKeyExpressions;
for (int i = 0; i < allBigTableKeyExpressionsLength; i++) {
VectorExpression ve = allBigTableKeyExpressions[i];
if (!IdentityExpression.isColumnOnly(ve)) {
bigTableKeyExpressionsList.add(ve);
}
bigTableKeyColumnMap[i] = ve.getOutputColumnNum();
ExprNodeDesc exprNode = keyDesc.get(i);
bigTableKeyColumnNames[i] = exprNode.toString();
TypeInfo typeInfo = exprNode.getTypeInfo();
// Verify we handle the key column types for an optimized table. This is the effectively the
// same check used in HashTableLoader.
if (!MapJoinKey.isSupportedField(typeInfo)) {
supportsKeyTypes = false;
Category category = typeInfo.getCategory();
notSupportedKeyTypes.add(
(category != Category.PRIMITIVE ? category.toString() :
((PrimitiveTypeInfo) typeInfo).getPrimitiveCategory().toString()));
}
bigTableKeyTypeInfos[i] = typeInfo;
}
if (bigTableKeyExpressionsList.size() == 0) {
slimmedBigTableKeyExpressions = null;
} else {
slimmedBigTableKeyExpressions = bigTableKeyExpressionsList.toArray(new VectorExpression[0]);
}
List<ExprNodeDesc> bigTableExprs = desc.getExprs().get(posBigTable);
// For now, we don't support joins on or using DECIMAL_64.
VectorExpression[] allBigTableValueExpressions =
vContext.getVectorExpressions(bigTableExprs);
boolean isFastHashTableEnabled =
HiveConf.getBoolVar(hiveConf,
HiveConf.ConfVars.HIVE_VECTORIZATION_MAPJOIN_NATIVE_FAST_HASHTABLE_ENABLED);
// Especially since LLAP is prone to turn it off in the MapJoinDesc in later
// physical optimizer stages...
boolean isHybridHashJoin = desc.isHybridHashJoin();
/*
* Populate vectorMapJoininfo.
*/
/*
* Similarly, we need a mapping since a value expression can be a calculation and the value
* will go into a scratch column.
*
* Value expressions include keys? YES.
*/
boolean supportsValueTypes = true; // Assume.
HashSet<String> notSupportedValueTypes = new HashSet<String>();
int[] bigTableValueColumnMap = new int[allBigTableValueExpressions.length];
String[] bigTableValueColumnNames = new String[allBigTableValueExpressions.length];
TypeInfo[] bigTableValueTypeInfos = new TypeInfo[allBigTableValueExpressions.length];
ArrayList<VectorExpression> bigTableValueExpressionsList = new ArrayList<VectorExpression>();
VectorExpression[] slimmedBigTableValueExpressions;
for (int i = 0; i < bigTableValueColumnMap.length; i++) {
VectorExpression ve = allBigTableValueExpressions[i];
if (!IdentityExpression.isColumnOnly(ve)) {
bigTableValueExpressionsList.add(ve);
}
bigTableValueColumnMap[i] = ve.getOutputColumnNum();
ExprNodeDesc exprNode = bigTableExprs.get(i);
bigTableValueColumnNames[i] = exprNode.toString();
TypeInfo typeInfo = exprNode.getTypeInfo();
if (!(typeInfo instanceof PrimitiveTypeInfo)) {
supportsValueTypes = false;
Category category = typeInfo.getCategory();
notSupportedValueTypes.add(category.toString());
}
bigTableValueTypeInfos[i] = typeInfo;
}
if (bigTableValueExpressionsList.size() == 0) {
slimmedBigTableValueExpressions = null;
} else {
slimmedBigTableValueExpressions =
bigTableValueExpressionsList.toArray(new VectorExpression[0]);
}
vectorMapJoinInfo.setBigTableKeyColumnMap(bigTableKeyColumnMap);
vectorMapJoinInfo.setBigTableKeyColumnNames(bigTableKeyColumnNames);
vectorMapJoinInfo.setBigTableKeyTypeInfos(bigTableKeyTypeInfos);
vectorMapJoinInfo.setSlimmedBigTableKeyExpressions(slimmedBigTableKeyExpressions);
vectorDesc.setAllBigTableKeyExpressions(allBigTableKeyExpressions);
vectorMapJoinInfo.setBigTableValueColumnMap(bigTableValueColumnMap);
vectorMapJoinInfo.setBigTableValueColumnNames(bigTableValueColumnNames);
vectorMapJoinInfo.setBigTableValueTypeInfos(bigTableValueTypeInfos);
vectorMapJoinInfo.setSlimmedBigTableValueExpressions(slimmedBigTableValueExpressions);
vectorDesc.setAllBigTableValueExpressions(allBigTableValueExpressions);
/*
* Column mapping.
*/
VectorColumnOutputMapping bigTableRetainMapping =
new VectorColumnOutputMapping("Big Table Retain Mapping");
VectorColumnOutputMapping nonOuterSmallTableKeyMapping =
new VectorColumnOutputMapping("Non Outer Small Table Key Key Mapping");
VectorColumnOutputMapping outerSmallTableKeyMapping =
new VectorColumnOutputMapping("Outer Small Table Key Mapping");
VectorColumnSourceMapping fullOuterSmallTableKeyMapping =
new VectorColumnSourceMapping("Full Outer Small Table Key Mapping");
// The order of the fields in the LazyBinary small table value must be used, so
// we use the source ordering flavor for the mapping.
VectorColumnSourceMapping smallTableValueMapping =
new VectorColumnSourceMapping("Small Table Value Mapping");
Byte[] order = desc.getTagOrder();
Byte posSingleVectorMapJoinSmallTable = (order[0] == posBigTable ? order[1] : order[0]);
boolean isOuterJoin = !desc.getNoOuterJoin();
/*
* Gather up big and small table output result information from the MapJoinDesc.
*/
List<Integer> bigTableRetainList = desc.getRetainList().get(posBigTable);
int[] smallTableIndices;
int smallTableIndicesSize;
List<ExprNodeDesc> smallTableExprs = desc.getExprs().get(posSingleVectorMapJoinSmallTable);
if (desc.getValueIndices() != null && desc.getValueIndices().get(posSingleVectorMapJoinSmallTable) != null) {
smallTableIndices = desc.getValueIndices().get(posSingleVectorMapJoinSmallTable);
smallTableIndicesSize = smallTableIndices.length;
} else {
smallTableIndices = null;
smallTableIndicesSize = 0;
}
List<Integer> smallTableRetainList = desc.getRetainList().get(posSingleVectorMapJoinSmallTable);
int smallTableRetainSize = smallTableRetainList.size();
int smallTableResultSize = 0;
if (smallTableIndicesSize > 0) {
smallTableResultSize = smallTableIndicesSize;
} else if (smallTableRetainSize > 0) {
smallTableResultSize = smallTableRetainSize;
}
/*
* Determine the big table retained mapping first so we can optimize out (with
* projection) copying inner join big table keys in the subsequent small table results section.
*/
// We use a mapping object here so we can build the projection in any order and
// get the ordered by 0 to n-1 output columns at the end.
//
// Also, to avoid copying a big table key into the small table result area for inner joins,
// we reference it with the projection so there can be duplicate output columns
// in the projection.
VectorColumnSourceMapping projectionMapping = new VectorColumnSourceMapping("Projection Mapping");
int nextOutputColumn = (order[0] == posBigTable ? 0 : smallTableResultSize);
final int bigTableRetainSize = bigTableRetainList.size();
for (int i = 0; i < bigTableRetainSize; i++) {
// Since bigTableValueExpressions may do a calculation and produce a scratch column, we
// need to map to the right batch column.
int retainColumn = bigTableRetainList.get(i);
int batchColumnIndex = bigTableValueColumnMap[retainColumn];
TypeInfo typeInfo = bigTableValueTypeInfos[i];
// With this map we project the big table batch to make it look like an output batch.
projectionMapping.add(nextOutputColumn, batchColumnIndex, typeInfo);
// Collect columns we copy from the big table batch to the overflow batch.
if (!bigTableRetainMapping.containsOutputColumn(batchColumnIndex)) {
// Tolerate repeated use of a big table column.
bigTableRetainMapping.add(batchColumnIndex, batchColumnIndex, typeInfo);
}
nextOutputColumn++;
}
/*
* Now determine the small table results.
*/
boolean smallTableExprVectorizes = true;
int firstSmallTableOutputColumn;
firstSmallTableOutputColumn = (order[0] == posBigTable ? bigTableRetainSize : 0);
int smallTableOutputCount = 0;
nextOutputColumn = firstSmallTableOutputColumn;
// Small table indices has more information (i.e. keys) than retain, so use it if it exists...
if (smallTableIndicesSize > 0) {
smallTableOutputCount = smallTableIndicesSize;
for (int i = 0; i < smallTableIndicesSize; i++) {
if (smallTableIndices[i] >= 0) {
// Zero and above numbers indicate a big table key is needed for
// small table result "area".
int keyIndex = smallTableIndices[i];
// Since bigTableKeyExpressions may do a calculation and produce a scratch column, we
// need to map the right column.
int bigTableKeyColumn = bigTableKeyColumnMap[keyIndex];
TypeInfo typeInfo = bigTableKeyTypeInfos[keyIndex];
if (!isOuterJoin) {
// Optimize inner join keys of small table results.
// Project the big table key into the small table result "area".
projectionMapping.add(nextOutputColumn, bigTableKeyColumn, typeInfo);
if (!bigTableRetainMapping.containsOutputColumn(bigTableKeyColumn)) {
// When the Big Key is not retained in the output result, we do need to copy the
// Big Table key into the overflow batch so the projection of it (Big Table key) to
// the Small Table key will work properly...
//
nonOuterSmallTableKeyMapping.add(bigTableKeyColumn, bigTableKeyColumn, typeInfo);
}
} else {
// For outer joins, since the small table key can be null when there for NOMATCH,
// we must have a physical (scratch) column for those keys. We cannot use the
// projection optimization used by non-[FULL} OUTER joins above.
int scratchColumn = vContext.allocateScratchColumn(typeInfo);
projectionMapping.add(nextOutputColumn, scratchColumn, typeInfo);
outerSmallTableKeyMapping.add(bigTableKeyColumn, scratchColumn, typeInfo);
// For FULL OUTER MapJoin, we need to be able to deserialize a Small Table key
// into the output result.
fullOuterSmallTableKeyMapping.add(keyIndex, scratchColumn, typeInfo);
}
} else {
// Negative numbers indicate a column to be (deserialize) read from the small table's
// LazyBinary value row.
int smallTableValueIndex = -smallTableIndices[i] - 1;
ExprNodeDesc smallTableExprNode = smallTableExprs.get(i);
if (!validateExprNodeDesc(smallTableExprNode, "Small Table")) {
clearNotVectorizedReason();
smallTableExprVectorizes = false;
}
TypeInfo typeInfo = smallTableExprNode.getTypeInfo();
// Make a new big table scratch column for the small table value.
int scratchColumn = vContext.allocateScratchColumn(typeInfo);
projectionMapping.add(nextOutputColumn, scratchColumn, typeInfo);
smallTableValueMapping.add(smallTableValueIndex, scratchColumn, typeInfo);
}
nextOutputColumn++;
}
} else if (smallTableRetainSize > 0) {
smallTableOutputCount = smallTableRetainSize;
// Only small table values appear in join output result.
for (int i = 0; i < smallTableRetainSize; i++) {
int smallTableValueIndex = smallTableRetainList.get(i);
ExprNodeDesc smallTableExprNode = smallTableExprs.get(i);
if (!validateExprNodeDesc(smallTableExprNode, "Small Table")) {
clearNotVectorizedReason();
smallTableExprVectorizes = false;
}
// Make a new big table scratch column for the small table value.
TypeInfo typeInfo = smallTableExprNode.getTypeInfo();
int scratchColumn = vContext.allocateScratchColumn(typeInfo);
projectionMapping.add(nextOutputColumn, scratchColumn, typeInfo);
smallTableValueMapping.add(smallTableValueIndex, scratchColumn, typeInfo);
nextOutputColumn++;
}
}
Map<Byte, List<ExprNodeDesc>> filterExpressions = desc.getFilters();
VectorExpression[] bigTableFilterExpressions =
vContext.getVectorExpressions(
filterExpressions.get(posBigTable),
VectorExpressionDescriptor.Mode.FILTER);
vectorMapJoinInfo.setBigTableFilterExpressions(bigTableFilterExpressions);
boolean useOptimizedTable =
HiveConf.getBoolVar(hiveConf, HiveConf.ConfVars.HIVEMAPJOINUSEOPTIMIZEDTABLE);
// Remember the condition variables for EXPLAIN regardless of whether we specialize or not.
vectorDesc.setVectorMapJoinInfo(vectorMapJoinInfo);
vectorDesc.setUseOptimizedTable(useOptimizedTable);
vectorDesc.setIsVectorizationMapJoinNativeEnabled(isVectorizationMapJoinNativeEnabled);
vectorDesc.setEngine(engine);
vectorDesc.setOneMapJoinCondition(oneMapJoinCondition);
vectorDesc.setHasNullSafes(hasNullSafes);
vectorDesc.setSmallTableExprVectorizes(smallTableExprVectorizes);
vectorDesc.setOuterJoinHasNoKeys(outerJoinHasNoKeys);
vectorDesc.setIsFastHashTableEnabled(isFastHashTableEnabled);
vectorDesc.setIsHybridHashJoin(isHybridHashJoin);
vectorDesc.setSupportsKeyTypes(supportsKeyTypes);
if (!supportsKeyTypes) {
vectorDesc.setNotSupportedKeyTypes(new ArrayList(notSupportedKeyTypes));
}
vectorDesc.setSupportsValueTypes(supportsValueTypes);
if (!supportsValueTypes) {
vectorDesc.setNotSupportedValueTypes(new ArrayList(notSupportedValueTypes));
}
// Check common conditions for both Optimized and Fast Hash Tables.
boolean result = true; // Assume.
if (!useOptimizedTable ||
!isVectorizationMapJoinNativeEnabled ||
!isTezOrSpark ||
!oneMapJoinCondition ||
hasNullSafes ||
!smallTableExprVectorizes ||
outerJoinHasNoKeys ||
!supportsValueTypes) {
result = false;
}
// supportsKeyTypes
if (!isFastHashTableEnabled) {
// Check optimized-only hash table restrictions.
if (!supportsKeyTypes) {
result = false;
}
} else {
// With the fast hash table implementation, we currently do not support
// Hybrid Grace Hash Join.
if (isHybridHashJoin) {
result = false;
}
}
// Convert dynamic arrays and maps to simple arrays.
bigTableRetainMapping.finalize();
vectorMapJoinInfo.setBigTableRetainColumnMap(bigTableRetainMapping.getOutputColumns());
vectorMapJoinInfo.setBigTableRetainTypeInfos(bigTableRetainMapping.getTypeInfos());
nonOuterSmallTableKeyMapping.finalize();
vectorMapJoinInfo.setNonOuterSmallTableKeyColumnMap(nonOuterSmallTableKeyMapping.getOutputColumns());
vectorMapJoinInfo.setNonOuterSmallTableKeyTypeInfos(nonOuterSmallTableKeyMapping.getTypeInfos());
outerSmallTableKeyMapping.finalize();
fullOuterSmallTableKeyMapping.finalize();
vectorMapJoinInfo.setOuterSmallTableKeyMapping(outerSmallTableKeyMapping);
vectorMapJoinInfo.setFullOuterSmallTableKeyMapping(fullOuterSmallTableKeyMapping);
smallTableValueMapping.finalize();
vectorMapJoinInfo.setSmallTableValueMapping(smallTableValueMapping);
projectionMapping.finalize();
// Verify we added an entry for each output.
assert projectionMapping.isSourceSequenceGood();
vectorMapJoinInfo.setProjectionMapping(projectionMapping);
return result;
}
private Operator<? extends OperatorDesc> specializeReduceSinkOperator(
Operator<? extends OperatorDesc> op, VectorizationContext vContext, ReduceSinkDesc desc,
VectorReduceSinkDesc vectorDesc) throws HiveException {
VectorReduceSinkInfo vectorReduceSinkInfo = vectorDesc.getVectorReduceSinkInfo();
Type[] reduceSinkKeyColumnVectorTypes = vectorReduceSinkInfo.getReduceSinkKeyColumnVectorTypes();
// By default, we can always use the multi-key class.
VectorReduceSinkDesc.ReduceSinkKeyType reduceSinkKeyType = VectorReduceSinkDesc.ReduceSinkKeyType.MULTI_KEY;
// Look for single column optimization.
if (reduceSinkKeyColumnVectorTypes != null && reduceSinkKeyColumnVectorTypes.length == 1) {
LOG.info("Vectorizer vectorizeOperator groupby typeName " + vectorReduceSinkInfo.getReduceSinkKeyTypeInfos()[0]);
Type columnVectorType = reduceSinkKeyColumnVectorTypes[0];
switch (columnVectorType) {
case LONG:
{
PrimitiveCategory primitiveCategory =
((PrimitiveTypeInfo) vectorReduceSinkInfo.getReduceSinkKeyTypeInfos()[0]).getPrimitiveCategory();
switch (primitiveCategory) {
case BOOLEAN:
case BYTE:
case SHORT:
case INT:
case LONG:
reduceSinkKeyType = VectorReduceSinkDesc.ReduceSinkKeyType.LONG;
break;
default:
// Other integer types not supported yet.
break;
}
}
break;
case BYTES:
reduceSinkKeyType = VectorReduceSinkDesc.ReduceSinkKeyType.STRING;
default:
// Stay with multi-key.
break;
}
}
Class<? extends Operator<?>> opClass = null;
if (vectorReduceSinkInfo.getUseUniformHash()) {
if (vectorDesc.getIsEmptyKey()) {
opClass = VectorReduceSinkEmptyKeyOperator.class;
} else {
switch (reduceSinkKeyType) {
case LONG:
opClass = VectorReduceSinkLongOperator.class;
break;
case STRING:
opClass = VectorReduceSinkStringOperator.class;
break;
case MULTI_KEY:
opClass = VectorReduceSinkMultiKeyOperator.class;
break;
default:
throw new HiveException("Unknown reduce sink key type " + reduceSinkKeyType);
}
}
} else {
if (vectorDesc.getIsEmptyKey() && vectorDesc.getIsEmptyBuckets() && vectorDesc.getIsEmptyPartitions()) {
opClass = VectorReduceSinkEmptyKeyOperator.class;
} else {
opClass = VectorReduceSinkObjectHashOperator.class;
}
}
vectorDesc.setReduceSinkKeyType(reduceSinkKeyType);
vectorDesc.setVectorReduceSinkInfo(vectorReduceSinkInfo);
LOG.info("Vectorizer vectorizeOperator reduce sink class " + opClass.getSimpleName());
// Get the bucketing version
int bucketingVersion = ((ReduceSinkOperator)op).getBucketingVersion();
Operator<? extends OperatorDesc> vectorOp = null;
try {
vectorOp = OperatorFactory.getVectorOperator(
opClass, op.getCompilationOpContext(), op.getConf(),
vContext, vectorDesc);
} catch (Exception e) {
LOG.info("Vectorizer vectorizeOperator reduce sink class exception " + opClass.getSimpleName() +
" exception " + e);
throw new HiveException(e);
}
// Set the bucketing version
Preconditions.checkArgument(vectorOp instanceof VectorReduceSinkCommonOperator);
vectorOp.setBucketingVersion(bucketingVersion);
return vectorOp;
}
private boolean canSpecializeReduceSink(ReduceSinkDesc desc,
boolean isTezOrSpark, VectorizationContext vContext,
VectorReduceSinkDesc vectorDesc) throws HiveException {
VectorReduceSinkInfo vectorReduceSinkInfo = new VectorReduceSinkInfo();
// Various restrictions.
// Set this if we encounter a condition we were not expecting.
boolean isUnexpectedCondition = false;
boolean isVectorizationReduceSinkNativeEnabled =
HiveConf.getBoolVar(hiveConf,
HiveConf.ConfVars.HIVE_VECTORIZATION_REDUCESINK_NEW_ENABLED);
String engine = HiveConf.getVar(hiveConf, HiveConf.ConfVars.HIVE_EXECUTION_ENGINE);
int limit = desc.getTopN();
float memUsage = desc.getTopNMemoryUsage();
boolean hasPTFTopN = (limit >= 0 && memUsage > 0 && desc.isPTFReduceSink());
boolean hasDistinctColumns = (desc.getDistinctColumnIndices().size() > 0);
TableDesc keyTableDesc = desc.getKeySerializeInfo();
Class<? extends Deserializer> keySerializerClass = keyTableDesc.getDeserializerClass();
boolean isKeyBinarySortable = (keySerializerClass == org.apache.hadoop.hive.serde2.binarysortable.BinarySortableSerDe.class);
TableDesc valueTableDesc = desc.getValueSerializeInfo();
Class<? extends Deserializer> valueDeserializerClass = valueTableDesc.getDeserializerClass();
boolean isValueLazyBinary = (valueDeserializerClass == org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe.class);
// We are doing work here we'd normally do in VectorGroupByCommonOperator's constructor.
// So if we later decide not to specialize, we'll just waste any scratch columns allocated...
List<ExprNodeDesc> keysDescs = desc.getKeyCols();
final boolean isEmptyKey = (keysDescs.size() == 0);
if (!isEmptyKey) {
VectorExpression[] allKeyExpressions = vContext.getVectorExpressions(keysDescs);
final int[] reduceSinkKeyColumnMap = new int[allKeyExpressions.length];
final TypeInfo[] reduceSinkKeyTypeInfos = new TypeInfo[allKeyExpressions.length];
final Type[] reduceSinkKeyColumnVectorTypes = new Type[allKeyExpressions.length];
final VectorExpression[] reduceSinkKeyExpressions;
// Since a key expression can be a calculation and the key will go into a scratch column,
// we need the mapping and type information.
ArrayList<VectorExpression> groupByKeyExpressionsList = new ArrayList<VectorExpression>();
for (int i = 0; i < reduceSinkKeyColumnMap.length; i++) {
VectorExpression ve = allKeyExpressions[i];
reduceSinkKeyColumnMap[i] = ve.getOutputColumnNum();
reduceSinkKeyTypeInfos[i] = keysDescs.get(i).getTypeInfo();
reduceSinkKeyColumnVectorTypes[i] =
VectorizationContext.getColumnVectorTypeFromTypeInfo(reduceSinkKeyTypeInfos[i]);
if (!IdentityExpression.isColumnOnly(ve)) {
groupByKeyExpressionsList.add(ve);
}
}
if (groupByKeyExpressionsList.size() == 0) {
reduceSinkKeyExpressions = null;
} else {
reduceSinkKeyExpressions = groupByKeyExpressionsList.toArray(new VectorExpression[0]);
}
vectorReduceSinkInfo.setReduceSinkKeyColumnMap(reduceSinkKeyColumnMap);
vectorReduceSinkInfo.setReduceSinkKeyTypeInfos(reduceSinkKeyTypeInfos);
vectorReduceSinkInfo.setReduceSinkKeyColumnVectorTypes(reduceSinkKeyColumnVectorTypes);
vectorReduceSinkInfo.setReduceSinkKeyExpressions(reduceSinkKeyExpressions);
}
ArrayList<ExprNodeDesc> valueDescs = desc.getValueCols();
final boolean isEmptyValue = (valueDescs.size() == 0);
if (!isEmptyValue) {
VectorExpression[] allValueExpressions = vContext.getVectorExpressions(valueDescs);
final int[] reduceSinkValueColumnMap = new int[allValueExpressions.length];
final TypeInfo[] reduceSinkValueTypeInfos = new TypeInfo[allValueExpressions.length];
final Type[] reduceSinkValueColumnVectorTypes = new Type[allValueExpressions.length];
VectorExpression[] reduceSinkValueExpressions;
ArrayList<VectorExpression> reduceSinkValueExpressionsList = new ArrayList<VectorExpression>();
for (int i = 0; i < valueDescs.size(); ++i) {
VectorExpression ve = allValueExpressions[i];
reduceSinkValueColumnMap[i] = ve.getOutputColumnNum();
reduceSinkValueTypeInfos[i] = valueDescs.get(i).getTypeInfo();
reduceSinkValueColumnVectorTypes[i] =
VectorizationContext.getColumnVectorTypeFromTypeInfo(reduceSinkValueTypeInfos[i]);
if (!IdentityExpression.isColumnOnly(ve)) {
reduceSinkValueExpressionsList.add(ve);
}
}
if (reduceSinkValueExpressionsList.size() == 0) {
reduceSinkValueExpressions = null;
} else {
reduceSinkValueExpressions = reduceSinkValueExpressionsList.toArray(new VectorExpression[0]);
}
vectorReduceSinkInfo.setReduceSinkValueColumnMap(reduceSinkValueColumnMap);
vectorReduceSinkInfo.setReduceSinkValueTypeInfos(reduceSinkValueTypeInfos);
vectorReduceSinkInfo.setReduceSinkValueColumnVectorTypes(reduceSinkValueColumnVectorTypes);
vectorReduceSinkInfo.setReduceSinkValueExpressions(reduceSinkValueExpressions);
}
boolean useUniformHash = desc.getReducerTraits().contains(UNIFORM);
vectorReduceSinkInfo.setUseUniformHash(useUniformHash);
List<ExprNodeDesc> bucketDescs = desc.getBucketCols();
final boolean isEmptyBuckets = (bucketDescs == null || bucketDescs.size() == 0);
List<ExprNodeDesc> partitionDescs = desc.getPartitionCols();
final boolean isEmptyPartitions = (partitionDescs == null || partitionDescs.size() == 0);
if (useUniformHash || (isEmptyKey && isEmptyBuckets && isEmptyPartitions)) {
// NOTE: For Uniform Hash or no buckets/partitions, when the key is empty, we will use the VectorReduceSinkEmptyKeyOperator instead.
} else {
// Collect bucket and/or partition information for object hashing.
int[] reduceSinkBucketColumnMap = null;
TypeInfo[] reduceSinkBucketTypeInfos = null;
Type[] reduceSinkBucketColumnVectorTypes = null;
VectorExpression[] reduceSinkBucketExpressions = null;
if (!isEmptyBuckets) {
VectorExpression[] allBucketExpressions = vContext.getVectorExpressions(bucketDescs);
reduceSinkBucketColumnMap = new int[bucketDescs.size()];
reduceSinkBucketTypeInfos = new TypeInfo[bucketDescs.size()];
reduceSinkBucketColumnVectorTypes = new Type[bucketDescs.size()];
ArrayList<VectorExpression> reduceSinkBucketExpressionsList = new ArrayList<VectorExpression>();
for (int i = 0; i < bucketDescs.size(); ++i) {
VectorExpression ve = allBucketExpressions[i];
reduceSinkBucketColumnMap[i] = ve.getOutputColumnNum();
reduceSinkBucketTypeInfos[i] = bucketDescs.get(i).getTypeInfo();
reduceSinkBucketColumnVectorTypes[i] =
VectorizationContext.getColumnVectorTypeFromTypeInfo(reduceSinkBucketTypeInfos[i]);
if (!IdentityExpression.isColumnOnly(ve)) {
reduceSinkBucketExpressionsList.add(ve);
}
}
if (reduceSinkBucketExpressionsList.size() == 0) {
reduceSinkBucketExpressions = null;
} else {
reduceSinkBucketExpressions = reduceSinkBucketExpressionsList.toArray(new VectorExpression[0]);
}
}
int[] reduceSinkPartitionColumnMap = null;
TypeInfo[] reduceSinkPartitionTypeInfos = null;
Type[] reduceSinkPartitionColumnVectorTypes = null;
VectorExpression[] reduceSinkPartitionExpressions = null;
if (!isEmptyPartitions) {
VectorExpression[] allPartitionExpressions = vContext.getVectorExpressions(partitionDescs);
reduceSinkPartitionColumnMap = new int[partitionDescs.size()];
reduceSinkPartitionTypeInfos = new TypeInfo[partitionDescs.size()];
reduceSinkPartitionColumnVectorTypes = new Type[partitionDescs.size()];
ArrayList<VectorExpression> reduceSinkPartitionExpressionsList = new ArrayList<VectorExpression>();
for (int i = 0; i < partitionDescs.size(); ++i) {
VectorExpression ve = allPartitionExpressions[i];
reduceSinkPartitionColumnMap[i] = ve.getOutputColumnNum();
reduceSinkPartitionTypeInfos[i] = partitionDescs.get(i).getTypeInfo();
reduceSinkPartitionColumnVectorTypes[i] =
VectorizationContext.getColumnVectorTypeFromTypeInfo(reduceSinkPartitionTypeInfos[i]);
if (!IdentityExpression.isColumnOnly(ve)) {
reduceSinkPartitionExpressionsList.add(ve);
}
}
if (reduceSinkPartitionExpressionsList.size() == 0) {
reduceSinkPartitionExpressions = null;
} else {
reduceSinkPartitionExpressions = reduceSinkPartitionExpressionsList.toArray(new VectorExpression[0]);
}
}
vectorReduceSinkInfo.setReduceSinkBucketColumnMap(reduceSinkBucketColumnMap);
vectorReduceSinkInfo.setReduceSinkBucketTypeInfos(reduceSinkBucketTypeInfos);
vectorReduceSinkInfo.setReduceSinkBucketColumnVectorTypes(reduceSinkBucketColumnVectorTypes);
vectorReduceSinkInfo.setReduceSinkBucketExpressions(reduceSinkBucketExpressions);
vectorReduceSinkInfo.setReduceSinkPartitionColumnMap(reduceSinkPartitionColumnMap);
vectorReduceSinkInfo.setReduceSinkPartitionTypeInfos(reduceSinkPartitionTypeInfos);
vectorReduceSinkInfo.setReduceSinkPartitionColumnVectorTypes(reduceSinkPartitionColumnVectorTypes);
vectorReduceSinkInfo.setReduceSinkPartitionExpressions(reduceSinkPartitionExpressions);
}
// Remember the condition variables for EXPLAIN regardless.
vectorDesc.setVectorReduceSinkInfo(vectorReduceSinkInfo);
vectorDesc.setIsVectorizationReduceSinkNativeEnabled(isVectorizationReduceSinkNativeEnabled);
vectorDesc.setEngine(engine);
vectorDesc.setIsEmptyKey(isEmptyKey);
vectorDesc.setIsEmptyValue(isEmptyValue);
vectorDesc.setIsEmptyBuckets(isEmptyBuckets);
vectorDesc.setIsEmptyPartitions(isEmptyPartitions);
vectorDesc.setHasPTFTopN(hasPTFTopN);
vectorDesc.setHasDistinctColumns(hasDistinctColumns);
vectorDesc.setIsKeyBinarySortable(isKeyBinarySortable);
vectorDesc.setIsValueLazyBinary(isValueLazyBinary);
vectorDesc.setIsAcidChange(desc.getWriteType() == AcidUtils.Operation.DELETE ||
desc.getWriteType() == AcidUtils.Operation.UPDATE);
// This indicates we logged an inconsistency (from our point-of-view) and will not make this
// operator native...
vectorDesc.setIsUnexpectedCondition(isUnexpectedCondition);
// Many restrictions.
if (!isVectorizationReduceSinkNativeEnabled ||
!isTezOrSpark ||
hasPTFTopN ||
hasDistinctColumns ||
!isKeyBinarySortable ||
!isValueLazyBinary ||
isUnexpectedCondition) {
return false;
}
return true;
}
private boolean checkForArrowFileSink(FileSinkDesc fileSinkDesc,
boolean isTezOrSpark, VectorizationContext vContext,
VectorFileSinkDesc vectorDesc) throws HiveException {
// Various restrictions.
boolean isVectorizationFileSinkArrowNativeEnabled =
HiveConf.getBoolVar(hiveConf,
HiveConf.ConfVars.HIVE_VECTORIZATION_FILESINK_ARROW_NATIVE_ENABLED);
String engine = HiveConf.getVar(hiveConf, HiveConf.ConfVars.HIVE_EXECUTION_ENGINE);
String serdeClassName = fileSinkDesc.getTableInfo().getSerdeClassName();
boolean isOkArrowFileSink =
serdeClassName.equals("org.apache.hadoop.hive.ql.io.arrow.ArrowColumnarBatchSerDe") &&
isVectorizationFileSinkArrowNativeEnabled &&
engine.equalsIgnoreCase("tez");
return isOkArrowFileSink;
}
private Operator<? extends OperatorDesc> specializeArrowFileSinkOperator(
Operator<? extends OperatorDesc> op, VectorizationContext vContext, FileSinkDesc desc,
VectorFileSinkDesc vectorDesc) throws HiveException {
Class<? extends Operator<?>> opClass = VectorFileSinkArrowOperator.class;
Operator<? extends OperatorDesc> vectorOp = null;
try {
vectorOp = OperatorFactory.getVectorOperator(
opClass, op.getCompilationOpContext(), op.getConf(),
vContext, vectorDesc);
} catch (Exception e) {
LOG.info("Vectorizer vectorizeOperator file sink class exception " + opClass.getSimpleName() +
" exception " + e);
throw new HiveException(e);
}
return vectorOp;
}
private boolean usesVectorUDFAdaptor(VectorExpression vecExpr) {
if (vecExpr == null) {
return false;
}
if (vecExpr instanceof VectorUDFAdaptor) {
return true;
}
if (usesVectorUDFAdaptor(vecExpr.getChildExpressions())) {
return true;
}
return false;
}
private boolean usesVectorUDFAdaptor(VectorExpression[] vecExprs) {
if (vecExprs == null) {
return false;
}
for (VectorExpression vecExpr : vecExprs) {
if (usesVectorUDFAdaptor(vecExpr)) {
return true;
}
}
return false;
}
public static Operator<? extends OperatorDesc> vectorizeFilterOperator(
Operator<? extends OperatorDesc> filterOp, VectorizationContext vContext,
VectorFilterDesc vectorFilterDesc)
throws HiveException {
FilterDesc filterDesc = (FilterDesc) filterOp.getConf();
ExprNodeDesc predicateExpr = filterDesc.getPredicate();
VectorExpression vectorPredicateExpr =
vContext.getVectorExpression(predicateExpr, VectorExpressionDescriptor.Mode.FILTER);
vectorFilterDesc.setPredicateExpression(vectorPredicateExpr);
return OperatorFactory.getVectorOperator(
filterOp.getCompilationOpContext(), filterDesc,
vContext, vectorFilterDesc);
}
private static Operator<? extends OperatorDesc> vectorizeTopNKeyOperator(
Operator<? extends OperatorDesc> topNKeyOperator, VectorizationContext vContext,
VectorTopNKeyDesc vectorTopNKeyDesc) throws HiveException {
TopNKeyDesc topNKeyDesc = (TopNKeyDesc) topNKeyOperator.getConf();
List<ExprNodeDesc> keyColumns = topNKeyDesc.getKeyColumns();
VectorExpression[] keyExpressions = vContext.getVectorExpressions(keyColumns);
vectorTopNKeyDesc.setKeyExpressions(keyExpressions);
return OperatorFactory.getVectorOperator(
topNKeyOperator.getCompilationOpContext(), topNKeyDesc,
vContext, vectorTopNKeyDesc);
}
private static Class<? extends VectorAggregateExpression> findVecAggrClass(
Class<? extends VectorAggregateExpression>[] vecAggrClasses,
String aggregateName, ColumnVector.Type inputColVectorType,
ColumnVector.Type outputColumnVecType, GenericUDAFEvaluator.Mode udafEvaluatorMode)
throws HiveException {
for (Class<? extends VectorAggregateExpression> vecAggrClass : vecAggrClasses) {
VectorAggregateExpression vecAggrExprCheck;
try {
vecAggrExprCheck = vecAggrClass.newInstance();
} catch (Exception e) {
throw new HiveException(
vecAggrClass.getSimpleName() + "() failed to initialize", e);
}
if (vecAggrExprCheck.matches(
aggregateName, inputColVectorType, outputColumnVecType, udafEvaluatorMode)) {
return vecAggrClass;
}
}
return null;
}
private static ImmutablePair<VectorAggregationDesc,String> getVectorAggregationDesc(
AggregationDesc aggrDesc, VectorizationContext vContext) throws HiveException {
String aggregateName = aggrDesc.getGenericUDAFName();
List<ExprNodeDesc> parameterList = aggrDesc.getParameters();
final int parameterCount = parameterList.size();
final GenericUDAFEvaluator.Mode udafEvaluatorMode = aggrDesc.getMode();
/*
* Look at evaluator to get output type info.
*/
GenericUDAFEvaluator evaluator = aggrDesc.getGenericUDAFEvaluator();
ObjectInspector[] parameterObjectInspectors = new ObjectInspector[parameterCount];
for (int i = 0; i < parameterCount; i++) {
TypeInfo typeInfo = parameterList.get(i).getTypeInfo();
parameterObjectInspectors[i] = TypeInfoUtils
.getStandardWritableObjectInspectorFromTypeInfo(typeInfo);
}
// The only way to get the return object inspector (and its return type) is to
// initialize it...
ObjectInspector returnOI =
evaluator.init(
aggrDesc.getMode(),
parameterObjectInspectors);
final TypeInfo outputTypeInfo = TypeInfoUtils.getTypeInfoFromTypeString(returnOI.getTypeName());
return getVectorAggregationDesc(
aggregateName, parameterList, evaluator, outputTypeInfo, udafEvaluatorMode, vContext);
}
public static ImmutablePair<VectorAggregationDesc,String> getVectorAggregationDesc(
String aggregationName, List<ExprNodeDesc> parameterList,
GenericUDAFEvaluator evaluator, TypeInfo outputTypeInfo,
GenericUDAFEvaluator.Mode udafEvaluatorMode,
VectorizationContext vContext)
throws HiveException {
VectorizedUDAFs annotation =
AnnotationUtils.getAnnotation(evaluator.getClass(), VectorizedUDAFs.class);
if (annotation == null) {
String issue =
"Evaluator " + evaluator.getClass().getSimpleName() + " does not have a " +
"vectorized UDAF annotation (aggregation: \"" + aggregationName + "\"). " +
"Vectorization not supported";
return new ImmutablePair<VectorAggregationDesc,String>(null, issue);
}
final Class<? extends VectorAggregateExpression>[] vecAggrClasses = annotation.value();
// Not final since it may change later due to DECIMAL_64.
ColumnVector.Type outputColVectorType =
VectorizationContext.getColumnVectorTypeFromTypeInfo(outputTypeInfo);
/*
* Determine input type info.
*/
final TypeInfo inputTypeInfo;
// Not final since it may change later due to DECIMAL_64.
VectorExpression inputExpression;
ColumnVector.Type inputColVectorType;
final int parameterCount = parameterList.size();
if (parameterCount == 0) {
// COUNT(*)
inputTypeInfo = null;
inputColVectorType = null;
inputExpression = null;
} else if (parameterCount == 1) {
ExprNodeDesc exprNodeDesc = parameterList.get(0);
inputTypeInfo = exprNodeDesc.getTypeInfo();
if (inputTypeInfo == null) {
String issue ="Aggregations with null parameter type not supported " +
aggregationName + "(" + parameterList.toString() + ")";
return new ImmutablePair<VectorAggregationDesc,String>(null, issue);
}
/*
* Determine an *initial* input vector expression.
*
* Note: we may have to convert it later from DECIMAL_64 to regular decimal.
*/
inputExpression =
vContext.getVectorExpression(
exprNodeDesc, VectorExpressionDescriptor.Mode.PROJECTION);
if (inputExpression == null) {
String issue ="Parameter expression " + exprNodeDesc.toString() + " not supported " +
aggregationName + "(" + parameterList.toString() + ")";
return new ImmutablePair<VectorAggregationDesc,String>(null, issue);
}
if (inputExpression.getOutputTypeInfo() == null) {
String issue ="Parameter expression " + exprNodeDesc.toString() + " with null type not supported " +
aggregationName + "(" + parameterList.toString() + ")";
return new ImmutablePair<VectorAggregationDesc,String>(null, issue);
}
inputColVectorType = inputExpression.getOutputColumnVectorType();
} else {
// No multi-parameter aggregations supported.
String issue ="Aggregations with > 1 parameter are not supported " +
aggregationName + "(" + parameterList.toString() + ")";
return new ImmutablePair<VectorAggregationDesc,String>(null, issue);
}
/*
* When we have DECIMAL_64 as the input parameter then we have to see if there is a special
* vector UDAF for it. If not we will need to convert the input parameter.
*/
if (inputTypeInfo != null && inputColVectorType == ColumnVector.Type.DECIMAL_64) {
if (outputColVectorType == ColumnVector.Type.DECIMAL) {
DecimalTypeInfo outputDecimalTypeInfo = (DecimalTypeInfo) outputTypeInfo;
if (HiveDecimalWritable.isPrecisionDecimal64(outputDecimalTypeInfo.getPrecision())) {
// Try with DECIMAL_64 input and DECIMAL_64 output.
final Class<? extends VectorAggregateExpression> vecAggrClass =
findVecAggrClass(
vecAggrClasses, aggregationName, inputColVectorType,
ColumnVector.Type.DECIMAL_64, udafEvaluatorMode);
if (vecAggrClass != null) {
final VectorAggregationDesc vecAggrDesc =
new VectorAggregationDesc(
aggregationName, evaluator, udafEvaluatorMode,
inputTypeInfo, inputColVectorType, inputExpression,
outputTypeInfo, ColumnVector.Type.DECIMAL_64, vecAggrClass);
return new ImmutablePair<VectorAggregationDesc,String>(vecAggrDesc, null);
}
}
// Try with regular DECIMAL output type.
final Class<? extends VectorAggregateExpression> vecAggrClass =
findVecAggrClass(
vecAggrClasses, aggregationName, inputColVectorType,
outputColVectorType, udafEvaluatorMode);
if (vecAggrClass != null) {
final VectorAggregationDesc vecAggrDesc =
new VectorAggregationDesc(
aggregationName, evaluator, udafEvaluatorMode,
inputTypeInfo, inputColVectorType, inputExpression,
outputTypeInfo, outputColVectorType, vecAggrClass);
return new ImmutablePair<VectorAggregationDesc,String>(vecAggrDesc, null);
}
// No support for DECIMAL_64 input. We must convert.
inputExpression = vContext.wrapWithDecimal64ToDecimalConversion(inputExpression);
inputColVectorType = ColumnVector.Type.DECIMAL;
// Fall through...
} else {
// Try with with DECIMAL_64 input and desired output type.
final Class<? extends VectorAggregateExpression> vecAggrClass =
findVecAggrClass(
vecAggrClasses, aggregationName, inputColVectorType,
outputColVectorType, udafEvaluatorMode);
if (vecAggrClass != null) {
// for now, disable operating on decimal64 column vectors for semijoin reduction as
// we have to make sure same decimal type should be used during bloom filter creation
// and bloom filter probing
if (aggregationName.equals("bloom_filter")) {
inputExpression = vContext.wrapWithDecimal64ToDecimalConversion(inputExpression);
inputColVectorType = ColumnVector.Type.DECIMAL;
}
final VectorAggregationDesc vecAggrDesc =
new VectorAggregationDesc(
aggregationName, evaluator, udafEvaluatorMode,
inputTypeInfo, inputColVectorType, inputExpression,
outputTypeInfo, outputColVectorType, vecAggrClass);
return new ImmutablePair<VectorAggregationDesc,String>(vecAggrDesc, null);
}
// No support for DECIMAL_64 input. We must convert.
inputExpression = vContext.wrapWithDecimal64ToDecimalConversion(inputExpression);
inputColVectorType = ColumnVector.Type.DECIMAL;
// Fall through...
}
}
/*
* Look for normal match.
*/
Class<? extends VectorAggregateExpression> vecAggrClass =
findVecAggrClass(
vecAggrClasses, aggregationName, inputColVectorType,
outputColVectorType, udafEvaluatorMode);
if (vecAggrClass != null) {
final VectorAggregationDesc vecAggrDesc =
new VectorAggregationDesc(
aggregationName, evaluator, udafEvaluatorMode,
inputTypeInfo, inputColVectorType, inputExpression,
outputTypeInfo, outputColVectorType, vecAggrClass);
return new ImmutablePair<VectorAggregationDesc,String>(vecAggrDesc, null);
}
// No match?
String issue =
"Vector aggregation : \"" + aggregationName + "\" " +
"for input type: " +
(inputColVectorType == null ? "any" : "\"" + inputColVectorType) + "\" " +
"and output type: \"" + outputColVectorType + "\" " +
"and mode: " + udafEvaluatorMode + " not supported for " +
"evaluator " + evaluator.getClass().getSimpleName();
return new ImmutablePair<VectorAggregationDesc,String>(null, issue);
}
public static Operator<? extends OperatorDesc> vectorizeGroupByOperator(
Operator<? extends OperatorDesc> groupByOp, VectorizationContext vContext,
VectorGroupByDesc vectorGroupByDesc)
throws HiveException {
ImmutablePair<Operator<? extends OperatorDesc>,String> pair =
doVectorizeGroupByOperator(
groupByOp, vContext, vectorGroupByDesc);
return pair.left;
}
/*
* NOTE: The VectorGroupByDesc has already been allocated and will be updated here.
*/
private static ImmutablePair<Operator<? extends OperatorDesc>,String> doVectorizeGroupByOperator(
Operator<? extends OperatorDesc> groupByOp, VectorizationContext vContext,
VectorGroupByDesc vectorGroupByDesc)
throws HiveException {
GroupByDesc groupByDesc = (GroupByDesc) groupByOp.getConf();
List<ExprNodeDesc> keysDesc = groupByDesc.getKeys();
// For now, we don't support group by on DECIMAL_64 keys.
VectorExpression[] vecKeyExpressions =
vContext.getVectorExpressionsUpConvertDecimal64(keysDesc);
ArrayList<AggregationDesc> aggrDesc = groupByDesc.getAggregators();
final int size = aggrDesc.size();
VectorAggregationDesc[] vecAggrDescs = new VectorAggregationDesc[size];
int[] projectedOutputColumns = new int[size];
for (int i = 0; i < size; ++i) {
AggregationDesc aggDesc = aggrDesc.get(i);
ImmutablePair<VectorAggregationDesc,String> pair =
getVectorAggregationDesc(aggDesc, vContext);
if (pair.left == null) {
return new ImmutablePair<Operator<? extends OperatorDesc>, String>(null, pair.right);
}
vecAggrDescs[i] = pair.left;
// GroupBy generates a new vectorized row batch...
projectedOutputColumns[i] = i;
}
vectorGroupByDesc.setKeyExpressions(vecKeyExpressions);
vectorGroupByDesc.setVecAggrDescs(vecAggrDescs);
vectorGroupByDesc.setProjectedOutputColumns(projectedOutputColumns);
Operator<GroupByDesc> vectorOp =
OperatorFactory.getVectorOperator(
groupByOp.getCompilationOpContext(), groupByDesc,
vContext, vectorGroupByDesc);
return new ImmutablePair<Operator<? extends OperatorDesc>, String>(vectorOp, null);
}
public static Operator<? extends OperatorDesc> vectorizeSelectOperator(
Operator<? extends OperatorDesc> selectOp, VectorizationContext vContext,
VectorSelectDesc vectorSelectDesc)
throws HiveException {
SelectDesc selectDesc = (SelectDesc) selectOp.getConf();
List<ExprNodeDesc> colList = selectDesc.getColList();
int index = 0;
final int size = colList.size();
// this will mark all actual computed columns
vContext.markActualScratchColumns();
VectorExpression[] vectorSelectExprs = new VectorExpression[size];
int[] projectedOutputColumns = new int[size];
for (int i = 0; i < size; i++) {
ExprNodeDesc expr = colList.get(i);
VectorExpression ve = vContext.getVectorExpression(expr);
projectedOutputColumns[i] = ve.getOutputColumnNum();
if (ve instanceof IdentityExpression) {
// Suppress useless evaluation.
continue;
}
vectorSelectExprs[index++] = ve;
}
if (index < size) {
vectorSelectExprs = Arrays.copyOf(vectorSelectExprs, index);
}
// Fix up the case where parent expression's output data type physical variations is DECIMAL whereas
// at least one of its children is DECIMAL_64. Some expressions like x % y for example only accepts DECIMAL
// for x and y (at this time there is only DecimalColModuloDecimalColumn so both x and y has to be DECIMAL).
// The following method introduces a cast if x or y is DECIMAL_64 and parent expression (x % y) is DECIMAL.
try {
fixDecimalDataTypePhysicalVariations(vContext, vectorSelectExprs);
} finally {
vContext.freeMarkedScratchColumns();
}
vectorSelectDesc.setSelectExpressions(vectorSelectExprs);
vectorSelectDesc.setProjectedOutputColumns(projectedOutputColumns);
return OperatorFactory.getVectorOperator(
selectOp.getCompilationOpContext(), selectDesc,
vContext, vectorSelectDesc);
}
private static void fixDecimalDataTypePhysicalVariations(final VectorizationContext vContext,
final VectorExpression[] vectorSelectExprs) throws HiveException {
for (int i = 0; i < vectorSelectExprs.length; i++) {
VectorExpression parent = vectorSelectExprs[i];
VectorExpression newParent = fixDecimalDataTypePhysicalVariations(parent, parent.getChildExpressions(),
vContext);
if (parent.getClass() == newParent.getClass() && parent != newParent) {
vectorSelectExprs[i] = newParent;
}
}
}
private static VectorExpression fixDecimalDataTypePhysicalVariations(final VectorExpression parent,
final VectorExpression[] children, final VectorizationContext vContext) throws HiveException {
if (children == null || children.length == 0) {
return parent;
}
for (int i = 0; i < children.length; i++) {
VectorExpression child = children[i];
VectorExpression newChild = fixDecimalDataTypePhysicalVariations(child, child.getChildExpressions(), vContext);
if (child.getClass() == newChild.getClass() && child != newChild) {
children[i] = newChild;
}
}
if (parent.getOutputDataTypePhysicalVariation() == DataTypePhysicalVariation.NONE) {
boolean inputArgsChanged = false;
DataTypePhysicalVariation[] dataTypePhysicalVariations = parent.getInputDataTypePhysicalVariations();
VectorExpression oldExpression = null;
VectorExpression newExpression = null;
for (int i = 0; i < children.length; i++) {
oldExpression = children[i];
// we found at least one children with mismatch
if (oldExpression.getOutputDataTypePhysicalVariation() == DataTypePhysicalVariation.DECIMAL_64) {
newExpression = vContext.wrapWithDecimal64ToDecimalConversion(oldExpression);
children[i] = newExpression;
inputArgsChanged = true;
dataTypePhysicalVariations[i] = DataTypePhysicalVariation.NONE;
}
}
// fix up the input column numbers and output column numbers
if (inputArgsChanged) {
if (parent instanceof VectorUDFAdaptor) {
VectorUDFAdaptor parentAdaptor = (VectorUDFAdaptor) parent;
VectorUDFArgDesc[] argDescs = parentAdaptor.getArgDescs();
for (VectorUDFArgDesc argDesc : argDescs) {
if (argDesc.getColumnNum() == oldExpression.getOutputColumnNum()) {
argDesc.setColumnNum(newExpression.getOutputColumnNum());
break;
}
}
} else {
int argumentCount = children.length + (parent.getOutputColumnNum() == -1 ? 0 : 1);
Object[] arguments = new Object[argumentCount];
// new input column numbers
for (int i = 0; i < children.length; i++) {
VectorExpression vce = children[i];
arguments[i] = vce.getOutputColumnNum();
}
// retain output column number from parent
if (parent.getOutputColumnNum() != -1) {
arguments[arguments.length - 1] = parent.getOutputColumnNum();
}
// re-instantiate the parent expression with new arguments
VectorExpression newParent = vContext.instantiateExpression(parent.getClass(), parent.getOutputTypeInfo(),
parent.getOutputDataTypePhysicalVariation(), arguments);
newParent.setOutputTypeInfo(parent.getOutputTypeInfo());
newParent.setOutputDataTypePhysicalVariation(parent.getOutputDataTypePhysicalVariation());
newParent.setInputTypeInfos(parent.getInputTypeInfos());
newParent.setInputDataTypePhysicalVariations(dataTypePhysicalVariations);
newParent.setChildExpressions(parent.getChildExpressions());
return newParent;
}
}
}
return parent;
}
private static void fillInPTFEvaluators(
List<WindowFunctionDef> windowsFunctions,
String[] evaluatorFunctionNames,
WindowFrameDef[] evaluatorWindowFrameDefs,
List<ExprNodeDesc>[] evaluatorInputExprNodeDescLists) throws HiveException {
final int functionCount = windowsFunctions.size();
for (int i = 0; i < functionCount; i++) {
WindowFunctionDef winFunc = windowsFunctions.get(i);
evaluatorFunctionNames[i] = winFunc.getName();
evaluatorWindowFrameDefs[i] = winFunc.getWindowFrame();
List<PTFExpressionDef> args = winFunc.getArgs();
if (args != null) {
List<ExprNodeDesc> exprNodeDescList = new ArrayList<ExprNodeDesc>();
for (PTFExpressionDef arg : args) {
exprNodeDescList.add(arg.getExprNode());
}
evaluatorInputExprNodeDescLists[i] = exprNodeDescList;
}
}
}
private static ExprNodeDesc[] getPartitionExprNodeDescs(List<PTFExpressionDef> partitionExpressions) {
final int size = partitionExpressions.size();
ExprNodeDesc[] exprNodeDescs = new ExprNodeDesc[size];
for (int i = 0; i < size; i++) {
exprNodeDescs[i] = partitionExpressions.get(i).getExprNode();
}
return exprNodeDescs;
}
private static ExprNodeDesc[] getOrderExprNodeDescs(List<OrderExpressionDef> orderExpressions) {
final int size = orderExpressions.size();
ExprNodeDesc[] exprNodeDescs = new ExprNodeDesc[size];
for (int i = 0; i < size; i++) {
exprNodeDescs[i] = orderExpressions.get(i).getExprNode();
}
return exprNodeDescs;
}
/*
* Update the VectorPTFDesc with data that is used during validation and that doesn't rely on
* VectorizationContext to lookup column names, etc.
*/
private static void createVectorPTFDesc(Operator<? extends OperatorDesc> ptfOp,
PTFDesc ptfDesc, VectorizationContext vContext, VectorPTFDesc vectorPTFDesc,
int vectorizedPTFMaxMemoryBufferingBatchCount)
throws HiveException {
PartitionedTableFunctionDef funcDef = ptfDesc.getFuncDef();
WindowTableFunctionDef windowTableFunctionDef = (WindowTableFunctionDef) funcDef;
List<WindowFunctionDef> windowsFunctions = windowTableFunctionDef.getWindowFunctions();
final int functionCount = windowsFunctions.size();
ArrayList<ColumnInfo> outputSignature = ptfOp.getSchema().getSignature();
final int outputSize = outputSignature.size();
/*
* Output columns.
*/
// Evaluator results are first.
String[] outputColumnNames = new String[outputSize];
TypeInfo[] outputTypeInfos = new TypeInfo[outputSize];
for (int i = 0; i < functionCount; i++) {
ColumnInfo colInfo = outputSignature.get(i);
TypeInfo typeInfo = colInfo.getType();
outputColumnNames[i] = colInfo.getInternalName();
outputTypeInfos[i] = typeInfo;
}
// Followed by key and non-key input columns (some may be missing).
for (int i = functionCount; i < outputSize; i++) {
ColumnInfo colInfo = outputSignature.get(i);
outputColumnNames[i] = colInfo.getInternalName();
outputTypeInfos[i] = colInfo.getType();
}
List<PTFExpressionDef> partitionExpressions = funcDef.getPartition().getExpressions();
final int partitionKeyCount = partitionExpressions.size();
ExprNodeDesc[] partitionExprNodeDescs = getPartitionExprNodeDescs(partitionExpressions);
List<OrderExpressionDef> orderExpressions = funcDef.getOrder().getExpressions();
final int orderKeyCount = orderExpressions.size();
ExprNodeDesc[] orderExprNodeDescs = getOrderExprNodeDescs(orderExpressions);
// When there are PARTITION and ORDER BY clauses, will have different partition expressions.
// Otherwise, only order by expressions.
boolean isPartitionOrderBy = false;
if (partitionKeyCount != orderKeyCount) {
// Obviously different expressions.
isPartitionOrderBy = true;
} else {
// Check each ExprNodeDesc.
for (int i = 0; i < partitionKeyCount; i++) {
final ExprNodeDescEqualityWrapper partitionExprEqualityWrapper =
new ExprNodeDesc.ExprNodeDescEqualityWrapper(partitionExprNodeDescs[i]);
final ExprNodeDescEqualityWrapper orderExprEqualityWrapper =
new ExprNodeDesc.ExprNodeDescEqualityWrapper(orderExprNodeDescs[i]);
if (!partitionExprEqualityWrapper.equals(orderExprEqualityWrapper)) {
isPartitionOrderBy = true;
break;
}
}
}
String[] evaluatorFunctionNames = new String[functionCount];
WindowFrameDef[] evaluatorWindowFrameDefs = new WindowFrameDef[functionCount];
List<ExprNodeDesc>[] evaluatorInputExprNodeDescLists = (List<ExprNodeDesc>[]) new List<?>[functionCount];
fillInPTFEvaluators(
windowsFunctions,
evaluatorFunctionNames,
evaluatorWindowFrameDefs,
evaluatorInputExprNodeDescLists);
TypeInfo[] reducerBatchTypeInfos = vContext.getAllTypeInfos();
vectorPTFDesc.setReducerBatchTypeInfos(reducerBatchTypeInfos);
vectorPTFDesc.setIsPartitionOrderBy(isPartitionOrderBy);
vectorPTFDesc.setOrderExprNodeDescs(orderExprNodeDescs);
vectorPTFDesc.setPartitionExprNodeDescs(partitionExprNodeDescs);
vectorPTFDesc.setEvaluatorFunctionNames(evaluatorFunctionNames);
vectorPTFDesc.setEvaluatorWindowFrameDefs(evaluatorWindowFrameDefs);
vectorPTFDesc.setEvaluatorInputExprNodeDescLists(evaluatorInputExprNodeDescLists);
vectorPTFDesc.setOutputColumnNames(outputColumnNames);
vectorPTFDesc.setOutputTypeInfos(outputTypeInfos);
vectorPTFDesc.setVectorizedPTFMaxMemoryBufferingBatchCount(
vectorizedPTFMaxMemoryBufferingBatchCount);
}
private static void determineKeyAndNonKeyInputColumnMap(int[] outputColumnProjectionMap,
boolean isPartitionOrderBy, int[] orderColumnMap, int[] partitionColumnMap,
int evaluatorCount, ArrayList<Integer> keyInputColumns,
ArrayList<Integer> nonKeyInputColumns) {
final int outputSize = outputColumnProjectionMap.length;
final int orderKeyCount = orderColumnMap.length;
final int partitionKeyCount = (isPartitionOrderBy ? partitionColumnMap.length : 0);
for (int i = evaluatorCount; i < outputSize; i++) {
final int nonEvalColumnNum = outputColumnProjectionMap[i];
boolean isKey = false;
for (int o = 0; o < orderKeyCount; o++) {
if (nonEvalColumnNum == orderColumnMap[o]) {
isKey = true;
break;
}
}
if (!isKey && isPartitionOrderBy) {
for (int p = 0; p < partitionKeyCount; p++) {
if (nonEvalColumnNum == partitionColumnMap[p]) {
isKey = true;
break;
}
}
}
if (isKey) {
keyInputColumns.add(nonEvalColumnNum);
} else {
nonKeyInputColumns.add(nonEvalColumnNum);
}
}
}
/*
* Create the additional vectorization PTF information needed by the VectorPTFOperator during
* execution.
*/
private static VectorPTFInfo createVectorPTFInfo(Operator<? extends OperatorDesc> ptfOp,
PTFDesc ptfDesc, VectorizationContext vContext, VectorPTFDesc vectorPTFDesc)
throws HiveException {
PartitionedTableFunctionDef funcDef = ptfDesc.getFuncDef();
ArrayList<ColumnInfo> outputSignature = ptfOp.getSchema().getSignature();
final int outputSize = outputSignature.size();
boolean isPartitionOrderBy = vectorPTFDesc.getIsPartitionOrderBy();
ExprNodeDesc[] orderExprNodeDescs = vectorPTFDesc.getOrderExprNodeDescs();
ExprNodeDesc[] partitionExprNodeDescs = vectorPTFDesc.getPartitionExprNodeDescs();
String[] evaluatorFunctionNames = vectorPTFDesc.getEvaluatorFunctionNames();
final int evaluatorCount = evaluatorFunctionNames.length;
WindowFrameDef[] evaluatorWindowFrameDefs = vectorPTFDesc.getEvaluatorWindowFrameDefs();
List<ExprNodeDesc>[] evaluatorInputExprNodeDescLists = vectorPTFDesc.getEvaluatorInputExprNodeDescLists();
/*
* Output columns.
*/
int[] outputColumnProjectionMap = new int[outputSize];
// Evaluator results are first.
for (int i = 0; i < evaluatorCount; i++) {
ColumnInfo colInfo = outputSignature.get(i);
TypeInfo typeInfo = colInfo.getType();
final int outputColumnNum;
outputColumnNum = vContext.allocateScratchColumn(typeInfo);
outputColumnProjectionMap[i] = outputColumnNum;
}
// Followed by key and non-key input columns (some may be missing).
for (int i = evaluatorCount; i < outputSize; i++) {
ColumnInfo colInfo = outputSignature.get(i);
outputColumnProjectionMap[i] = vContext.getInputColumnIndex(colInfo.getInternalName());
}
/*
* Partition and order by.
*/
int[] partitionColumnMap;
Type[] partitionColumnVectorTypes;
VectorExpression[] partitionExpressions;
if (!isPartitionOrderBy) {
partitionColumnMap = null;
partitionColumnVectorTypes = null;
partitionExpressions = null;
} else {
final int partitionKeyCount = partitionExprNodeDescs.length;
partitionColumnMap = new int[partitionKeyCount];
partitionColumnVectorTypes = new Type[partitionKeyCount];
partitionExpressions = new VectorExpression[partitionKeyCount];
for (int i = 0; i < partitionKeyCount; i++) {
VectorExpression partitionExpression = vContext.getVectorExpression(partitionExprNodeDescs[i]);
TypeInfo typeInfo = partitionExpression.getOutputTypeInfo();
Type columnVectorType = VectorizationContext.getColumnVectorTypeFromTypeInfo(typeInfo);
partitionColumnVectorTypes[i] = columnVectorType;
partitionColumnMap[i] = partitionExpression.getOutputColumnNum();
partitionExpressions[i] = partitionExpression;
}
}
final int orderKeyCount = orderExprNodeDescs.length;
int[] orderColumnMap = new int[orderKeyCount];
Type[] orderColumnVectorTypes = new Type[orderKeyCount];
VectorExpression[] orderExpressions = new VectorExpression[orderKeyCount];
for (int i = 0; i < orderKeyCount; i++) {
VectorExpression orderExpression = vContext.getVectorExpression(orderExprNodeDescs[i]);
TypeInfo typeInfo = orderExpression.getOutputTypeInfo();
Type columnVectorType = VectorizationContext.getColumnVectorTypeFromTypeInfo(typeInfo);
orderColumnVectorTypes[i] = columnVectorType;
orderColumnMap[i] = orderExpression.getOutputColumnNum();
orderExpressions[i] = orderExpression;
}
ArrayList<Integer> keyInputColumns = new ArrayList<Integer>();
ArrayList<Integer> nonKeyInputColumns = new ArrayList<Integer>();
determineKeyAndNonKeyInputColumnMap(outputColumnProjectionMap, isPartitionOrderBy, orderColumnMap,
partitionColumnMap, evaluatorCount, keyInputColumns, nonKeyInputColumns);
int[] keyInputColumnMap = ArrayUtils.toPrimitive(keyInputColumns.toArray(new Integer[0]));
int[] nonKeyInputColumnMap = ArrayUtils.toPrimitive(nonKeyInputColumns.toArray(new Integer[0]));
VectorExpression[] evaluatorInputExpressions = new VectorExpression[evaluatorCount];
Type[] evaluatorInputColumnVectorTypes = new Type[evaluatorCount];
for (int i = 0; i < evaluatorCount; i++) {
String functionName = evaluatorFunctionNames[i];
WindowFrameDef windowFrameDef = evaluatorWindowFrameDefs[i];
SupportedFunctionType functionType = VectorPTFDesc.supportedFunctionsMap.get(functionName);
List<ExprNodeDesc> exprNodeDescList = evaluatorInputExprNodeDescLists[i];
VectorExpression inputVectorExpression;
final Type columnVectorType;
if (exprNodeDescList != null) {
// Validation has limited evaluatorInputExprNodeDescLists to size 1.
ExprNodeDesc exprNodeDesc = exprNodeDescList.get(0);
// Determine input vector expression using the VectorizationContext.
inputVectorExpression = vContext.getVectorExpression(exprNodeDesc);
TypeInfo typeInfo = exprNodeDesc.getTypeInfo();
PrimitiveCategory primitiveCategory = ((PrimitiveTypeInfo) typeInfo).getPrimitiveCategory();
columnVectorType = VectorizationContext.getColumnVectorTypeFromTypeInfo(typeInfo);
} else {
inputVectorExpression = null;
columnVectorType = ColumnVector.Type.NONE;
}
evaluatorInputExpressions[i] = inputVectorExpression;
evaluatorInputColumnVectorTypes[i] = columnVectorType;
}
VectorPTFInfo vectorPTFInfo = new VectorPTFInfo();
vectorPTFInfo.setOutputColumnMap(outputColumnProjectionMap);
vectorPTFInfo.setPartitionColumnMap(partitionColumnMap);
vectorPTFInfo.setPartitionColumnVectorTypes(partitionColumnVectorTypes);
vectorPTFInfo.setPartitionExpressions(partitionExpressions);
vectorPTFInfo.setOrderColumnMap(orderColumnMap);
vectorPTFInfo.setOrderColumnVectorTypes(orderColumnVectorTypes);
vectorPTFInfo.setOrderExpressions(orderExpressions);
vectorPTFInfo.setEvaluatorInputExpressions(evaluatorInputExpressions);
vectorPTFInfo.setEvaluatorInputColumnVectorTypes(evaluatorInputColumnVectorTypes);
vectorPTFInfo.setKeyInputColumnMap(keyInputColumnMap);
vectorPTFInfo.setNonKeyInputColumnMap(nonKeyInputColumnMap);
return vectorPTFInfo;
}
/*
* NOTE: The VectorPTFDesc has already been allocated and populated.
*/
public static Operator<? extends OperatorDesc> vectorizePTFOperator(
Operator<? extends OperatorDesc> ptfOp, VectorizationContext vContext,
VectorPTFDesc vectorPTFDesc)
throws HiveException {
PTFDesc ptfDesc = (PTFDesc) ptfOp.getConf();
VectorPTFInfo vectorPTFInfo = createVectorPTFInfo(ptfOp, ptfDesc, vContext, vectorPTFDesc);
vectorPTFDesc.setVectorPTFInfo(vectorPTFInfo);
Class<? extends Operator<?>> opClass = VectorPTFOperator.class;
return OperatorFactory.getVectorOperator(
opClass, ptfOp.getCompilationOpContext(), ptfOp.getConf(),
vContext, vectorPTFDesc);
}
// UNDONE: Used by tests...
public Operator<? extends OperatorDesc> vectorizeOperator(Operator<? extends OperatorDesc> op,
VectorizationContext vContext, boolean isReduce, boolean isTezOrSpark, VectorTaskColumnInfo vectorTaskColumnInfo)
throws HiveException, VectorizerCannotVectorizeException {
Operator<? extends OperatorDesc> vectorOp =
validateAndVectorizeOperator(op, vContext, isReduce, isTezOrSpark, vectorTaskColumnInfo);
if (vectorOp != op) {
fixupParentChildOperators(op, vectorOp);
}
return vectorOp;
}
public Operator<? extends OperatorDesc> validateAndVectorizeOperator(Operator<? extends OperatorDesc> op,
VectorizationContext vContext, boolean isReduce, boolean isTezOrSpark,
VectorTaskColumnInfo vectorTaskColumnInfo)
throws HiveException, VectorizerCannotVectorizeException {
Operator<? extends OperatorDesc> vectorOp = null;
// This "global" allows various validation methods to set the "not vectorized" reason.
currentOperator = op;
boolean isNative;
try {
switch (op.getType()) {
case MAPJOIN:
{
if (op instanceof MapJoinOperator) {
if (!validateMapJoinOperator((MapJoinOperator) op)) {
throw new VectorizerCannotVectorizeException();
}
} else if (op instanceof SMBMapJoinOperator) {
if (!validateSMBMapJoinOperator((SMBMapJoinOperator) op)) {
throw new VectorizerCannotVectorizeException();
}
} else {
setOperatorNotSupported(op);
throw new VectorizerCannotVectorizeException();
}
if (op instanceof MapJoinOperator) {
MapJoinDesc desc = (MapJoinDesc) op.getConf();
VectorMapJoinDesc vectorMapJoinDesc = new VectorMapJoinDesc();
boolean specialize =
canSpecializeMapJoin(op, desc, isTezOrSpark, vContext, vectorMapJoinDesc);
if (!specialize) {
Class<? extends Operator<?>> opClass = null;
// *NON-NATIVE* vector map differences for LEFT OUTER JOIN and Filtered...
List<ExprNodeDesc> bigTableFilters = desc.getFilters().get((byte) desc.getPosBigTable());
boolean isOuterAndFiltered = (!desc.isNoOuterJoin() && bigTableFilters.size() > 0);
if (!isOuterAndFiltered) {
opClass = VectorMapJoinOperator.class;
} else {
opClass = VectorMapJoinOuterFilteredOperator.class;
}
vectorOp = OperatorFactory.getVectorOperator(
opClass, op.getCompilationOpContext(), desc,
vContext, vectorMapJoinDesc);
isNative = false;
} else {
// TEMPORARY Until Native Vector Map Join with Hybrid passes tests...
// HiveConf.setBoolVar(physicalContext.getConf(),
// HiveConf.ConfVars.HIVEUSEHYBRIDGRACEHASHJOIN, false);
vectorOp = specializeMapJoinOperator(op, vContext, desc, vectorMapJoinDesc);
isNative = true;
if (vectorTaskColumnInfo != null) {
VectorMapJoinInfo vectorMapJoinInfo = vectorMapJoinDesc.getVectorMapJoinInfo();
if (usesVectorUDFAdaptor(vectorMapJoinDesc.getAllBigTableKeyExpressions())) {
vectorTaskColumnInfo.setUsesVectorUDFAdaptor(true);
}
if (usesVectorUDFAdaptor(vectorMapJoinDesc.getAllBigTableValueExpressions())) {
vectorTaskColumnInfo.setUsesVectorUDFAdaptor(true);
}
}
}
} else {
Preconditions.checkState(op instanceof SMBMapJoinOperator);
SMBJoinDesc smbJoinSinkDesc = (SMBJoinDesc) op.getConf();
// Check additional constraint.
if (smbJoinSinkDesc.getFilterMap() != null) {
setOperatorIssue("FilterMaps not supported for Vector Pass-Thru SMB MapJoin");
throw new VectorizerCannotVectorizeException();
}
VectorSMBJoinDesc vectorSMBJoinDesc = new VectorSMBJoinDesc();
vectorOp = OperatorFactory.getVectorOperator(
op.getCompilationOpContext(), smbJoinSinkDesc, vContext, vectorSMBJoinDesc);
isNative = false;
}
}
break;
case REDUCESINK:
{
if (!validateReduceSinkOperator((ReduceSinkOperator) op)) {
throw new VectorizerCannotVectorizeException();
}
ReduceSinkDesc reduceDesc = (ReduceSinkDesc) op.getConf();
VectorReduceSinkDesc vectorReduceSinkDesc = new VectorReduceSinkDesc();
boolean specialize =
canSpecializeReduceSink(reduceDesc, isTezOrSpark, vContext, vectorReduceSinkDesc);
if (!specialize) {
vectorOp = OperatorFactory.getVectorOperator(
op.getCompilationOpContext(), reduceDesc, vContext, vectorReduceSinkDesc);
isNative = false;
} else {
vectorOp = specializeReduceSinkOperator(op, vContext, reduceDesc, vectorReduceSinkDesc);
isNative = true;
if (vectorTaskColumnInfo != null) {
VectorReduceSinkInfo vectorReduceSinkInfo = vectorReduceSinkDesc.getVectorReduceSinkInfo();
if (usesVectorUDFAdaptor(vectorReduceSinkInfo.getReduceSinkKeyExpressions())) {
vectorTaskColumnInfo.setUsesVectorUDFAdaptor(true);
}
if (usesVectorUDFAdaptor(vectorReduceSinkInfo.getReduceSinkValueExpressions())) {
vectorTaskColumnInfo.setUsesVectorUDFAdaptor(true);
}
}
}
}
break;
case FILTER:
{
if (!validateFilterOperator((FilterOperator) op)) {
throw new VectorizerCannotVectorizeException();
}
VectorFilterDesc vectorFilterDesc = new VectorFilterDesc();
vectorOp = vectorizeFilterOperator(op, vContext, vectorFilterDesc);
isNative = true;
if (vectorTaskColumnInfo != null) {
VectorExpression vectorPredicateExpr = vectorFilterDesc.getPredicateExpression();
if (usesVectorUDFAdaptor(vectorPredicateExpr)) {
vectorTaskColumnInfo.setUsesVectorUDFAdaptor(true);
}
}
}
break;
case TOPNKEY:
{
if (!validateTopNKeyOperator((TopNKeyOperator) op)) {
throw new VectorizerCannotVectorizeException();
}
VectorTopNKeyDesc vectorTopNKeyDesc = new VectorTopNKeyDesc();
vectorOp = vectorizeTopNKeyOperator(op, vContext, vectorTopNKeyDesc);
isNative = true;
if (vectorTaskColumnInfo != null) {
VectorExpression[] keyExpressions = vectorTopNKeyDesc.getKeyExpressions();
if (usesVectorUDFAdaptor(keyExpressions)) {
vectorTaskColumnInfo.setUsesVectorUDFAdaptor(true);
}
}
}
break;
case SELECT:
{
if (!validateSelectOperator((SelectOperator) op)) {
throw new VectorizerCannotVectorizeException();
}
VectorSelectDesc vectorSelectDesc = new VectorSelectDesc();
vectorOp = vectorizeSelectOperator(op, vContext, vectorSelectDesc);
isNative = true;
if (vectorTaskColumnInfo != null) {
VectorExpression[] vectorSelectExprs = vectorSelectDesc.getSelectExpressions();
if (usesVectorUDFAdaptor(vectorSelectExprs)) {
vectorTaskColumnInfo.setUsesVectorUDFAdaptor(true);
}
}
}
break;
case GROUPBY:
{
// The validateGroupByOperator method will update vectorGroupByDesc.
VectorGroupByDesc vectorGroupByDesc = new VectorGroupByDesc();
if (!validateGroupByOperator((GroupByOperator) op, isReduce, isTezOrSpark,
vectorGroupByDesc)) {
throw new VectorizerCannotVectorizeException();
}
ImmutablePair<Operator<? extends OperatorDesc>,String> pair =
doVectorizeGroupByOperator(op, vContext, vectorGroupByDesc);
if (pair.left == null) {
setOperatorIssue(pair.right);
throw new VectorizerCannotVectorizeException();
}
vectorOp = pair.left;
isNative = false;
if (vectorTaskColumnInfo != null) {
VectorExpression[] vecKeyExpressions = vectorGroupByDesc.getKeyExpressions();
if (usesVectorUDFAdaptor(vecKeyExpressions)) {
vectorTaskColumnInfo.setUsesVectorUDFAdaptor(true);
}
VectorAggregationDesc[] vecAggrDescs = vectorGroupByDesc.getVecAggrDescs();
for (VectorAggregationDesc vecAggrDesc : vecAggrDescs) {
if (usesVectorUDFAdaptor(vecAggrDesc.getInputExpression())) {
vectorTaskColumnInfo.setUsesVectorUDFAdaptor(true);
}
}
}
}
break;
case FILESINK:
{
if (!validateFileSinkOperator((FileSinkOperator) op)) {
throw new VectorizerCannotVectorizeException();
}
FileSinkDesc fileSinkDesc = (FileSinkDesc) op.getConf();
VectorFileSinkDesc vectorFileSinkDesc = new VectorFileSinkDesc();
boolean isArrowSpecialization =
checkForArrowFileSink(fileSinkDesc, isTezOrSpark, vContext, vectorFileSinkDesc);
if (isArrowSpecialization) {
vectorOp =
specializeArrowFileSinkOperator(
op, vContext, fileSinkDesc, vectorFileSinkDesc);
isNative = true;
} else {
vectorOp =
OperatorFactory.getVectorOperator(
op.getCompilationOpContext(), fileSinkDesc, vContext, vectorFileSinkDesc);
isNative = false;
}
}
break;
case LIMIT:
{
// No validation.
LimitDesc limitDesc = (LimitDesc) op.getConf();
VectorLimitDesc vectorLimitDesc = new VectorLimitDesc();
vectorOp = OperatorFactory.getVectorOperator(
op.getCompilationOpContext(), limitDesc, vContext, vectorLimitDesc);
isNative = true;
}
break;
case EVENT:
{
// No validation.
AppMasterEventDesc eventDesc = (AppMasterEventDesc) op.getConf();
VectorAppMasterEventDesc vectorEventDesc = new VectorAppMasterEventDesc();
vectorOp = OperatorFactory.getVectorOperator(
op.getCompilationOpContext(), eventDesc, vContext, vectorEventDesc);
isNative = true;
}
break;
case PTF:
{
// The validatePTFOperator method will update vectorPTFDesc.
VectorPTFDesc vectorPTFDesc = new VectorPTFDesc();
if (!validatePTFOperator((PTFOperator) op, vContext, vectorPTFDesc)) {
throw new VectorizerCannotVectorizeException();
}
vectorOp = vectorizePTFOperator(op, vContext, vectorPTFDesc);
isNative = true;
}
break;
case HASHTABLESINK:
{
// No validation.
SparkHashTableSinkDesc sparkHashTableSinkDesc = (SparkHashTableSinkDesc) op.getConf();
VectorSparkHashTableSinkDesc vectorSparkHashTableSinkDesc = new VectorSparkHashTableSinkDesc();
vectorOp = OperatorFactory.getVectorOperator(
op.getCompilationOpContext(), sparkHashTableSinkDesc,
vContext, vectorSparkHashTableSinkDesc);
isNative = true;
}
break;
case SPARKPRUNINGSINK:
{
// No validation.
SparkPartitionPruningSinkDesc sparkPartitionPruningSinkDesc =
(SparkPartitionPruningSinkDesc) op.getConf();
VectorSparkPartitionPruningSinkDesc vectorSparkPartitionPruningSinkDesc =
new VectorSparkPartitionPruningSinkDesc();
vectorOp = OperatorFactory.getVectorOperator(
op.getCompilationOpContext(), sparkPartitionPruningSinkDesc,
vContext, vectorSparkPartitionPruningSinkDesc);
// need to maintain the unique ID so that target map works can
// read the output
((SparkPartitionPruningSinkOperator) vectorOp).setUniqueId(
((SparkPartitionPruningSinkOperator) op).getUniqueId());
isNative = true;
}
break;
default:
setOperatorNotSupported(op);
throw new VectorizerCannotVectorizeException();
}
} catch (HiveException e) {
setOperatorIssue(e.getMessage());
throw new VectorizerCannotVectorizeException();
}
Preconditions.checkState(vectorOp != null);
if (vectorTaskColumnInfo != null && !isNative) {
vectorTaskColumnInfo.setAllNative(false);
}
LOG.debug("vectorizeOperator " + vectorOp.getClass().getName());
LOG.debug("vectorizeOperator " + vectorOp.getConf().getClass().getName());
// These operators need to be linked to enable runtime statistics to be gathered/used correctly
planMapper.link(op, vectorOp);
return vectorOp;
}
}
| apache-2.0 |
zhaoqian/study-test | credo-test/src/main/java/org/credo/jdk/font/FontTest.java | 506 | package org.credo.jdk.font;
import java.awt.GraphicsEnvironment;
public class FontTest {
public static void main(String[] args) {
String[] fontnames = GraphicsEnvironment.getLocalGraphicsEnvironment()
.getAvailableFontFamilyNames();// 获得当前系统字体
for (int i = 0; i < fontnames.length; i++) {// 输出所有字体
//System.out.println(fontnames[i]);
}
String systemRoot = System.getenv().get("SystemRoot");
System.out.println(systemRoot);
}
}
| apache-2.0 |
Amplifino/obelix | com.amplifino.obelix.sequences/test/com/amplifino/obelix/sequences/SequenceTest.java | 3048 | package com.amplifino.obelix.sequences;
import static org.junit.Assert.assertEquals;
import java.nio.file.FileSystems;
import java.util.stream.LongStream;
import java.util.stream.Stream;
import org.junit.Before;
import org.junit.Test;
import com.amplifino.counters.Counts;
import com.amplifino.obelix.injections.RawInjections;
import com.amplifino.obelix.pairs.LongKeyPair;
import com.amplifino.obelix.pairs.OrderedPair;
import com.amplifino.obelix.space.ByteSpace;
import com.amplifino.obelix.space.DirectorySpace;
public class SequenceTest {
private final static long LENGTH = 10_000_000L;
private final static double STEP = Math.PI * 2 / LENGTH;
private DoubleSequence cosine;
private DoubleSequence sine;
private ByteSpace space;
private Counts snapshot;
@Before
public void setup() {
//this.space = new HeapSpace();
this.space = DirectorySpace.on(FileSystems.getDefault().getPath(System.getProperty("user.home"), ".obelix" ));
snapshot = space.counts();
LongMixer mixer = LongMixer.builder()
.fromKey(0, 32)
.fromValue(0,29)
.build();
RawSequenceSpace rawSequenceSpace = new RawSequenceSpace(space, mixer, Double.BYTES);
SequenceSpace<Double> sequenceSpace = new ObjectSequenceSpace<>(rawSequenceSpace, RawInjections.doubles());
this.cosine = new DoubleSequence(sequenceSpace, 1L);
this.sine = new DoubleSequence(sequenceSpace, 2L);
}
private LongStream ordinals() {
return LongStream.range(0, LENGTH );
}
private void put(Long value) {
cosine.put(value, Math.cos(STEP * value));
sine.put(value, Math.sin(STEP * value));
}
@Test
public void test() {
ordinals()
.parallel()
.forEach(this::put);
snapshot = space.counts().delta(snapshot, Counts::print);
double cosineSum = ordinals()
.parallel()
.mapToDouble(cosine::get)
.sum();
snapshot = space.counts().delta(snapshot, Counts::print);
assertEquals(0d, cosineSum, 0.00000001d);
double sineSum = ordinals()
.parallel()
.mapToDouble(sine::get)
.sum();
snapshot = space.counts().delta(snapshot, Counts::print);
assertEquals(0d, sineSum, 0.00000001d);
long matches = ordinals()
.parallel()
.mapToObj( key -> OrderedPair.of(cosine.get(key), sine.get(key)))
.mapToDouble(pair -> pair.key() * pair.key() + pair.value() * pair.value())
.filter(value -> value >= 0.9999)
.filter(value -> value <= 1.0001)
.count();
snapshot = space.counts().delta(snapshot, Counts::print);
assertEquals(LENGTH, matches);
Stream<LongKeyPair<Double>> delta = sine.delta(ordinals(), (previous,current) -> (current - previous) / STEP);
snapshot = space.counts().delta(snapshot, Counts::print);
matches = delta
.parallel()
.map( pair -> OrderedPair.of(pair.value() , cosine.get(pair.key())))
.filter(pair -> Math.abs(pair.key() - pair.value()) < 0.01)
.count();
assertEquals(LENGTH - 1, matches);
snapshot = space.counts().delta(snapshot, Counts::print);
}
}
| apache-2.0 |
google/fest | third_party/fest-swing-junit/src/test/java/org/fest/swing/junit/ant/ImageHandler_encodeBase64_decodeBase64AndSaveAsPng_Test.java | 1799 | /*
* Created on Aug 24, 2009
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
* in compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*
* Copyright @2009 the original author or authors.
*/
package org.fest.swing.junit.ant;
import static java.io.File.separator;
import static org.fest.assertions.Assertions.assertThat;
import static org.fest.assertions.ImageAssert.read;
import static org.fest.util.Files.newTemporaryFolder;
import static org.fest.util.Strings.concat;
import java.awt.image.BufferedImage;
import java.io.IOException;
import org.junit.Test;
/**
* Tests for <code>{@link ImageHandler#encodeBase64(java.awt.image.BufferedImage)}</code> and
* <code>{@link ImageHandler#decodeBase64AndSaveAsPng(String, String)}</code>.
*
* @author Alex Ruiz
*/
public class ImageHandler_encodeBase64_decodeBase64AndSaveAsPng_Test extends ImageHandler_TestCase {
@Test
public void should_encode_image_and_save_it_decoded_as_file() throws IOException {
String path = concat(newTemporaryFolder(), separator, "image.png");
BufferedImage imageToEncode = screenshotTaker.takeDesktopScreenshot();
String encoded = ImageHandler.encodeBase64(imageToEncode);
assertThat(ImageHandler.decodeBase64AndSaveAsPng(encoded, path)).isEmpty();
BufferedImage savedImage = read(path);
assertThat(savedImage).isEqualTo(imageToEncode);
}
}
| apache-2.0 |
BlucePan/MyBlog | blog/src/com/blog/model/BlogMenu.java | 3487 | package com.blog.model;
import java.io.Serializable;
/**
* 菜单表
* @author panzhi
* @date 2017-3-15
* @version 1.0.0
*/
public class BlogMenu implements Serializable {
private static final long serialVersionUID = -3769190932542602472L;
private String id; //菜单id
private String menuName; //菜单名称
private String resKey; //资源KEY
private String url;//菜单方法链接
private String menuType;//菜单类型 1、目录 2、菜单 3、按钮
private String priority;//菜单排序 1、2、3、4、5、6....
private String superior;//上级菜单
private String createTime; //创建时间
private String createUserId; //创建者
private String updateTime; //修改时间
private String updateUserId; //修改者
private String flag; //1、启用、2禁用
private String remake; //菜单描述
private String iScommon; // 是否公共菜单 yes no
private String imgUrl;//菜单图片链接
private String superiorName; //上级名称
public String getSuperiorName() {
return superiorName;
}public void setSuperiorName(String superiorName) {
this.superiorName = superiorName;
}
public String getId() {
return id;
}public void setId(String id) {
this.id = id;
}
public String getMenuName() {
return menuName;
}public void setMenuName(String menuName) {
this.menuName = menuName;
}
public String getResKey() {
return resKey;
}public void setResKey(String resKey) {
this.resKey = resKey;
}
public String getUrl() {
return url;
}public void setUrl(String url) {
this.url = url;
}
public String getMenuType() {
return menuType;
}public void setMenuType(String menuType) {
this.menuType = menuType;
}
public String getPriority() {
return priority;
}public void setPriority(String priority) {
this.priority = priority;
}
public String getSuperior() {
return superior;
}public void setSuperior(String superior) {
this.superior = superior;
}
public String getCreateTime() {
return createTime;
}public void setCreateTime(String createTime) {
this.createTime = createTime;
}
public String getCreateUserId() {
return createUserId;
}public void setCreateUserId(String createUserId) {
this.createUserId = createUserId;
}
public String getUpdateTime() {
return updateTime;
}public void setUpdateTime(String updateTime) {
this.updateTime = updateTime;
}
public String getUpdateUserId() {
return updateUserId;
}public void setUpdateUserId(String updateUserId) {
this.updateUserId = updateUserId;
}
public String getFlag() {
return flag;
}public void setFlag(String flag) {
this.flag = flag;
}
public String getRemake() {
return remake;
}public void setRemake(String remake) {
this.remake = remake;
}
public String getiScommon() {
return iScommon;
}public void setiScommon(String iScommon) {
this.iScommon = iScommon;
}
public String getImgUrl() {
return imgUrl;
}public void setImgUrl(String imgUrl) {
this.imgUrl = imgUrl;
}
@Override
public String toString() {
return "BlogMenu [createTime=" + createTime + ", createUserId="
+ createUserId + ", flag=" + flag + ", iScommon=" + iScommon
+ ", id=" + id + ", imgUrl=" + imgUrl + ", menuName="
+ menuName + ", menuType=" + menuType + ", priority="
+ priority + ", remake=" + remake + ", resKey=" + resKey
+ ", superior=" + superior + ", updateTime=" + updateTime
+ ", updateUserId=" + updateUserId + ", url=" + url + "]";
}
}
| apache-2.0 |
openwide-java/owsi-core-parent | owsi-core/owsi-core-components/owsi-core-component-wicket-more/src/test/java/fr/openwide/core/test/wicket/more/model/AbstractTestMapModel.java | 1490 | package fr.openwide.core.test.wicket.more.model;
import java.util.Map;
import org.hamcrest.Description;
import org.hamcrest.Matcher;
import org.hamcrest.TypeSafeMatcher;
import com.google.common.base.Equivalence;
import com.google.common.base.Supplier;
public abstract class AbstractTestMapModel<M extends Map<?, ?>>
extends AbstractTestModel<M> {
protected static final Equivalence<Map<?, ?>> UNORDERED_MAP_EQUIVALENCE = new Equivalence<Map<?, ?>>() {
@Override
protected boolean doEquivalent(Map<?, ?> a, Map<?, ?> b) {
return a.size() == b.size() && b.entrySet().containsAll(a.entrySet()); // No constraint on Map order
}
@Override
protected int doHash(Map<?, ?> t) {
return t.hashCode();
}
@Override
public String toString() {
return "UNORDERED_SET_EQUIVALENCE";
}
};
protected final Supplier<? extends M> mapSupplier;
public AbstractTestMapModel(Supplier<? extends M> mapSupplier, Equivalence<? super M> equivalence) {
super(equivalence);
this.mapSupplier = mapSupplier;
}
@SuppressWarnings({ "rawtypes", "unchecked" })
protected M clone(M map) {
M clone = mapSupplier.get();
clone.putAll((Map)map);
return clone;
}
protected Matcher<M> isEmpty() {
return new TypeSafeMatcher<M>() {
@Override
public void describeTo(Description description) {
description.appendText("an empty collection");
}
@Override
protected boolean matchesSafely(M item) {
return item.isEmpty();
}
};
}
} | apache-2.0 |
Ravmouse/vvasilyev | chapter_005/src/main/java/ru/job4j/h2generic/t2store/RoleStore.java | 1161 | package ru.job4j.h2generic.t2store;
/**
* @param <T> is the name of type parameter.
* Обобщенный класс, который расширяет класс AbstractStore; в нем хранятся экз.класса Role.
*/
public class RoleStore<T extends Base> extends AbstractStore<T> {
/**
* @param size of the RoleStore.
*/
public RoleStore(int size) {
super(size);
}
/**
* По id находится нужная модель (User) и к ее id добавляется idAdd.
* @param model that is consisted in the UserStore.
* @param idAdd that should be added to the User's existed id.
*/
public void addIdToExisted(T model, String idAdd) {
for (int i = 0; i < this.getUserArray().getObjects().length; i++) {
if (this.getUserArray().get(i).getId().equals(model.getId())) {
final String str = this.getUserArray().get(i).getId();
StringBuilder sb = new StringBuilder(str);
sb.append(idAdd);
this.getUserArray().get(i).setId(sb.toString());
break;
}
}
}
} | apache-2.0 |
kikermo/PotsOverBLE | apps/BLEPotController/app/src/main/java/org/kikermo/blepotcontroller/adapter/BluetoothDeviceAdapter.java | 1413 | package org.kikermo.blepotcontroller.adapter;
import android.bluetooth.BluetoothDevice;
import android.content.Context;
import android.support.annotation.NonNull;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.widget.ArrayAdapter;
import android.widget.TextView;
/**
* Created by EnriqueR on 10/12/2016.
*/
public class BluetoothDeviceAdapter extends ArrayAdapter<BluetoothDevice> {
private int resource;
public BluetoothDeviceAdapter(Context context, int resource) {
super(context, resource);
this.resource = resource;
}
@Override
public void add(BluetoothDevice itemToAdd) {
for (int i = 0; i < getCount(); i++)
if (getItem(i).getAddress().equals(itemToAdd.getAddress()))
return;
super.add(itemToAdd);
}
@NonNull
@Override
public View getView(int position, View convertView, ViewGroup parent) {
if (convertView == null) {
convertView = LayoutInflater.from(getContext()).inflate(resource,parent,false);
TextView textView = (TextView) convertView.findViewById(android.R.id.text1);
convertView.setTag(textView);
}
TextView tv = (TextView) convertView.getTag();
BluetoothDevice item = getItem(position);
tv.setText(item.getName());
return convertView;
}
}
| apache-2.0 |
yiwent/Mobike | app/src/main/java/com/yiwen/mobike/views/LoadingPageView.java | 3161 | package com.yiwen.mobike.views;
import android.content.Context;
import android.support.annotation.Nullable;
import android.util.AttributeSet;
import android.view.LayoutInflater;
import android.view.animation.Animation;
import android.view.animation.AnimationUtils;
import android.widget.ImageView;
import android.widget.LinearLayout;
import android.widget.TextView;
import com.yiwen.mobike.R;
import net.frakbot.jumpingbeans.JumpingBeans;
/**
* Created by yiwen (https://github.com/yiwent)
* Date:2017/6/13
* Time: 23:40
*/
public class LoadingPageView extends LinearLayout {
private ImageView mImageView;
private Animation mAnimation;
private TextView mTextView;
private JumpingBeans mJumpingBeans;
private LayoutInflater mInflater;
public LoadingPageView(Context context) {
this(context, null);
}
public LoadingPageView(Context context, @Nullable AttributeSet attrs) {
this(context, attrs, 0);
}
public LoadingPageView(Context context, @Nullable AttributeSet attrs, int defStyleAttr) {
super(context, attrs, defStyleAttr);
mInflater = LayoutInflater.from(context);
initView();
}
private void initView() {
// View view = mInflater.inflate(R.layout.loading_page, this, true);
// mImageView = ((ImageView) view.findViewById(R.id.loading_page_img));
// mTextView = ((TextView) view.findViewById(R.id.loading_text));
// mAnimation = AnimationUtils.loadAnimation(getContext(), R.anim.loading);
}
@Override
protected void onFinishInflate() {
super.onFinishInflate();
// mInflater = LayoutInflater.from(context);
mImageView = ((ImageView) findViewById(R.id.loading_page_img));
mTextView = ((TextView) findViewById(R.id.loading_text));
mAnimation = AnimationUtils.loadAnimation(getContext(), R.anim.loading);
}
public void show() {
if (this.mImageView != null) {
this.mImageView.startAnimation(mAnimation);
mJumpingBeans = JumpingBeans.with(mTextView)
.appendJumpingDots()
.build();
}
this.setVisibility(VISIBLE);
}
public void hide() {
if (this.mImageView != null) {
// this.mAnimation.cancel();
this.mImageView.clearAnimation();
}
if (mJumpingBeans != null)
mJumpingBeans.stopJumping();
this.setVisibility(GONE);
}
public void setLoadingText(String text) {
if (this.mTextView != null) {
this.mTextView.setText(text);
mJumpingBeans = JumpingBeans.with(mTextView)
.appendJumpingDots()
.build();
}
}
public void setLoadingText(int resId) {
if (this.mTextView != null) {
this.mTextView.setText(resId);
mJumpingBeans = JumpingBeans.with(mTextView)
.appendJumpingDots()
.build();
}
}
}
| apache-2.0 |
talenguyen/UDer | app/src/integrationTests/java/com/tale/uder/UderIntegrationRobolectricTestRunner.java | 1365 | package com.tale.uder;
import android.support.annotation.NonNull;
import org.robolectric.RobolectricGradleTestRunner;
import org.robolectric.RuntimeEnvironment;
import org.robolectric.annotation.Config;
import java.lang.reflect.Method;
// Custom runner allows us set config in one place instead of setting it in each test class.
public class UderIntegrationRobolectricTestRunner extends RobolectricGradleTestRunner {
// This value should be changed as soon as Robolectric will support newer api.
private static final int SDK_EMULATE_LEVEL = 21;
public UderIntegrationRobolectricTestRunner(@NonNull Class<?> klass) throws Exception {
super(klass);
}
@Override
public Config getConfig(@NonNull Method method) {
final Config defaultConfig = super.getConfig(method);
return new Config.Implementation(
new int[]{SDK_EMULATE_LEVEL},
defaultConfig.manifest(),
defaultConfig.qualifiers(),
defaultConfig.packageName(),
defaultConfig.resourceDir(),
defaultConfig.assetDir(),
defaultConfig.shadows(),
UderIntegrationTestApp.class,
defaultConfig.libraries(),
defaultConfig.constants() == Void.class ? BuildConfig.class : defaultConfig.constants()
);
}
@NonNull
public static UderApp udersApp() {
return (UderApp) RuntimeEnvironment.application;
}
}
| apache-2.0 |
Artur-/foobar | src/org/vaadin/netbeans/maven/ui/wizard/VaadinProjectWizardIterator.java | 21960 | /*
* To change this template, choose Tools | Templates and open the template in
* the editor.
*/
package org.vaadin.netbeans.maven.ui.wizard;
import java.io.IOException;
import java.io.UnsupportedEncodingException;
import java.net.MalformedURLException;
import java.net.URI;
import java.net.URISyntaxException;
import java.net.URL;
import java.net.URLEncoder;
import java.util.Collections;
import java.util.LinkedList;
import java.util.List;
import java.util.Set;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.logging.Level;
import java.util.logging.Logger;
import javax.swing.SwingUtilities;
import javax.swing.event.ChangeListener;
import javax.xml.namespace.QName;
import org.netbeans.api.annotations.common.StaticResource;
import org.netbeans.api.progress.ProgressUtils;
import org.netbeans.api.templates.TemplateRegistration;
import org.netbeans.modules.maven.api.archetype.Archetype;
import org.netbeans.modules.maven.api.archetype.ArchetypeProvider;
import org.netbeans.modules.maven.api.archetype.ArchetypeWizards;
import org.netbeans.modules.maven.model.ModelOperation;
import org.netbeans.modules.maven.model.Utilities;
import org.netbeans.modules.maven.model.pom.Build;
import org.netbeans.modules.maven.model.pom.Configuration;
import org.netbeans.modules.maven.model.pom.POMExtensibilityElement;
import org.netbeans.modules.maven.model.pom.POMModel;
import org.netbeans.modules.maven.model.pom.POMQName;
import org.netbeans.modules.maven.model.pom.Plugin;
import org.netbeans.modules.maven.model.pom.Project;
import org.openide.WizardDescriptor;
import org.openide.WizardDescriptor.InstantiatingIterator;
import org.openide.WizardDescriptor.Panel;
import org.openide.filesystems.FileObject;
import org.openide.util.Lookup;
import org.openide.util.NbBundle;
/**
* @author denis
*/
@NbBundle.Messages("retrieveLatestVersion=Retrieve Latest Version...")
public class VaadinProjectWizardIterator implements InstantiatingIterator {
private static final String NAME_SEPARATOR = " - ";// NOI18N
private static final String RUN_TARGET = "runTarget";// NOI18N
private static final Logger LOG = Logger
.getLogger(VaadinProjectWizardIterator.class.getName());
private static final String APPLICATION_ARTIFACT_ID = "vaadin-archetype-application";// NOI18N
private static final String PORTLET_ARTIFACT_ID = "vaadin-archetype-portlet"; // NOI18N
private static final String ADD_ON_ARTIFACT_ID = "vaadin-archetype-widget";// NOI18N
private static final String TOUCHKIT_ARTIFACT_ID = "vaadin-archetype-touchkit";// NOI18N
private static final String GROUP_ID = "com.vaadin"; // NOI18N
private static final String MAVEN_ARTIFACT_ID = "vaadin-maven-plugin";// NOI18N
private static final String REPOSITORY = "http://repo.maven.apache.org/maven2/";// NOI18N
private static final String APPLICATION_VERSION = "7.0.7";// NOI18N
private static final String PORTLET_VERSION = "1.0.0";// NOI18N
public static final int APPLICATION_MIN_VERSION = Integer
.parseInt(APPLICATION_VERSION.substring(0, 1));
private static final int PORTLET_MIN_VERSION = Integer
.parseInt(PORTLET_VERSION.substring(0, 1));
private static final String JETTY_ARTIFACT_ID = "jetty-maven-plugin";
private static final String JETTY_GROUP_ID = "org.mortbay.jetty";
private static final String WEB_APP = "webApp";// NOI18N
private static final String CONTEXT_PATH = "contextPath";// NOI18N
private static final String UTF_8 = "UTF-8";// NOI18N
private VaadinProjectWizardIterator( InstantiatingIterator<?> iterator ) {
delegate = iterator;
}
@StaticResource
public static final String PROJECT_ICON = "org/vaadin/netbeans/maven/ui/resources/vaadin.png"; // NOI18N
@TemplateRegistration(folder = "Project/Vaadin",
displayName = "#VaadinNewServletProject",
description = "../resources/VaadinServletProjectDescription.html",
iconBase = PROJECT_ICON, position = 100)
@NbBundle.Messages({
"VaadinNewServletProject=Vaadin Web Application Project",
"vaadinNewProjectTitle=Vaadin Web Application" })
public static WizardDescriptor.InstantiatingIterator<?> newServletProject()
{
return newProject(APPLICATION_ARTIFACT_ID,
Bundle.vaadinNewProjectTitle());
}
/**
* Portlet archetype is not available for 7.+ Vaadin versions
*
* @TemplateRegistration(folder = "Project/Vaadin", displayName =
* "#VaadinNewPortletProject", description =
* "../resources/VaadinPortletProjectDescription.html"
* , iconBase = PROJECT_ICON, position = 300)
*/
@NbBundle.Messages({
"VaadinNewPortletProject=Vaadin Portlet Application Project",
"vaadinNewPortletTitle=Vaadin Portlet Application" })
public static WizardDescriptor.InstantiatingIterator<?> newPortletProject()
{
return newProject(PORTLET_ARTIFACT_ID, Bundle.vaadinNewPortletTitle());
}
@TemplateRegistration(folder = "Project/Vaadin",
displayName = "#VaadinTouchkitProject",
description = "../resources/VaadinTouchkitProjectDescription.html",
iconBase = PROJECT_ICON, position = 400)
@NbBundle.Messages({ "VaadinTouchkitProject=Vaadin TouchKit Project",
"vaadinNewTouckitTitle=Vaadin TouchKit Application" })
public static WizardDescriptor.InstantiatingIterator<?> newTouckitProject()
{
return newProject(TOUCHKIT_ARTIFACT_ID, Bundle.vaadinNewTouckitTitle());
}
@TemplateRegistration(folder = "Project/Vaadin",
displayName = "#VaadinAddOnProject",
description = "../resources/VaadinAddOnProjectDescription.html",
iconBase = PROJECT_ICON, position = 200)
@NbBundle.Messages({ "VaadinAddOnProject=Vaadin Add-On Project",
"vaadinNewAddOnTitle=Vaadin Add-On Project with Test Application" })
public static WizardDescriptor.InstantiatingIterator<?> newAddOnProject() {
return newProject(ADD_ON_ARTIFACT_ID, Bundle.vaadinNewAddOnTitle());
}
@Override
public void addChangeListener( ChangeListener listener ) {
delegate.addChangeListener(listener);
}
@Override
public Panel<?> current() {
return delegate.current();
}
@Override
public boolean hasNext() {
return delegate.hasNext();
}
@Override
public boolean hasPrevious() {
return delegate.hasPrevious();
}
@Override
public String name() {
return delegate.name();
}
@Override
public void nextPanel() {
delegate.nextPanel();
}
@Override
public void previousPanel() {
delegate.previousPanel();
}
@Override
public void removeChangeListener( ChangeListener listener ) {
delegate.removeChangeListener(listener);
}
@Override
public void initialize( WizardDescriptor descriptor ) {
wizard = descriptor;
delegate.initialize(descriptor);
}
@Override
public Set<?> instantiate() throws IOException {
Set<?> result = delegate.instantiate();
if (!result.isEmpty()) {
FileObject warPom = null;
List<FileObject> poms = new LinkedList<>();
for (Object project : result) {
if (project instanceof FileObject) {
FileObject pom = ((FileObject) project)
.getFileObject("pom.xml"); //NOI18N
if (pom == null) {
continue;
}
final String[] packaging = new String[1];
ModelOperation<POMModel> operation = new ModelOperation<POMModel>()
{
@Override
public void performOperation( POMModel model ) {
Project project = model.getProject();
if (project != null) {
packaging[0] = project.getPackaging();
}
}
};
Utilities.performPOMModelOperations(pom,
Collections.singletonList(operation));
if ("war".equals(packaging[0])) { //NOI18N
warPom = pom;
}
else {
poms.add(pom);
}
}
}
if (warPom == null) {
LOG.warning("Instantiated set doesn't contain WAR project folder");
}
else {
final boolean prefixName = !poms.isEmpty();
final String name = wizard.getProperty("name").toString();// NOI18N
ModelOperation<POMModel> operation = new ModelOperation<POMModel>()
{
@Override
public void performOperation( POMModel model ) {
Project project = model.getProject();
if (prefixName) {
project.setName(name + NAME_SEPARATOR
+ project.getName());
}
else {
project.setName(name);
}
try {
String uri = URLEncoder.encode(name, UTF_8);
setJettyContextPath(uri, model);
setRunTarget(uri, model);
}
catch (UnsupportedEncodingException ignore) {
LOG.log(Level.FINE, null, ignore);
}
}
};
Utilities.performPOMModelOperations(warPom,
Collections.singletonList(operation));
// modify name for other projects
operation = new ModelOperation<POMModel>() {
@Override
public void performOperation( POMModel model ) {
Project project = model.getProject();
project.setName(name + NAME_SEPARATOR
+ project.getName());
}
};
for (FileObject pom : poms) {
Utilities.performPOMModelOperations(pom,
Collections.singletonList(operation));
}
}
}
else {
LOG.warning("Instantiated set is empty"); // NOI18N
}
return result;
}
private void setRunTarget( String name, POMModel model ) {
Plugin plugin = getVaadinPlugin(model);
if (plugin == null) {
return;
}
Configuration configuration = plugin.getConfiguration();
if (configuration == null) {
configuration = model.getFactory().createConfiguration();
configuration.addExtensibilityElement(createRunTarget(name, model));
plugin.setConfiguration(configuration);
}
else {
List<POMExtensibilityElement> children = configuration
.getExtensibilityElements();
for (POMExtensibilityElement child : children) {
if (RUN_TARGET.equals(child.getQName().getLocalPart())) {
String target = child.getElementText();
URI uri;
try {
uri = new URI(target);
URL url = uri.toURL();
String file = url.getFile();
if (file != null) {
if (file.length() == 0) {
target = target + '/' + name;
}
else if (file.length() == 1
&& file.charAt(0) == '/')
{
target += name;
}
child.setElementText(target);
}
}
catch (URISyntaxException | MalformedURLException e) {
LOG.log(Level.INFO, null, e);
}
return;
}
}
configuration.addExtensibilityElement(createRunTarget(name, model));
}
}
private POMExtensibilityElement createRunTarget( String name, POMModel model )
{
QName qname = POMQName.createQName(RUN_TARGET, model.getPOMQNames()
.isNSAware());
POMExtensibilityElement runTarget = model.getFactory()
.createPOMExtensibilityElement(qname);
runTarget.setElementText("http://localhost:8080/" + name); // NOI18N
return runTarget;
}
private void setJettyContextPath( String name, POMModel model ) {
Project project = model.getProject();
Build build = project.getBuild();
if (build == null) {
return;
}
List<Plugin> plugins = build.getPlugins();
for (Plugin plugin : plugins) {
if (JETTY_ARTIFACT_ID.equals(plugin.getArtifactId())
&& JETTY_GROUP_ID.equals(plugin.getGroupId()))
{
Configuration configuration = plugin.getConfiguration();
if (configuration == null) {
plugin.setConfiguration(createConfiguration(name, model));
return;
}
List<POMExtensibilityElement> children = configuration
.getExtensibilityElements();
POMExtensibilityElement webApp = null;
for (POMExtensibilityElement component : children) {
if (WEB_APP.equals(component.getQName().getLocalPart())) {
webApp = component;
break;
}
}
if (webApp == null) {
configuration.addExtensibilityElement(createWebApp(name,
model));
return;
}
children = webApp.getExtensibilityElements();
POMExtensibilityElement contextPath = null;
for (POMExtensibilityElement component : children) {
if (CONTEXT_PATH
.equals(component.getQName().getLocalPart()))
{
contextPath = component;
break;
}
}
if (contextPath == null) {
webApp.addExtensibilityElement(createContextPath(name,
model));
return;
}
String root = "/"; // NOI18N
if (contextPath.getElementText() == null
|| contextPath.getElementText().trim().equals(root))
{
String cPath = root + name;
contextPath.setElementText(cPath);
}
}
}
}
private POMExtensibilityElement createContextPath( String name,
POMModel model )
{
QName qname = POMQName.createQName(CONTEXT_PATH, model.getPOMQNames()
.isNSAware());
POMExtensibilityElement contextPath = model.getFactory()
.createPOMExtensibilityElement(qname);
String cPath = "/";
try {
cPath += URLEncoder.encode(name, UTF_8);
}
catch (UnsupportedEncodingException ignore) {
}
contextPath.setElementText(cPath);
return contextPath;
}
private POMExtensibilityElement createWebApp( String name, POMModel model )
{
POMExtensibilityElement contextPath = createContextPath(name, model);
QName qname = POMQName.createQName(WEB_APP, model.getPOMQNames()
.isNSAware());
POMExtensibilityElement webApp = model.getFactory()
.createPOMExtensibilityElement(qname);
webApp.addExtensibilityElement(contextPath);
return webApp;
}
private Configuration createConfiguration( String name, POMModel model ) {
POMExtensibilityElement webApp = createWebApp(name, model);
Configuration configuration = model.getFactory().createConfiguration();
configuration.addExtensibilityElement(webApp);
return configuration;
}
private Plugin getVaadinPlugin( POMModel model ) {
Project project = model.getProject();
Build build = project.getBuild();
if (build == null) {
return null;
}
List<Plugin> plugins = build.getPlugins();
for (Plugin plugin : plugins) {
if (MAVEN_ARTIFACT_ID.equals(plugin.getArtifactId())
&& GROUP_ID.equals(plugin.getGroupId()))
{
return plugin;
}
}
return null;
}
@Override
public void uninitialize( WizardDescriptor descriptor ) {
delegate.uninitialize(descriptor);
}
private static WizardDescriptor.InstantiatingIterator<?> newProject(
final String artifactId, String title )
{
InstantiatingIterator<?> iterator = ArchetypeWizards.definedArchetype(
GROUP_ID, artifactId, "LATEST", REPOSITORY, title); //NOI18N
return new VaadinProjectWizardIterator(iterator);
}
/**
* Code could be used for wizard having additional panel with Vaadin version
* selection
*/
private static WizardDescriptor.InstantiatingIterator<?> newProject(
final String artifactId, final int minVersion, String version,
String title )
{
assert SwingUtilities.isEventDispatchThread();
AtomicBoolean isCancelled = new AtomicBoolean();
final Archetype[] archetype = new Archetype[1];
ProgressUtils.runOffEventDispatchThread(new Runnable() {
@Override
public void run() {
archetype[0] = getLatestArchetype(artifactId, minVersion, null);
}
}, Bundle.retrieveLatestVersion(), isCancelled, false);
String repository = REPOSITORY;
String useVersion = version;
if (isCancelled.get()) {
LOG.log(Level.INFO, "Latest version retrieval is interrupted, "
+ "using default version: {0}", version); // NOI18N
}
else if (archetype[0] == null) {
LOG.log(Level.WARNING,
"Latest version retrieval is not interrupted but version "
+ "hasn''t been dtermined, using default version: {0}",
version); // NOI18N
}
if (!isCancelled.get() && archetype[0] != null) {
useVersion = archetype[0].getVersion();
repository = archetype[0].getRepository();
}
InstantiatingIterator<?> iterator = ArchetypeWizards.definedArchetype(
GROUP_ID, artifactId, useVersion, repository, title);
return new VaadinProjectWizardIterator(iterator);
}
private static Archetype getLatestArchetype( String artifactId,
int minVersion, List<String> versions )
{
Archetype result = null;
for (ArchetypeProvider provider : Lookup.getDefault().lookupAll(
ArchetypeProvider.class))
{
final List<Archetype> archetypes = provider.getArchetypes();
for (Archetype archetype : archetypes) {
if (GROUP_ID.equals(archetype.getGroupId())
&& artifactId.equals(archetype.getArtifactId()))
{
String archVersion = archetype.getVersion();
String version = archVersion;
LOG.log(Level.FINE,
"Found archetype with appropriate group id "
+ "and artifactId: {0}. It''s version : {1}",
new Object[] { artifactId, archVersion });
int index = archVersion.indexOf('.');
int majorVersion = 0;
try {
if (index > -1) {
version = archVersion.substring(0, index);
}
majorVersion = Integer.parseInt(version);
}
catch (NumberFormatException ignore) {
LOG.log(Level.WARNING, "Unable to parse version :{0}",
version);
}
if (majorVersion >= minVersion) {
if (versions != null) {
versions.add(archVersion);
}
if (result == null) {
result = archetype;
}
else if (version.compareTo(result.getVersion()) > 0) {
result = archetype;
}
}
else {
LOG.log(Level.FINE,
"Found archetype version {0} is skipped. "
+ "It''s less than default version {1}",
new Object[] { version, minVersion });
}
}
}
}
return result;
}
private final InstantiatingIterator<?> delegate;
private WizardDescriptor wizard;
}
| apache-2.0 |
pdsoftplan/zap-maven-plugin | zap-client-api/src/test/java/br/com/softplan/security/zap/api/exception/AuthenticationInfoValidationExceptionTest.java | 1105 | package br.com.softplan.security.zap.api.exception;
import static org.testng.Assert.assertEquals;
import org.testng.annotations.Test;
public class AuthenticationInfoValidationExceptionTest {
@Test
public void testMessageConstructor() {
String message = "message";
try {
throw new AuthenticationInfoValidationException(message);
} catch (AuthenticationInfoValidationException e) {
assertEquals(e.getMessage(), message);
}
}
@Test
public void testThrowableConstructor() {
Throwable throwable = new NullPointerException("test");
try {
throw new AuthenticationInfoValidationException(throwable);
} catch (AuthenticationInfoValidationException e) {
assertEquals(e.getCause(), throwable);
}
}
@Test
public void testMessageAndThrowableConstructor() {
String message = "message";
Throwable throwable = new NullPointerException("test");
try {
throw new AuthenticationInfoValidationException(message, throwable);
} catch (AuthenticationInfoValidationException e) {
assertEquals(e.getMessage(), message);
assertEquals(e.getCause(), throwable);
}
}
}
| apache-2.0 |
PurelyApplied/geode | extensions/geode-modules/src/main/java/org/apache/geode/modules/session/catalina/DeltaSessionManager.java | 32163 | /*
* Licensed to the Apache Software Foundation (ASF) under one or more contributor license
* agreements. See the NOTICE file distributed with this work for additional information regarding
* copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance with the License. You may obtain a
* copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package org.apache.geode.modules.session.catalina;
import java.beans.PropertyChangeEvent;
import java.beans.PropertyChangeListener;
import java.io.BufferedInputStream;
import java.io.BufferedOutputStream;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.ObjectInputStream;
import java.io.ObjectOutputStream;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashSet;
import java.util.Iterator;
import java.util.Set;
import java.util.Timer;
import java.util.TimerTask;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.atomic.AtomicInteger;
import org.apache.catalina.Container;
import org.apache.catalina.Context;
import org.apache.catalina.Lifecycle;
import org.apache.catalina.Loader;
import org.apache.catalina.Pipeline;
import org.apache.catalina.Session;
import org.apache.catalina.Valve;
import org.apache.catalina.session.ManagerBase;
import org.apache.catalina.session.StandardSession;
import org.apache.catalina.util.CustomObjectInputStream;
import org.apache.juli.logging.Log;
import org.apache.juli.logging.LogFactory;
import org.apache.geode.cache.CacheFactory;
import org.apache.geode.cache.Region;
import org.apache.geode.cache.query.Query;
import org.apache.geode.cache.query.QueryService;
import org.apache.geode.cache.query.SelectResults;
import org.apache.geode.internal.cache.GemFireCacheImpl;
import org.apache.geode.modules.session.catalina.internal.DeltaSessionStatistics;
import org.apache.geode.modules.util.ContextMapper;
import org.apache.geode.modules.util.RegionConfiguration;
import org.apache.geode.modules.util.RegionHelper;
public abstract class DeltaSessionManager extends ManagerBase
implements Lifecycle, PropertyChangeListener, SessionManager {
/**
* The number of rejected sessions.
*/
private AtomicInteger rejectedSessions;
/**
* The maximum number of active Sessions allowed, or -1 for no limit.
*/
protected int maxActiveSessions = -1;
/**
* Has this <code>Manager</code> been started?
*/
protected AtomicBoolean started = new AtomicBoolean(false);
/**
* The name of this <code>Manager</code>
*/
protected String name;
protected Valve jvmRouteBinderValve;
protected Valve commitSessionValve;
protected SessionCache sessionCache;
protected static final String DEFAULT_REGION_NAME = RegionHelper.NAME + "_sessions";
protected static final boolean DEFAULT_ENABLE_GATEWAY_DELTA_REPLICATION = false;
protected static final boolean DEFAULT_ENABLE_GATEWAY_REPLICATION = false;
protected static final boolean DEFAULT_ENABLE_DEBUG_LISTENER = false;
protected static final boolean DEFAULT_ENABLE_COMMIT_VALVE = true;
protected static final boolean DEFAULT_ENABLE_COMMIT_VALVE_FAILFAST = false;
protected static final boolean DEFAULT_PREFER_DESERIALIZED_FORM = true;
/*
* This *MUST* only be assigned during start/startInternal otherwise it will be associated with
* the incorrect context class loader.
*/
protected Log LOGGER;
protected String regionName = DEFAULT_REGION_NAME;
protected String regionAttributesId; // the default is different for client-server and
// peer-to-peer
protected Boolean enableLocalCache; // the default is different for client-server and peer-to-peer
protected boolean enableCommitValve = DEFAULT_ENABLE_COMMIT_VALVE;
protected boolean enableCommitValveFailfast = DEFAULT_ENABLE_COMMIT_VALVE_FAILFAST;
protected boolean enableGatewayDeltaReplication = DEFAULT_ENABLE_GATEWAY_DELTA_REPLICATION;
protected boolean enableGatewayReplication = DEFAULT_ENABLE_GATEWAY_REPLICATION;
protected boolean enableDebugListener = DEFAULT_ENABLE_DEBUG_LISTENER;
protected boolean preferDeserializedForm = DEFAULT_PREFER_DESERIALIZED_FORM;
private Timer timer;
private final Set<String> sessionsToTouch;
private static final long TIMER_TASK_PERIOD =
Long.getLong("gemfiremodules.sessionTimerTaskPeriod", 10000);
private static final long TIMER_TASK_DELAY =
Long.getLong("gemfiremodules.sessionTimerTaskDelay", 10000);
public DeltaSessionManager() {
// Create the set to store sessions to be touched after get attribute requests
this.sessionsToTouch = Collections.newSetFromMap(new ConcurrentHashMap<String, Boolean>());
}
@Override
public String getRegionName() {
return this.regionName;
}
public void setRegionName(String regionName) {
this.regionName = regionName;
}
@Override
public void setMaxInactiveInterval(final int interval) {
super.setMaxInactiveInterval(interval);
}
@Override
public String getRegionAttributesId() {
// This property will be null if it hasn't been set in the context.xml file.
// Since its default is dependent on the session cache, get the default from
// the session cache.
if (this.regionAttributesId == null) {
this.regionAttributesId = getSessionCache().getDefaultRegionAttributesId();
}
return this.regionAttributesId;
}
public void setRegionAttributesId(String regionType) {
this.regionAttributesId = regionType;
}
@Override
public boolean getEnableLocalCache() {
// This property will be null if it hasn't been set in the context.xml file.
// Since its default is dependent on the session cache, get the default from
// the session cache.
if (this.enableLocalCache == null) {
this.enableLocalCache = getSessionCache().getDefaultEnableLocalCache();
}
return this.enableLocalCache;
}
public void setEnableLocalCache(boolean enableLocalCache) {
this.enableLocalCache = enableLocalCache;
}
public int getMaxActiveSessions() {
return this.maxActiveSessions;
}
public void setMaxActiveSessions(int maxActiveSessions) {
int oldMaxActiveSessions = this.maxActiveSessions;
this.maxActiveSessions = maxActiveSessions;
support.firePropertyChange("maxActiveSessions", new Integer(oldMaxActiveSessions),
new Integer(this.maxActiveSessions));
}
@Override
public boolean getEnableGatewayDeltaReplication() {
// return this.enableGatewayDeltaReplication;
return false; // disabled
}
public void setEnableGatewayDeltaReplication(boolean enableGatewayDeltaReplication) {
this.enableGatewayDeltaReplication = enableGatewayDeltaReplication;
}
@Override
public boolean getEnableGatewayReplication() {
return this.enableGatewayReplication;
}
public void setEnableGatewayReplication(boolean enableGatewayReplication) {
this.enableGatewayReplication = enableGatewayReplication;
}
@Override
public boolean getEnableDebugListener() {
return this.enableDebugListener;
}
public void setEnableDebugListener(boolean enableDebugListener) {
this.enableDebugListener = enableDebugListener;
}
@Override
public boolean isCommitValveEnabled() {
return this.enableCommitValve;
}
public void setEnableCommitValve(boolean enable) {
this.enableCommitValve = enable;
}
@Override
public boolean isCommitValveFailfastEnabled() {
return this.enableCommitValveFailfast;
}
public void setEnableCommitValveFailfast(boolean enable) {
this.enableCommitValveFailfast = enable;
}
@Override
public boolean isBackingCacheAvailable() {
return sessionCache.isBackingCacheAvailable();
}
public void setPreferDeserializedForm(boolean enable) {
this.preferDeserializedForm = enable;
}
@Override
public boolean getPreferDeserializedForm() {
return this.preferDeserializedForm;
}
@Override
public String getStatisticsName() {
return getContextName().replace("/", "");
}
@Override
public Log getLogger() {
if (LOGGER == null) {
LOGGER = LogFactory.getLog(DeltaSessionManager.class);
}
return LOGGER;
}
public SessionCache getSessionCache() {
return this.sessionCache;
}
public DeltaSessionStatistics getStatistics() {
return getSessionCache().getStatistics();
}
public boolean isPeerToPeer() {
return getSessionCache().isPeerToPeer();
}
public boolean isClientServer() {
return getSessionCache().isClientServer();
}
/**
* This method was taken from StandardManager to set the default maxInactiveInterval based on the
* container (to 30 minutes).
* <p>
* Set the Container with which this Manager has been associated. If it is a Context (the usual
* case), listen for changes to the session timeout property.
*
* @param container The associated Container
*/
@Override
public void setContainer(Container container) {
// De-register from the old Container (if any)
if ((this.container != null) && (this.container instanceof Context)) {
((Context) this.container).removePropertyChangeListener(this);
}
// Default processing provided by our superclass
super.setContainer(container);
// Register with the new Container (if any)
if ((this.container != null) && (this.container instanceof Context)) {
// Overwrite the max inactive interval with the context's session timeout.
setMaxInactiveInterval(((Context) this.container).getSessionTimeout() * 60);
((Context) this.container).addPropertyChangeListener(this);
}
}
@Override
public Session findSession(String id) throws IOException {
if (id == null) {
return null;
}
if (getLogger().isDebugEnabled()) {
getLogger().debug(
this + ": Finding session " + id + " in " + getSessionCache().getOperatingRegionName());
}
DeltaSessionInterface session = (DeltaSessionInterface) getSessionCache().getSession(id);
/*
* Check that the context name for this session is the same as this manager's. This comes into
* play when multiple versions of a webapp are deployed and active at the same time; the context
* name will contain an embedded version number; something like /test###2.
*/
if (session != null && !session.getContextName().isEmpty()
&& !getContextName().equals(session.getContextName())) {
getLogger()
.info(this + ": Session " + id + " rejected as container name and context do not match: "
+ getContextName() + " != " + session.getContextName());
session = null;
}
if (session == null) {
if (getLogger().isDebugEnabled()) {
getLogger().debug(this + ": Did not find session " + id + " in "
+ getSessionCache().getOperatingRegionName());
}
} else {
if (getLogger().isDebugEnabled()) {
getLogger().debug(this + ": Found session " + id + " in "
+ getSessionCache().getOperatingRegionName() + ": " + session);
}
// The session was previously stored. Set new to false.
session.setNew(false);
// Check the manager.
// If the manager is null, the session was replicated and this is a
// failover situation. Reset the manager and activate the session.
if (session.getManager() == null) {
DeltaSessionInterface ds = (DeltaSessionInterface) session;
ds.setOwner(this);
ds.activate();
}
}
return session;
}
protected void initializeSessionCache() {
// Retrieve the cache
GemFireCacheImpl cache = (GemFireCacheImpl) CacheFactory.getAnyInstance();
if (cache == null) {
throw new IllegalStateException(
"No cache exists. Please configure either a PeerToPeerCacheLifecycleListener or ClientServerCacheLifecycleListener in the server.xml file.");
}
// Create the appropriate session cache
this.sessionCache = cache.isClient() ? new ClientServerSessionCache(this, cache)
: new PeerToPeerSessionCache(this, cache);
// Initialize the session cache
this.sessionCache.initialize();
}
@Override
protected StandardSession getNewSession() {
return new DeltaSession(this);
}
@Override
public void remove(Session session) {
remove(session, false);
}
public void remove(Session session, boolean update) {
// super.remove(session);
// Remove the session from the region if necessary.
// It will have already been removed if it expired implicitly.
DeltaSessionInterface ds = (DeltaSessionInterface) session;
if (ds.getExpired()) {
if (getLogger().isDebugEnabled()) {
getLogger().debug(this + ": Expired session " + session.getId() + " from "
+ getSessionCache().getOperatingRegionName());
}
} else {
if (getLogger().isDebugEnabled()) {
getLogger().debug(this + ": Destroying session " + session.getId() + " from "
+ getSessionCache().getOperatingRegionName());
}
getSessionCache().destroySession(session.getId());
if (getLogger().isDebugEnabled()) {
getLogger().debug(this + ": Destroyed session " + session.getId() + " from "
+ getSessionCache().getOperatingRegionName());
}
}
}
@Override
public void add(Session session) {
// super.add(session);
if (getLogger().isDebugEnabled()) {
getLogger().debug(this + ": Storing session " + session.getId() + " into "
+ getSessionCache().getOperatingRegionName());
}
getSessionCache().putSession(session);
if (getLogger().isDebugEnabled()) {
getLogger().debug(this + ": Stored session " + session.getId() + " into "
+ getSessionCache().getOperatingRegionName());
}
getSessionCache().getStatistics().incSessionsCreated();
}
@Override
public int getRejectedSessions() {
return this.rejectedSessions.get();
}
@Override
public void setRejectedSessions(int rejectedSessions) {
this.rejectedSessions.set(rejectedSessions);
}
private void incrementRejectedSessions() {
this.rejectedSessions.incrementAndGet();
}
/**
* Returns the number of active sessions
*
* @return number of sessions active
*/
@Override
public int getActiveSessions() {
return getSessionCache().size();
}
/**
* For debugging: return a list of all session ids currently active
*/
@Override
public String listSessionIds() {
StringBuilder builder = new StringBuilder();
Iterator<String> sessionIds = getSessionCache().keySet().iterator();
while (sessionIds.hasNext()) {
builder.append(sessionIds.next());
if (sessionIds.hasNext()) {
builder.append(" ");
}
}
return builder.toString();
}
/*
* If local caching is enabled, add the session to the set of sessions to be touched. A timer task
* will be periodically invoked to get the session in the session region to update its last
* accessed time. This prevents the session from expiring in the case where the application is
* only getting attributes from the session and never putting attributes into the session. If
* local caching is disabled. the session's last accessed time would already have been updated
* properly in the sessions region.
*
* Note: Due to issues in GemFire expiry, sessions are always asynchronously touched using a
* function regardless whether or not local caching is enabled. This prevents premature
* expiration.
*/
protected void addSessionToTouch(String sessionId) {
this.sessionsToTouch.add(sessionId);
}
protected Set<String> getSessionsToTouch() {
return this.sessionsToTouch;
}
protected boolean removeTouchedSession(String sessionId) {
return this.sessionsToTouch.remove(sessionId);
}
protected void scheduleTimerTasks() {
// Create the timer
this.timer = new Timer("Timer for " + toString(), true);
// Schedule the task to handle sessions to be touched
scheduleTouchSessionsTask();
// Schedule the task to maintain the maxActive sessions
scheduleDetermineMaxActiveSessionsTask();
}
private void scheduleTouchSessionsTask() {
TimerTask task = new TimerTask() {
@Override
public void run() {
// Get the sessionIds to touch and clear the set inside synchronization
Set<String> sessionIds = null;
sessionIds = new HashSet<String>(getSessionsToTouch());
getSessionsToTouch().clear();
// Touch the sessions we currently have
if (!sessionIds.isEmpty()) {
getSessionCache().touchSessions(sessionIds);
if (getLogger().isDebugEnabled()) {
getLogger().debug(DeltaSessionManager.this + ": Touched sessions: " + sessionIds);
}
}
}
};
this.timer.schedule(task, TIMER_TASK_DELAY, TIMER_TASK_PERIOD);
}
protected void cancelTimer() {
if (timer != null) {
this.timer.cancel();
}
}
private void scheduleDetermineMaxActiveSessionsTask() {
TimerTask task = new TimerTask() {
@Override
public void run() {
int currentActiveSessions = getSessionCache().size();
if (currentActiveSessions > getMaxActive()) {
setMaxActive(currentActiveSessions);
if (getLogger().isDebugEnabled()) {
getLogger().debug(
DeltaSessionManager.this + ": Set max active sessions: " + currentActiveSessions);
}
}
}
};
this.timer.schedule(task, TIMER_TASK_DELAY, TIMER_TASK_PERIOD);
}
@Override
public void load() throws ClassNotFoundException, IOException {
doLoad();
ContextMapper.addContext(getContextName(), this);
}
@Override
public void unload() throws IOException {
doUnload();
ContextMapper.removeContext(getContextName());
}
protected void registerJvmRouteBinderValve() {
if (getLogger().isDebugEnabled()) {
getLogger().debug(this + ": Registering JVM route binder valve");
}
jvmRouteBinderValve = new JvmRouteBinderValve();
getPipeline().addValve(jvmRouteBinderValve);
}
protected Pipeline getPipeline() {
return getContainer().getPipeline();
}
protected void unregisterJvmRouteBinderValve() {
if (getLogger().isDebugEnabled()) {
getLogger().debug(this + ": Unregistering JVM route binder valve");
}
if (jvmRouteBinderValve != null) {
getPipeline().removeValve(jvmRouteBinderValve);
}
}
protected void registerCommitSessionValve() {
if (getLogger().isDebugEnabled()) {
getLogger().debug(this + ": Registering CommitSessionValve");
}
commitSessionValve = new CommitSessionValve();
getPipeline().addValve(commitSessionValve);
}
protected void unregisterCommitSessionValve() {
if (getLogger().isDebugEnabled()) {
getLogger().debug(this + ": Unregistering CommitSessionValve");
}
if (commitSessionValve != null) {
getPipeline().removeValve(commitSessionValve);
}
}
// ------------------------------ Lifecycle Methods
/**
* Process property change events from our associated Context.
* <p>
* Part of this method implementation was taken from StandardManager. The sessionTimeout can be
* changed in the web.xml which is processed after the context.xml. The context (and the default
* session timeout) would already have been set in this Manager. This is the way to get the new
* session timeout value specified in the web.xml.
* <p>
* The precedence order for setting the session timeout value is:
* <p>
* <ol>
* <li>the max inactive interval is set based on the Manager defined in the context.xml
* <li>the max inactive interval is then overwritten by the value of the Context's session timeout
* when setContainer is called
* <li>the max inactive interval is then overwritten by the value of the session-timeout specified
* in the web.xml (if any)
* </ol>
*
* @param event The property change event that has occurred
*/
@Override
public void propertyChange(PropertyChangeEvent event) {
// Validate the source of this event
if (!(event.getSource() instanceof Context)) {
return;
}
Context context = (Context) event.getSource();
// Process a relevant property change
if (event.getPropertyName().equals("sessionTimeout")) {
try {
int interval = ((Integer) event.getNewValue()).intValue();
if (interval < RegionConfiguration.DEFAULT_MAX_INACTIVE_INTERVAL) {
getLogger().warn("The configured session timeout of " + interval
+ " minutes is invalid. Using the original value of " + event.getOldValue()
+ " minutes.");
interval = ((Integer) event.getOldValue()).intValue();;
}
// StandardContext.setSessionTimeout passes -1 if the configured timeout
// is 0; otherwise it passes the value set in web.xml. If the interval
// parameter equals the default, set the max inactive interval to the
// default (no expiration); otherwise set it in seconds.
setMaxInactiveInterval(interval == RegionConfiguration.DEFAULT_MAX_INACTIVE_INTERVAL
? RegionConfiguration.DEFAULT_MAX_INACTIVE_INTERVAL : interval * 60);
} catch (NumberFormatException e) {
getLogger()
.error(sm.getString("standardManager.sessionTimeout", event.getNewValue().toString()));
}
}
}
/**
* Save any currently active sessions in the appropriate persistence mechanism, if any. If
* persistence is not supported, this method returns without doing anything.
*
* @throws IOException if an input/output error occurs
*/
protected void doUnload() throws IOException {
QueryService querySvc = sessionCache.getCache().getQueryService();
Context context = getTheContext();
if (context == null) {
return;
}
String regionName;
if (getRegionName().startsWith("/")) {
regionName = getRegionName();
} else {
regionName = "/" + getRegionName();
}
Query query = querySvc.newQuery("select s.id from " + regionName
+ " as s where s.contextName = '" + context.getPath() + "'");
getLogger().debug("Query: " + query.getQueryString());
SelectResults results;
try {
results = (SelectResults) query.execute();
} catch (Exception ex) {
getLogger().error("Unable to perform query during doUnload", ex);
return;
}
if (results.isEmpty()) {
getLogger().debug("No sessions to unload for context " + context.getPath());
return; // nothing to do
}
// Open an output stream to the specified pathname, if any
File store = sessionStore(context.getPath());
if (store == null) {
return;
}
if (getLogger().isDebugEnabled()) {
getLogger().debug("Unloading sessions to " + store.getAbsolutePath());
}
FileOutputStream fos = null;
BufferedOutputStream bos = null;
ObjectOutputStream oos = null;
boolean error = false;
try {
fos = new FileOutputStream(store.getAbsolutePath());
bos = new BufferedOutputStream(fos);
oos = new ObjectOutputStream(bos);
} catch (IOException e) {
error = true;
getLogger().error("Exception unloading sessions", e);
throw e;
} finally {
if (error) {
if (oos != null) {
try {
oos.close();
} catch (IOException ioe) {
// Ignore
}
}
if (bos != null) {
try {
bos.close();
} catch (IOException ioe) {
// Ignore
}
}
if (fos != null) {
try {
fos.close();
} catch (IOException ioe) {
// Ignore
}
}
}
}
ArrayList<DeltaSessionInterface> list = new ArrayList<DeltaSessionInterface>();
Iterator<String> elements = results.iterator();
while (elements.hasNext()) {
String id = elements.next();
DeltaSessionInterface session = (DeltaSessionInterface) findSession(id);
if (session != null) {
list.add(session);
}
}
// Write the number of active sessions, followed by the details
if (getLogger().isDebugEnabled())
getLogger().debug("Unloading " + list.size() + " sessions");
try {
oos.writeObject(new Integer(list.size()));
for (DeltaSessionInterface session : list) {
if (session instanceof StandardSession) {
StandardSession standardSession = (StandardSession) session;
standardSession.passivate();
standardSession.writeObjectData(oos);
} else {
// All DeltaSessionInterfaces as of Geode 1.0 should be based on StandardSession
throw new IOException("Session should be of type StandardSession");
}
}
} catch (IOException e) {
getLogger().error("Exception unloading sessions", e);
try {
oos.close();
} catch (IOException f) {
// Ignore
}
throw e;
}
// Flush and close the output stream
try {
oos.flush();
} finally {
try {
oos.close();
} catch (IOException f) {
// Ignore
}
}
// Locally destroy the sessions we just wrote
if (getSessionCache().isClientServer()) {
for (DeltaSessionInterface session : list) {
if (getLogger().isDebugEnabled()) {
getLogger().debug("Locally destroying session " + session.getId());
}
getSessionCache().getOperatingRegion().localDestroy(session.getId());
}
}
// // Expire all the sessions we just wrote
// if (getLogger().isDebugEnabled()) {
// getLogger().debug("Expiring " + list.size() + " persisted sessions");
// }
// Iterator<StandardSession> expires = list.iterator();
// while (expires.hasNext()) {
// StandardSession session = expires.next();
// try {
// session.expire(false);
// } catch (Throwable t) {
//// ExceptionUtils.handleThrowable(t);
// } finally {
// session.recycle();
// }
// }
if (getLogger().isDebugEnabled()) {
getLogger().debug("Unloading complete");
}
}
/**
* Load any currently active sessions that were previously unloaded to the appropriate persistence
* mechanism, if any. If persistence is not supported, this method returns without doing anything.
*
* @throws ClassNotFoundException if a serialized class cannot be found during the reload
* @throws IOException if an input/output error occurs
*/
protected void doLoad() throws ClassNotFoundException, IOException {
Context context = getTheContext();
if (context == null) {
return;
}
// Open an input stream to the specified pathname, if any
File store = sessionStore(context.getPath());
if (store == null) {
getLogger().debug("No session store file found");
return;
}
if (getLogger().isDebugEnabled()) {
getLogger().debug("Loading sessions from " + store.getAbsolutePath());
}
FileInputStream fis = null;
BufferedInputStream bis = null;
ObjectInputStream ois = null;
Loader loader = null;
ClassLoader classLoader = null;
try {
fis = new FileInputStream(store.getAbsolutePath());
bis = new BufferedInputStream(fis);
if (getTheContext() != null) {
loader = getTheContext().getLoader();
}
if (loader != null) {
classLoader = loader.getClassLoader();
}
if (classLoader != null) {
if (getLogger().isDebugEnabled()) {
getLogger().debug("Creating custom object input stream for class loader");
}
ois = new CustomObjectInputStream(bis, classLoader);
} else {
if (getLogger().isDebugEnabled()) {
getLogger().debug("Creating standard object input stream");
}
ois = new ObjectInputStream(bis);
}
} catch (FileNotFoundException e) {
if (getLogger().isDebugEnabled()) {
getLogger().debug("No persisted data file found");
}
return;
} catch (IOException e) {
getLogger().error("Exception loading sessions", e);
try {
fis.close();
} catch (IOException f) {
// Ignore
}
try {
bis.close();
} catch (IOException f) {
// Ignore
}
throw e;
}
// Load the previously unloaded active sessions
try {
Integer count = (Integer) ois.readObject();
int n = count.intValue();
if (getLogger().isDebugEnabled()) {
getLogger().debug("Loading " + n + " persisted sessions");
}
for (int i = 0; i < n; i++) {
StandardSession session = getNewSession();
session.readObjectData(ois);
session.setManager(this);
Region region = getSessionCache().getOperatingRegion();
DeltaSessionInterface existingSession = (DeltaSessionInterface) region.get(session.getId());
// Check whether the existing session is newer
if (existingSession != null
&& existingSession.getLastAccessedTime() > session.getLastAccessedTime()) {
if (getLogger().isDebugEnabled()) {
getLogger().debug("Loaded session " + session.getId() + " is older than cached copy");
}
continue;
}
// Check whether the new session has already expired
if (!session.isValid()) {
if (getLogger().isDebugEnabled()) {
getLogger().debug("Loaded session " + session.getId() + " is invalid");
}
continue;
}
getLogger().debug("Loading session " + session.getId());
session.activate();
add(session);
}
} catch (ClassNotFoundException e) {
getLogger().error(e);
try {
ois.close();
} catch (IOException f) {
// Ignore
}
throw e;
} catch (IOException e) {
getLogger().error(e);
try {
ois.close();
} catch (IOException f) {
// Ignore
}
throw e;
} finally {
// Close the input stream
try {
ois.close();
} catch (IOException f) {
// ignored
}
// Delete the persistent storage file
if (store.exists()) {
store.delete();
}
}
}
/**
* Return a File object representing the pathname to our persistence file, if any.
*/
private File sessionStore(String ctxPath) {
String storeDir = System.getProperty("catalina.base");
if (storeDir == null || storeDir.isEmpty()) {
storeDir = System.getProperty("java.io.tmpdir");
} else {
storeDir += System.getProperty("file.separator") + "temp";
}
File file = new File(storeDir, ctxPath.replaceAll("/", "_") + ".sessions.ser");
return (file);
}
@Override
public String toString() {
return new StringBuilder().append(getClass().getSimpleName()).append("[").append("container=")
.append(getTheContext()).append("; regionName=").append(this.regionName)
.append("; regionAttributesId=").append(this.regionAttributesId).append("]").toString();
}
protected String getContextName() {
return getTheContext().getName();
}
public Context getTheContext() {
if (getContainer() instanceof Context) {
return (Context) getContainer();
} else {
getLogger().error("Unable to unload sessions - container is of type "
+ getContainer().getClass().getName() + " instead of StandardContext");
return null;
}
}
}
| apache-2.0 |
OpenNTF/org.openntf.domino | domino/externals/tinkerpop/src/main/java/com/tinkerpop/pipes/filter/CyclicPathFilterPipe.java | 1288 | package com.tinkerpop.pipes.filter;
import com.tinkerpop.pipes.AbstractPipe;
import com.tinkerpop.pipes.Pipe;
import java.util.Iterator;
import java.util.LinkedHashSet;
import java.util.List;
import java.util.Set;
/**
* CyclicPathFilterPipe will only emit an object if its transformation path has no repeats (loops) in it.
* This pipe requires that path calculations be enabled. As such, when the start is set, enablePath(true) is invoked.
*
* @author Marko A. Rodriguez (http://markorodriguez.com)
*/
@SuppressWarnings({ "rawtypes", "unchecked" })
public class CyclicPathFilterPipe<S> extends AbstractPipe<S, S> implements FilterPipe<S> {
private Set set = new LinkedHashSet();
public void setStarts(Iterator<S> starts) {
super.setStarts(starts);
this.enablePath(true);
}
public S processNextStart() {
while (true) {
final S s = this.starts.next();
if (this.starts instanceof Pipe) {
final List path = ((Pipe) this.starts).getCurrentPath();
this.set.clear();
this.set.addAll(path);
if (path.size() == this.set.size()) {
return s;
}
} else {
return s;
}
}
}
}
| apache-2.0 |
yinyiliang/RabbitCloud | app/src/androidTest/java/yyl/rabbitcloud/ExampleInstrumentedTest.java | 734 | package yyl.rabbitcloud;
import android.content.Context;
import android.support.test.InstrumentationRegistry;
import android.support.test.runner.AndroidJUnit4;
import org.junit.Test;
import org.junit.runner.RunWith;
import static org.junit.Assert.*;
/**
* Instrumentation test, which will execute on an Android device.
*
* @see <a href="http://d.android.com/tools/testing">Testing documentation</a>
*/
@RunWith(AndroidJUnit4.class)
public class ExampleInstrumentedTest {
@Test
public void useAppContext() throws Exception {
// Context of the app under test.
Context appContext = InstrumentationRegistry.getTargetContext();
assertEquals("yyl.rabbitcloud", appContext.getPackageName());
}
}
| apache-2.0 |
rabbitmq/rabbitmq-tutorials | spring-amqp/src/main/java/org/springframework/amqp/tutorials/tut1/Tut1Sender.java | 1284 | /*
* Copyright 2015 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.amqp.tutorials.tut1;
import org.springframework.amqp.core.Queue;
import org.springframework.amqp.rabbit.core.RabbitTemplate;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.scheduling.annotation.Scheduled;
/**
* @author Gary Russell
* @author Scott Deeg
*/
public class Tut1Sender {
@Autowired
private RabbitTemplate template;
@Autowired
private Queue queue;
@Scheduled(fixedDelay = 1000, initialDelay = 500)
public void send() {
String message = "Hello World!";
this.template.convertAndSend(queue.getName(), message);
System.out.println(" [x] Sent '" + message + "'");
}
}
| apache-2.0 |
ow2-xlcloud/vcms | vcms-gui/framework/src/main/java/org/xlcloud/console/controllers/utils/ProjectsLazyLoader.java | 2205 | /*
* Copyright 2012 AMG.lab, a Bull Group Company
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.xlcloud.console.controllers.utils;
import java.io.Serializable;
import java.util.HashMap;
import java.util.Map;
import javax.annotation.PostConstruct;
import javax.inject.Inject;
import javax.inject.Named;
import org.xlcloud.console.scope.ViewScoped;
import org.xlcloud.service.Project;
import org.xlcloud.service.api.ProjectsApi;
/**
* Lazy loader for accounts.
*
* @author Konrad Król, AMG.net
*/
@Named
@ViewScoped
public class ProjectsLazyLoader implements Serializable {
private static final long serialVersionUID = -5822897808977079642L;
private Map<Long, Project> projects;
@Inject
private ProjectsApi projectsApi;
/**
* Initializes bean with an empty collection of accounts.
*/
@PostConstruct
public void initialize() {
if (projects == null) {
projects = new HashMap<Long, Project>();
}
}
/**
* Retrieves account data from the accounts api.
*
* @param projectId
* requested account id
*/
public void loadProject(Long projectId) {
if (projectId != null && !projects.containsKey(projectId)) {
projects.put(projectId, projectsApi.get(projectId));
}
}
/**
* Returns the requeted account. The account has to be loaded first using
* {@link #loadAccount(Long)}, otherwise: {@code null} is returned.
*
* @param accountId
* requested account id.
* @return account
*/
public Project getProject(Long projectId) {
return projects.get(projectId);
}
}
| apache-2.0 |
MiniPlayer/log-island | logisland-plugins/logisland-hbase-plugin/src/main/java/com/hurence/logisland/processor/hbase/util/ObjectSerDe.java | 1510 | package com.hurence.logisland.processor.hbase.util;
import com.hurence.logisland.serializer.DeserializationException;
import com.hurence.logisland.serializer.Deserializer;
import com.hurence.logisland.serializer.SerializationException;
import com.hurence.logisland.serializer.Serializer;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.io.ObjectInputStream;
import java.io.ObjectOutputStream;
import java.io.OutputStream;
public class ObjectSerDe implements Serializer<Object>, Deserializer<Object> {
@Override
public Object deserialize(byte[] input) throws DeserializationException, IOException {
if (input == null || input.length == 0) {
return null;
}
try (final ByteArrayInputStream in = new ByteArrayInputStream(input);
final ObjectInputStream objIn = new ObjectInputStream(in)) {
return objIn.readObject();
} catch (ClassNotFoundException e) {
throw new DeserializationException("Could not deserialize object due to ClassNotFoundException", e);
}
}
@Override
public void serialize(Object value, OutputStream output) throws SerializationException, IOException {
try (final ByteArrayOutputStream bOut = new ByteArrayOutputStream();
final ObjectOutputStream objOut = new ObjectOutputStream(bOut)) {
objOut.writeObject(value);
output.write(bOut.toByteArray());
}
}
}
| apache-2.0 |
mnxfst/stream-analyzer | src/main/java/com/mnxfst/stream/server/StreamAnalyzerServer.java | 7893 | /**
* Copyright 2014 Christian Kreutzfeldt
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.mnxfst.stream.server;
import io.netty.bootstrap.ServerBootstrap;
import io.netty.channel.ChannelFuture;
import io.netty.channel.ChannelInitializer;
import io.netty.channel.ChannelOption;
import io.netty.channel.EventLoopGroup;
import io.netty.channel.nio.NioEventLoopGroup;
import io.netty.channel.socket.SocketChannel;
import io.netty.channel.socket.nio.NioServerSocketChannel;
import java.io.File;
import java.util.List;
import org.apache.commons.cli.CommandLine;
import org.apache.commons.cli.CommandLineParser;
import org.apache.commons.cli.HelpFormatter;
import org.apache.commons.cli.Options;
import org.apache.commons.cli.PosixParser;
import org.apache.log4j.Logger;
import org.codehaus.jackson.map.ObjectMapper;
import akka.actor.ActorRef;
import akka.actor.ActorSystem;
import akka.actor.Props;
import com.mnxfst.stream.config.StreamAnalyzerConfiguration;
import com.mnxfst.stream.directory.ComponentRegistry;
import com.mnxfst.stream.dispatcher.StreamEventMessageDispatcher;
import com.mnxfst.stream.dispatcher.config.StreamEventMessageDispatcherConfiguration;
import com.mnxfst.stream.listener.StreamEventListenerConfiguration;
import com.mnxfst.stream.pipeline.PipelineRoot;
import com.mnxfst.stream.pipeline.PipelinesMaster;
import com.mnxfst.stream.pipeline.config.PipelineRootConfiguration;
import com.mnxfst.stream.pipeline.message.PipelineSetupMessage;
/**
* Initializes the stream analyzer server and starts it up
* @author mnxfst
* @since 06.03.2014
*
*/
public class StreamAnalyzerServer {
private static final Logger logger = Logger.getLogger(StreamAnalyzerServer.class.getName());
private ActorSystem rootActorSystem;
private ActorRef componentRegistryRef;
private ActorRef pipelineMasterRef;
public void run(final String configurationFilename, final int port) throws Exception {
ObjectMapper mapper = new ObjectMapper();
StreamAnalyzerConfiguration streamAnalyzerConfiguration = mapper.readValue(new File(configurationFilename), StreamAnalyzerConfiguration.class);
// set up the actor runtime environment
this.rootActorSystem = ActorSystem.create("streamanalyzer");
this.componentRegistryRef = componentRegistryInitialization();
pipelineInitialization(streamAnalyzerConfiguration.getPipelines());
dispatcherInitialization(streamAnalyzerConfiguration.getDispatchers(), componentRegistryRef);
listenerInitialization(streamAnalyzerConfiguration.getListeners(), componentRegistryRef);
EventLoopGroup bossGroup = new NioEventLoopGroup(); // (1)
EventLoopGroup workerGroup = new NioEventLoopGroup();
try {
ServerBootstrap b = new ServerBootstrap(); // (2)
b.group(bossGroup, workerGroup)
.channel(NioServerSocketChannel.class) // (3)
.childHandler(new ChannelInitializer<SocketChannel>() { // (4)
@Override
public void initChannel(SocketChannel ch) throws Exception {
ch.pipeline().addLast(new StreamAnalyzerStatsHandler());
}
})
.option(ChannelOption.SO_BACKLOG, 128) // (5)
.childOption(ChannelOption.SO_KEEPALIVE, true); // (6)
// Bind and start to accept incoming connections.
ChannelFuture f = b.bind(port).sync(); // (7)
// Wait until the server socket is closed.
// In this example, this does not happen, but you can do that to gracefully
// shut down your server.
f.channel().closeFuture().sync();
} finally {
workerGroup.shutdownGracefully();
bossGroup.shutdownGracefully();
}
}
/**
* Initializes the component registry
* @return
*/
protected ActorRef componentRegistryInitialization() {
return this.rootActorSystem.actorOf(Props.create(ComponentRegistry.class), "componentRegistry");
}
/**
* Initializes the {@link StreamEventListener stream event listeners} contained in the provided configuration
* @param listenerConfigurations
*/
protected void listenerInitialization(final List<StreamEventListenerConfiguration> listenerConfigurations, final ActorRef componentRegistryRef) throws Exception {
// step through configurations
for(final StreamEventListenerConfiguration listenerCfg : listenerConfigurations) {
if(listenerCfg != null) {
logger.info("listener [id="+listenerCfg.getId()+", name="+listenerCfg.getName()+", class="+listenerCfg.getListenerClass()+", version=" + listenerCfg.getVersion()+"]");
this.rootActorSystem.actorOf(Props.create(Class.forName(listenerCfg.getListenerClass()), listenerCfg, componentRegistryRef), listenerCfg.getId());
}
}
}
/**
* Initializes the {@link StreamEventMessageDispatcher dispatchers} contained in the provided configuration
* @param dispatcherConfigurations
* @param componentRegistryRef
* @throws Exception
*/
protected void dispatcherInitialization(final List<StreamEventMessageDispatcherConfiguration> dispatcherConfigurations, final ActorRef componentRegistryRef) throws Exception {
// step through configurations
for(final StreamEventMessageDispatcherConfiguration dispCfg : dispatcherConfigurations) {
if(dispCfg != null) {
logger.info("dispatcher [id="+dispCfg.getId()+", name="+dispCfg.getName()+", policy="+dispCfg.getDispatchPolicy().getPolicyClass()+"]");
this.rootActorSystem.actorOf(Props.create(StreamEventMessageDispatcher.class, dispCfg, componentRegistryRef), dispCfg.getId());
}
}
}
/**
* Initializes the {@link PipelineRoot pipeline root} contained in the provided configuration
* @param pipelineConfigurations
* @throws Exception
*/
protected void pipelineInitialization(final List<PipelineRootConfiguration> pipelineConfigurations) throws Exception {
this.pipelineMasterRef = this.rootActorSystem.actorOf(Props.create(PipelinesMaster.class, componentRegistryRef), "pipelineMaster");
// step through configurations
for(final PipelineRootConfiguration pipeCfg : pipelineConfigurations) {
if(pipeCfg != null) {
logger.info("pipeline root [id="+pipeCfg.getPipelineId()+", initialReceiverId=" + pipeCfg.getInitialReceiverId()+"]");
PipelineSetupMessage msg = new PipelineSetupMessage(pipeCfg);
this.pipelineMasterRef.tell(msg, null);
// this.rootActorSystem.actorOf(Props.create(PipelineRoot.class, pipeCfg), pipeCfg.getPipelineId());
}
}
}
/**
* Ramps up the server
* @param args
*/
public static void main(String[] args) throws Exception {
CommandLineParser parser = new PosixParser();
CommandLine cl = parser.parse(getOptions(), args);
if(!cl.hasOption("cfg") || !cl.hasOption("port")) {
HelpFormatter hf = new HelpFormatter();
hf.printHelp("java " + StreamAnalyzerServer.class.getName(), getOptions());
return;
}
(new StreamAnalyzerServer()).run(cl.getOptionValue("cfg"), Integer.parseInt(cl.getOptionValue("port")));
}
/**
* Return command-line options
* @return
*/
protected static Options getOptions() {
Options opts = new Options();
opts.addOption("cfg", true, "Stream analyzer configuration for listeners, dispatchers, pipelines, ...");
opts.addOption("port", true, "Server port");
return opts;
}
}
| apache-2.0 |
raoofm/etcd-viewer | src/main/java/org/github/etcd/viewer/html/resource/WebResources.java | 2853 | package org.github.etcd.viewer.html.resource;
import java.util.Arrays;
import org.apache.wicket.Application;
import org.apache.wicket.markup.head.CssHeaderItem;
import org.apache.wicket.markup.head.HeaderItem;
import org.apache.wicket.markup.head.IHeaderResponse;
import org.apache.wicket.markup.head.JavaScriptHeaderItem;
import org.apache.wicket.request.Url;
import org.apache.wicket.request.resource.ResourceReference;
import org.apache.wicket.request.resource.UrlResourceReference;
import org.apache.wicket.resource.JQueryResourceReference;
public final class WebResources {
private static final UrlResourceReference BOOTSTRAP_CSS = new UrlResourceReference(Url.parse("https://maxcdn.bootstrapcdn.com/bootstrap/3.3.4/css/bootstrap" + getMinifiedPrefix() + ".css"));
private static final UrlResourceReference BOOTSTRAP_THEME_CSS = new UrlResourceReference(Url.parse("https://maxcdn.bootstrapcdn.com/bootstrap/3.3.4/css/bootstrap-theme" + getMinifiedPrefix() + ".css")) {
private static final long serialVersionUID = 1L;
@Override
public Iterable<? extends HeaderItem> getDependencies() {
return Arrays.asList(CssHeaderItem.forReference(BOOTSTRAP_CSS));
}
};
private static final UrlResourceReference FONT_AWESOME_CSS = new UrlResourceReference(Url.parse("//maxcdn.bootstrapcdn.com/font-awesome/4.3.0/css/font-awesome" + getMinifiedPrefix() + ".css"));
private static final UrlResourceReference BOOTSTRAP_JS = new UrlResourceReference(Url.parse("https://maxcdn.bootstrapcdn.com/bootstrap/3.3.4/js/bootstrap" + getMinifiedPrefix() + ".js")) {
private static final long serialVersionUID = 1L;
@Override
public Iterable<? extends HeaderItem> getDependencies() {
final ResourceReference backingLibraryReference;
if (Application.exists()) {
backingLibraryReference = Application.get().getJavaScriptLibrarySettings().getJQueryReference();
} else {
backingLibraryReference = JQueryResourceReference.get();
}
return Arrays.asList(CssHeaderItem.forReference(BOOTSTRAP_CSS), JavaScriptHeaderItem.forReference(backingLibraryReference));
}
};
private static String getMinifiedPrefix() {
return Application.get().getResourceSettings().getUseMinifiedResources() ? ".min" : "";
}
public static void renderBootstrapHeader(IHeaderResponse response) {
response.render(JavaScriptHeaderItem.forReference(BOOTSTRAP_JS));
}
public static void renderBootstrapThemeHeader(IHeaderResponse response) {
response.render(CssHeaderItem.forReference(BOOTSTRAP_THEME_CSS));
}
public static void renderFontAwesomeHeader(IHeaderResponse response) {
response.render(CssHeaderItem.forReference(FONT_AWESOME_CSS));
}
}
| apache-2.0 |
summerofcode/Jijri | src/ir/summerofcode/DateConverter.java | 3002 | package ir.summerofcode;
public class DateConverter {
public static int[] HijriToGregorian(int year, int month, int date) {
// calculation
int[] gDaysInMonth = new int[]{31, 28, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31};
int[] jDaysInMonth = new int[]{31, 31, 31, 31, 31, 31, 30, 30, 30, 30, 30, 29};
int jy = year - 979;
int jm = month - 1;
int jd = date - 1;
int gm, gd, gy;
int jDayNo = 365 * jy + (jy / 33) * 8 + ((jy % 33) + 3) / 4;
for (int i = 0; i < jm; i++)
jDayNo += jDaysInMonth[i];
jDayNo += jd;
int gDayNo = jDayNo + 79;
gy = 1600 + 400 * (gDayNo / 146097);
gDayNo = gDayNo % 146097;
boolean leap = true;
if (gDayNo >= 36525) {
gDayNo--;
gy += 100 * (gDayNo / 36524);
gDayNo = gDayNo % 36524;
if (gDayNo >= 365)
gDayNo = gDayNo + 1;
else
leap = false;
}
gy += 4 * (gDayNo / 1461);
gDayNo %= 1461;
if (gDayNo >= 366) {
leap = false;
gDayNo--;
gy += gDayNo / 365;
gDayNo = gDayNo % 365;
}
int i = 0;
int tmp = 0;
while (gDayNo >= (gDaysInMonth[i] + tmp)) {
if (i == 1 && leap == true)
tmp = 1;
else
tmp = 0;
gDayNo -= gDaysInMonth[i] + tmp;
i++;
}
gm = i + 1;
gd = gDayNo + 1;
// result
int[] result = new int[3];
result[0] = gy;
result[1] = gm;
result[2] = gd;
return result;
}
public static int[] GregorianToHijri(int year, int month, int date) {
// calculation
int[] gDaysInMonth = new int[]{31, 28, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31};
int[] jDaysInMonth = new int[]{31, 31, 31, 31, 31, 31, 30, 30, 30, 30, 30, 29};
int gy = year - 1600;
int gm = month - 1;
int gd = date - 1;
int jm, jd, jy;
int gDayNo = 365 * gy + (gy + 3) / 4 - (gy + 99) / 100 + (gy + 399) / 400;
for (int i = 0; i < gm; i++)
gDayNo += gDaysInMonth[i];
if (gm > 1 && (gy % 4 == 0 && gy % 100 != 0) || (gy % 400 == 0))
gDayNo++;
gDayNo += gd;
int jDayNo = gDayNo - 79;
int jNp = jDayNo / 12053;
jDayNo = jDayNo % 12053;
jy = 979 + 33 * jNp + 4 * (jDayNo / 1461);
jDayNo %= 1461;
if (jDayNo >= 366) {
jy += (jDayNo - 1) / 365;
jDayNo = (jDayNo - 1) % 365;
}
int i = 0;
while (i < 11 && jDayNo >= jDaysInMonth[i]) {
jDayNo -= jDaysInMonth[i];
i++;
}
jm = i + 1;
jd = jDayNo + 1;
// result
int[] result = new int[3];
result[0] = jy;
result[1] = jm;
result[2] = jd;
return result;
}
}
| apache-2.0 |
maven-nar/cpptasks-parallel | src/main/java/com/github/maven_nar/cpptasks/compiler/Linker.java | 2623 | /*
*
* Copyright 2001-2004 The Ant-Contrib project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.github.maven_nar.cpptasks.compiler;
import java.io.File;
import java.io.IOException;
import com.github.maven_nar.cpptasks.TargetMatcher;
import com.github.maven_nar.cpptasks.VersionInfo;
import com.github.maven_nar.cpptasks.types.LibraryTypeEnum;
/**
* A linker for executables, and static and dynamic libraries.
*
* @author Adam Murdoch
*/
public interface Linker extends Processor {
/**
* Extracts the significant part of a library name to ensure there aren't
* collisions
*/
String getLibraryKey(File libname);
/**
* returns the library path for the linker
*/
File[] getLibraryPath();
/**
* Returns a set of filename patterns corresponding to library names.
*
* For example, "advapi32" would be expanded to "advapi32.dll" by
* DevStudioLinker and to "libadvapi32.a" and "libadvapi32.so" by
* GccLinker.
*
* @param libnames
* array of library names
*/
String[] getLibraryPatterns(String[] libnames, LibraryTypeEnum libraryType);
/**
* Gets the linker for the specified link type.
*
* @return appropriate linker or null, will return this if this linker can
* handle the specified link type
*/
Linker getLinker(LinkType linkType);
/**
* Returns true if the linker is case-sensitive
*/
boolean isCaseSensitive();
/**
* Adds source or object files to the bidded fileset to
* support version information.
*
* @param versionInfo version information
* @param linkType link type
* @param isDebug true if debug build
* @param outputFile name of generated executable
* @param objDir directory for generated files
* @param matcher bidded fileset
*/
void addVersionFiles(final VersionInfo versionInfo,
final LinkType linkType,
final File outputFile,
final boolean isDebug,
final File objDir,
final TargetMatcher matcher) throws IOException;
}
| apache-2.0 |
Jackoder/progress-dispatcher | app/src/main/java/com/jackoder/sample/app/MainActivity.java | 4048 | package com.jackoder.sample.app;
import android.app.AlertDialog;
import android.content.DialogInterface;
import android.content.Intent;
import android.os.Bundle;
import android.support.annotation.Nullable;
import android.support.v4.app.FragmentActivity;
import android.widget.SeekBar;
import android.widget.TextView;
import com.jackoder.progdispatcher.OnProgressListener;
import com.jackoder.progdispatcher.Progress;
import com.jackoder.progdispatcher.ProgressDispatcher;
import com.jackoder.sample.app.observer.NullIdService;
import com.jackoder.sample.app.observer.TargetIdBroadcastReceiver;
import com.jackoder.sample.app.observer.TargetIdService;
import com.jackoder.sample.app.utils.LogFormater;
import butterknife.ButterKnife;
import butterknife.InjectView;
import butterknife.OnClick;
/**
* @author Jackoder
* @version 2016/9/23
*/
public class MainActivity extends FragmentActivity implements OnProgressListener {
@InjectView(R.id.tv_id)
TextView mTvId;
@InjectView(R.id.tv_progress)
TextView mTvProgress;
@InjectView(R.id.sb_progress)
SeekBar mSbProgress;
@InjectView(R.id.tv_log)
TextView mTvLog;
private int mPosition;
private String[] mItems = new String[]{"null", TargetIdBroadcastReceiver.TAG, TargetIdService.TAG};
@Override
protected void onCreate(@Nullable Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
ButterKnife.inject(this, this);
mSbProgress.setOnSeekBarChangeListener(new SeekBar.OnSeekBarChangeListener() {
@Override
public void onProgressChanged(SeekBar seekBar, int progress, boolean fromUser) {
mTvProgress.setText("" + progress);
}
@Override
public void onStartTrackingTouch(SeekBar seekBar) {
}
@Override
public void onStopTrackingTouch(SeekBar seekBar) {
}
});
ProgressDispatcher.getInstance().addOnProgressListener(null, this);
startServices();
}
private void startServices() {
startService(new Intent(this, NullIdService.class));
startService(new Intent(this, TargetIdService.class));
}
@OnClick(R.id.tv_id)
void showSingleChoiceButton() {
new AlertDialog.Builder(this)
.setTitle("Target Id")
.setSingleChoiceItems(mItems, mPosition, new DialogInterface.OnClickListener() {
@Override
public void onClick(DialogInterface dialog, int which) {
mPosition = which;
mTvId.setText("" + mItems[mPosition]);
}
}).show();
}
@OnClick(R.id.btn_dispatch)
void dispatch() {
Progress progress = new Progress(getId());
progress.setProgress(getProgress());
ProgressDispatcher.getInstance().getProgressObserver(getId()).onNext(progress);
}
@OnClick(R.id.btn_error)
void error() {
Exception exception = new Exception("Custom error.");
ProgressDispatcher.getInstance().getProgressObserver(getId()).onError(exception);
}
@OnClick(R.id.btn_complete)
void completed() {
ProgressDispatcher.getInstance().getProgressObserver(getId()).onCompleted();
}
public String getId() {
return mItems[mPosition].equals("null") ? null : mItems[mPosition];
}
public int getProgress() {
return mSbProgress.getProgress();
}
@Override
public void onProgress(String id, int progress, Object context) {
mTvLog.setText(LogFormater.format(id, progress, context) + "\n" + mTvLog.getText().toString());
}
@Override
public void onError(String id, Throwable throwable) {
mTvLog.setText(LogFormater.format(id, throwable) + "\n" + mTvLog.getText().toString());
}
@Override
protected void onDestroy() {
super.onDestroy();
ProgressDispatcher.getInstance().release();
}
}
| apache-2.0 |
iMDT/katharsis-framework-j6 | katharsis-core/src/main/java/io/katharsis/response/BaseResponseContext.java | 467 | package io.katharsis.response;
import com.fasterxml.jackson.annotation.JsonIgnore;
import io.katharsis.queryspec.internal.QueryAdapter;
import io.katharsis.request.path.JsonPath;
/**
* Top-level JSON container's interface, used to generalize single and collection responses.
*/
public interface BaseResponseContext {
@JsonIgnore
int getHttpStatus();
JsonApiResponse getResponse();
JsonPath getJsonPath();
QueryAdapter getQueryAdapter();
}
| apache-2.0 |
mbezjak/vhdllab | vhdllab-client/src/main/java/hr/fer/zemris/vhdllab/platform/manager/workspace/WorkspaceAdapter.java | 1419 | /*******************************************************************************
* See the NOTICE file distributed with this work for additional information
* regarding copyright ownership.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
******************************************************************************/
package hr.fer.zemris.vhdllab.platform.manager.workspace;
import hr.fer.zemris.vhdllab.entity.Project;
import hr.fer.zemris.vhdllab.service.workspace.FileReport;
public abstract class WorkspaceAdapter implements WorkspaceListener {
@Override
public void fileCreated(FileReport report) {
}
@Override
public void fileDeleted(FileReport report) {
}
@Override
public void fileSaved(FileReport report) {
}
@Override
public void projectCreated(Project project) {
}
@Override
public void projectDeleted(Project project) {
}
}
| apache-2.0 |
domaframework/simple-examples | dao-style-file/src/main/java/example/dao_style_file/domain/AgeConverter.java | 423 | package example.dao_style_file.domain;
import org.seasar.doma.ExternalDomain;
import org.seasar.doma.jdbc.domain.DomainConverter;
@ExternalDomain
public class AgeConverter implements DomainConverter<Age, Integer> {
@Override
public Integer fromDomainToValue(Age age) {
return age.getValue();
}
@Override
public Age fromValueToDomain(Integer value) {
return value == null ? null : new Age(value);
}
}
| apache-2.0 |
Taller/sqlworkbench-plus | src/workbench/gui/actions/CopyAsSqlDeleteAction.java | 1873 | /*
* CopyAsSqlDeleteAction.java
*
* This file is part of SQL Workbench/J, http://www.sql-workbench.net
*
* Copyright 2002-2015, Thomas Kellerer
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at.
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* To contact the author please send an email to: support@sql-workbench.net
*
*/
package workbench.gui.actions;
import java.awt.event.ActionEvent;
import workbench.resource.ResourceMgr;
import workbench.gui.components.ClipBoardCopier;
import workbench.gui.components.WbTable;
/**
* Action to copy the contents of a table to the clipboard as DELETE statements.
*
* @see workbench.gui.components.ClipBoardCopier
*
* @author Thomas Kellerer
*/
public class CopyAsSqlDeleteAction extends WbAction
{
private WbTable client;
public CopyAsSqlDeleteAction(WbTable aClient)
{
super();
this.client = aClient;
this.initMenuDefinition("MnuTxtCopyAsSqlDelete");
this.setMenuItemName(ResourceMgr.MNU_TXT_DATA);
this.setEnabled(false);
}
@Override
public boolean hasCtrlModifier()
{
return true;
}
@Override
public boolean hasShiftModifier()
{
return true;
}
@Override
public void executeAction(ActionEvent e)
{
ClipBoardCopier copier = new ClipBoardCopier(this.client);
boolean selectColumns = false;
if (invokedByMouse(e))
{
selectColumns = isCtrlPressed(e) ;
}
copier.copyAsSqlDelete(false, selectColumns);
}
}
| apache-2.0 |
npgall/mobility-rpc | code/src/main/java/com/googlecode/mobilityrpc/protocol/pojo/ExecutionRequest.java | 2746 | /**
* Copyright 2011, 2012 Niall Gallagher
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.googlecode.mobilityrpc.protocol.pojo;
/**
* @author Niall Gallagher
*/
public class ExecutionRequest {
private final byte[] serializedExecutableObject;
private final SerializationFormat serializationFormat;
private final ExecutionMode executionMode;
private final RequestIdentifier requestIdentifier;
public ExecutionRequest(byte[] serializedExecutableObject, SerializationFormat serializationFormat, ExecutionMode executionMode, RequestIdentifier requestIdentifier) {
this.serializedExecutableObject = serializedExecutableObject;
this.serializationFormat = serializationFormat;
this.executionMode = executionMode;
this.requestIdentifier = requestIdentifier;
}
public byte[] getSerializedExecutableObject() {
return serializedExecutableObject;
}
public SerializationFormat getSerializationFormat() {
return serializationFormat;
}
public ExecutionMode getExecutionMode() {
return executionMode;
}
public RequestIdentifier getRequestIdentifier() {
return requestIdentifier;
}
/**
* @throws UnsupportedOperationException always, as this object is not intended to be compared for equality
* or used as a key in a hash map.
*/
@SuppressWarnings({"EqualsWhichDoesntCheckParameterClass"})
@Override
public boolean equals(Object o) {
throw new UnsupportedOperationException("Not supported.");
}
/**
* @throws UnsupportedOperationException always, as this object is not intended to be compared for equality
* or used as a key in a hash map.
*/
@Override
public int hashCode() {
throw new UnsupportedOperationException("Not supported.");
}
@Override
public String toString() {
return "ExecutionRequest{" +
"serializedExecutableObject=" + serializedExecutableObject.length + " bytes" +
", serializationFormat=" + serializationFormat +
", executionMode=" + executionMode +
", requestIdentifier=" + requestIdentifier +
'}';
}
}
| apache-2.0 |
shivpun/spring-framework | spring-context/src/test/java/org/springframework/aop/framework/CglibProxyTests.java | 14096 | /*
* Copyright 2002-2016 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.aop.framework;
import java.io.Serializable;
import org.aopalliance.intercept.MethodInterceptor;
import org.aopalliance.intercept.MethodInvocation;
import org.junit.Test;
import test.mixin.LockMixinAdvisor;
import org.springframework.aop.ClassFilter;
import org.springframework.aop.MethodMatcher;
import org.springframework.aop.Pointcut;
import org.springframework.aop.support.AopUtils;
import org.springframework.aop.support.DefaultPointcutAdvisor;
import org.springframework.context.ApplicationContext;
import org.springframework.context.ApplicationContextException;
import org.springframework.context.support.ClassPathXmlApplicationContext;
import org.springframework.tests.aop.advice.CountingBeforeAdvice;
import org.springframework.tests.aop.interceptor.NopInterceptor;
import org.springframework.tests.sample.beans.ITestBean;
import org.springframework.tests.sample.beans.TestBean;
import static org.hamcrest.CoreMatchers.*;
import static org.junit.Assert.*;
/**
* Additional and overridden tests for CGLIB proxies.
*
* @author Rod Johnson
* @author Juergen Hoeller
* @author Rob Harrop
* @author Ramnivas Laddad
* @author Chris Beams
*/
@SuppressWarnings("serial")
public class CglibProxyTests extends AbstractAopProxyTests implements Serializable {
private static final String DEPENDENCY_CHECK_CONTEXT =
CglibProxyTests.class.getSimpleName() + "-with-dependency-checking.xml";
@Override
protected Object createProxy(ProxyCreatorSupport as) {
as.setProxyTargetClass(true);
Object proxy = as.createAopProxy().getProxy();
assertTrue(AopUtils.isCglibProxy(proxy));
return proxy;
}
@Override
protected AopProxy createAopProxy(AdvisedSupport as) {
as.setProxyTargetClass(true);
return new CglibAopProxy(as);
}
@Override
protected boolean requiresTarget() {
return true;
}
@Test(expected = IllegalArgumentException.class)
public void testNullConfig() {
new CglibAopProxy(null);
}
@Test(expected = AopConfigException.class)
public void testNoTarget() {
AdvisedSupport pc = new AdvisedSupport(ITestBean.class);
pc.addAdvice(new NopInterceptor());
AopProxy aop = createAopProxy(pc);
aop.getProxy();
}
@Test
public void testProtectedMethodInvocation() {
ProtectedMethodTestBean bean = new ProtectedMethodTestBean();
bean.value = "foo";
mockTargetSource.setTarget(bean);
AdvisedSupport as = new AdvisedSupport();
as.setTargetSource(mockTargetSource);
as.addAdvice(new NopInterceptor());
AopProxy aop = new CglibAopProxy(as);
ProtectedMethodTestBean proxy = (ProtectedMethodTestBean) aop.getProxy();
assertTrue(AopUtils.isCglibProxy(proxy));
assertEquals(proxy.getClass().getClassLoader(), bean.getClass().getClassLoader());
assertEquals("foo", proxy.getString());
}
@Test
public void testPackageMethodInvocation() {
PackageMethodTestBean bean = new PackageMethodTestBean();
bean.value = "foo";
mockTargetSource.setTarget(bean);
AdvisedSupport as = new AdvisedSupport();
as.setTargetSource(mockTargetSource);
as.addAdvice(new NopInterceptor());
AopProxy aop = new CglibAopProxy(as);
PackageMethodTestBean proxy = (PackageMethodTestBean) aop.getProxy();
assertTrue(AopUtils.isCglibProxy(proxy));
assertEquals(proxy.getClass().getClassLoader(), bean.getClass().getClassLoader());
assertEquals("foo", proxy.getString());
}
@Test
public void testPackageMethodInvocationWithDifferentClassLoader() {
ClassLoader child = new ClassLoader(getClass().getClassLoader()) {
};
PackageMethodTestBean bean = new PackageMethodTestBean();
bean.value = "foo";
mockTargetSource.setTarget(bean);
AdvisedSupport as = new AdvisedSupport();
as.setTargetSource(mockTargetSource);
as.addAdvice(new NopInterceptor());
AopProxy aop = new CglibAopProxy(as);
PackageMethodTestBean proxy = (PackageMethodTestBean) aop.getProxy(child);
assertTrue(AopUtils.isCglibProxy(proxy));
assertNotEquals(proxy.getClass().getClassLoader(), bean.getClass().getClassLoader());
assertNull(proxy.getString()); // we're stuck in the proxy instance
}
@Test
public void testProxyCanBeClassNotInterface() throws Exception {
TestBean raw = new TestBean();
raw.setAge(32);
mockTargetSource.setTarget(raw);
AdvisedSupport pc = new AdvisedSupport();
pc.setTargetSource(mockTargetSource);
AopProxy aop = new CglibAopProxy(pc);
Object proxy = aop.getProxy();
assertTrue(AopUtils.isCglibProxy(proxy));
assertTrue(proxy instanceof ITestBean);
assertTrue(proxy instanceof TestBean);
TestBean tb = (TestBean) proxy;
assertEquals(32, tb.getAge());
}
@Test
public void testMethodInvocationDuringConstructor() {
CglibTestBean bean = new CglibTestBean();
bean.setName("Rob Harrop");
AdvisedSupport as = new AdvisedSupport();
as.setTarget(bean);
as.addAdvice(new NopInterceptor());
AopProxy aop = new CglibAopProxy(as);
CglibTestBean proxy = (CglibTestBean) aop.getProxy();
assertEquals("The name property has been overwritten by the constructor", "Rob Harrop", proxy.getName());
}
@Test
public void testUnadvisedProxyCreationWithCallDuringConstructor() throws Exception {
CglibTestBean target = new CglibTestBean();
target.setName("Rob Harrop");
AdvisedSupport pc = new AdvisedSupport();
pc.setFrozen(true);
pc.setTarget(target);
CglibAopProxy aop = new CglibAopProxy(pc);
CglibTestBean proxy = (CglibTestBean) aop.getProxy();
assertNotNull("Proxy should not be null", proxy);
assertEquals("Constructor overrode the value of name", "Rob Harrop", proxy.getName());
}
@Test
public void testMultipleProxies() {
TestBean target = new TestBean();
target.setAge(20);
TestBean target2 = new TestBean();
target2.setAge(21);
ITestBean proxy1 = getAdvisedProxy(target);
ITestBean proxy2 = getAdvisedProxy(target2);
assertSame(proxy1.getClass(), proxy2.getClass());
assertEquals(target.getAge(), proxy1.getAge());
assertEquals(target2.getAge(), proxy2.getAge());
}
private ITestBean getAdvisedProxy(TestBean target) {
ProxyFactory pf = new ProxyFactory(new Class<?>[]{ITestBean.class});
pf.setProxyTargetClass(true);
MethodInterceptor advice = new NopInterceptor();
Pointcut pointcut = new Pointcut() {
@Override
public ClassFilter getClassFilter() {
return ClassFilter.TRUE;
}
@Override
public MethodMatcher getMethodMatcher() {
return MethodMatcher.TRUE;
}
@Override
public boolean equals(Object obj) {
return true;
}
@Override
public int hashCode() {
return 0;
}
};
pf.addAdvisor(new DefaultPointcutAdvisor(pointcut, advice));
pf.setTarget(target);
pf.setFrozen(true);
pf.setExposeProxy(false);
return (ITestBean) pf.getProxy();
}
@Test
public void testMultipleProxiesForIntroductionAdvisor() {
TestBean target1 = new TestBean();
target1.setAge(20);
TestBean target2 = new TestBean();
target2.setAge(21);
ITestBean proxy1 = getIntroductionAdvisorProxy(target1);
ITestBean proxy2 = getIntroductionAdvisorProxy(target2);
assertSame("Incorrect duplicate creation of proxy classes", proxy1.getClass(), proxy2.getClass());
}
private ITestBean getIntroductionAdvisorProxy(TestBean target) {
ProxyFactory pf = new ProxyFactory(ITestBean.class);
pf.setProxyTargetClass(true);
pf.addAdvisor(new LockMixinAdvisor());
pf.setTarget(target);
pf.setFrozen(true);
pf.setExposeProxy(false);
return (ITestBean) pf.getProxy();
}
@Test
public void testWithNoArgConstructor() {
NoArgCtorTestBean target = new NoArgCtorTestBean("b", 1);
target.reset();
mockTargetSource.setTarget(target);
AdvisedSupport pc = new AdvisedSupport();
pc.setTargetSource(mockTargetSource);
CglibAopProxy aop = new CglibAopProxy(pc);
aop.setConstructorArguments(new Object[] {"Rob Harrop", 22}, new Class<?>[] {String.class, int.class});
NoArgCtorTestBean proxy = (NoArgCtorTestBean) aop.getProxy();
proxy = (NoArgCtorTestBean) aop.getProxy();
assertNotNull("Proxy should be null", proxy);
}
@Test
public void testProxyAProxy() {
ITestBean target = new TestBean();
mockTargetSource.setTarget(target);
AdvisedSupport as = new AdvisedSupport();
as.setTargetSource(mockTargetSource);
as.addAdvice(new NopInterceptor());
CglibAopProxy cglib = new CglibAopProxy(as);
ITestBean proxy1 = (ITestBean) cglib.getProxy();
mockTargetSource.setTarget(proxy1);
as = new AdvisedSupport(new Class<?>[]{});
as.setTargetSource(mockTargetSource);
as.addAdvice(new NopInterceptor());
cglib = new CglibAopProxy(as);
assertThat(cglib.getProxy(), instanceOf(ITestBean.class));
}
@Test
public void testProxyAProxyWithAdditionalInterface() {
ITestBean target = new TestBean();
mockTargetSource.setTarget(target);
AdvisedSupport as = new AdvisedSupport();
as.setTargetSource(mockTargetSource);
as.addAdvice(new NopInterceptor());
as.addInterface(Serializable.class);
CglibAopProxy cglib = new CglibAopProxy(as);
ITestBean proxy1 = (ITestBean) cglib.getProxy();
mockTargetSource.setTarget(proxy1);
as = new AdvisedSupport(new Class<?>[]{});
as.setTargetSource(mockTargetSource);
as.addAdvice(new NopInterceptor());
cglib = new CglibAopProxy(as);
ITestBean proxy2 = (ITestBean) cglib.getProxy();
assertTrue(proxy2 instanceof Serializable);
}
@Test
public void testExceptionHandling() {
ExceptionThrower bean = new ExceptionThrower();
mockTargetSource.setTarget(bean);
AdvisedSupport as = new AdvisedSupport();
as.setTargetSource(mockTargetSource);
as.addAdvice(new NopInterceptor());
AopProxy aop = new CglibAopProxy(as);
ExceptionThrower proxy = (ExceptionThrower) aop.getProxy();
try {
proxy.doTest();
}
catch (Exception ex) {
assertTrue("Invalid exception class", ex instanceof ApplicationContextException);
}
assertTrue("Catch was not invoked", proxy.isCatchInvoked());
assertTrue("Finally was not invoked", proxy.isFinallyInvoked());
}
@Test
@SuppressWarnings("resource")
public void testWithDependencyChecking() {
ApplicationContext ctx = new ClassPathXmlApplicationContext(DEPENDENCY_CHECK_CONTEXT, getClass());
ctx.getBean("testBean");
}
@Test
public void testAddAdviceAtRuntime() {
TestBean bean = new TestBean();
CountingBeforeAdvice cba = new CountingBeforeAdvice();
ProxyFactory pf = new ProxyFactory();
pf.setTarget(bean);
pf.setFrozen(false);
pf.setOpaque(false);
pf.setProxyTargetClass(true);
TestBean proxy = (TestBean) pf.getProxy();
assertTrue(AopUtils.isCglibProxy(proxy));
proxy.getAge();
assertEquals(0, cba.getCalls());
((Advised) proxy).addAdvice(cba);
proxy.getAge();
assertEquals(1, cba.getCalls());
}
@Test
public void testProxyProtectedMethod() throws Exception {
CountingBeforeAdvice advice = new CountingBeforeAdvice();
ProxyFactory proxyFactory = new ProxyFactory(new MyBean());
proxyFactory.addAdvice(advice);
proxyFactory.setProxyTargetClass(true);
MyBean proxy = (MyBean) proxyFactory.getProxy();
assertEquals(4, proxy.add(1, 3));
assertEquals(1, advice.getCalls("add"));
}
@Test
public void testProxyTargetClassInCaseOfNoInterfaces() throws Exception {
ProxyFactory proxyFactory = new ProxyFactory(new MyBean());
MyBean proxy = (MyBean) proxyFactory.getProxy();
assertEquals(4, proxy.add(1, 3));
}
@Test // SPR-13328
public void testVarargsWithEnumArray() throws Exception {
ProxyFactory proxyFactory = new ProxyFactory(new MyBean());
MyBean proxy = (MyBean) proxyFactory.getProxy();
assertTrue(proxy.doWithVarargs(MyEnum.A, MyOtherEnum.C));
}
public static class MyBean {
private String name;
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
protected int add(int x, int y) {
return x + y;
}
@SuppressWarnings("unchecked")
public <V extends MyInterface> boolean doWithVarargs(V... args) {
return true;
}
}
public interface MyInterface {
}
public enum MyEnum implements MyInterface {
A, B;
}
public enum MyOtherEnum implements MyInterface {
C, D;
}
public static class ExceptionThrower {
private boolean catchInvoked;
private boolean finallyInvoked;
public boolean isCatchInvoked() {
return catchInvoked;
}
public boolean isFinallyInvoked() {
return finallyInvoked;
}
public void doTest() throws Exception {
try {
throw new ApplicationContextException("foo");
}
catch (Exception ex) {
catchInvoked = true;
throw ex;
}
finally {
finallyInvoked = true;
}
}
}
public static class NoArgCtorTestBean {
private boolean called = false;
public NoArgCtorTestBean(String x, int y) {
called = true;
}
public boolean wasCalled() {
return called;
}
public void reset() {
called = false;
}
}
public static class ProtectedMethodTestBean {
public String value;
protected String getString() {
return this.value;
}
}
public static class PackageMethodTestBean {
public String value;
String getString() {
return this.value;
}
}
}
class CglibTestBean {
private String name;
public CglibTestBean() {
setName("Some Default");
}
public void setName(String name) {
this.name = name;
}
public String getName() {
return this.name;
}
}
class UnsupportedInterceptor implements MethodInterceptor {
@Override
public Object invoke(MethodInvocation mi) throws Throwable {
throw new UnsupportedOperationException(mi.getMethod().getName());
}
}
| apache-2.0 |
andrefio/Rx.Widgets | app/src/test/java/io/andref/rx/widget/example/ExampleUnitTest.java | 409 | package io.andref.rx.widget.example;
import org.junit.Test;
import static org.junit.Assert.*;
/**
* Example local unit test, which will execute on the development machine (host).
*
* @see <a href="http://d.android.com/tools/testing">Testing documentation</a>
*/
public class ExampleUnitTest
{
@Test
public void addition_isCorrect() throws Exception
{
assertEquals(4, 2 + 2);
}
} | apache-2.0 |
jnidzwetzki/scalephant | bboxdb-server/src/main/java/org/bboxdb/storage/queryprocessor/predicate/PredicateJoinedTupleFilterIterator.java | 2170 | /*******************************************************************************
*
* Copyright (C) 2015-2021 the BBoxDB project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*******************************************************************************/
package org.bboxdb.storage.queryprocessor.predicate;
import java.util.Iterator;
import org.bboxdb.storage.entity.MultiTuple;
public class PredicateJoinedTupleFilterIterator implements Iterator<MultiTuple> {
/**
* The base iterator
*/
private final Iterator<MultiTuple> baseIterator;
/**
* The filter predicate
*/
private final Predicate predicate;
/**
* The next available tuple
*/
private MultiTuple nextTuple = null;
public PredicateJoinedTupleFilterIterator(final Iterator<MultiTuple> baseIterator, final Predicate predicate) {
this.baseIterator = baseIterator;
this.predicate = predicate;
}
@Override
public boolean hasNext() {
if(nextTuple != null) {
return true;
}
// Search for the next predicate matching tuple
while(baseIterator.hasNext()) {
final MultiTuple tuple = baseIterator.next();
if(tuple.getNumberOfTuples() != 1) {
throw new IllegalArgumentException("Unable to filter tuple: " + tuple);
}
if(predicate.matches(tuple.getTuple(0))) {
nextTuple = tuple;
return true;
}
}
return false;
}
@Override
public MultiTuple next() {
if(nextTuple == null) {
throw new IllegalArgumentException("Invalid state, did you really called hasNext()?");
}
final MultiTuple resultTuple = nextTuple;
nextTuple = null;
return resultTuple;
}
}
| apache-2.0 |
cdap-solutions/mmds | mmds-model/src/main/java/io/cdap/mmds/proto/ExperimentNotFoundException.java | 893 | /*
* Copyright © 2017-2018 Cask Data, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package io.cdap.mmds.proto;
/**
* Indicates an experiment was not found.
*/
public class ExperimentNotFoundException extends NotFoundException {
public ExperimentNotFoundException(String experimentName) {
super(String.format("Experiment '%s' not found.", experimentName));
}
}
| apache-2.0 |
degauhta/dgagarsky | chapter_010/1_todolist/src/main/java/ru/dega/servlets/package-info.java | 97 | /**
* package-info class.
*
* @author Denis
* @since 14.09.2017
*/
package ru.dega.servlets; | apache-2.0 |
vladmm/intellij-community | platform/platform-impl/src/com/intellij/openapi/editor/impl/softwrap/mapping/SoftWrapApplianceManager.java | 59763 | /*
* Copyright 2000-2015 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.openapi.editor.impl.softwrap.mapping;
import com.intellij.diagnostic.Dumpable;
import com.intellij.diagnostic.LogMessageEx;
import com.intellij.openapi.diagnostic.Attachment;
import com.intellij.openapi.diagnostic.Logger;
import com.intellij.openapi.editor.*;
import com.intellij.openapi.editor.event.DocumentEvent;
import com.intellij.openapi.editor.event.VisibleAreaEvent;
import com.intellij.openapi.editor.event.VisibleAreaListener;
import com.intellij.openapi.editor.ex.DocumentEx;
import com.intellij.openapi.editor.ex.ScrollingModelEx;
import com.intellij.openapi.editor.ex.util.EditorUtil;
import com.intellij.openapi.editor.impl.*;
import com.intellij.openapi.editor.impl.softwrap.SoftWrapDrawingType;
import com.intellij.openapi.editor.impl.softwrap.SoftWrapImpl;
import com.intellij.openapi.editor.impl.softwrap.SoftWrapPainter;
import com.intellij.openapi.editor.impl.softwrap.SoftWrapsStorage;
import com.intellij.openapi.editor.markup.TextAttributes;
import com.intellij.openapi.util.Segment;
import com.intellij.openapi.util.text.StringUtil;
import org.intellij.lang.annotations.JdkConstants;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import org.jetbrains.annotations.TestOnly;
import javax.swing.*;
import java.awt.*;
import java.util.*;
import java.util.List;
/**
* The general idea of soft wraps processing is to build a cache to use for quick document dimensions mapping
* ({@code 'logical position -> visual position'}, {@code 'offset -> logical position'} etc) and update it incrementally
* on events like document modification fold region(s) expanding/collapsing etc.
* <p/>
* This class encapsulates document parsing logic. It notifies {@link SoftWrapAwareDocumentParsingListener registered listeners}
* about parsing and they are free to store necessary information for further usage.
* <p/>
* Not thread-safe.
*
* @author Denis Zhdanov
* @since Jul 5, 2010 10:01:27 AM
*/
public class SoftWrapApplianceManager implements Dumpable {
private static final Logger LOG = Logger.getInstance("#" + SoftWrapApplianceManager.class.getName());
/** Enumerates possible type of soft wrap indents to use. */
enum IndentType {
/** Don't apply special indent to soft-wrapped line at all. */
NONE,
/**
* Indent soft wraps for the {@link EditorSettings#getCustomSoftWrapIndent() user-defined number of columns}
* to the start of the previous visual line.
*/
CUSTOM
}
private final List<SoftWrapAwareDocumentParsingListener> myListeners = new ArrayList<SoftWrapAwareDocumentParsingListener>();
private final ProcessingContext myContext = new ProcessingContext();
private final FontTypesStorage myOffset2fontType = new FontTypesStorage();
private final WidthsStorage myOffset2widthInPixels = new WidthsStorage();
private final SoftWrapsStorage myStorage;
private final EditorImpl myEditor;
private SoftWrapPainter myPainter;
private final CachingSoftWrapDataMapper myDataMapper;
/**
* Visual area width change causes soft wraps addition/removal, so, we want to update <code>'y'</code> coordinate
* of the editor viewport then. For example, we observe particular text region at the 'vcs diff' control and change
* its width. We would like to see the same text range at the viewport then.
* <p/>
* This field holds offset of the text range that is shown at the top-left viewport position. It's used as an anchor
* during viewport's <code>'y'</code> coordinate adjustment on visual area width change.
*/
private int myLastTopLeftCornerOffset = -1;
private int myVerticalScrollBarWidth = -1;
private VisibleAreaWidthProvider myWidthProvider;
private LineWrapPositionStrategy myLineWrapPositionStrategy;
private IncrementalCacheUpdateEvent myEventBeingProcessed;
private boolean myVisualAreaListenerAttached;
private boolean myCustomIndentUsedLastTime;
private int myCustomIndentValueUsedLastTime;
private int myVisibleAreaWidth;
private boolean myInProgress;
private boolean myIsDirty = true;
private IncrementalCacheUpdateEvent myDocumentChangedEvent;
public SoftWrapApplianceManager(@NotNull SoftWrapsStorage storage,
@NotNull EditorImpl editor,
@NotNull SoftWrapPainter painter,
CachingSoftWrapDataMapper dataMapper)
{
myStorage = storage;
myEditor = editor;
myPainter = painter;
myDataMapper = dataMapper;
myWidthProvider = new DefaultVisibleAreaWidthProvider(editor);
}
public void registerSoftWrapIfNecessary() {
recalculateIfNecessary();
}
public void reset() {
myIsDirty = true;
for (SoftWrapAwareDocumentParsingListener listener : myListeners) {
listener.reset();
}
}
public void release() {
myLineWrapPositionStrategy = null;
}
private void initListenerIfNecessary() {
// We can't attach the listener during this object initialization because there is a big chance that the editor is in incomplete
// state there (e.g. it's scrolling model is not initialized yet).
if (myVisualAreaListenerAttached) {
return;
}
myVisualAreaListenerAttached = true;
myEditor.getScrollingModel().addVisibleAreaListener(new VisibleAreaListener() {
@Override
public void visibleAreaChanged(VisibleAreaEvent e) {
updateLastTopLeftCornerOffset();
}
});
updateLastTopLeftCornerOffset();
}
public void recalculate(IncrementalCacheUpdateEvent e) {
if (myIsDirty) {
return;
}
initListenerIfNecessary();
if (myVisibleAreaWidth <= 0) {
myIsDirty = true;
return;
}
recalculateSoftWraps(e);
onRecalculationEnd();
}
public void recalculate(List<? extends Segment> ranges) {
if (myIsDirty) {
return;
}
initListenerIfNecessary();
if (myVisibleAreaWidth <= 0) {
myIsDirty = true;
return;
}
Collections.sort(ranges, new Comparator<Segment>() {
@Override
public int compare(Segment o1, Segment o2) {
int startDiff = o1.getStartOffset() - o2.getStartOffset();
return startDiff == 0 ? o2.getEndOffset() - o1.getEndOffset() : startDiff;
}
});
final int[] lastRecalculatedOffset = new int[] {0};
SoftWrapAwareDocumentParsingListenerAdapter listener = new SoftWrapAwareDocumentParsingListenerAdapter() {
@Override
public void onRecalculationEnd(@NotNull IncrementalCacheUpdateEvent event) {
lastRecalculatedOffset[0] = event.getActualEndOffset();
}
};
myListeners.add(listener);
try {
for (Segment range : ranges) {
int lastOffset = lastRecalculatedOffset[0];
if (range.getEndOffset() > lastOffset) {
recalculateSoftWraps(new IncrementalCacheUpdateEvent(myEditor.getDocument(),
Math.max(range.getStartOffset(), lastOffset), range.getEndOffset(),
myDataMapper, myEditor));
}
}
}
finally {
myListeners.remove(listener);
}
onRecalculationEnd();
}
/**
* @return <code>true</code> if soft wraps were really re-calculated;
* <code>false</code> if it's not possible to do at the moment (e.g. current editor is not shown and we don't
* have information about viewport width)
*/
private boolean recalculateSoftWraps() {
initListenerIfNecessary();
if (!myIsDirty) {
return true;
}
if (myVisibleAreaWidth <= 0) {
return false;
}
myIsDirty = false;
recalculateSoftWraps(new IncrementalCacheUpdateEvent(myEditor.getDocument()));
onRecalculationEnd();
return true;
}
private void onRecalculationEnd() {
updateLastTopLeftCornerOffset();
for (SoftWrapAwareDocumentParsingListener listener : myListeners) {
listener.recalculationEnds();
}
}
private void recalculateSoftWraps(@NotNull IncrementalCacheUpdateEvent event) {
if (myEditor.getDocument() instanceof DocumentImpl && ((DocumentImpl)myEditor.getDocument()).acceptsSlashR()) {
LOG.error("Soft wrapping is not supported for documents with non-standard line endings. File: " + myEditor.getVirtualFile());
}
if (myInProgress) {
LogMessageEx.error(LOG, "Detected race condition at soft wraps recalculation",
(myEditor instanceof EditorImpl) ? ((EditorImpl)myEditor).dumpState() : "", event.toString());
}
myInProgress = true;
try {
doRecalculateSoftWraps(event);
}
finally {
myInProgress = false;
}
}
private void doRecalculateSoftWraps(IncrementalCacheUpdateEvent event) {
myEventBeingProcessed = event;
notifyListenersOnCacheUpdateStart(event);
// Preparation.
myContext.reset();
myOffset2fontType.clear();
myOffset2widthInPixels.clear();
// Define start of the visual line that holds target range start.
final int start = event.getStartOffset();
final LogicalPosition logical = event.getStartLogicalPosition();
Document document = myEditor.getDocument();
myContext.text = document.getCharsSequence();
myContext.tokenStartOffset = start;
IterationState iterationState = new IterationState(myEditor, start, document.getTextLength(), false);
TextAttributes attributes = iterationState.getMergedAttributes();
myContext.fontType = attributes.getFontType();
myContext.rangeEndOffset = event.getMandatoryEndOffset();
EditorPosition position = myEditor.myUseNewRendering ? new EditorPosition(logical, event.getStartVisualPosition(), start, myEditor) :
new EditorPosition(logical, start, myEditor);
position.x = start == 0 ? myEditor.getPrefixTextWidthInPixels() : 0;
int spaceWidth = EditorUtil.getSpaceWidth(myContext.fontType, myEditor);
int plainSpaceWidth = EditorUtil.getSpaceWidth(Font.PLAIN, myEditor);
myContext.logicalLineData.update(logical.line, spaceWidth, plainSpaceWidth);
myContext.currentPosition = position;
myContext.lineStartPosition = position.clone();
myContext.fontType2spaceWidth.put(myContext.fontType, spaceWidth);
myContext.softWrapStartOffset = position.offset;
myContext.reservedWidthInPixels = myPainter.getMinDrawingWidth(SoftWrapDrawingType.BEFORE_SOFT_WRAP_LINE_FEED);
SoftWrap softWrapAtStartPosition = myStorage.getSoftWrap(start);
if (softWrapAtStartPosition != null) {
myContext.currentPosition.x = softWrapAtStartPosition.getIndentInPixels();
myContext.lineStartPosition.visualColumn = 0;
myContext.lineStartPosition.softWrapColumnDiff -= softWrapAtStartPosition.getIndentInColumns();
myContext.softWrapStartOffset++;
notifyListenersOnVisualLineStart(myContext.lineStartPosition);
}
// Perform soft wraps calculation.
while (!iterationState.atEnd()) {
FoldRegion currentFold = iterationState.getCurrentFold();
if (currentFold == null) {
myContext.tokenEndOffset = iterationState.getEndOffset();
if (processNonFoldToken()) {
break;
}
}
else {
if (processCollapsedFoldRegion(currentFold)) {
break;
}
// 'myOffset2widthInPixels' contains information necessary to processing soft wraps that lay before the current offset.
// We do know that soft wraps are not allowed to go backward after processed collapsed fold region, hence, we drop
// information about processed symbols width.
myOffset2widthInPixels.clear();
}
iterationState.advance();
attributes = iterationState.getMergedAttributes();
myContext.fontType = attributes.getFontType();
myContext.tokenStartOffset = iterationState.getStartOffset();
myOffset2fontType.fill(myContext.tokenStartOffset, iterationState.getEndOffset(), myContext.fontType);
}
if (myContext.delayedSoftWrap != null) {
myStorage.remove(myContext.delayedSoftWrap);
}
notifyListenersOnVisualLineEnd();
event.setActualEndOffset(myContext.currentPosition.offset);
validateFinalPosition(event);
notifyListenersOnCacheUpdateEnd(event);
myEventBeingProcessed = null;
}
private void validateFinalPosition(IncrementalCacheUpdateEvent event) {
if (LOG.isDebugEnabled()) {
LOG.debug("Soft wrap recalculation done: " + event.toString() + ". " + (event.getActualEndOffset() - event.getStartOffset()) + " characters processed");
}
int endOffsetUpperEstimate = EditorUtil.getNotFoldedLineEndOffset(myEditor, event.getMandatoryEndOffset());
int line = myEditor.getDocument().getLineNumber(endOffsetUpperEstimate);
if (line < myEditor.getDocument().getLineCount() - 1) {
endOffsetUpperEstimate = myEditor.getDocument().getLineStartOffset(line + 1);
}
if (event.getActualEndOffset() > endOffsetUpperEstimate) {
LOG.error("Unexpected error at soft wrap recalculation", new Attachment("softWrapModel.txt", myEditor.getSoftWrapModel().toString()));
}
}
/**
* Encapsulates logic of processing given collapsed fold region.
*
* @param foldRegion target collapsed fold region to process
* @return <code>true</code> if no further calculation is required
*/
private boolean processCollapsedFoldRegion(FoldRegion foldRegion) {
Document document = myEditor.getDocument();
if (!foldRegion.isValid() ||
foldRegion.getStartOffset() != myContext.tokenStartOffset
|| foldRegion.getEndOffset() > document.getTextLength()) {
LOG.error("Inconsistent fold region state: fold region: " + foldRegion
+ ", soft wrap model state: " + myEditor.getSoftWrapModel()
+ ", folding model state: " + myEditor.getFoldingModel());
return true;
}
String placeholder = foldRegion.getPlaceholderText();
int placeholderWidthInPixels = 0;
for (int i = 0; i < placeholder.length(); i++) {
placeholderWidthInPixels += SoftWrapModelImpl.getEditorTextRepresentationHelper(myEditor)
.charWidth(placeholder.charAt(i), myContext.fontType);
}
if (myContext.delayedSoftWrap == null) {
int newX = myContext.currentPosition.x + placeholderWidthInPixels;
notifyListenersOnVisualLineStart(myContext.lineStartPosition);
if (!myContext.exceedsVisualEdge(newX) || myContext.currentPosition.offset == myContext.lineStartPosition.offset) {
myContext.advance(foldRegion, placeholderWidthInPixels);
return false;
}
}
myContext.logicalLineData.update(foldRegion.getStartOffset());
SoftWrap softWrap = null;
if (myContext.delayedSoftWrap == null && myContext.exceedsVisualEdge(myContext.currentPosition.x + myContext.reservedWidthInPixels)) {
softWrap = registerSoftWrap(
myContext.softWrapStartOffset, myContext.tokenStartOffset, myContext.tokenStartOffset, myContext.getSpaceWidth(),
myContext.logicalLineData
);
}
if (myContext.delayedSoftWrap != null) {
myStorage.remove(myContext.delayedSoftWrap);
myContext.delayedSoftWrap = null;
}
if (softWrap == null) {
// If we're here that means that we can't find appropriate soft wrap offset before the fold region.
// However, we expect that it's always possible to wrap collapsed fold region placeholder text
softWrap = registerSoftWrap(foldRegion.getStartOffset(), myContext.getSpaceWidth(), myContext.logicalLineData);
}
myContext.softWrapStartOffset = softWrap.getStart();
if (softWrap.getStart() < myContext.tokenStartOffset) {
revertListeners(softWrap.getStart(), myContext.currentPosition.visualLine);
for (int j = foldRegion.getStartOffset() - 1; j >= softWrap.getStart(); j--) {
int pixelsDiff = myOffset2widthInPixels.data[j - myOffset2widthInPixels.anchor];
int columnsDiff = calculateWidthInColumns(myContext.text.charAt(j), pixelsDiff, myContext.getPlainSpaceWidth());
myContext.currentPosition.offset--;
myContext.currentPosition.logicalColumn -= columnsDiff;
myContext.currentPosition.visualColumn -= columnsDiff;
}
}
notifyListenersOnSoftWrapLineFeed(true);
myContext.currentPosition.visualColumn = 0;
myContext.currentPosition.softWrapColumnDiff = myContext.currentPosition.visualColumn - myContext.currentPosition.foldingColumnDiff
- myContext.currentPosition.logicalColumn;
myContext.currentPosition.softWrapLinesCurrent++;
myContext.currentPosition.visualLine++;
notifyListenersOnSoftWrapLineFeed(false);
myContext.currentPosition.x = softWrap.getIndentInPixels();
myContext.currentPosition.visualColumn = softWrap.getIndentInColumns();
myContext.currentPosition.softWrapColumnDiff += softWrap.getIndentInColumns();
myContext.clearLastFoldInfo();
myContext.skipToLineEnd = false;
if (checkIsDoneAfterSoftWrap()) {
return true;
}
for (int j = softWrap.getStart(); j < myContext.tokenStartOffset; j++) {
char c = myContext.text.charAt(j);
int newX = calculateNewX(c);
myContext.onNonLineFeedSymbol(c, newX);
}
myOffset2fontType.clear();
myContext.advance(foldRegion, placeholderWidthInPixels);
return false;
}
/**
* Encapsulates logic of processing target non-fold region token defined by the {@link #myContext current processing context}
* (target token start offset is identified by {@link ProcessingContext#tokenStartOffset}; end offset is stored
* at {@link ProcessingContext#tokenEndOffset}).
* <p/>
* <code>'Token'</code> here stands for the number of subsequent symbols that are represented using the same font by IJ editor.
*
* @return <code>true</code> if no further calculation is required
*/
private boolean processNonFoldToken() {
int limit = 3 * (myContext.tokenEndOffset - myContext.lineStartPosition.offset);
int counter = 0;
int startOffset = myContext.currentPosition.offset;
while (myContext.currentPosition.offset < myContext.tokenEndOffset) {
if (counter++ > limit) {
String editorInfo = myEditor instanceof EditorImpl ? ((EditorImpl)myEditor).dumpState() : myEditor.getClass().toString();
LogMessageEx.error(LOG, "Cycled soft wraps recalculation detected", String.format(
"Start recalculation offset: %d, visible area width: %d, calculation context: %s, editor info: %s",
startOffset, myVisibleAreaWidth, myContext, editorInfo));
for (int i = myContext.currentPosition.offset; i < myContext.tokenEndOffset; i++) {
char c = myContext.text.charAt(i);
if (c == '\n') {
myContext.onNewLine();
if (checkIsDoneAfterNewLine()) {
return true;
}
}
else {
myContext.onNonLineFeedSymbol(c);
}
}
return false;
}
int offset = myContext.currentPosition.offset;
if (myContext.delayedSoftWrap != null && myContext.delayedSoftWrap.getStart() == offset) {
processSoftWrap(myContext.delayedSoftWrap);
myContext.delayedSoftWrap = null;
if (checkIsDoneAfterSoftWrap()) {
return true;
}
}
char c = myContext.text.charAt(offset);
if (c == '\n') {
myContext.onNewLine();
if (checkIsDoneAfterNewLine()) {
return true;
}
continue;
}
if (myContext.skipToLineEnd) {
myContext.skipToLineEnd = false; // Assuming that this flag is set if no soft wrap is registered during processing the call below
if (createSoftWrapIfPossible()) {
return true;
}
continue;
}
int newX = offsetToX(offset, c);
if (myContext.exceedsVisualEdge(newX) && myContext.delayedSoftWrap == null) {
if (createSoftWrapIfPossible()) {
return true;
}
}
else {
myContext.onNonLineFeedSymbol(c, newX);
}
}
return false;
}
private boolean checkIsDoneAfterNewLine() {
return myContext.currentPosition.offset > myContext.rangeEndOffset;
}
private boolean checkIsDoneAfterSoftWrap() {
SoftWrapImpl lastSoftWrap = myDataMapper.getLastSoftWrap();
LOG.assertTrue(lastSoftWrap != null);
if (myContext.currentPosition.offset > myContext.rangeEndOffset
&& myDataMapper.matchesOldSoftWrap(lastSoftWrap, myEventBeingProcessed.getLengthDiff())) {
myDataMapper.removeLastCacheEntry();
return true;
}
return false;
}
/**
* Allows to retrieve 'x' coordinate of the right edge of document symbol referenced by the given offset.
*
* @param offset target symbol offset
* @param c target symbol referenced by the given offset
* @return 'x' coordinate of the right edge of document symbol referenced by the given offset
*/
private int offsetToX(int offset, char c) {
if (myOffset2widthInPixels.end > offset
&& (myOffset2widthInPixels.anchor + myOffset2widthInPixels.end > offset)
&& myContext.currentPosition.symbol != '\t'/*we need to recalculate tabulation width after soft wrap*/)
{
return myContext.currentPosition.x + myOffset2widthInPixels.data[offset - myOffset2widthInPixels.anchor];
}
else {
return calculateNewX(c);
}
}
private boolean createSoftWrapIfPossible() {
final int offset = myContext.currentPosition.offset;
myContext.logicalLineData.update(offset);
int softWrapStartOffset = myContext.softWrapStartOffset;
int preferredOffset = Math.max(softWrapStartOffset, offset - 1 /* reserve a column for the soft wrap sign */);
SoftWrapImpl softWrap = registerSoftWrap(
softWrapStartOffset,
preferredOffset,
myContext.logicalLineData.endLineOffset,
myContext.getSpaceWidth(),
myContext.logicalLineData
);
FoldRegion revertedToFoldRegion = null;
if (softWrap == null) {
EditorPosition wrapPosition = null;
// Try to insert soft wrap after the last collapsed fold region that is located on the current visual line.
if (myContext.lastFoldEndPosition != null && myStorage.getSoftWrap(myContext.lastFoldEndPosition.offset) == null) {
wrapPosition = myContext.lastFoldEndPosition;
}
if (wrapPosition == null && myContext.lastFoldStartPosition != null
&& myStorage.getSoftWrap(myContext.lastFoldStartPosition.offset) == null
&& myContext.lastFoldStartPosition.offset < myContext.currentPosition.offset)
{
wrapPosition = myContext.lastFoldStartPosition;
}
if (wrapPosition != null){
revertListeners(wrapPosition.offset, wrapPosition.visualLine);
myContext.currentPosition = wrapPosition;
softWrap = registerSoftWrap(wrapPosition.offset, myContext.getSpaceWidth(), myContext.logicalLineData);
myContext.tokenStartOffset = wrapPosition.offset;
revertedToFoldRegion = myContext.lastFold;
}
else {
return myContext.tryToShiftToNextLine();
}
}
myContext.skipToLineEnd = false;
notifyListenersOnVisualLineStart(myContext.lineStartPosition);
int actualSoftWrapOffset = softWrap.getStart();
// There are three possible options:
// 1. Soft wrap offset is located before the current offset;
// 2. Soft wrap offset is located after the current offset but doesn't exceed current token end offset
// (it may occur if there are no convenient wrap positions before the current offset);
// 3. Soft wrap offset is located after the current offset and exceeds current token end offset;
// We should process that accordingly.
if (actualSoftWrapOffset > myContext.tokenEndOffset) {
myContext.delayedSoftWrap = softWrap;
myContext.onNonLineFeedSymbol(myContext.text.charAt(offset));
return false;
}
else if (actualSoftWrapOffset < offset) {
if (revertedToFoldRegion == null) {
revertListeners(actualSoftWrapOffset, myContext.currentPosition.visualLine);
for (int j = offset - 1; j >= actualSoftWrapOffset; j--) {
int pixelsDiff = myOffset2widthInPixels.data[j - myOffset2widthInPixels.anchor];
int columnsDiff = calculateWidthInColumns(myContext.text.charAt(j), pixelsDiff, myContext.getPlainSpaceWidth());
myContext.currentPosition.offset--;
myContext.currentPosition.logicalColumn -= columnsDiff;
myContext.currentPosition.visualColumn -= columnsDiff;
myContext.currentPosition.x -= pixelsDiff;
}
}
}
else if (actualSoftWrapOffset > offset) {
myContext.onNonLineFeedSymbol(myContext.text.charAt(offset));
for (int j = offset + 1; j < actualSoftWrapOffset; j++) {
myContext.onNonLineFeedSymbol(myContext.text.charAt(offset));
}
}
processSoftWrap(softWrap);
myContext.currentPosition.offset = actualSoftWrapOffset;
myOffset2fontType.clear();
myOffset2widthInPixels.clear();
if (checkIsDoneAfterSoftWrap()) {
return true;
}
if (revertedToFoldRegion != null && myContext.currentPosition.offset == revertedToFoldRegion.getStartOffset()) {
return processCollapsedFoldRegion(revertedToFoldRegion);
}
return false;
}
private int calculateNewX(char c) {
if (c == '\t') {
return EditorUtil.nextTabStop(myContext.currentPosition.x, myEditor);
}
else {
return myContext.currentPosition.x + SoftWrapModelImpl.getEditorTextRepresentationHelper(myEditor).charWidth(c, myContext.fontType);
}
}
private static int calculateWidthInColumns(char c, int widthInPixels, int plainSpaceWithInPixels) {
if (c != '\t') {
return 1;
}
int result = widthInPixels / plainSpaceWithInPixels;
if (widthInPixels % plainSpaceWithInPixels > 0) {
result++;
}
return result;
}
/**
* This method is assumed to be called in a situation when visible area width is exceeded. It tries to create and register
* new soft wrap which data is defined in accordance with the given parameters.
* <p/>
* There is a possible case that no soft wrap is created and registered. That is true, for example, for a situation when
* we have a long line of text that doesn't contain white spaces, operators or any other symbols that may be used
* as a <code>'wrap points'</code>. We just left such lines as-is.
*
* @param minOffset min line <code>'wrap point'</code> offset
* @param preferredOffset preferred <code>'wrap point'</code> offset, i.e. max offset which symbol doesn't exceed right margin
* @param maxOffset max line <code>'wrap point'</code> offset
* @param spaceSize current space width in pixels
* @param lineData object that encapsulates information about currently processed logical line
* @return newly created and registered soft wrap if any; <code>null</code> otherwise
*/
@Nullable
private SoftWrapImpl registerSoftWrap(int minOffset, int preferredOffset, int maxOffset, int spaceSize, LogicalLineData lineData) {
int softWrapOffset = calculateBackwardSpaceOffsetIfPossible(minOffset, preferredOffset);
if (softWrapOffset < 0) {
softWrapOffset = calculateBackwardOffsetForEasternLanguageIfPossible(minOffset, preferredOffset);
}
if (softWrapOffset < 0) {
Document document = myEditor.getDocument();
// Performance optimization implied by profiling results analysis.
if (myLineWrapPositionStrategy == null) {
myLineWrapPositionStrategy = LanguageLineWrapPositionStrategy.INSTANCE.forEditor(myEditor);
}
softWrapOffset = myLineWrapPositionStrategy.calculateWrapPosition(
document, myEditor.getProject(), minOffset, maxOffset, preferredOffset, true, true
);
}
if (softWrapOffset >= lineData.endLineOffset || softWrapOffset < 0
|| (myCustomIndentUsedLastTime && softWrapOffset == lineData.nonWhiteSpaceSymbolOffset)
|| (softWrapOffset > preferredOffset && myContext.lastFoldStartPosition != null // Prefer to wrap on fold region backwards
&& myContext.lastFoldStartPosition.offset <= preferredOffset)) // to wrapping forwards.
{
return null;
}
return registerSoftWrap(softWrapOffset, spaceSize, lineData);
}
@NotNull
private SoftWrapImpl registerSoftWrap(int offset, int spaceSize, LogicalLineData lineData) {
int indentInColumns = 0;
int indentInPixels = myPainter.getMinDrawingWidth(SoftWrapDrawingType.AFTER_SOFT_WRAP);
if (myCustomIndentUsedLastTime) {
indentInColumns = myCustomIndentValueUsedLastTime + lineData.indentInColumns;
indentInPixels += lineData.indentInPixels + (myCustomIndentValueUsedLastTime * spaceSize);
}
SoftWrapImpl result = new SoftWrapImpl(
new TextChangeImpl("\n" + StringUtil.repeatSymbol(' ', indentInColumns), offset, offset),
indentInColumns + 1/* for 'after soft wrap' drawing */,
indentInPixels
);
myStorage.storeOrReplace(result);
return result;
}
/**
* It was found out that frequent soft wrap position calculation may become performance bottleneck (e.g. consider application
* that is run under IJ and writes long strings to stdout non-stop. If those strings are long enough to be soft-wrapped,
* we have the mentioned situation).
* <p/>
* Hence, we introduce an optimization here - try to find offset of white space symbol that belongs to the target interval and
* use its offset as soft wrap position.
*
* @param minOffset min offset to use (inclusive)
* @param preferredOffset max offset to use (inclusive)
* @return offset of the space symbol that belongs to <code>[minOffset; preferredOffset]</code> interval if any;
* <code>'-1'</code> otherwise
*/
private int calculateBackwardSpaceOffsetIfPossible(int minOffset, int preferredOffset) {
// There is a possible case that we have a long line that contains many non-white space symbols eligible for performing
// soft wrap that are preceded by white space symbol. We don't want to create soft wrap that is located so far from the
// preferred position then, hence, we check white space symbol existence not more than specific number of symbols back.
int maxTrackBackSymbolsNumber = 10;
int minOffsetToUse = minOffset;
if (preferredOffset - minOffset > maxTrackBackSymbolsNumber) {
minOffsetToUse = preferredOffset - maxTrackBackSymbolsNumber;
}
for (int i = preferredOffset - 1; i >= minOffsetToUse; i--) {
char c = myContext.text.charAt(i);
if (c == ' ') {
return i + 1;
}
}
return -1;
}
/**
* There is a possible case that current line holds eastern language symbols (e.g. japanese text). We want to allow soft
* wrap just after such symbols and this method encapsulates the logic that tries to calculate soft wraps offset on that basis.
*
* @param minOffset min offset to use (inclusive)
* @param preferredOffset max offset to use (inclusive)
* @return soft wrap offset that belongs to <code>[minOffset; preferredOffset]</code> interval if any;
* <code>'-1'</code> otherwise
*/
public int calculateBackwardOffsetForEasternLanguageIfPossible(int minOffset, int preferredOffset) {
// There is a possible case that we have a long line that contains many non-white space symbols eligible for performing
// soft wrap that are preceded by white space symbol. We don't want to create soft wrap that is located so far from the
// preferred position then, hence, we check white space symbol existence not more than specific number of symbols back.
int maxTrackBackSymbolsNumber = 10;
int minOffsetToUse = minOffset;
if (preferredOffset - minOffset > maxTrackBackSymbolsNumber) {
minOffsetToUse = preferredOffset - maxTrackBackSymbolsNumber;
}
for (int i = preferredOffset - 1; i >= minOffsetToUse; i--) {
char c = myContext.text.charAt(i);
if (c >= 0x2f00) { // Check this document for eastern languages unicode ranges - http://www.unicode.org/charts
return i + 1;
}
}
return -1;
}
private void processSoftWrap(SoftWrap softWrap) {
notifyListenersOnSoftWrapLineFeed(true);
EditorPosition position = myContext.currentPosition;
position.visualColumn = 0;
position.softWrapColumnDiff = position.visualColumn - position.foldingColumnDiff - position.logicalColumn;
position.softWrapLinesCurrent++;
position.visualLine++;
notifyListenersOnSoftWrapLineFeed(false);
myContext.lineStartPosition.from(myContext.currentPosition);
position.x = softWrap.getIndentInPixels();
position.visualColumn = softWrap.getIndentInColumns();
position.softWrapColumnDiff += softWrap.getIndentInColumns();
myContext.softWrapStartOffset = softWrap.getStart() + 1;
myContext.clearLastFoldInfo();
}
/**
* There is a possible case that we need to reparse the whole document (e.g. visible area width is changed or user-defined
* soft wrap indent is changed etc). This method encapsulates that logic, i.e. it checks if necessary conditions are satisfied
* and updates internal state as necessary.
*
* @return <code>true</code> if re-calculation logic was performed;
* <code>false</code> otherwise (e.g. we need to perform re-calculation but current editor is now shown, i.e. we don't
* have information about viewport width
*/
public boolean recalculateIfNecessary() {
if (myInProgress) {
return false;
}
// Check if we need to recalculate soft wraps due to indent settings change.
boolean indentChanged = false;
IndentType currentIndentType = getIndentToUse();
boolean useCustomIndent = currentIndentType == IndentType.CUSTOM;
int currentCustomIndent = myEditor.getSettings().getCustomSoftWrapIndent();
if (useCustomIndent ^ myCustomIndentUsedLastTime || (useCustomIndent && myCustomIndentValueUsedLastTime != currentCustomIndent)) {
indentChanged = true;
}
myCustomIndentUsedLastTime = useCustomIndent;
myCustomIndentValueUsedLastTime = currentCustomIndent;
// Check if we need to recalculate soft wraps due to visible area width change.
int currentVisibleAreaWidth = myWidthProvider.getVisibleAreaWidth();
if (!indentChanged && myVisibleAreaWidth == currentVisibleAreaWidth) {
return recalculateSoftWraps(); // Recalculate existing dirty regions if any.
}
final JScrollBar scrollBar = myEditor.getScrollPane().getVerticalScrollBar();
if (myVerticalScrollBarWidth < 0) {
myVerticalScrollBarWidth = scrollBar.getWidth();
if (myVerticalScrollBarWidth <= 0) {
myVerticalScrollBarWidth = scrollBar.getPreferredSize().width;
}
}
// We experienced the following situation:
// 1. Editor is configured to show scroll bars only when necessary;
// 2. Editor with active soft wraps is changed in order for the vertical scroll bar to appear;
// 3. Vertical scrollbar consumes vertical space, hence, soft wraps are recalculated because of the visual area width change;
// 4. Newly recalculated soft wraps trigger editor size update;
// 5. Editor size update starts scroll pane update which, in turn, disables vertical scroll bar at first (the reason for that
// lays somewhere at the swing depth);
// 6. Soft wraps are recalculated because of visible area width change caused by the disabled vertical scroll bar;
// 7. Go to the step 4;
// I.e. we have an endless EDT activity that stops only when editor is re-sized in a way to avoid vertical scroll bar.
// That's why we don't recalculate soft wraps when visual area width is changed to the vertical scroll bar width value assuming
// that such a situation is triggered by the scroll bar (dis)appearance.
if (Math.abs(currentVisibleAreaWidth - myVisibleAreaWidth) == myVerticalScrollBarWidth) {
myVisibleAreaWidth = currentVisibleAreaWidth;
return recalculateSoftWraps();
}
// We want to adjust viewport's 'y' coordinate on complete recalculation, so, we remember number of soft-wrapped lines
// before the target offset on recalculation start and compare it with the number of soft-wrapped lines before the same offset
// after the recalculation.
int softWrapsBefore = -1;
final ScrollingModelEx scrollingModel = myEditor.getScrollingModel();
int yScrollOffset = scrollingModel.getVerticalScrollOffset();
int anchorOffset = myLastTopLeftCornerOffset;
if (anchorOffset >= 0) {
softWrapsBefore = getNumberOfSoftWrapsBefore(anchorOffset);
}
// Drop information about processed lines.
reset();
myStorage.removeAll();
myVisibleAreaWidth = currentVisibleAreaWidth;
final boolean result = recalculateSoftWraps();
if (!result) {
return false;
}
// Adjust viewport's 'y' coordinate if necessary.
if (softWrapsBefore >= 0) {
int softWrapsNow = getNumberOfSoftWrapsBefore(anchorOffset);
if (softWrapsNow != softWrapsBefore) {
scrollingModel.disableAnimation();
try {
scrollingModel.scrollVertically(yScrollOffset + (softWrapsNow - softWrapsBefore) * myEditor.getLineHeight());
}
finally {
scrollingModel.enableAnimation();
}
}
}
updateLastTopLeftCornerOffset();
return true;
}
private void updateLastTopLeftCornerOffset() {
int visualLine = 1 + myEditor.getScrollingModel().getVisibleArea().y / myEditor.getLineHeight();
myLastTopLeftCornerOffset = myEditor.myUseNewRendering ? myEditor.visualLineStartOffset(visualLine) :
myDataMapper.getVisualLineStartOffset(visualLine);
}
private int getNumberOfSoftWrapsBefore(int offset) {
final int i = myStorage.getSoftWrapIndex(offset);
return i >= 0 ? i : -i - 1;
}
private IndentType getIndentToUse() {
return myEditor.getSettings().isUseCustomSoftWrapIndent() ? IndentType.CUSTOM : IndentType.NONE;
}
/**
* Registers given listener within the current manager.
*
* @param listener listener to register
* @return <code>true</code> if this collection changed as a result of the call; <code>false</code> otherwise
*/
public boolean addListener(@NotNull SoftWrapAwareDocumentParsingListener listener) {
return myListeners.add(listener);
}
public boolean removeListener(@NotNull SoftWrapAwareDocumentParsingListener listener) {
return myListeners.remove(listener);
}
@SuppressWarnings({"ForLoopReplaceableByForEach"})
private void revertListeners(int offset, int visualLine) {
for (int i = 0; i < myListeners.size(); i++) {
// Avoid unnecessary Iterator object construction as this method is expected to be called frequently.
SoftWrapAwareDocumentParsingListener listener = myListeners.get(i);
listener.revertToOffset(offset, visualLine);
}
}
@SuppressWarnings({"ForLoopReplaceableByForEach"})
private void notifyListenersOnFoldRegion(@NotNull FoldRegion foldRegion, int collapsedFoldingWidthInColumns, int visualLine) {
for (int i = 0; i < myListeners.size(); i++) {
// Avoid unnecessary Iterator object construction as this method is expected to be called frequently.
SoftWrapAwareDocumentParsingListener listener = myListeners.get(i);
listener.onCollapsedFoldRegion(foldRegion, collapsedFoldingWidthInColumns, visualLine);
}
}
@SuppressWarnings({"ForLoopReplaceableByForEach"})
private void notifyListenersOnVisualLineStart(@NotNull EditorPosition position) {
for (int i = 0; i < myListeners.size(); i++) {
// Avoid unnecessary Iterator object construction as this method is expected to be called frequently.
SoftWrapAwareDocumentParsingListener listener = myListeners.get(i);
listener.onVisualLineStart(position);
}
}
@SuppressWarnings({"ForLoopReplaceableByForEach"})
private void notifyListenersOnVisualLineEnd() {
for (int i = 0; i < myListeners.size(); i++) {
// Avoid unnecessary Iterator object construction as this method is expected to be called frequently.
SoftWrapAwareDocumentParsingListener listener = myListeners.get(i);
listener.onVisualLineEnd(myContext.currentPosition);
}
}
@SuppressWarnings({"ForLoopReplaceableByForEach"})
private void notifyListenersOnTabulation(int widthInColumns) {
for (int i = 0; i < myListeners.size(); i++) {
// Avoid unnecessary Iterator object construction as this method is expected to be called frequently.
SoftWrapAwareDocumentParsingListener listener = myListeners.get(i);
listener.onTabulation(myContext.currentPosition, widthInColumns);
}
}
@SuppressWarnings({"ForLoopReplaceableByForEach"})
private void notifyListenersOnSoftWrapLineFeed(boolean before) {
for (int i = 0; i < myListeners.size(); i++) {
// Avoid unnecessary Iterator object construction as this method is expected to be called frequently.
SoftWrapAwareDocumentParsingListener listener = myListeners.get(i);
if (before) {
listener.beforeSoftWrapLineFeed(myContext.currentPosition);
}
else {
listener.afterSoftWrapLineFeed(myContext.currentPosition);
}
}
}
@SuppressWarnings({"ForLoopReplaceableByForEach"})
private void notifyListenersOnCacheUpdateStart(IncrementalCacheUpdateEvent event) {
for (int i = 0; i < myListeners.size(); i++) {
// Avoid unnecessary Iterator object construction as this method is expected to be called frequently.
SoftWrapAwareDocumentParsingListener listener = myListeners.get(i);
listener.onCacheUpdateStart(event);
}
}
@SuppressWarnings({"ForLoopReplaceableByForEach"})
private void notifyListenersOnCacheUpdateEnd(IncrementalCacheUpdateEvent event) {
for (int i = 0; i < myListeners.size(); i++) {
// Avoid unnecessary Iterator object construction as this method is expected to be called frequently.
SoftWrapAwareDocumentParsingListener listener = myListeners.get(i);
listener.onRecalculationEnd(event);
}
}
public void beforeDocumentChange(DocumentEvent event) {
myDocumentChangedEvent = new IncrementalCacheUpdateEvent(event, myDataMapper, myEditor);
}
public void documentChanged(DocumentEvent event) {
LOG.assertTrue(myDocumentChangedEvent != null);
myDocumentChangedEvent.updateAfterDocumentChange(event.getDocument());
recalculate(myDocumentChangedEvent);
myDocumentChangedEvent = null;
}
public void setWidthProvider(@NotNull VisibleAreaWidthProvider widthProvider) {
myWidthProvider = widthProvider;
reset();
}
@NotNull
@Override
public String dumpState() {
return String.format(
"recalculation in progress: %b; event being processed: %s",
myInProgress, myEventBeingProcessed
);
}
@Override
public String toString() {
return dumpState();
}
@TestOnly
public void setSoftWrapPainter(SoftWrapPainter painter) {
myPainter = painter;
}
/**
* We need to use correct indent for soft-wrapped lines, i.e. they should be indented to the start of the logical line.
* This class stores information about logical line start indent.
*/
private class LogicalLineData {
public int indentInColumns;
public int indentInPixels;
public int endLineOffset;
public int nonWhiteSpaceSymbolOffset;
public void update(int logicalLine, int spaceWidth, int plainSpaceWidth) {
Document document = myEditor.getDocument();
int startLineOffset;
if (logicalLine >= document.getLineCount()) {
startLineOffset = endLineOffset = document.getTextLength();
}
else {
startLineOffset = document.getLineStartOffset(logicalLine);
endLineOffset = document.getLineEndOffset(logicalLine);
}
CharSequence text = document.getCharsSequence();
indentInColumns = 0;
indentInPixels = 0;
nonWhiteSpaceSymbolOffset = -1;
for (int i = startLineOffset; i < endLineOffset; i++) {
char c = text.charAt(i);
switch (c) {
case ' ': indentInColumns += 1; indentInPixels += spaceWidth; break;
case '\t':
int x = EditorUtil.nextTabStop(indentInPixels, myEditor);
indentInColumns += calculateWidthInColumns(c, x - indentInPixels, plainSpaceWidth);
indentInPixels = x;
break;
default: nonWhiteSpaceSymbolOffset = i; return;
}
}
}
/**
* There is a possible case that all document line symbols before the first soft wrap are white spaces. We don't want to use
* such a big indent then.
* <p/>
* This method encapsulates logic that 'resets' indent to use if such a situation is detected.
*
* @param softWrapOffset offset of the soft wrap that occurred on document line which data is stored at the current object
*/
public void update(int softWrapOffset) {
if (nonWhiteSpaceSymbolOffset >= 0 && softWrapOffset > nonWhiteSpaceSymbolOffset) {
return;
}
indentInColumns = 0;
indentInPixels = 0;
}
public void reset() {
indentInColumns = 0;
indentInPixels = 0;
endLineOffset = 0;
}
}
/**
* This interface is introduced mostly for encapsulating GUI-specific values retrieval and make it possible to write
* tests for soft wraps processing.
*/
public interface VisibleAreaWidthProvider {
int getVisibleAreaWidth();
}
private static class DefaultVisibleAreaWidthProvider implements VisibleAreaWidthProvider {
private final Editor myEditor;
DefaultVisibleAreaWidthProvider(Editor editor) {
myEditor = editor;
}
@Override
public int getVisibleAreaWidth() {
return myEditor.getScrollingModel().getVisibleArea().width;
}
}
/**
* Primitive array-based data structure that contain mappings like {@code int -> int}.
* <p/>
* The key is array index plus anchor; the value is array value.
*/
private static class WidthsStorage {
public int[] data = new int[256];
public int anchor;
public int end;
public void clear() {
anchor = 0;
end = 0;
}
}
/**
*
* We need to be able to track back font types to offsets mappings because text processing may be shifted back because of soft wrap.
* <p/>
* <b>Example</b>
* Suppose with have this line of text that should be soft-wrapped
* <pre>
* | <- right margin
* token1 token2-toke|n3
* | <- right margin
* </pre>
* It's possible that <code>'token1'</code>, white spaces and <code>'token2'</code> use different font types and
* soft wrapping should be performed between <code>'token1'</code> and <code>'token2'</code>. We need to be able to
* match offsets of <code>'token2'</code> to font types then.
* <p/>
* There is an additional trick here - there is a possible case that a bunch number of adjacent symbols use the same font
* type (are marked by {@link IterationState} as a single token. That is often the case for plain text). We don't want to
* store those huge mappings then (it may take over million records) because it's indicated by profiling as extremely expensive
* and causing unnecessary garbage collections that dramatically reduce overall application throughput.
* <p/>
* Hence, we want to restrict ourselves by storing information about particular sub-sequence of overall token offsets.
* <p/>
* This is primitive array-based data structure that contains {@code offset -> font type} mappings.
*/
private static class FontTypesStorage {
private int[] myStarts = new int[256];
private int[] myEnds = new int[256];
private int[] myData = new int[256];
private int myLastIndex = -1;
public void fill(int start, int end, int value) {
if (myLastIndex >= 0 && myData[myLastIndex] == value && myEnds[myLastIndex] == start) {
myEnds[myLastIndex] = end;
return;
}
if (++myLastIndex >= myData.length) {
expand();
}
myStarts[myLastIndex] = start;
myEnds[myLastIndex] = end;
myData[myLastIndex] = value;
}
/**
* Tries to retrieve stored value for the given offset if any;
*
* @param offset target offset
* @return target value if any is stored; <code>-1</code> otherwise
*/
public int get(int offset) {
// The key is array index plus anchor; the value is array value.
if (myLastIndex < 0) {
return -1;
}
for (int i = myLastIndex; i >= 0 && myEnds[i] >= offset; i--) {
if (myStarts[i] <= offset) {
return myData[i];
}
}
return -1;
}
public void clear() {
myLastIndex = -1;
}
private void expand() {
int[] tmp = new int[myStarts.length * 2];
System.arraycopy(myStarts, 0, tmp, 0, myStarts.length);
myStarts = tmp;
tmp = new int[myEnds.length * 2];
System.arraycopy(myEnds, 0, tmp, 0, myEnds.length);
myEnds = tmp;
tmp = new int[myData.length * 2];
System.arraycopy(myData, 0, tmp, 0, myData.length);
myData = tmp;
}
}
private class ProcessingContext {
public final PrimitiveIntMap fontType2spaceWidth = new PrimitiveIntMap();
public final LogicalLineData logicalLineData = new LogicalLineData();
public CharSequence text;
public EditorPosition lineStartPosition;
public EditorPosition currentPosition;
/**
* Start position of the last collapsed fold region that is located at the current visual line and can be used as a fall back
* position for soft wrapping.
*/
public EditorPosition lastFoldStartPosition;
public EditorPosition lastFoldEndPosition;
/** A fold region referenced by the {@link #lastFoldStartPosition}. */
public FoldRegion lastFold;
public SoftWrapImpl delayedSoftWrap;
public int reservedWidthInPixels;
/**
* Min offset to use when new soft wrap should be introduced. I.e. every time we detect that text exceeds visual width,
*/
public int softWrapStartOffset;
public int rangeEndOffset;
public int tokenStartOffset;
public int tokenEndOffset;
@JdkConstants.FontStyle
public int fontType;
public boolean skipToLineEnd;
@Override
public String toString() {
return "reserved width: " + reservedWidthInPixels + ", soft wrap start offset: " + softWrapStartOffset + ", range end offset: "
+ rangeEndOffset + ", token offsets: [" + tokenStartOffset + "; " + tokenEndOffset + "], font type: " + fontType
+ ", skip to line end: " + skipToLineEnd + ", delayed soft wrap: " + delayedSoftWrap + ", current position: "+ currentPosition
+ "line start position: " + lineStartPosition;
}
public void reset() {
text = null;
lineStartPosition = null;
currentPosition = null;
clearLastFoldInfo();
delayedSoftWrap = null;
reservedWidthInPixels = 0;
softWrapStartOffset = 0;
rangeEndOffset = 0;
tokenStartOffset = 0;
tokenEndOffset = 0;
fontType = 0;
skipToLineEnd = false;
fontType2spaceWidth.reset();
logicalLineData.reset();
}
public int getSpaceWidth() {
return getSpaceWidth(fontType);
}
public int getPlainSpaceWidth() {
return getSpaceWidth(Font.PLAIN);
}
private int getSpaceWidth(@JdkConstants.FontStyle int fontType) {
int result = fontType2spaceWidth.get(fontType);
if (result <= 0) {
result = EditorUtil.getSpaceWidth(fontType, myEditor);
fontType2spaceWidth.put(fontType, result);
}
assert result > 0;
return result;
}
/**
* Asks current context to update its state assuming that it begins to point to the line next to its current position.
*/
@SuppressWarnings("MagicConstant")
public void onNewLine() {
notifyListenersOnVisualLineEnd();
currentPosition.onNewLine();
softWrapStartOffset = currentPosition.offset;
clearLastFoldInfo();
lineStartPosition.from(currentPosition);
logicalLineData.update(currentPosition.logicalLine, getSpaceWidth(), getPlainSpaceWidth());
fontType = myOffset2fontType.get(currentPosition.offset);
myOffset2fontType.clear();
myOffset2widthInPixels.clear();
skipToLineEnd = false;
}
private void clearLastFoldInfo() {
lastFoldStartPosition = null;
lastFoldEndPosition = null;
lastFold = null;
}
public void onNonLineFeedSymbol(char c) {
int newX;
if (myOffset2widthInPixels.end > myContext.currentPosition.offset
&& (myOffset2widthInPixels.anchor + myOffset2widthInPixels.end > myContext.currentPosition.offset)
&& myContext.currentPosition.symbol != '\t'/*we need to recalculate tabulation width after soft wrap*/)
{
newX = myContext.currentPosition.x + myOffset2widthInPixels.data[myContext.currentPosition.offset - myOffset2widthInPixels.anchor];
}
else {
newX = calculateNewX(c);
}
onNonLineFeedSymbol(c, newX);
}
@SuppressWarnings("MagicConstant")
public void onNonLineFeedSymbol(char c, int newX) {
int widthInPixels = newX - myContext.currentPosition.x;
if (myOffset2widthInPixels.anchor <= 0) {
myOffset2widthInPixels.anchor = currentPosition.offset;
}
if (currentPosition.offset - myOffset2widthInPixels.anchor >= myOffset2widthInPixels.data.length) {
int newLength = Math.max(myOffset2widthInPixels.data.length * 2, currentPosition.offset - myOffset2widthInPixels.anchor + 1);
int[] newData = new int[newLength];
System.arraycopy(myOffset2widthInPixels.data, 0, newData, 0, myOffset2widthInPixels.data.length);
myOffset2widthInPixels.data = newData;
}
myOffset2widthInPixels.data[currentPosition.offset - myOffset2widthInPixels.anchor] = widthInPixels;
myOffset2widthInPixels.end++;
int widthInColumns = calculateWidthInColumns(c, widthInPixels, myContext.getPlainSpaceWidth());
if (c == '\t') {
notifyListenersOnVisualLineStart(myContext.lineStartPosition);
notifyListenersOnTabulation(widthInColumns);
}
currentPosition.logicalColumn += widthInColumns;
currentPosition.visualColumn += widthInColumns;
currentPosition.x = newX;
currentPosition.offset++;
fontType = myOffset2fontType.get(currentPosition.offset);
}
/**
* Updates state of the current context object in order to point to the end of the given collapsed fold region.
*
* @param foldRegion collapsed fold region to process
*/
private void advance(FoldRegion foldRegion, int placeHolderWidthInPixels) {
lastFoldStartPosition = currentPosition.clone();
lastFold = foldRegion;
int visualLineBefore = currentPosition.visualLine;
int logicalLineBefore = currentPosition.logicalLine;
int logicalColumnBefore = currentPosition.logicalColumn;
currentPosition.advance(foldRegion, -1);
currentPosition.x += placeHolderWidthInPixels;
int collapsedFoldingWidthInColumns = currentPosition.logicalColumn;
if (currentPosition.logicalLine <= logicalLineBefore) {
// Single-line fold region.
collapsedFoldingWidthInColumns = currentPosition.logicalColumn - logicalColumnBefore;
}
else {
final DocumentEx document = myEditor.getDocument();
int endFoldLine = document.getLineNumber(foldRegion.getEndOffset());
logicalLineData.endLineOffset = document.getLineEndOffset(endFoldLine);
}
notifyListenersOnFoldRegion(foldRegion, collapsedFoldingWidthInColumns, visualLineBefore);
tokenStartOffset = myContext.currentPosition.offset;
softWrapStartOffset = foldRegion.getEndOffset();
lastFoldEndPosition = currentPosition.clone();
}
/**
* Asks current context to update its state in order to show to the first symbol of the next visual line if it belongs to
* [{@link #tokenStartOffset}; {@link #skipToLineEnd} is set to <code>'true'</code> otherwise
*/
public boolean tryToShiftToNextLine() {
for (int i = currentPosition.offset; i < tokenEndOffset; i++) {
char c = text.charAt(i);
currentPosition.offset = i;
if (c == '\n') {
onNewLine(); // Assuming that offset is incremented during this method call
return checkIsDoneAfterNewLine();
}
else {
onNonLineFeedSymbol(c, offsetToX(i, c));
}
}
skipToLineEnd = true;
return false;
}
/**
* Allows to answer if point with the given <code>'x'</code> coordinate exceeds visual area's right edge.
*
* @param x target <code>'x'</code> coordinate to check
* @return <code>true</code> if given <code>'x'</code> coordinate exceeds visual area's right edge; <code>false</code> otherwise
*/
public boolean exceedsVisualEdge(int x) {
return x >= myVisibleAreaWidth;
}
}
/**
* Primitive data structure to hold {@code int -> int} mappings assuming that the following is true:
* <pre>
* <ul>
* <li>number of entries is small;</li>
* <li>the keys are roughly adjacent;</li>
* </ul>
* </pre>
*/
private static class PrimitiveIntMap {
private int[] myData = new int[16];
private int myShift;
public int get(int key) {
int index = key + myShift;
if (index < 0 || index >= myData.length) {
return -1;
}
return myData[index];
}
public void put(int key, int value) {
int index = key + myShift;
if (index < 0) {
int[] tmp = new int[myData.length - index];
System.arraycopy(myData, 0, tmp, -index, myData.length);
myData = tmp;
myShift -= index;
index = 0;
}
myData[index] = value;
}
public void reset() {
myShift = 0;
Arrays.fill(myData, 0);
}
}
}
| apache-2.0 |
vsch/MissingInActions | src/com/vladsch/MissingInActions/actions/character/identifier/NextWordStartWithSelectionAction.java | 1539 | /*
* Copyright (c) 2016-2018 Vladimir Schneider <vladimir.schneider@gmail.com>
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
/*
* Created by IntelliJ IDEA.
* User: max
* Date: May 14, 2002
* Time: 6:49:27 PM
* To change template for new class use
* Code Style | Class Templates options (Tools | IDE Options).
*/
package com.vladsch.MissingInActions.actions.character.identifier;
import com.vladsch.MissingInActions.actions.DumbAwareTextComponentEditorAction;
import com.vladsch.MissingInActions.actions.LineSelectionAware;
public class NextWordStartWithSelectionAction extends DumbAwareTextComponentEditorAction implements LineSelectionAware {
public NextWordStartWithSelectionAction() {
super(new NextOrPrevWordStartHandler(true, true, false));
}
}
| apache-2.0 |
torrances/swtk-commons | commons-dict-wordnet-indexbyname/src/main/java/org/swtk/commons/dict/wordnet/indexbyname/instance/p/l/i/WordnetNounIndexNameInstancePLI.java | 3267 | package org.swtk.commons.dict.wordnet.indexbyname.instance.p.l.i; import java.util.ArrayList; import java.util.Collection; import java.util.Map; import java.util.TreeMap; import org.swtk.common.dict.dto.wordnet.IndexNoun; import com.trimc.blogger.commons.utils.GsonUtils; public final class WordnetNounIndexNameInstancePLI { private static Map<String, Collection<IndexNoun>> map = new TreeMap<String, Collection<IndexNoun>>(); static { add("{\"term\":\"pliability\", \"synsetCount\":2, \"upperType\":\"NOUN\", \"ids\":[\"04667087\", \"05029875\"]}");
add("{\"term\":\"pliancy\", \"synsetCount\":2, \"upperType\":\"NOUN\", \"ids\":[\"04667087\", \"05030188\"]}");
add("{\"term\":\"pliantness\", \"synsetCount\":2, \"upperType\":\"NOUN\", \"ids\":[\"04667087\", \"05030188\"]}");
add("{\"term\":\"plica\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"05499911\"]}");
add("{\"term\":\"plica vocalis\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"05538062\"]}");
add("{\"term\":\"plication\", \"synsetCount\":2, \"upperType\":\"NOUN\", \"ids\":[\"00407621\", \"13930429\"]}");
add("{\"term\":\"plicatoperipatus\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"02003008\"]}");
add("{\"term\":\"plicatoperipatus jamaicensis\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"02003156\"]}");
add("{\"term\":\"plier\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"10461667\"]}");
add("{\"term\":\"pliers\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"03973308\"]}");
add("{\"term\":\"plight\", \"synsetCount\":2, \"upperType\":\"NOUN\", \"ids\":[\"07242765\", \"14432050\"]}");
add("{\"term\":\"plimsoll\", \"synsetCount\":2, \"upperType\":\"NOUN\", \"ids\":[\"03973602\", \"08613892\"]}");
add("{\"term\":\"plimsoll line\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"08613892\"]}");
add("{\"term\":\"plimsoll mark\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"08613892\"]}");
add("{\"term\":\"plinian eruption\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"07420252\"]}");
add("{\"term\":\"plinth\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"03909987\"]}");
add("{\"term\":\"pliny\", \"synsetCount\":2, \"upperType\":\"NOUN\", \"ids\":[\"11259557\", \"11259755\"]}");
add("{\"term\":\"pliny the elder\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"11259557\"]}");
add("{\"term\":\"pliny the younger\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"11259755\"]}");
add("{\"term\":\"pliocene\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"15150275\"]}");
add("{\"term\":\"pliocene epoch\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"15150275\"]}");
} private static void add(final String JSON) { IndexNoun indexNoun = GsonUtils.toObject(JSON, IndexNoun.class); Collection<IndexNoun> list = (map.containsKey(indexNoun.getTerm())) ? map.get(indexNoun.getTerm()) : new ArrayList<IndexNoun>(); list.add(indexNoun); map.put(indexNoun.getTerm(), list); } public static Collection<IndexNoun> get(final String TERM) { return map.get(TERM); } public static boolean has(final String TERM) { return map.containsKey(TERM); } public static Collection<String> terms() { return map.keySet(); } } | apache-2.0 |
rndsolutions/hawkcd | Server/src/main/java/io/hawkcd/core/security/AuthorizationFactory.java | 229 | package io.hawkcd.core.security;
/**
* Created by rado on 14.11.16.
*/
public class AuthorizationFactory {
public static IAuthorizationManager getAuthorizationManager(){
return new AuthorizationManager();
}
}
| apache-2.0 |
cocoJamboo/jboss-eap-6.4.0.GA-quickstarts | logging-tools/target/generated-sources/annotations/org/jboss/as/quickstarts/loggingToolsQS/exceptions/GreeterExceptionBundle_$bundle_de_DE.java | 1159 |
package org.jboss.as.quickstarts.loggingToolsQS.exceptions;
import javax.annotation.Generated;
/**
* Warning this class consists of generated code.
*
*/
@Generated(value = "org.jboss.logging.processor.generator.model.MessageBundleTranslator", date = "2015-10-14T23:22:03+0200")
public class GreeterExceptionBundle_$bundle_de_DE
extends GreeterExceptionBundle_$bundle_de
implements GreeterExceptionBundle
{
public final static GreeterExceptionBundle_$bundle_de_DE INSTANCE = new GreeterExceptionBundle_$bundle_de_DE();
private final static java.lang.String thrownOnPurpose = "GREETER000006: Diese Ausnahme absichtlich geworfen.";
private final static java.lang.String localeNotValid = "GREETER000005: Angeforderte Gebietsschema nicht g\u00fcltig: %s";
protected GreeterExceptionBundle_$bundle_de_DE() {
super();
}
@Override
protected Object readResolve() {
return INSTANCE;
}
@Override
protected java.lang.String thrownOnPurpose$str() {
return thrownOnPurpose;
}
@Override
protected java.lang.String localeNotValid$str() {
return localeNotValid;
}
}
| apache-2.0 |
not-my-name/HonoursProject2016 | src/main/java/za/redbridge/simulator/Novelty/Behaviour.java | 2364 | package za.redbridge.simulator;
import za.redbridge.simulator.ConstructionZone;
import za.redbridge.simulator.ConstructionTask;
import za.redbridge.simulator.object.ResourceObject;
import java.util.Arrays;
import java.util.concurrent.CopyOnWriteArrayList;
import java.util.*;
import za.redbridge.simulator.object.RobotObject;
import org.jbox2d.common.Vec2;
/*
use this class to manage and monitor all the different behaviours that are needed to find the novelty
score of an individual
*/
public class Behaviour {
private ConstructionTask constructionTask;
//private ConstructionZone[] constructionZones;
private ArrayList<ConstructionZone> constructionZones;
//private ConstructionZone mostValuableCZ; //variable to hold the most valuable construction zone that the robot team created during the simulation
private int numConstructionZones;
//variables to count how many different types of each block there are in the construction zone
private int connectedA;
private int connectedB;
private int connectedC;
private int schemaConfigNum;
//variables to hold the respective scores for evaluating the constructoin zones of the behaviour
// private double adjacentScore;
// private double correctSchemaScore;
public Behaviour(ConstructionTask constructionTask, int schemaConfigNum) {
this.constructionTask = constructionTask;
this.constructionZones = this.constructionTask.getConstructionZones();
numConstructionZones = this.constructionZones.size();
this.schemaConfigNum = schemaConfigNum;
connectedA = 0;
connectedB = 0;
connectedC = 0;
countConnected();
}
/*
method to count how many of each of the different types of blocks were
connected across all the construction zones */
private void countConnected() {
for(ConstructionZone cZone : constructionZones) {
connectedA += cZone.getACount();
connectedB += cZone.getBCount();
connectedC += cZone.getCCount();
}
}
public ArrayList<ConstructionZone> getConstructionZones() {
return constructionZones;
}
public ConstructionTask getConstructionTask() {
return this.constructionTask;
}
public int getNumConstructionZones() {
return this.numConstructionZones;
}
public int getConnectedA() {
return this.connectedA;
}
public int getConnectedB() {
return this.connectedB;
}
public int getConnectedC() {
return this.connectedC;
}
}
| apache-2.0 |
wlzjdm/wolf | wolf-web/src/main/java/org/dm/web/base/dao/ComnDao.java | 26310 | package org.dm.web.base.dao;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import javax.annotation.Resource;
import org.apache.log4j.Logger;
import org.springframework.jdbc.core.JdbcTemplate;
import org.springframework.transaction.support.TransactionTemplate;
import org.dm.web.base.cache.SysCacheManager;
import org.dm.web.base.dynamicds.CustomerContextHolder;
import org.dm.web.base.exception.DMPromptException;
import org.dm.web.base.exception.DMSqlException;
import org.dm.web.base.exception.DMSystemException;
import org.dm.web.base.sql.ISqlStatement;
import org.dm.web.base.sql.ITransaction;
import org.dm.web.base.sql.PreparedSqlStatement;
import org.dm.web.base.sql.SqlResult;
import org.dm.web.base.sql.SqlStatement;
import org.dm.web.base.sql.SqlUtil;
import org.dm.web.base.sql.SysSql;
import org.dm.web.base.sql.TResult;
import org.dm.web.base.sql.object.SqlCheck;
import org.dm.web.base.sql.object.SqlConfig;
import org.dm.web.base.sql.object.SqlExecute;
import org.dm.web.base.system.RequestSupport;
import org.dm.web.base.system.SysBeans;
import org.dm.web.base.util.Tools;
public class ComnDao extends BaseDao {
private static Logger log = Logger.getLogger(ComnDao.class);
@Resource
private TransactionTemplate transactionTemplate;
// 是否采用预编译
private static boolean isPrepared = false;
/**
* 获取当前执行的SQL语句对应的数据库类型
*/
public String getDBType() {
String dbid = CustomerContextHolder.getCustomerType();
String dbtype = SysBeans.getSysSql().getDBType(dbid);
return dbtype;
}
/**
* 执行查询并返回SqlResult结果集<br />
* <strong>如果SQL语句中有参数需要注入,需要在调用此方法前使用ComnDao对象的setParams方法设置参数,
* 并且把exeid添加在params中, 或者使用query(String, Map<String,Object>)方法</strong>
*
* @return 查询结果集,出错返回null
* @throws DMSqlException
* @throws DMSystemException
* @throws DMSqlException
* @throws SQLException
*/
public SqlResult query(Map<String, Object> params) throws DMPromptException,
DMSqlException {
return query(null, params, null, null);
}
/**
* 执行查询并返回SqlResult结果集<br />
* <strong>如果SQL语句中有参数需要注入,需要在调用此方法前使用ComnDao对象的setParams方法设置参数,
* 并且把exeid添加在params中, 或者使用query(String, Map<String,Object>)方法</strong>
*
* @return 查询结果集,出错返回null
* @throws DMSqlException
* @throws DMSystemException
* @throws DMSqlException
* @throws SQLException
*/
public SqlResult query(String exeid, Map<String, Object> params)
throws DMPromptException, DMSqlException {
return query(exeid, params, null, null);
}
/**
* 执行查询并返回SqlResult结果集<br />
* 在执行查询前,必须给params成员设置值<br />
* 通用查询只支持指定exeid
*
* @param exeid
* 如果指定exeid则执行exeid指定的通用SQL,否则尝试从params中取得exeid参数来使用
* @param start
* 起始记录,如果传null或小于零的值,则忽略该参数
* @param limit
* 最大记录数,如果传null或小于1的值,则忽略该参数
* @return 查询结果集,出错返回null
* @throws DMSqlException
* @throws DMPromptException
*/
public SqlResult query(String exeid, Map<String, Object> params,
Integer start, Integer limit) throws DMPromptException,
DMSqlException {
if (params == null)
params = new HashMap<String, Object>();
if (exeid == null)
exeid = Tools.trimString(params.get("exeid"));
else
params.put("exeid", exeid);
// 取得exe配置
final SqlExecute execonfig = SysSql.getExeid(exeid);
if (execonfig == null)
throw new DMSqlException("找不到 exeid : " + exeid);
TResult tr = doTransaction(new ITransaction() {
@Override
public TResult transaction(JdbcTemplate jdbcTemplate,
Map<String, Object> params) throws DMPromptException,
Exception {
SqlResult result = exeQuery(execonfig, params);
return new TResult(true, result, null, null);
}
}, execonfig.getDatasource(), params);
return tr.getSresult();
}
/**
* 嵌套于事务的执行sql配置查询
*
* @param execonfig
* @param params
* @return
* @throws DMSqlException
* @throws SQLException
* @throws DMSystemException
*/
public SqlResult exeQuery(String exeid, Map<String, Object> params)
throws DMPromptException, DMSqlException, DMSystemException,
SQLException {
// 取得exe配置
final SqlExecute execonfig = SysSql.getExeid(exeid);
if (execonfig == null)
throw new DMSqlException("找不到 exeid : " + exeid);
return exeQuery(execonfig, params);
}
/**
* 嵌套于事务的执行sql配置查询
*
* @param execonfig
* @param params
* @return
* @throws DMSqlException
* @throws SQLException
* @throws DMSystemException
*/
private SqlResult exeQuery(final SqlExecute execonfig,
final Map<String, Object> params) throws DMPromptException,
DMSqlException, DMSystemException, SQLException {
String dbtype = getDBType();
boolean dolog = !params.containsKey(RequestSupport.DO_NOT_SAY_LOG);
@SuppressWarnings("rawtypes")
List sqllist = execonfig.getSqlinfos();
if (sqllist == null || sqllist.size() == 0) {
sqllist = execonfig.getSqlids();
}
SqlResult sr = null;// 保存最后一个查询执行结果用于返回
for (Object sqlobj : sqllist) {// 遍历所有同步执行的SQL语句
SqlConfig sqlconfig;
if (sqlobj instanceof String) {
sqlconfig = SysSql.getSqlid((String) sqlobj, dbtype);
// 当前执行的sqlid
String exeSqlid = sqlconfig.getSqlid();
if (dolog) {
log.info("##### comn query exeid : " + execonfig.getExeid()
+ ", execute sqlid : " + exeSqlid + ", " + dbtype);
}
} else {
sqlconfig = (SqlConfig) sqlobj;
if (dolog) {
log.info("##### comn query exeid : " + execonfig.getExeid()
+ ", execute sqlname : " + sqlconfig.getSqlname()
+ ", " + dbtype);
}
}
String sql_statement = sqlconfig.getSql(dbtype);
// 准备更新执行
ISqlStatement stmExecute;
if (isPrepared) {
stmExecute = new PreparedSqlStatement(sql_statement,
getJdbcTemplate());
} else {
stmExecute = new SqlStatement(sql_statement, getJdbcTemplate());
}
// 进行SQL执行前校验
if (sqlCheck(getJdbcTemplate(), sqlconfig, params)) {// SQL校验成功
String check_sql = sql_statement;
check_sql = check_sql.replaceAll("\\(", "").trim();
if ("SELECT".equalsIgnoreCase(check_sql.substring(0, 6))
|| "WITH".equalsIgnoreCase(check_sql.substring(0, 4))) {
stmExecute.autoSetParams(params);
// 执行查询语句
sr = stmExecute.executeQuery();
sr.setQueryTree(sqlconfig.getQureytree());
sr.setTransfers(sqlconfig.getTransfers());
} else {
// 设置参数,但不设置分页参数,
stmExecute.autoSetParams(params, true);
// 执行更新语句
stmExecute.executeUpdate();
}
// if ("SELECT".equalsIgnoreCase(sql_statement.substring(0, 6)))
// {
// stmExecute.autoSetParams(params);
// // 执行查询语句
// sr = stmExecute.executeQuery();
// sr.setQueryTree(sqlconfig.getQureytree());
// sr.setTransfers(sqlconfig.getTransfers());
// } else {
// // 设置参数,但不设置分页参数,
// stmExecute.autoSetParams(params, true);
// // 执行更新语句
// stmExecute.executeUpdate();
// }
}
}// end for
if (sr == null) {
throw new DMSqlException("执行查询操作里的SQL没有SELECT语句exeid="
+ execonfig.getExeid());
}
return sr;
}
/**
* 执行通用SQL更新<br />
* <strong>如果SQL语句中有参数需要注入,需要在调用此方法前使用ComnDao对象的setParams方法设置参数,
* 并且把exeid添加在params中, 或者使用update(String, Map<String,Object>)方法</strong>
*
* @return 返回处理结果信息,出错返回null
* @throws DMSqlException
* @throws DMPromptException
* @throws DMSqlException
* @throws SQLException
* @throws DMSystemException
*/
public String update(final Map<String, Object> params)
throws DMPromptException, DMSqlException {
return update(null, params);
}
/**
* 执行通用SQL更新<br />
* 在执行更新前,必须给params成员设置值<br />
* 通用查询只支持指定exeid
*
* @param exeid
* 如果指定exeid则执行exeid指定的通用SQL,否则尝试从params中取得exeid参数来使用
* @return 返回处理结果信息,出错返回null
* @throws DMPromptException
*/
public String update(String exeid, final Map<String, Object> params)
throws DMPromptException, DMSqlException {
if (params != null) {
if (exeid == null)
exeid = Tools.trimString(params.get("exeid"));
else
params.put("exeid", exeid);
}
return doUpdate(exeid, params);
}
/**
* 执行通用SQL更新<br />
* 在执行更新前,必须给params成员设置值<br />
* 通用查询只支持指定exeid
*
* @param exeid
* 如果指定exeid则执行exeid指定的通用SQL,否则尝试从params中取得exeid参数来使用
* @param dolog
* 是否需要由dao记录日志。如果不需要记录日志,则此处传入false
* @return 返回处理结果信息,出错返回null
*/
public String update(String exeid, final Map<String, Object> params,
boolean dolog) throws DMPromptException, DMSqlException {
if (params != null) {
if (exeid == null)
exeid = Tools.trimString(params.get("exeid"));
else
params.put("exeid", exeid);
}
return doUpdate(exeid, params, dolog);
}
private String doUpdate(String exeid, final Map<String, Object> params)
throws DMPromptException, DMSqlException {
return doUpdate(exeid, params, true);
}
/**
* 执行通用SQL更新
*
* @param exeid
* 如果指定exeid则执行exeid指定的通用SQL,否则尝试从params中取得exeid参数来使用
* @return 返回处理结果信息,出错返回null
* @throws DMPromptException
* 在SQL校验检查失败时,抛出相应的错误信息,在调用此方法时,必须处理这个错误信息(如返回提示给用户)
*/
private String doUpdate(String exeid, final Map<String, Object> params,
final boolean dolog) throws DMPromptException, DMSqlException {
// 读取SQL配置
final SqlExecute execonfig = SysSql.getExeid(exeid);
if (execonfig == null) {
throw new DMSqlException(String.format("找不到exeid:%s", exeid));
}
TResult tr = doTransaction(new ITransaction() {
@Override
public TResult transaction(JdbcTemplate jdbcTemplate,
Map<String, Object> params) throws DMPromptException,
Exception {
String result = exeUpdate(execonfig, params, dolog);
return new TResult(true, null, null, result);
}
}, execonfig.getDatasource(), params);
return (String) tr.getObject();
}
public String exeUpdate(final String exeid, final Map<String, Object> params)
throws DMPromptException, DMSqlException, SQLException,
DMSystemException {
// 读取SQL配置
final SqlExecute execonfig = SysSql.getExeid(exeid);
if (execonfig == null) {
throw new DMSqlException(String.format("找不到exeid:%s", exeid));
}
return exeUpdate(execonfig, params, true);
}
private String exeUpdate(final SqlExecute execonfig,
final Map<String, Object> params, boolean dolog)
throws DMPromptException, DMSqlException, SQLException,
DMSqlException, DMSystemException {
String dbtype = getDBType();
String descript = null;// 操作描述,从按钮配置中取
if (descript == null) {// 如果前面从按钮配置找不到操作描述内容,则尝试从SQL执行配置中取EXENAME
descript = execonfig.getExename();
}
// 收集执行完成的sqlid,以便在提交事务后,供缓存管理器更新缓存
Set<String> executedSqlids = new HashSet<String>();
// 收集执行更新的返回值(影响的行数)
List<Integer> fetchRows = new ArrayList<Integer>();
@SuppressWarnings("rawtypes")
List sqllist = execonfig.getSqlinfos();
if (sqllist == null || sqllist.size() == 0) {
sqllist = execonfig.getSqlids();
}
for (Object sqlobj : sqllist) {// 遍历所有同步执行的SQL语句
SqlConfig sqlconfig;
if (sqlobj instanceof String) {
sqlconfig = SysSql.getSqlid((String) sqlobj, dbtype);
// 当前执行的sqlid
String exeSqlid = sqlconfig.getSqlid();
if (dolog) {
log.info("##### comn update exeid : "
+ execonfig.getExeid() + ", execute sqlid : "
+ exeSqlid + ", " + dbtype);
}
} else {
sqlconfig = (SqlConfig) sqlobj;
if (dolog) {
log.info("##### comn query exeid : " + execonfig.getExeid()
+ ", execute sqlname : " + sqlconfig.getSqlname()
+ ", " + dbtype);
}
}
// 准备更新执行
ISqlStatement stmExecute;
if (isPrepared) {
stmExecute = new PreparedSqlStatement(sqlconfig.getSql(dbtype),
getJdbcTemplate());
} else {
stmExecute = new SqlStatement(sqlconfig.getSql(dbtype),
getJdbcTemplate());
}
// 进行SQL执行前校验
if (sqlCheck(getJdbcTemplate(), sqlconfig, params)) {// SQL校验成功
// 设置参数,但不设置分页参数,
stmExecute.autoSetParams(params, false);
// 执行更新语句
int ret = stmExecute.executeUpdate();
executedSqlids.add(sqlconfig.getSqlid());// 收集执行的sqlid
fetchRows.add(ret);// 收集执行影响行数
}
// line++;
}// end for
descript += "成功";
// 把所有更新影响的行数保存到params中,以便action中读取用以返回
params.put(SqlUtil.COMN_UPDATE_FETCH_ROWS_KEY,
fetchRows.toArray(Tools.emptyArrayInteger));
// 更新缓存
SysCacheManager.flushSqlid(executedSqlids, params);
return descript;
}
/**
* 进行SQL执行前校验
*
* @throws DMSqlException
*
* @throws DMSystemException
* @throws SQLException
* @throws DMSqlException
* @throws DMPromptException
* @throws DMSqlException
*/
public boolean sqlCheck(JdbcTemplate jdbcTemplate, SqlConfig sqlinfo,
Map<String, Object> params) throws DMPromptException, DMSqlException,
DMSystemException, SQLException {
// 读取SQL执行得校验配置
String dbtype = getDBType();
List<SqlCheck> checks = sqlinfo.getChecks();
if (checks == null)
return true;
boolean checkOK = true;// SQL校验是否成功,如果此变量值为false,则跳过这个SQL更新
// 进行SQL校验检查
for (SqlCheck check : checks) {// 遍历单个SQL更新的校验配置信息
String checkname = check.getCheckname();// 校验名称
String checkSql = check.getChecksql(dbtype);// 查询SQL
String checkexeid = check.getChecksqlid();// 通用查询exeid
String checkString = check.getCheckstring();// 校验值
String compareVal = check.getCompareval();// 比较值
String compareSign = check.getComparesign();// 比较条件
String errText = check.getErrtext();// 出错提示
Boolean exitAll = check.getExitall();// 是否中止同步SQL执行
Object checkVal = null;// 校验值
if (!Tools.strIsEmpty(checkexeid)) {
SqlResult sr = this.query(checkexeid, params);
if (sr.next()) {// SQL执行成功,进行校验检查
checkVal = sr.getByColumnIndex(0);// 只取查询结果的第一行第一列的值
}
} else if (!Tools.strIsEmpty(checkSql)) {
SqlStatement sm = new SqlStatement(checkSql, jdbcTemplate);
sm.autoSetParams(params, false);// 设置参数,但不设置分页参数
// 执行校验检查SQL
SqlResult sr = sm.executeQuery();
if (sr.next()) {// SQL执行成功,进行校验检查
checkVal = sr.getByColumnIndex(0);// 只取查询结果的第一行第一列的值
}
} else if (!Tools.strIsEmpty(checkString)) {
StringBuilder sb = new StringBuilder(checkString.trim());
// boolean isNumber = sb.indexOf("$N{") == 0;
// 参数注入
SqlStatement.autoSetParams(sb, params, false);
checkVal = sb.toString();
// if (isNumber && Tools.isNumber((String) checkVal))
if (Tools.isNumber((String) checkVal)) {
checkVal = Tools.str2BigDecimal((String) checkVal);
}
} else {
throw new DMSystemException(
"通用更新的SQL校验配置中有没有配置checksqlid或checksql或checkstring的配置checkname="
+ checkname);
}
StringBuilder sbCompareVal = new StringBuilder(compareVal);
// 给比较值注入参数值(如果有的话)
SqlStatement.autoSetParams(sbCompareVal, params, false);
if (compare(checkVal, sbCompareVal.toString(), compareSign)) {// 校验检查失败条件成立
StringBuilder sbErrText = new StringBuilder(errText);
// 给错误提示信息注入参数值(如果有的话)
SqlStatement.autoSetParams(sbErrText, params, false);
errText = sbErrText.toString();
if (exitAll) {// 中止同步SQL执行,则抛出错误信息
throw new DMPromptException(errText);
} else {// 不中止同步SQL执行,只跳过当前SQL更新的执行
log.info("通用SQL更新的SQL校验不通过,将跳过此更新的执行:checkname="
+ checkname + ", errText:" + errText);
checkOK = false;
}
}
}
return checkOK;
}
/**
* 比较从查询取到的对象sqlObj与compareVal对象,规则是:<br />
* 1. 以condition指定的比较方式<br />
* 2. 如果sqlObj是String类型,则两个对象都以字符串形式比较,否则把两者都转换成数字来比较
*
* @param sqlObj
* @param compareVal
* @param sign
* 比较方式: eql 等于, uneql 不等于, gt 大于, gteql 大于等于, lt 小于, lteql 小于等于
* @return
*/
public static boolean compare(Object sqlObj, String compareVal, String sign) {
if (sqlObj == null)
return false;
int res;
if (sqlObj.getClass().equals(String.class)) {// 字符串比较
res = ((String) sqlObj).compareTo(compareVal);
} else {// 数值比较
res = Tools.str2BigDecimal(String.valueOf(sqlObj)).compareTo(
Tools.str2BigDecimal(compareVal));
}
sign = sign.trim();
boolean ret;
if ("uneql".equals(sign))// 判断不等于
ret = res != 0;
else
ret = sign.indexOf("eql") > -1 && res == 0
|| sign.indexOf("gt") > -1 && res > 0
|| sign.indexOf("lt") > -1 && res < 0;
log.info("##### compare : " + sqlObj + " " + sign + " " + compareVal
+ " " + ret);
return ret;
}
/**
* 执行指定的SQL查询
*
* @param sql
* 要查询的SQL
* @param dataSource
* 指定数据源,如果值为null,则使用默认数据源
* @throws DMSqlException
* @throws SQLException
* @throws DMSystemException
*/
public SqlResult sqlQuery(String sql, String dataSource)
throws DMSqlException, DMSystemException, SQLException, DMSqlException {
return sqlQuery(sql, dataSource, null, null, null);
}
/**
* 执行指定的SQL查询
*
* @param sql
* 要查询的SQL
* @param dataSource
* 指定数据源,如果值为null,则使用默认数据源
* @throws DMSqlException
* @throws SQLException
* @throws DMSystemException
*/
public SqlResult sqlQuery(String sql, String dataSource,
Map<String, Object> params) throws DMSqlException, DMSystemException,
SQLException, DMSqlException {
return sqlQuery(sql, dataSource, params, null, null);
}
/**
* 执行指定的SQL查询
*
* @param sql
* 要查询的SQL
* @param dataSource
* 指定数据源,如果值为null,则使用默认数据源
* @param start
* 起始记录,如果传null或小于零的值,则忽略该参数
* @param limit
* 最大记录数,如果传null或小于1的值,则忽略该参数
* @throws SQLException
* @throws DMSystemException
* @throws DMSqlException
*/
public SqlResult sqlQuery(String sql, String dataSource,
Map<String, Object> params, Integer start, Integer limit)
throws DMSqlException, DMSystemException, SQLException, DMSqlException {
if (params == null)
params = new HashMap<String, Object>();
// 设置数据源
SqlUtil.selectDataSource(dataSource,
params.containsKey(RequestSupport.DO_NOT_SAY_LOG));
if (start != null && start > -1)
params.put("start", start);
if (limit != null && limit > 0)
params.put("limit", limit);
// 执行通用查询语句
SqlStatement stm = new SqlStatement(sql, getJdbcTemplate());
stm.autoSetParams(params);
SqlResult result = stm.executeQuery();
return result;
}
/**
* 执行更新的SQL语句
*
* @param sql
* 要执行的更新SQL
* @param dataSource
* 指定数据源,如果值为null,则使用默认数据源
* @return 返回更新影晌的行数,如果出错则返回-1
* @throws DMSqlException
* @throws SQLException
* @throws DMSystemException
*/
// @Transactional(propagation = Propagation.REQUIRED, rollbackFor =
// KSqlException.class)
public int sqlUpdate(String sql, String dataSource) throws DMSqlException,
DMSystemException, SQLException, DMSqlException {
return sqlUpdate(sql, dataSource, null);
}
/**
* 执行更新的SQL语句
*
* @param sql
* 要执行的更新SQL
* @param dataSource
* 指定数据源,如果值为null,则使用默认数据源
* @return 返回更新影晌的行数,如果出错则返回-1
* @throws SQLException
* @throws DMSystemException
* @throws DMSqlException
*/
// @Transactional(propagation = Propagation.REQUIRED, rollbackFor =
// KSqlException.class)
public int sqlUpdate(String sql, String dataSource,
final Map<String, Object> params) throws DMSqlException,
DMSystemException, SQLException, DMSqlException {
boolean donotsaylog = false;
if (params != null && params.containsKey(RequestSupport.DO_NOT_SAY_LOG))
donotsaylog = true;
// 设置数据源
SqlUtil.selectDataSource(dataSource, donotsaylog);
// 执行通用查询语句
SqlStatement stm = new SqlStatement(sql, getJdbcTemplate());
stm.autoSetParams(params);
int ret = stm.executeUpdate();
return ret;
}
/**
* 根据sqlid查询数据库
*
* @param sqlid
* @param params
* @return
* @throws DMPromptException
* @throws DMSystemException
* @throws SQLException
* @throws DMSqlException
*/
public SqlResult doQueryBySqlid(String sqlid,
final Map<String, Object> params) throws DMPromptException,
DMSystemException, SQLException, DMSqlException {
return doQueryBySqlid("dsSys", sqlid, params);
}
/**
* 根据sqlid查询数据库
*
* @param dataSoure
* @param sqlid
* @param params
* @return
* @throws DMPromptException
* @throws DMSystemException
* @throws SQLException
* @throws DMSqlException
*/
public SqlResult doQueryBySqlid(String dataSoure, String sqlid,
final Map<String, Object> params) throws DMPromptException,
DMSystemException, SQLException, DMSqlException {
String dbType = getDBType();
// 取得SQL配置信息
final SqlConfig sqlconfig = SysSql.getSqlid(sqlid, dbType);
if (sqlconfig == null)
throw new DMSqlException("找不到 sqlid : " + sqlid);
if (params != null
&& !params.containsKey(RequestSupport.DO_NOT_SAY_LOG))
log.info("##### doQueryBySqlid query sqlid : "
+ sqlconfig.getSqlid());
// 选择数据源,如果使用默认的数据源,这句代码可以不写
SqlUtil.selectDataSource(dataSoure);
SqlStatement stm = new SqlStatement(sqlconfig.getSql(dbType),
jdbcTemplate);
stm.autoSetParams(params);
return stm.executeQuery();
}
/**
* 根据sqlid更新数据库
*
* @param sqlid
* @param params
* @return
* @throws DMPromptException
* @throws DMSystemException
* @throws SQLException
* @throws DMSqlException
*/
public int doUpdateBySqlid(String sqlid, final Map<String, Object> params)
throws DMPromptException, DMSystemException, SQLException,
DMSqlException {
return doUpdateBySqlid("dsSys", sqlid, params);
}
/**
* 根据sqlid更新数据库
*
* @param dataSoure
* @param sqlid
* @param params
* @return
* @throws DMPromptException
* @throws DMSystemException
* @throws SQLException
* @throws DMSqlException
*/
public int doUpdateBySqlid(String dataSoure, String sqlid,
final Map<String, Object> params) throws DMPromptException,
DMSystemException, SQLException, DMSqlException {
String dbType = getDBType();
// 取得SQL配置信息
final SqlConfig sqlconfig = SysSql.getSqlid(sqlid, dbType);
if (sqlconfig == null)
throw new DMSqlException("找不到 sqlid : " + sqlid);
if (params != null
&& !params.containsKey(RequestSupport.DO_NOT_SAY_LOG))
log.info("##### doQueryBySqlid query sqlid : "
+ sqlconfig.getSqlid());
if (sqlCheck(getJdbcTemplate(), sqlconfig, params)) {// SQL校验成功
// 选择数据源,如果使用默认的数据源,这句代码可以不写
SqlUtil.selectDataSource(dataSoure);
SqlStatement stm = new SqlStatement(sqlconfig.getSql(dbType),
jdbcTemplate);
stm.autoSetParams(params);
return stm.executeUpdate();
}
return 0;
}
}
| apache-2.0 |
actframework/actframework | src/main/java/act/handler/RequestHandlerBase.java | 4086 | package act.handler;
/*-
* #%L
* ACT Framework
* %%
* Copyright (C) 2014 - 2017 ActFramework
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
import act.app.ActionContext;
import act.security.CORS;
import act.security.CSRF;
import org.osgl.$;
import org.osgl.exception.NotAppliedException;
import org.osgl.logging.LogManager;
import org.osgl.logging.Logger;
import java.lang.annotation.Annotation;
import javax.enterprise.context.ApplicationScoped;
public abstract class RequestHandlerBase extends $.F1<ActionContext, Void> implements RequestHandler {
protected Logger logger = LogManager.get(getClass());
private boolean destroyed;
private boolean sessionFree;
private boolean requireContextResolving;
private boolean express;
public RequestHandlerBase() {
this.express = this instanceof ExpressHandler;
this.sessionFree = false;
this.requireContextResolving = true;
}
@Override
public final Void apply(ActionContext context) throws NotAppliedException, $.Break {
handle(context);
return null;
}
public RequestHandlerBase setExpress() {
this.express = true;
return this;
}
@Override
public boolean express(ActionContext context) {
return express;
}
/**
* By default an {@link #express(ActionContext) express} handler will
* skip result commit events triggering.
*
* @param context the action context.
* @return result of {@link #express(ActionContext)}
*/
@Override
public boolean skipEvents(ActionContext context) {
return express;
}
@Override
public final Class<? extends Annotation> scope() {
return ApplicationScoped.class;
}
@Override
public boolean supportPartialPath() {
return false;
}
@Override
public boolean requireResolveContext() {
return requireContextResolving;
}
public RequestHandlerBase noContextResoving() {
requireContextResolving = false;
return this;
}
public RequestHandler realHandler() {
return this;
}
public RequestHandlerBase setSessionFree() {
this.sessionFree = true;
return this;
}
@Override
public String toString() {
return getClass().getName();
}
@Override
public boolean sessionFree() {
return sessionFree;
}
@Override
public CORS.Spec corsSpec() {
return CORS.Spec.DUMB;
}
@Override
public CSRF.Spec csrfSpec() {
return CSRF.Spec.DUMB;
}
@Override
public String contentSecurityPolicy() {
return null;
}
@Override
public boolean disableContentSecurityPolicy() {
return false;
}
@Override
public void destroy() {
if (destroyed) return;
destroyed = true;
releaseResources();
}
@Override
public boolean isDestroyed() {
return destroyed;
}
protected void releaseResources() {}
public static RequestHandlerBase wrap(final SimpleRequestHandler handler) {
return new RequestHandlerBase() {
@Override
public void handle(ActionContext context) {
handler.handle(context);
}
@Override
public void prepareAuthentication(ActionContext context) {
}
@Override
public String toString() {
return handler.toString();
}
}.setSessionFree().noContextResoving();
}
}
| apache-2.0 |
kuali/kpme | tk-lm/impl/src/main/java/org/kuali/kpme/tklm/common/PersonInfoAction.java | 16142 | /**
* Copyright 2004-2014 The Kuali Foundation
*
* Licensed under the Educational Community License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.opensource.org/licenses/ecl2.php
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.kuali.kpme.tklm.common;
import org.apache.commons.lang.ObjectUtils;
import org.apache.commons.lang.StringUtils;
import org.apache.struts.action.ActionForm;
import org.apache.struts.action.ActionForward;
import org.apache.struts.action.ActionMapping;
import org.joda.time.DateTime;
import org.joda.time.LocalDate;
import org.kuali.kpme.core.api.accrualcategory.AccrualCategory;
import org.kuali.kpme.core.api.accrualcategory.AccrualEarnInterval;
import org.kuali.kpme.core.api.accrualcategory.rule.AccrualCategoryRule;
import org.kuali.kpme.core.api.assignment.Assignment;
import org.kuali.kpme.core.api.namespace.KPMENamespace;
import org.kuali.kpme.core.api.principal.PrincipalHRAttributes;
import org.kuali.kpme.core.role.KPMERole;
import org.kuali.kpme.core.service.HrServiceLocator;
import org.kuali.kpme.core.util.HrConstants;
import org.kuali.kpme.core.util.HrContext;
import org.kuali.kpme.core.web.KPMEAction;
import org.kuali.rice.kim.api.identity.IdentityService;
import org.kuali.rice.kim.api.identity.Person;
import org.kuali.rice.kim.api.identity.principal.EntityNamePrincipalName;
import org.kuali.rice.kim.api.role.RoleMember;
import org.kuali.rice.kim.api.services.KimApiServiceLocator;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import java.math.BigDecimal;
import java.util.ArrayList;
import java.util.Comparator;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.TreeSet;
public class PersonInfoAction extends KPMEAction {
private IdentityService identityService;
public ActionForward showInfo(ActionMapping mapping, ActionForm form, HttpServletRequest request, HttpServletResponse response) throws Exception {
return mapping.findForward("basic");
}
@Override
public ActionForward execute(ActionMapping mapping, ActionForm form,
HttpServletRequest request, HttpServletResponse response)
throws Exception {
ActionForward actForw = super.execute(mapping, form, request, response);
PersonInfoActionForm personForm = (PersonInfoActionForm)form;
personForm.setPrincipalId(HrContext.getTargetPrincipalId());
EntityNamePrincipalName name = KimApiServiceLocator.getIdentityService().getDefaultNamesForPrincipalId(personForm.getPrincipalId());
//Person person = KimApiServiceLocator.getPersonService().getPerson(personForm.getPrincipalId());
if (name != null) {
personForm.setPrincipalName(name.getPrincipalName());
// set name
personForm.setName(name.getDefaultName() != null ? name.getDefaultName().getCompositeName() : StringUtils.EMPTY);
}
personForm.setJobs(HrServiceLocator.getJobService().getJobs(HrContext.getTargetPrincipalId(), LocalDate.now()));
//KPME-1441
PrincipalHRAttributes principalHRAttributes = HrServiceLocator.getPrincipalHRAttributeService().getPrincipalCalendar(personForm.getPrincipalId(), LocalDate.now());
if ( principalHRAttributes != null && principalHRAttributes.getServiceLocalDate() != null ){
personForm.setServiceDate(principalHRAttributes.getServiceLocalDate().toString());
} else {
personForm.setServiceDate("");
}
// KPME-1441
if (principalHRAttributes != null && principalHRAttributes.getLeavePlan() != null) {
List<AccrualCategory> accrualCategories = new ArrayList<AccrualCategory>();
Map<String, BigDecimal> accrualCategoryRates = new HashMap<String, BigDecimal>();
Map<String, String> accrualEarnIntervals = new HashMap<String, String>();
Map<String, String> unitOfTime = new HashMap<String, String>();
List<AccrualCategory> allAccrualCategories = HrServiceLocator.getAccrualCategoryService().getActiveLeaveAccrualCategoriesForLeavePlan(principalHRAttributes.getLeavePlan(), LocalDate.now());
for (AccrualCategory accrualCategory : allAccrualCategories) {
if (StringUtils.equalsIgnoreCase(accrualCategory.getHasRules(), "Y")) {
AccrualCategoryRule accrualCategoryRule = HrServiceLocator.getAccrualCategoryRuleService().getAccrualCategoryRuleForDate(accrualCategory, LocalDate.now(), principalHRAttributes.getServiceLocalDate());
if (accrualCategoryRule != null) {
accrualCategories.add(accrualCategory);
accrualCategoryRates.put(accrualCategory.getAccrualCategory(), accrualCategoryRule.getAccrualRate());
for (AccrualEarnInterval entry : AccrualEarnInterval.values()) {
if (accrualCategory.getAccrualEarnInterval().equals(entry.getCode())) {
accrualEarnIntervals.put(accrualCategory.getAccrualCategory(), entry.getDescription());
break;
}
}
for (Map.Entry<String, String> entry : HrConstants.UNIT_OF_TIME.entrySet()) {
if (accrualCategory.getUnitOfTime().equals(entry.getKey()) ){
unitOfTime.put(accrualCategory.getAccrualCategory(), entry.getValue());
break;
}
}
}
}
}
personForm.setAccrualCategories(accrualCategories);
personForm.setAccrualCategoryRates(accrualCategoryRates);
personForm.setAccrualEarnIntervals(accrualEarnIntervals);
personForm.setUnitOfTime(unitOfTime);
}
setupRolesOnForm(personForm);
List<Assignment> assignments = HrServiceLocator.getAssignmentService().getAssignments(HrContext.getTargetPrincipalId(), LocalDate.now());
Map<Long, Set<Assignment>> jobNumberToListAssignments = new HashMap<Long,Set<Assignment>>();
Map<Long, Set<Person>> workAreaToApproverPerson = new HashMap<Long, Set<Person>>();
Map<String, Set<Person>> deptToDeptAdminPerson = new HashMap<String, Set<Person>>();
Map<String, Set<Person>> deptToPayrollPerson = new HashMap<String, Set<Person>>();
for (Assignment assignment : assignments) {
Set<Assignment> jobAssignments = jobNumberToListAssignments.get(assignment.getJobNumber());
if (jobAssignments == null) {
jobAssignments = new HashSet<Assignment>();
}
jobAssignments.add(assignment);
jobNumberToListAssignments.put(assignment.getJobNumber(), jobAssignments);
Set<Person> approvers = workAreaToApproverPerson.get(assignment.getWorkArea());
if (approvers == null) {
approvers = new TreeSet<Person>(new Comparator<Person>() {
@Override
public int compare(Person person1, Person person2) {
return ObjectUtils.compare(person1.getPrincipalId(), person2.getPrincipalId());
}
});
}
approvers.addAll(getApprovers(assignment.getWorkArea()));
workAreaToApproverPerson.put(assignment.getWorkArea(), approvers);
Set<Person> departmentAdmins = deptToDeptAdminPerson.get(assignment.getDept());
if (departmentAdmins == null) {
departmentAdmins = new TreeSet<Person>(new Comparator<Person>() {
@Override
public int compare(Person person1, Person person2) {
return ObjectUtils.compare(person1.getPrincipalId(), person2.getPrincipalId());
}
});
}
departmentAdmins.addAll(getDepartmentAdmins(assignment.getDept(), assignment.getGroupKeyCode()));
deptToDeptAdminPerson.put(assignment.getDept(), departmentAdmins);
Set<Person> payrollProcessors = deptToPayrollPerson.get(assignment.getDept());
if (payrollProcessors == null) {
payrollProcessors = new TreeSet<Person>(new Comparator<Person>() {
@Override
public int compare(Person person1, Person person2) {
return ObjectUtils.compare(person1.getPrincipalId(), person2.getPrincipalId());
}
});
}
payrollProcessors.addAll(getPayrollProcessors(assignment.getDept(), assignment.getGroupKeyCode()));
deptToPayrollPerson.put(assignment.getDept(), payrollProcessors);
}
personForm.setJobNumberToListAssignments(jobNumberToListAssignments);
personForm.setWorkAreaToApproverPerson(workAreaToApproverPerson);
personForm.setDeptToDeptAdminPerson(deptToDeptAdminPerson);
personForm.setDeptToPayrollPerson(deptToPayrollPerson);
return actForw;
}
private void setupRolesOnForm(PersonInfoActionForm personInfoActionForm) {
String principalId = HrContext.getTargetPrincipalId();
DateTime date = LocalDate.now().toDateTimeAtStartOfDay();
Set<Long> allApproverWorkAreas = new HashSet<Long>();
allApproverWorkAreas.addAll(HrServiceLocator.getKPMERoleService().getWorkAreasForPrincipalInRole(principalId, KPMENamespace.KPME_HR.getNamespaceCode(), KPMERole.APPROVER_DELEGATE.getRoleName(), date, true));
allApproverWorkAreas.addAll(HrServiceLocator.getKPMERoleService().getWorkAreasForPrincipalInRole(principalId, KPMENamespace.KPME_HR.getNamespaceCode(), KPMERole.APPROVER.getRoleName(), date, true));
personInfoActionForm.setApproverWorkAreas(new ArrayList<Long>(allApproverWorkAreas));
List<Long> reviewerWorkAreas = HrServiceLocator.getKPMERoleService().getWorkAreasForPrincipalInRole(principalId, KPMENamespace.KPME_HR.getNamespaceCode(), KPMERole.REVIEWER.getRoleName(), date, true);
personInfoActionForm.setReviewerWorkAreas(reviewerWorkAreas);
Set<String> allViewOnlyDepartments = new HashSet<String>();
allViewOnlyDepartments.addAll(HrServiceLocator.getKPMERoleService().getDepartmentsForPrincipalInRole(principalId, KPMENamespace.KPME_TK.getNamespaceCode(), KPMERole.TIME_DEPARTMENT_VIEW_ONLY.getRoleName(), date, true));
allViewOnlyDepartments.addAll(HrServiceLocator.getKPMERoleService().getDepartmentsForPrincipalInRole(principalId, KPMENamespace.KPME_LM.getNamespaceCode(), KPMERole.LEAVE_DEPARTMENT_VIEW_ONLY.getRoleName(), date, true));
allViewOnlyDepartments.addAll(HrServiceLocator.getKPMERoleService().getDepartmentsForPrincipalInRole(principalId, KPMENamespace.KPME_TK.getNamespaceCode(), KPMERole.TIME_LOCATION_VIEW_ONLY.getRoleName(), date, true));
allViewOnlyDepartments.addAll(HrServiceLocator.getKPMERoleService().getDepartmentsForPrincipalInRole(principalId, KPMENamespace.KPME_LM.getNamespaceCode(), KPMERole.LEAVE_LOCATION_VIEW_ONLY.getRoleName(), date, true));
personInfoActionForm.setDeptViewOnlyDepts(new ArrayList<String>(allViewOnlyDepartments));
Set<String> allAdministratorDepartments = new HashSet<String>();
allAdministratorDepartments.addAll(HrServiceLocator.getKPMERoleService().getDepartmentsForPrincipalInRole(principalId, KPMENamespace.KPME_TK.getNamespaceCode(), KPMERole.TIME_DEPARTMENT_ADMINISTRATOR.getRoleName(), date, true));
allAdministratorDepartments.addAll(HrServiceLocator.getKPMERoleService().getDepartmentsForPrincipalInRole(principalId, KPMENamespace.KPME_LM.getNamespaceCode(), KPMERole.LEAVE_DEPARTMENT_ADMINISTRATOR.getRoleName(), date, true));
allAdministratorDepartments.addAll(HrServiceLocator.getKPMERoleService().getDepartmentsForPrincipalInRole(principalId, KPMENamespace.KPME_TK.getNamespaceCode(), KPMERole.TIME_LOCATION_ADMINISTRATOR.getRoleName(), date, true));
allAdministratorDepartments.addAll(HrServiceLocator.getKPMERoleService().getDepartmentsForPrincipalInRole(principalId, KPMENamespace.KPME_LM.getNamespaceCode(), KPMERole.LEAVE_LOCATION_ADMINISTRATOR.getRoleName(), date, true));
personInfoActionForm.setDeptAdminDepts(new ArrayList<String>(allAdministratorDepartments));
Set<String> allPayrollProcessorDepartments = new HashSet<String>();
allPayrollProcessorDepartments.addAll(HrServiceLocator.getKPMERoleService().getDepartmentsForPrincipalInRole(principalId, KPMENamespace.KPME_HR.getNamespaceCode(), KPMERole.PAYROLL_PROCESSOR.getRoleName(), date, true));
allPayrollProcessorDepartments.addAll(HrServiceLocator.getKPMERoleService().getDepartmentsForPrincipalInRole(principalId, KPMENamespace.KPME_HR.getNamespaceCode(), KPMERole.PAYROLL_PROCESSOR_DELEGATE.getRoleName(), date, true));
personInfoActionForm.setPayrollProcessorDepts(new ArrayList<String>(allPayrollProcessorDepartments));
Set<String> allAdministratorLocations = new HashSet<String>();
allAdministratorLocations.addAll(HrServiceLocator.getKPMERoleService().getLocationsForPrincipalInRole(principalId, KPMENamespace.KPME_TK.getNamespaceCode(), KPMERole.TIME_LOCATION_ADMINISTRATOR.getRoleName(), date, true));
allAdministratorLocations.addAll(HrServiceLocator.getKPMERoleService().getLocationsForPrincipalInRole(principalId, KPMENamespace.KPME_LM.getNamespaceCode(), KPMERole.LEAVE_LOCATION_ADMINISTRATOR.getRoleName(), date, true));
personInfoActionForm.setLocationAdminDepts(new ArrayList<String>(allAdministratorLocations));
personInfoActionForm.setGlobalViewOnlyRoles(HrServiceLocator.getKPMEGroupService().isMemberOfSystemViewOnlyGroup(principalId, date));
personInfoActionForm.setSystemAdmin(HrServiceLocator.getKPMEGroupService().isMemberOfSystemAdministratorGroup(principalId, date));
}
private Set<Person> getDepartmentAdmins(String dept, String groupKeyCode) {
Set<Person> departmentAdmins = new HashSet<Person>();
DateTime date = LocalDate.now().toDateTimeAtStartOfDay();
List<RoleMember> roleMembers = new ArrayList<RoleMember>();
roleMembers.addAll(HrServiceLocator.getKPMERoleService().getRoleMembersInDepartment(KPMENamespace.KPME_TK.getNamespaceCode(), KPMERole.TIME_DEPARTMENT_ADMINISTRATOR.getRoleName(), dept, groupKeyCode, date, true));
roleMembers.addAll(HrServiceLocator.getKPMERoleService().getRoleMembersInDepartment(KPMENamespace.KPME_LM.getNamespaceCode(), KPMERole.LEAVE_DEPARTMENT_ADMINISTRATOR.getRoleName(), dept, groupKeyCode, date, true));
for (RoleMember roleMember : roleMembers) {
Person person = KimApiServiceLocator.getPersonService().getPerson(roleMember.getMemberId());
if (person != null) {
departmentAdmins.add(person);
}
}
return departmentAdmins;
}
private Set<Person> getPayrollProcessors(String dept, String groupKeyCode) {
Set<Person> payrollProcs = new HashSet<Person>();
DateTime date = LocalDate.now().toDateTimeAtStartOfDay();
List<RoleMember> roleMembers = new ArrayList<RoleMember>();
roleMembers.addAll(HrServiceLocator.getKPMERoleService().getRoleMembersInDepartment(KPMENamespace.KPME_HR.getNamespaceCode(), KPMERole.PAYROLL_PROCESSOR.getRoleName(), dept, groupKeyCode, date, true));
for (RoleMember roleMember : roleMembers) {
Person person = KimApiServiceLocator.getPersonService().getPerson(roleMember.getMemberId());
if (person != null) {
payrollProcs.add(person);
}
}
return payrollProcs;
}
private Set<Person> getApprovers(Long workArea) {
Set<Person> approvers = new HashSet<Person>();
DateTime date = LocalDate.now().toDateTimeAtStartOfDay();
List<RoleMember> roleMembers = HrServiceLocator.getKPMERoleService().getRoleMembersInWorkArea(KPMENamespace.KPME_HR.getNamespaceCode(), KPMERole.APPROVER.getRoleName(), workArea, date, true);
for (RoleMember roleMember : roleMembers) {
Person person = KimApiServiceLocator.getPersonService().getPerson(roleMember.getMemberId());
if (person != null) {
approvers.add(person);
}
}
return approvers;
}
} | apache-2.0 |
andrei-kuznetsov/fpf4mir | fpf4mir-core/src/main/java/ru/spbstu/icc/kspt/kuznetsov/fpf4mir/core/facts/artifact/impl/GenericTextFileArtifact.java | 411 | package ru.spbstu.icc.kspt.kuznetsov.fpf4mir.core.facts.artifact.impl;
import ru.spbstu.icc.kspt.kuznetsov.fpf4mir.core.facts.artifact.TextFileArtifact;
public final class GenericTextFileArtifact extends TextFileArtifactBase implements TextFileArtifact {
/**
*
*/
private static final long serialVersionUID = -587939096854370958L;
public GenericTextFileArtifact() {
super();
}
}
| apache-2.0 |
burberius/eve-crest | src/test/java/net/troja/eve/crest/processors/StatusProcessorTest.java | 2341 | package net.troja.eve.crest.processors;
/*
* ========================================================================
* Library for the Eve Online CREST API
* ------------------------------------------------------------------------
* Copyright (C) 2014 - 2015 Jens Oberender <j.obi@troja.net>
* ------------------------------------------------------------------------
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* ========================================================================
*/
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.greaterThan;
import static org.hamcrest.Matchers.startsWith;
import static org.junit.Assert.assertThat;
import java.io.IOException;
import net.troja.eve.crest.CrestApiProcessorTest;
import net.troja.eve.crest.beans.Status;
import net.troja.eve.crest.beans.Status.State;
import org.junit.Test;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.JsonNode;
public class StatusProcessorTest extends CrestApiProcessorTest {
@Test
public void testParsing() throws JsonProcessingException, IOException {
final StatusProcessor processor = new StatusProcessor();
final JsonNode node = loadAndParseData("Status.json");
final Status status = processor.parseEntry(node);
assertThat(status.getServerName(), equalTo("TRANQUILITY"));
assertThat(status.getServerVersion(), startsWith("EVE-TRANQUILITY "));
assertThat(status.getServiceStatusDust(), equalTo(State.ONLINE));
assertThat(status.getServiceStatusEve(), equalTo(State.ONLINE));
assertThat(status.getServiceStatusServer(), equalTo(State.ONLINE));
assertThat(status.getUserCountsDust(), greaterThan(1));
assertThat(status.getUserCountsEve(), greaterThan(1));
}
}
| apache-2.0 |
Talend/data-prep | dataprep-backend-service/src/main/java/org/talend/dataprep/schema/csv/CSVDetector.java | 4785 | // ============================================================================
//
// Copyright (C) 2006-2018 Talend Inc. - www.talend.com
//
// This source code is available under agreement available at
// https://github.com/Talend/data-prep/blob/master/LICENSE
//
// You should have received a copy of the agreement
// along with this program; if not, write to Talend SA
// 9 rue Pages 92150 Suresnes, France
//
// ============================================================================
package org.talend.dataprep.schema.csv;
import java.io.IOException;
import java.io.InputStream;
import java.nio.charset.Charset;
import org.apache.commons.lang3.StringUtils;
import org.apache.tika.io.IOUtils;
import org.apache.tika.io.TikaInputStream;
import org.apache.tika.metadata.Metadata;
import org.apache.tika.mime.MediaType;
import org.apache.tika.mime.MimeTypes;
import org.apache.tika.parser.txt.UniversalEncodingDetector;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.core.annotation.Order;
import org.springframework.stereotype.Component;
import org.talend.dataprep.schema.Detector;
import org.talend.dataprep.schema.Format;
import org.talend.dataprep.schema.FormatUtils;
/**
* This class is used as a detector for CSV class. It is an adaptor for the TIKA MimeTypes {@link MimeTypes} and
* {@link UniversalEncodingDetector}.
*
*
*
*/
@Component
@Order(value = 3)
public class CSVDetector implements Detector {
private static final Logger LOGGER = LoggerFactory.getLogger(CSVDetector.class);
/**
* The media type returned by TIKA when a CSV is detected.
*/
private static final String TEXT_PLAIN = "text/plain";
/**
* The TIKA MimeTypes {@link MimeTypes}
*/
private final MimeTypes mimeTypes = MimeTypes.getDefaultMimeTypes();
/**
* The TIKA MimeTypes {@link UniversalEncodingDetector}
*/
private final UniversalEncodingDetector encodingDetector = new UniversalEncodingDetector();
/** The csv format family. */
@Autowired
private CSVFormatFamily csvFormatFamily;
/**
* Reads an input stream and checks if it has a CSV format.
*
* The general contract of a detector is to not close the specified stream before returning. It is to the
* responsibility of the caller to close it. The detector should leverage the mark/reset feature of the specified
* {@see TikaInputStream} in order to let the stream always return the same bytes.
*
* @param metadata the specified TIKA {@link Metadata}
* @param inputStream the specified input stream
* @return either null or an CSV format
* @throws IOException
*/
@Override
public Format detect(Metadata metadata, TikaInputStream inputStream) throws IOException {
Format result = detectText(metadata, inputStream);
if (result == null) {
inputStream.mark(FormatUtils.META_TAG_BUFFER_SIZE);
byte[] buffer = new byte[FormatUtils.META_TAG_BUFFER_SIZE];
int n = 0;
for (int m = inputStream.read(buffer); m != -1 && n < buffer.length; m =
inputStream.read(buffer, n, buffer.length - n)) {
n += m;
}
inputStream.reset();
String head = FormatUtils.readFromBuffer(buffer, 0, n);
try (InputStream stream = TikaInputStream.get(IOUtils.toInputStream(head))) {
result = detectText(new Metadata(), stream);
}
}
return result;
}
/**
* A private utility class used to detect format.
*
* @param metadata the specified TIKA {@link Metadata}
* @param inputStream the specified input stream
* @return either null or an CSV format
* @throws IOException
*/
private Format detectText(Metadata metadata, InputStream inputStream) throws IOException {
MediaType mediaType = mimeTypes.detect(inputStream, metadata);
if (mediaType != null) {
String mediaTypeName = mediaType.toString();
if (StringUtils.startsWith(mediaTypeName, TEXT_PLAIN)) {
Charset charset = null;
try {
charset = encodingDetector.detect(inputStream, metadata);
} catch (IOException e) {
LOGGER.debug("Unable to detect the encoding for a data set in CSV format", e);
}
if (charset != null) {
return new Format(csvFormatFamily, charset.name());
} else {
return new Format(csvFormatFamily, FormatUtils.DEFAULT_ENCODING);
}
}
}
return null;
}
}
| apache-2.0 |
spasam/terremark-api | src/main/java/com/terremark/api/CpuComputeResourceSummary.java | 2669 | /**
* Copyright 2012 Terremark Worldwide Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.terremark.api;
import javax.xml.bind.annotation.XmlAccessType;
import javax.xml.bind.annotation.XmlAccessorType;
import javax.xml.bind.annotation.XmlElement;
import javax.xml.bind.annotation.XmlType;
/**
* <p>
* Java class for CpuComputeResourceSummary complex type.
* <p>
* The following schema fragment specifies the expected content contained within this class.
*
* <pre>
* <complexType name="CpuComputeResourceSummary">
* <complexContent>
* <extension base="{}ComputeResourceSummary">
* <sequence>
* <element name="Count" type="{http://www.w3.org/2001/XMLSchema}int" minOccurs="0"/>
* <element name="PoweredOnCount" type="{http://www.w3.org/2001/XMLSchema}int" minOccurs="0"/>
* </sequence>
* </extension>
* </complexContent>
* </complexType>
* </pre>
*/
@XmlAccessorType(XmlAccessType.FIELD)
@XmlType(name = "CpuComputeResourceSummary", propOrder = {"count", "poweredOnCount"})
public class CpuComputeResourceSummary extends ComputeResourceSummary {
@XmlElement(name = "Count", nillable = true)
protected Integer count;
@XmlElement(name = "PoweredOnCount", nillable = true)
protected Integer poweredOnCount;
/**
* Gets the value of the count property.
*
* @return possible object is {@link Integer }
*/
public Integer getCount() {
return count;
}
/**
* Sets the value of the count property.
*
* @param value allowed object is {@link Integer }
*/
public void setCount(final Integer value) {
this.count = value;
}
/**
* Gets the value of the poweredOnCount property.
*
* @return possible object is {@link Integer }
*/
public Integer getPoweredOnCount() {
return poweredOnCount;
}
/**
* Sets the value of the poweredOnCount property.
*
* @param value allowed object is {@link Integer }
*/
public void setPoweredOnCount(final Integer value) {
this.poweredOnCount = value;
}
}
| apache-2.0 |
JimSeker/permissions | eclipse/setwallpaper/src/edu/cs4730/setwallpaper/MainFragment.java | 1383 | package edu.cs4730.setwallpaper;
import java.io.IOException;
import android.app.WallpaperManager;
import android.os.Bundle;
import android.support.v4.app.Fragment;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.widget.Button;
import android.widget.ImageView;
/**
* This is an example of how to setup the wall paper on the device.
*
*/
public class MainFragment extends Fragment {
public MainFragment() {
// Required empty public constructor
}
@Override
public View onCreateView(LayoutInflater inflater, ViewGroup container,
Bundle savedInstanceState) {
// Inflate the layout for this fragment
View myView = inflater.inflate(R.layout.fragment_main, container, false);
Button buttonSetWallpaper = (Button) myView.findViewById(R.id.set);
ImageView imagePreview = (ImageView) myView.findViewById(R.id.preview);
imagePreview.setImageResource(R.drawable.ifixedit);
buttonSetWallpaper.setOnClickListener(new Button.OnClickListener(){
@Override
public void onClick(View view) {
WallpaperManager myWallpaperManager = WallpaperManager.getInstance(getActivity().getApplicationContext());
try {
myWallpaperManager.setResource(R.drawable.ifixedit);
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}});
return myView;
}
}
| apache-2.0 |
lessthanoptimal/ejml | main/ejml-ddense/benchmarks/src/org/ejml/dense/row/decomposition/svd/BenchmarkDecompositionSvd_MT_DDRM.java | 2285 | /*
* Copyright (c) 2009-2020, Peter Abeles. All Rights Reserved.
*
* This file is part of Efficient Java Matrix Library (EJML).
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.ejml.dense.row.decomposition.svd;
import org.ejml.data.DMatrixRMaj;
import org.ejml.dense.row.RandomMatrices_DDRM;
import org.openjdk.jmh.annotations.*;
import org.openjdk.jmh.runner.Runner;
import org.openjdk.jmh.runner.RunnerException;
import org.openjdk.jmh.runner.options.Options;
import org.openjdk.jmh.runner.options.OptionsBuilder;
import java.util.Random;
import java.util.concurrent.TimeUnit;
@BenchmarkMode(Mode.AverageTime)
@OutputTimeUnit(TimeUnit.MILLISECONDS)
@Warmup(iterations = 2)
@Measurement(iterations = 5)
@State(Scope.Benchmark)
@Fork(value = 2)
public class BenchmarkDecompositionSvd_MT_DDRM {
// @Param({"100", "500", "1000", "5000", "10000"})
@Param({"1000"})
public int size;
public DMatrixRMaj A, U, V;
SvdImplicitQrDecompose_MT_DDRM svd = new SvdImplicitQrDecompose_MT_DDRM(true, true, true, true);
@Setup
public void setup() {
Random rand = new Random(234);
A = RandomMatrices_DDRM.rectangle(size*2, size/2, -1, 1, rand);
U = new DMatrixRMaj(size, size);
V = new DMatrixRMaj(size, size);
}
@Benchmark
public void implicit_uv() {
svd.decompose(A.copy());
svd.getU(U, false);
svd.getV(V, true);
}
@Benchmark
public void implicit() {
svd.decompose(A.copy());
}
public static void main( String[] args ) throws RunnerException {
Options opt = new OptionsBuilder()
.include(BenchmarkDecompositionSvd_MT_DDRM.class.getSimpleName())
.build();
new Runner(opt).run();
}
}
| apache-2.0 |
toddlipcon/helenus | src/java/com/facebook/infrastructure/analytics/AnalyticsContext.java | 20665 | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.facebook.infrastructure.analytics;
import java.io.IOException;
import java.net.DatagramPacket;
import java.net.DatagramSocket;
import java.net.InetSocketAddress;
import java.net.SocketException;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.Timer;
import java.util.TimerTask;
import java.util.TreeMap;
import java.util.concurrent.locks.Lock;
import java.util.concurrent.locks.ReentrantLock;
import org.apache.log4j.Logger;
import com.facebook.infrastructure.config.DatabaseDescriptor;
import com.facebook.infrastructure.service.IComponentShutdown;
import com.facebook.infrastructure.service.StorageService;
import com.facebook.infrastructure.utils.LogUtil;
/**
* Context for sending metrics to Ganglia. This class drives the entire metric
* collection process.
*
* Author : Avinash Lakshman ( alakshman@facebook.com) & Prashant Malik (
* pmalik@facebook.com ) & Karthik Ranganathan ( kranganathan@facebook.com )
*/
public class AnalyticsContext implements IComponentShutdown {
private static Logger logger_ = Logger.getLogger(AnalyticsContext.class);
private static final String PERIOD_PROPERTY = "period";
private static final String SERVERS_PROPERTY = "servers";
private static final String UNITS_PROPERTY = "units";
private static final String SLOPE_PROPERTY = "slope";
private static final String TMAX_PROPERTY = "tmax";
private static final String DMAX_PROPERTY = "dmax";
private static final String DEFAULT_UNITS = "";
private static final String DEFAULT_SLOPE = "both";
private static final int DEFAULT_TMAX = 60;
private static final int DEFAULT_DMAX = 0;
private static final int DEFAULT_PORT = 8649;
private static final int BUFFER_SIZE = 1500; // as per libgmond.c
private static final Map<Class, String> typeTable_ = new HashMap<Class, String>(
5);
private Map<String, RecordMap> bufferedData_ = new HashMap<String, RecordMap>();
/*
* Keeps the MetricRecord for each abstraction that implements
* IAnalyticsSource
*/
private Map<String, MetricsRecord> recordMap_ = new HashMap<String, MetricsRecord>();
private Map<String, Object> attributeMap_ = new HashMap<String, Object>();
private Set<IAnalyticsSource> updaters = new HashSet<IAnalyticsSource>(1);
private List<InetSocketAddress> metricsServers_;
private Map<String, String> unitsTable_;
private Map<String, String> slopeTable_;
private Map<String, String> tmaxTable_;
private Map<String, String> dmaxTable_;
/* singleton instance */
private static AnalyticsContext instance_;
/* Used to lock the factory for creation of StorageService instance */
private static Lock createLock_ = new ReentrantLock();
/**
* Default period in seconds at which data is sent to the metrics system.
*/
private static final int DEFAULT_PERIOD = 5;
/**
* Port to which we should write the data.
*/
private int port_ = DEFAULT_PORT;
private Timer timer = null;
private int period_ = DEFAULT_PERIOD;
private volatile boolean isMonitoring = false;
private byte[] buffer_ = new byte[BUFFER_SIZE];
private int offset_;
private DatagramSocket datagramSocket_;
static class TagMap extends TreeMap<String, Object> {
private static final long serialVersionUID = 3546309335061952993L;
TagMap() {
super();
}
TagMap(TagMap orig) {
super(orig);
}
}
static class MetricMap extends TreeMap<String, Number> {
private static final long serialVersionUID = -7495051861141631609L;
}
static class RecordMap extends HashMap<TagMap, MetricMap> {
private static final long serialVersionUID = 259835619700264611L;
}
static {
typeTable_.put(String.class, "string");
typeTable_.put(Byte.class, "int8");
typeTable_.put(Short.class, "int16");
typeTable_.put(Integer.class, "int32");
typeTable_.put(Float.class, "float");
}
/**
* Creates a new instance of AnalyticsReporter
*/
public AnalyticsContext() {
StorageService.instance().registerComponentForShutdown(this);
}
/**
* Initializes the context.
*/
public void init(String contextName, String serverSpecList) {
String periodStr = getAttribute(PERIOD_PROPERTY);
if (periodStr != null) {
int period = 0;
try {
period = Integer.parseInt(periodStr);
} catch (NumberFormatException nfe) {
}
if (period <= 0) {
throw new AnalyticsException("Invalid period: " + periodStr);
}
setPeriod(period);
}
metricsServers_ = parse(serverSpecList, port_);
unitsTable_ = getAttributeTable(UNITS_PROPERTY);
slopeTable_ = getAttributeTable(SLOPE_PROPERTY);
tmaxTable_ = getAttributeTable(TMAX_PROPERTY);
dmaxTable_ = getAttributeTable(DMAX_PROPERTY);
try {
datagramSocket_ = new DatagramSocket();
} catch (SocketException se) {
se.printStackTrace();
}
}
/**
* Sends a record to the metrics system.
*/
public void emitRecord(String recordName, OutputRecord outRec)
throws IOException {
// emit each metric in turn
for (String metricName : outRec.getMetricNames()) {
Object metric = outRec.getMetric(metricName);
String type = (String) typeTable_.get(metric.getClass());
emitMetric(metricName, type, metric.toString());
}
}
/**
* Helper which actually writes the metric in XDR format.
*
* @param name
* @param type
* @param value
* @throws IOException
*/
private void emitMetric(String name, String type, String value)
throws IOException {
String units = getUnits(name);
int slope = getSlope(name);
int tmax = getTmax(name);
int dmax = getDmax(name);
offset_ = 0;
xdr_int(0); // metric_user_defined
xdr_string(type);
xdr_string(name);
xdr_string(value);
xdr_string(units);
xdr_int(slope);
xdr_int(tmax);
xdr_int(dmax);
for (InetSocketAddress socketAddress : metricsServers_) {
DatagramPacket packet = new DatagramPacket(buffer_, offset_,
socketAddress);
datagramSocket_.send(packet);
}
}
private String getUnits(String metricName) {
String result = (String) unitsTable_.get(metricName);
if (result == null) {
result = DEFAULT_UNITS;
}
return result;
}
private int getSlope(String metricName) {
String slopeString = (String) slopeTable_.get(metricName);
if (slopeString == null) {
slopeString = DEFAULT_SLOPE;
}
return ("zero".equals(slopeString) ? 0 : 3); // see gmetric.c
}
private int getTmax(String metricName) {
String tmaxString = (String) tmaxTable_.get(metricName);
if (tmaxString == null) {
return DEFAULT_TMAX;
} else {
return Integer.parseInt(tmaxString);
}
}
private int getDmax(String metricName) {
String dmaxString = (String) dmaxTable_.get(metricName);
if (dmaxString == null) {
return DEFAULT_DMAX;
} else {
return Integer.parseInt(dmaxString);
}
}
/**
* Puts a string into the buffer by first writing the size of the string as an
* int, followed by the bytes of the string, padded if necessary to a multiple
* of 4.
*/
private void xdr_string(String s) {
byte[] bytes = s.getBytes();
int len = bytes.length;
xdr_int(len);
System.arraycopy(bytes, 0, buffer_, offset_, len);
offset_ += len;
pad();
}
/**
* Pads the buffer with zero bytes up to the nearest multiple of 4.
*/
private void pad() {
int newOffset = ((offset_ + 3) / 4) * 4;
while (offset_ < newOffset) {
buffer_[offset_++] = 0;
}
}
/**
* Puts an integer into the buffer as 4 bytes, big-endian.
*/
private void xdr_int(int i) {
buffer_[offset_++] = (byte) ((i >> 24) & 0xff);
buffer_[offset_++] = (byte) ((i >> 16) & 0xff);
buffer_[offset_++] = (byte) ((i >> 8) & 0xff);
buffer_[offset_++] = (byte) (i & 0xff);
}
/**
* Returns the names of all the factory's attributes.
*
* @return the attribute names
*/
public String[] getAttributeNames() {
String[] result = new String[attributeMap_.size()];
int i = 0;
// for (String attributeName : attributeMap.keySet()) {
Iterator<String> it = attributeMap_.keySet().iterator();
while (it.hasNext()) {
result[i++] = it.next();
}
return result;
}
/**
* Sets the named factory attribute to the specified value, creating it if it
* did not already exist. If the value is null, this is the same as calling
* removeAttribute.
*
* @param attributeName
* the attribute name
* @param value
* the new attribute value
*/
public void setAttribute(String attributeName, Object value) {
attributeMap_.put(attributeName, value);
}
/**
* Removes the named attribute if it exists.
*
* @param attributeName
* the attribute name
*/
public void removeAttribute(String attributeName) {
attributeMap_.remove(attributeName);
}
/**
* Returns the value of the named attribute, or null if there is no attribute
* of that name.
*
* @param attributeName
* the attribute name
* @return the attribute value
*/
public String getAttribute(String attributeName) {
return (String) attributeMap_.get(attributeName);
}
/**
* Returns an attribute-value map derived from the factory attributes by
* finding all factory attributes that begin with
* <i>contextName</i>.<i>tableName</i>. The returned map consists of those
* attributes with the contextName and tableName stripped off.
*/
protected Map<String, String> getAttributeTable(String tableName) {
String prefix = tableName + ".";
Map<String, String> result = new HashMap<String, String>();
for (String attributeName : getAttributeNames()) {
if (attributeName.startsWith(prefix)) {
String name = attributeName.substring(prefix.length());
String value = (String) getAttribute(attributeName);
result.put(name, value);
}
}
return result;
}
/**
* Starts or restarts monitoring, the emitting of metrics records.
*/
public void startMonitoring() throws IOException {
if (!isMonitoring) {
startTimer();
isMonitoring = true;
}
}
/**
* Stops monitoring. This does not free buffered data.
*
* @see #close()
*/
public void stopMonitoring() {
if (isMonitoring) {
shutdown();
isMonitoring = false;
}
}
/**
* Returns true if monitoring is currently in progress.
*/
public boolean isMonitoring() {
return isMonitoring;
}
/**
* Stops monitoring and frees buffered data, returning this object to its
* initial state.
*/
public void close() {
stopMonitoring();
clearUpdaters();
}
/**
* Creates a new AbstractMetricsRecord instance with the given
* <code>recordName</code>. Throws an exception if the metrics implementation
* is configured with a fixed set of record names and <code>recordName</code>
* is not in that set.
*
* @param recordName
* the name of the record
* @throws AnalyticsException
* if recordName conflicts with configuration data
*/
public final void createRecord(String recordName) {
if (bufferedData_.get(recordName) == null) {
bufferedData_.put(recordName, new RecordMap());
}
recordMap_.put(recordName, new MetricsRecord(recordName, this));
}
/**
* Return the MetricsRecord associated with this record name.
*
* @param recordName
* the name of the record
* @return newly created instance of MetricsRecordImpl or subclass
*/
public MetricsRecord getMetricsRecord(String recordName) {
return recordMap_.get(recordName);
}
/**
* Registers a callback to be called at time intervals determined by the
* configuration.
*
* @param updater
* object to be run periodically; it should update some metrics
* records
*/
public void registerUpdater(final IAnalyticsSource updater) {
if (!updaters.contains(updater)) {
updaters.add(updater);
}
}
/**
* Removes a callback, if it exists.
*
* @param updater
* object to be removed from the callback list
*/
public void unregisterUpdater(IAnalyticsSource updater) {
updaters.remove(updater);
}
private void clearUpdaters() {
updaters.clear();
}
/**
* Starts timer if it is not already started
*/
private void startTimer() {
if (timer == null) {
timer = new Timer("Timer thread for monitoring AnalyticsContext", true);
TimerTask task = new TimerTask() {
public void run() {
try {
timerEvent();
} catch (IOException ioe) {
ioe.printStackTrace();
}
}
};
long millis = period_ * 1000;
timer.scheduleAtFixedRate(task, millis, millis);
}
}
/**
* Stops timer if it is running
*/
public void shutdown() {
if (timer != null) {
timer.cancel();
timer = null;
}
}
/**
* Timer callback.
*/
private void timerEvent() throws IOException {
if (isMonitoring) {
Collection<IAnalyticsSource> myUpdaters;
// we dont need to synchronize as there will not be any
// addition or removal of listeners
myUpdaters = new ArrayList<IAnalyticsSource>(updaters);
// Run all the registered updates without holding a lock
// on this context
for (IAnalyticsSource updater : myUpdaters) {
try {
updater.doUpdates(this);
} catch (Throwable throwable) {
throwable.printStackTrace();
}
}
emitRecords();
}
}
/**
* Emits the records.
*/
private void emitRecords() throws IOException {
for (String recordName : bufferedData_.keySet()) {
RecordMap recordMap = bufferedData_.get(recordName);
synchronized (recordMap) {
for (TagMap tagMap : recordMap.keySet()) {
MetricMap metricMap = recordMap.get(tagMap);
OutputRecord outRec = new OutputRecord(tagMap, metricMap);
emitRecord(recordName, outRec);
}
}
}
flush();
}
/**
* Called each period after all records have been emitted, this method does
* nothing. Subclasses may override it in order to perform some kind of flush.
*/
protected void flush() throws IOException {
}
/**
* Called by MetricsRecordImpl.update(). Creates or updates a row in the
* internal table of metric data.
*/
protected void update(MetricsRecord record) {
String recordName = record.getRecordName();
TagMap tagTable = record.getTagTable();
Map<String, MetricValue> metricUpdates = record.getMetricTable();
RecordMap recordMap = getRecordMap(recordName);
synchronized (recordMap) {
MetricMap metricMap = recordMap.get(tagTable);
if (metricMap == null) {
metricMap = new MetricMap();
TagMap tagMap = new TagMap(tagTable); // clone tags
recordMap.put(tagMap, metricMap);
}
for (String metricName : metricUpdates.keySet()) {
MetricValue updateValue = metricUpdates.get(metricName);
Number updateNumber = updateValue.getNumber();
Number currentNumber = metricMap.get(metricName);
if (currentNumber == null || updateValue.isAbsolute()) {
metricMap.put(metricName, updateNumber);
} else {
Number newNumber = sum(updateNumber, currentNumber);
metricMap.put(metricName, newNumber);
}
}
}
}
private RecordMap getRecordMap(String recordName) {
return bufferedData_.get(recordName);
}
/**
* Adds two numbers, coercing the second to the type of the first.
*
*/
private Number sum(Number a, Number b) {
if (a instanceof Integer) {
return new Integer(a.intValue() + b.intValue());
} else if (a instanceof Float) {
return new Float(a.floatValue() + b.floatValue());
} else if (a instanceof Short) {
return new Short((short) (a.shortValue() + b.shortValue()));
} else if (a instanceof Byte) {
return new Byte((byte) (a.byteValue() + b.byteValue()));
} else {
// should never happen
throw new AnalyticsException("Invalid number type");
}
}
/**
* Called by MetricsRecordImpl.remove(). Removes any matching row in the
* internal table of metric data. A row matches if it has the same tag names
* and tag values.
*/
protected void remove(MetricsRecord record) {
String recordName = record.getRecordName();
TagMap tagTable = record.getTagTable();
RecordMap recordMap = getRecordMap(recordName);
recordMap.remove(tagTable);
}
/**
* Returns the timer period.
*/
public int getPeriod() {
return period_;
}
/**
* Sets the timer period
*/
protected void setPeriod(int period) {
this.period_ = period;
}
/**
* Sets the default port to listen on
*/
public void setPort(int port) {
port_ = port;
}
/**
* Parses a space and/or comma separated sequence of server specifications of
* the form <i>hostname</i> or <i>hostname:port</i>. If the specs string is
* null, defaults to localhost:defaultPort.
*
* @return a list of InetSocketAddress objects.
*/
private static List<InetSocketAddress> parse(String specs, int defaultPort) {
List<InetSocketAddress> result = new ArrayList<InetSocketAddress>(1);
if (specs == null) {
result.add(new InetSocketAddress("localhost", defaultPort));
} else {
String[] specStrings = specs.split("[ ,]+");
for (String specString : specStrings) {
int colon = specString.indexOf(':');
if (colon < 0 || colon == specString.length() - 1) {
result.add(new InetSocketAddress(specString, defaultPort));
} else {
String hostname = specString.substring(0, colon);
int port = Integer.parseInt(specString.substring(colon + 1));
result.add(new InetSocketAddress(hostname, port));
}
}
}
return result;
}
/**
* Starts up the analytics context and registers the VM metrics.
*/
public void start() {
// register the vm analytics object with the analytics context to update the
// data
registerUpdater(new VMAnalyticsSource());
init("analyticsContext", DatabaseDescriptor.getGangliaServers());
try {
startMonitoring();
} catch (IOException e) {
logger_.error(LogUtil.throwableToString(e));
}
}
public void stop() {
close();
}
/**
* Factory method that gets an instance of the StorageService class.
*/
public static AnalyticsContext instance() {
if (instance_ == null) {
AnalyticsContext.createLock_.lock();
try {
if (instance_ == null) {
instance_ = new AnalyticsContext();
}
} finally {
createLock_.unlock();
}
}
return instance_;
}
}
| apache-2.0 |
JaLandry/MeasureAuthoringTool_LatestSprint | mat/test/mat/sprint2Testcase/CodeListExportTest.java | 748 | package mat.sprint2Testcase;
import mat.dao.SpringInitializationTest;
import mat.dao.clause.MeasureDAO;
import mat.dao.clause.MeasureExportDAO;
import mat.server.service.impl.CodeListXLSGenerator;
import org.apache.poi.hssf.usermodel.HSSFWorkbook;
import org.junit.Test;
import org.springframework.beans.factory.annotation.Autowired;
public class CodeListExportTest extends SpringInitializationTest {
@Autowired
protected MeasureExportDAO measureExportDAO;
@Autowired
protected MeasureDAO measureDAO;
@Test
public void testMeasureExport() {
}
@Test
public void testGetErrorXLS() {
CodeListXLSGenerator clgen = new CodeListXLSGenerator();
HSSFWorkbook wkbk = clgen.getErrorXLS();
wkbk.getSheet(wkbk.getSheetName(0));
}
}
| apache-2.0 |
barmintor/fcrepo4 | fcrepo-kernel/src/main/java/org/fcrepo/kernel/utils/FedoraTypesUtils.java | 16513 | /**
* Copyright 2013 DuraSpace, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.fcrepo.kernel.utils;
import static com.google.common.base.Preconditions.checkArgument;
import static com.google.common.base.Preconditions.checkNotNull;
import static com.google.common.base.Throwables.propagate;
import static com.google.common.collect.Collections2.transform;
import static com.google.common.collect.ImmutableSet.copyOf;
import static com.hp.hpl.jena.rdf.model.ResourceFactory.createProperty;
import static javax.jcr.PropertyType.BINARY;
import static javax.jcr.query.Query.JCR_SQL2;
import static org.fcrepo.jcr.FedoraJcrTypes.CONTENT_SIZE;
import static org.fcrepo.jcr.FedoraJcrTypes.FEDORA_BINARY;
import static org.fcrepo.jcr.FedoraJcrTypes.FEDORA_DATASTREAM;
import static org.fcrepo.jcr.FedoraJcrTypes.FEDORA_OBJECT;
import static org.fcrepo.jcr.FedoraJcrTypes.FEDORA_RESOURCE;
import static org.fcrepo.kernel.rdf.JcrRdfTools.getRDFNamespaceForJcrNamespace;
import static org.modeshape.jcr.api.JcrConstants.JCR_DATA;
import static org.modeshape.jcr.api.JcrConstants.JCR_PATH;
import static org.slf4j.LoggerFactory.getLogger;
import java.util.Collection;
import java.util.Iterator;
import javax.jcr.Node;
import javax.jcr.Property;
import javax.jcr.PropertyIterator;
import javax.jcr.Repository;
import javax.jcr.RepositoryException;
import javax.jcr.Session;
import javax.jcr.Value;
import javax.jcr.nodetype.NodeType;
import javax.jcr.nodetype.NodeTypeManager;
import javax.jcr.nodetype.PropertyDefinition;
import javax.jcr.query.QueryManager;
import javax.jcr.query.QueryResult;
import javax.jcr.query.RowIterator;
import javax.jcr.version.Version;
import javax.jcr.version.VersionHistory;
import org.fcrepo.jcr.FedoraJcrTypes;
import org.joda.time.DateTime;
import org.joda.time.DateTimeZone;
import org.joda.time.format.DateTimeFormatter;
import org.joda.time.format.ISODateTimeFormat;
import org.modeshape.jcr.api.Namespaced;
import org.slf4j.Logger;
import com.google.common.base.Function;
import com.google.common.base.Predicate;
import com.google.common.collect.Iterators;
/**
* Convenience class with static methods for manipulating Fedora types in the
* JCR.
*
* @author ajs6f
* @date Feb 14, 2013
*/
public abstract class FedoraTypesUtils implements FedoraJcrTypes {
static final Logger LOGGER = getLogger(FedoraTypesUtils.class);
/**
* Predicate for determining whether this {@link Node} is a Fedora resource.
*/
public static Predicate<Node> isFedoraResource = new Predicate<Node>() {
@Override
public boolean apply(final Node node) {
checkArgument(node != null, "null cannot be a Fedora object!");
try {
return map(node.getMixinNodeTypes(), nodetype2name).contains(
FEDORA_RESOURCE);
} catch (final RepositoryException e) {
throw propagate(e);
}
}
};
/**
* Predicate for determining whether this {@link Node} is a frozen node
* (a part of the system version history).
*/
public static Predicate<Node> isFrozen = new Predicate<Node>() {
@Override
public boolean apply(final Node node) {
checkArgument(node != null, "null cannot be a Frozen node!");
try {
return node.getPrimaryNodeType().getName().equals(FROZEN_NODE);
} catch (final RepositoryException e) {
throw propagate(e);
}
}
};
/**
* Predicate for determining whether this {@link Node} is a Fedora resource
* or is a frozen node that was a fedora resource.
*/
public static Predicate<Node> isOrWasFedoraResource = new Predicate<Node>() {
@Override
public boolean apply(final Node node) {
checkArgument(node != null, "null cannot be a Fedora object!");
try {
if (node.getPrimaryNodeType().getName().equals(FROZEN_NODE)) {
PropertyIterator it = node.getProperties(FROZEN_MIXIN_TYPES);
while (it.hasNext()) {
for (Value v : it.nextProperty().getValues()) {
if (v.getString().equals(FEDORA_RESOURCE)) {
return true;
}
}
}
return false;
} else {
return map(node.getMixinNodeTypes(), nodetype2name).contains(
FEDORA_RESOURCE);
}
} catch (final RepositoryException e) {
throw propagate(e);
}
}
};
/**
* Predicate for determining whether this {@link Node} is a Fedora object.
*/
public static Predicate<Node> isFedoraObject = new Predicate<Node>() {
@Override
public boolean apply(final Node node) {
checkArgument(node != null, "null cannot be a Fedora object!");
try {
return map(node.getMixinNodeTypes(), nodetype2name).contains(
FEDORA_OBJECT);
} catch (final RepositoryException e) {
throw propagate(e);
}
}
};
/**
* Predicate for determining whether this {@link Node} is a Fedora
* datastream.
*/
public static Predicate<Node> isFedoraDatastream = new Predicate<Node>() {
@Override
public boolean apply(final Node node) {
checkArgument(node != null, "null cannot be a Fedora datastream!");
try {
return map(node.getMixinNodeTypes(), nodetype2name).contains(
FEDORA_DATASTREAM);
} catch (final RepositoryException e) {
throw propagate(e);
}
}
};
/**
* Translates a {@link NodeType} to its {@link String} name.
*/
public static Function<NodeType, String> nodetype2name =
new Function<NodeType, String>() {
@Override
public String apply(final NodeType t) {
checkArgument(t != null, "null has no name!");
return t.getName();
}
};
/**
* Translates a JCR {@link Value} to its {@link String} expression.
*/
public static Function<Value, String> value2string =
new Function<Value, String>() {
@Override
public String apply(final Value v) {
try {
checkArgument(v != null, "null has no appropriate "
+ "String representation!");
return v.getString();
} catch (final RepositoryException e) {
throw propagate(e);
}
}
};
/**
* Constructs an {@link Iterator} of {@link Value}s from any
* {@link Property}, multi-valued or not.
*/
public static Function<Property, Iterator<Value>> property2values =
new Function<Property, Iterator<Value>>() {
@Override
public Iterator<Value> apply(final Property p) {
try {
if (p.isMultiple()) {
LOGGER.debug("Found multi-valued property: {}", p);
return Iterators.forArray(p.getValues());
} else {
LOGGER.debug("Found single-valued property: {}", p);
return Iterators.forArray(new Value[] {p.getValue()});
}
} catch (final RepositoryException e) {
throw propagate(e);
}
}
};
/**
* Check if a JCR property is a multivalued property or not
*/
public static Predicate<Property> isMultipleValuedProperty =
new Predicate<Property>() {
@Override
public boolean apply(final Property p) {
checkNotNull(p, "null is neither multiple nor not multiple!");
try {
return p.isMultiple();
} catch (final RepositoryException e) {
throw propagate(e);
}
}
};
/**
* Check if a JCR property is a binary jcr:data property
*/
public static Predicate<Property> isBinaryContentProperty =
new Predicate<Property>() {
@Override
public boolean apply(final Property p) {
checkNotNull(p, "null is neither binary nor not binary!");
try {
return p.getType() == BINARY && p.getName().equals(JCR_DATA);
} catch (final RepositoryException e) {
throw propagate(e);
}
}
};
/**
* Check if a node is "internal" and should not be exposed e.g. via the REST
* API
*/
public static Predicate<Node> isInternalNode = new Predicate<Node>() {
@Override
public boolean apply(final Node n) {
checkNotNull(n, "null is neither internal nor not internal!");
try {
final NodeType primaryNodeType = n.getPrimaryNodeType();
return primaryNodeType != null
&& primaryNodeType.isNodeType("mode:system");
} catch (final RepositoryException e) {
throw propagate(e);
}
}
};
/**
* Map a JCR property to an RDF property with the right namespace URI and
* local name
*/
public static Function<Property, com.hp.hpl.jena.rdf.model.Property> getPredicateForProperty =
new Function<Property, com.hp.hpl.jena.rdf.model.Property>() {
@Override
public com.hp.hpl.jena.rdf.model.Property apply(
final Property property) {
LOGGER.trace("Creating predicate for property: {}", property);
try {
if (property instanceof Namespaced) {
final Namespaced nsProperty = (Namespaced) property;
final String uri = nsProperty.getNamespaceURI();
return createProperty(
getRDFNamespaceForJcrNamespace(uri), nsProperty
.getLocalName());
} else {
return createProperty(property.getName());
}
} catch (final RepositoryException e) {
throw propagate(e);
}
}
};
/**
* ISODateTimeFormat is thread-safe and immutable, and the formatters it
* returns are as well.
*/
private static final DateTimeFormatter FMT = ISODateTimeFormat.dateTime();
/**
* Get the JCR Node Type manager
*
* @param node
* @return
* @throws RepositoryException
*/
public static NodeTypeManager getNodeTypeManager(final Node node) throws RepositoryException {
return node.getSession().getWorkspace().getNodeTypeManager();
}
/**
* Get the property definition information (containing type and multi-value
* information)
*
* @param node the node to use for inferring the property definition
* @param propertyName the property name to retrieve a definition for
* @return a JCR PropertyDefinition, if available, or null
* @throws javax.jcr.RepositoryException
*/
public static PropertyDefinition getDefinitionForPropertyName(final Node node,
final String propertyName) throws RepositoryException {
final PropertyDefinition[] propertyDefinitions =
node.getPrimaryNodeType().getPropertyDefinitions();
LOGGER.debug("Looking for property name: {}", propertyName);
for (final PropertyDefinition p : propertyDefinitions) {
LOGGER.debug("Checking property: {}", p.getName());
if (p.getName().equals(propertyName)) {
return p;
}
}
for (final NodeType nodeType : node.getMixinNodeTypes()) {
for (final PropertyDefinition p : nodeType.getPropertyDefinitions()) {
if (p.getName().equals(propertyName)) {
return p;
}
}
}
return null;
}
/**
* Convenience method for transforming arrays into {@link Collection}s
* through a mapping {@link Function}.
*
* @param input A Collection<F>.
* @param f A Function<F,T>.
* @return An ImmutableSet copy of input after transformation by f
*/
public static <F, T> Collection<T> map(final F[] input,
final Function<F, T> f) {
return transform(copyOf(input), f);
}
/**
* @param date Instance of java.util.Date.
* @return the lexical form of the XSD dateTime value, e.g.
* "2006-11-13T09:40:55.001Z".
*/
public static String convertDateToXSDString(final long date) {
final DateTime dt = new DateTime(date, DateTimeZone.UTC);
return FMT.print(dt);
}
/**
* Get the JCR Base version for a node
*
* @param node
* @return
* @throws RepositoryException
*/
public static Version getBaseVersion(final Node node) throws RepositoryException {
return node.getSession().getWorkspace().getVersionManager()
.getBaseVersion(node.getPath());
}
/**
* Get the JCR VersionHistory for an existing node.
*
* @param node
* @return
* @throws RepositoryException
*/
public static VersionHistory getVersionHistory(final Node node) throws RepositoryException {
return getVersionHistory(node.getSession(), node.getPath());
}
/**
* Get the JCR VersionHistory for a node at a given JCR path
*
* @param session
* @param path
* @return
* @throws RepositoryException
*/
public static VersionHistory getVersionHistory(final Session session,
final String path) throws RepositoryException {
return session.getWorkspace().getVersionManager().getVersionHistory(
path);
}
/**
* @return a double of the size of the fedora:datastream binary content
* @throws RepositoryException
*/
public static long getRepositoryCount(final Repository repository)
throws RepositoryException {
final Session session = repository.login();
try {
final QueryManager queryManager =
session.getWorkspace().getQueryManager();
final String querystring =
"SELECT [" + JCR_PATH + "] FROM ["
+ FEDORA_OBJECT + "]";
final QueryResult queryResults =
queryManager.createQuery(querystring, JCR_SQL2).execute();
return queryResults.getRows().getSize();
} finally {
session.logout();
}
}
/**
* @return a double of the size of the fedora:datastream binary content
* @throws RepositoryException
*/
public static long getRepositorySize(final Repository repository)
throws RepositoryException {
final Session session = repository.login();
long sum = 0;
final QueryManager queryManager =
session.getWorkspace().getQueryManager();
final String querystring =
"SELECT [" + CONTENT_SIZE + "] FROM [" +
FEDORA_BINARY + "]";
final QueryResult queryResults =
queryManager.createQuery(querystring, JCR_SQL2).execute();
for (final RowIterator rows = queryResults.getRows(); rows.hasNext();) {
final Value value =
rows.nextRow().getValue(CONTENT_SIZE);
sum += value.getLong();
}
session.logout();
return sum;
}
}
| apache-2.0 |
nagyist/marketcetera | trunk/core/src/main/java/org/marketcetera/trade/ExecutionTransType.java | 2198 | package org.marketcetera.trade;
import java.util.Collections;
import java.util.HashMap;
import java.util.Map;
import org.marketcetera.util.misc.ClassVersion;
import quickfix.field.ExecTransType;
/* $License$ */
/**
* Specific execution report status.
*
* @author anshul@marketcetera.com
* @version $Id: ExecutionTransType.java 16613 2013-07-03 19:28:31Z colin $
* @since 1.0.0
*/
@ClassVersion("$Id: ExecutionTransType.java 16613 2013-07-03 19:28:31Z colin $") //$NON-NLS-1$
public enum ExecutionTransType {
/**
* Sentinel value for Execution Transaction Type that the system is not currently
* aware of.
*/
Unknown(Character.MIN_VALUE),
New(ExecTransType.NEW),
Cancel(ExecTransType.CANCEL),
Correct(ExecTransType.CORRECT),
Status(ExecTransType.STATUS);
/**
* The FIX char value for this instance.
*
* @return FIX char value for this instance.
*/
public char getFIXValue() {
return mFIXValue;
}
/**
* Returns the ExecutionTransType instance given the FIX char value.
*
* @param inValue the FIX char value.
*
* @return the ExecutionTransType instance.
*/
public static ExecutionTransType getInstanceForFIXValue(char inValue) {
ExecutionTransType type = mFIXValueMap.get(inValue);
return type == null
? Unknown
: type;
}
/**
* Creates an instance.
*
* @param inFIXValue the FIX char value for this instance.
*/
private ExecutionTransType(char inFIXValue) {
mFIXValue = inFIXValue;
}
/**
* underlying FIX value
*/
private final char mFIXValue;
/**
* all values by FIX value
*/
private static final Map<Character, ExecutionTransType> mFIXValueMap;
static {
//Initialize the lookup table.
Map<Character,ExecutionTransType> table = new HashMap<Character, ExecutionTransType>();
for(ExecutionTransType et:values()) {
table.put(et.getFIXValue(), et);
}
mFIXValueMap = Collections.unmodifiableMap(table);
}
}
| apache-2.0 |
oehme/analysing-gradle-performance | my-app/src/main/java/org/gradle/test/performance/mediummonolithicjavaproject/p335/Production6701.java | 2132 | package org.gradle.test.performance.mediummonolithicjavaproject.p335;
import org.gradle.test.performance.mediummonolithicjavaproject.p334.Production6698;
import org.gradle.test.performance.mediummonolithicjavaproject.p334.Production6699;
public class Production6701 {
private Production6698 property0;
public Production6698 getProperty0() {
return property0;
}
public void setProperty0(Production6698 value) {
property0 = value;
}
private Production6699 property1;
public Production6699 getProperty1() {
return property1;
}
public void setProperty1(Production6699 value) {
property1 = value;
}
private Production6700 property2;
public Production6700 getProperty2() {
return property2;
}
public void setProperty2(Production6700 value) {
property2 = value;
}
private String property3;
public String getProperty3() {
return property3;
}
public void setProperty3(String value) {
property3 = value;
}
private String property4;
public String getProperty4() {
return property4;
}
public void setProperty4(String value) {
property4 = value;
}
private String property5;
public String getProperty5() {
return property5;
}
public void setProperty5(String value) {
property5 = value;
}
private String property6;
public String getProperty6() {
return property6;
}
public void setProperty6(String value) {
property6 = value;
}
private String property7;
public String getProperty7() {
return property7;
}
public void setProperty7(String value) {
property7 = value;
}
private String property8;
public String getProperty8() {
return property8;
}
public void setProperty8(String value) {
property8 = value;
}
private String property9;
public String getProperty9() {
return property9;
}
public void setProperty9(String value) {
property9 = value;
}
} | apache-2.0 |
openmg/metagraph-driver-java | src/test/java/io/metagraph/driver/GraphTest.java | 1177 | package io.metagraph.driver;
import org.junit.AfterClass;
import org.junit.BeforeClass;
import org.junit.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.net.URL;
/**
* Created by (zhaoliang@metagraph.io) on (17-2-15).
*/
public class GraphTest {
private static Metagraph metagraph;
private static Graph open;
private static Logger logger = LoggerFactory.getLogger(GraphTest.class);
@BeforeClass
public static void setUp() throws Exception {
metagraph = new Metagraph(new URL("http://192.168.199.189:8080"), "openmg", "openmg");
Graph testGraph = metagraph.create("testGraph3");
open = metagraph.open(testGraph.getGraphId());
}
@AfterClass
public static void tearDown() throws Exception {
}
@Test
public void gremlin() throws Exception {
logger.info("graphId={}", open.getGraphId());
open.gremlin("g.V().count()", "tp");
}
@Test
public void traversal() throws Exception {
String jsonFormat = "{\"gremlin\": \"%s\"}";
String parameters = String.format(jsonFormat, "g.V().count()");
open.traversal(parameters);
}
} | apache-2.0 |
akjava/akjava_gwtlib | src/com/akjava/lib/common/tag/ValidAttributeMapConstraint.java | 1551 | package com.akjava.lib.common.tag;
import com.google.common.base.CharMatcher;
import com.google.common.base.Predicate;
import com.google.common.base.Strings;
import com.google.common.collect.MapConstraint;
public class ValidAttributeMapConstraint implements MapConstraint<String,String>{
private ValidAttributeMapConstraint(){}
private static ValidAttributeMapConstraint validAttributeMapConstraint;
public static ValidAttributeMapConstraint getValidAttributeMapConstraint(){
if(validAttributeMapConstraint==null){
validAttributeMapConstraint=new ValidAttributeMapConstraint();
}
return validAttributeMapConstraint;
}
public final static CharMatcher AZ=CharMatcher.inRange('A', 'Z').or(CharMatcher.inRange('a', 'z'));
public final static CharMatcher IDENTIFIER_CHAR = CharMatcher.is('_')
.or(AZ)
.or(CharMatcher.inRange('0', '9'))
.precomputed();
@Override
public void checkKeyValue(String key, String value) {
if(!getValidAttributePredicate().apply(key)){
throw new IllegalArgumentException("invalid key:"+key);
}
}
public ValidAttributePredicate getValidAttributePredicate(){
return ValidAttributePredicate.INSTANCE;
}
public enum ValidAttributePredicate implements Predicate<String>{
INSTANCE;
@Override
public boolean apply(String input) {
if(Strings.isNullOrEmpty(input)){
return false;
}
if(!AZ.matches(input.charAt(0))){
return false;
}
return IDENTIFIER_CHAR.matchesAllOf(input);
}
}
}
| apache-2.0 |
pqpo/analysis | src/chapter04/FindMaxSubarray.java | 1429 | package chapter04;
import util.ArrayUtils;
/**
* 寻找最大子数组和
* @author pqpo
*
*/
public class FindMaxSubarray {
public static void main(String[] args) {
int[] randomIntArray = ArrayUtils.getRandomIntArray(5);
System.out.println(ArrayUtils.join(randomIntArray));
System.out.println(FindMaxSubarray.findMaxSubarraySum(randomIntArray));
}
public static int findMaxSubarraySum(int[] src){
if(src==null){
throw new IllegalArgumentException("src must not be null");
}
if(src.length==0){
return Integer.MIN_VALUE;
}
return findMaxSubarraySum(src,0,src.length-1);
}
private static int findMaxSubarraySum(int[] src,int start,int end){
if(start==end){
return src[start];
}
int mid = (start+end)/2;
int maxL = findMaxSubarraySum(src,start,mid);
int maxR = findMaxSubarraySum(src,mid+1,end);
int maxCross = findMaxCrossSubarraySum(src,start,mid,end);
if(maxL>=maxR&&maxL>=maxCross){
return maxL;
}else if(maxR>=maxL&&maxR>=maxCross){
return maxR;
}else{
return maxCross;
}
}
private static int findMaxCrossSubarraySum(int[] src, int start, int mid, int end) {
int maxL = Integer.MIN_VALUE;
int sum = 0;
for(int i=mid;i>=start;i--){
sum += src[i];
if(sum>maxL){
maxL = sum;
}
}
int maxR = Integer.MIN_VALUE;
sum = 0;
for(int i=mid+1;i<=end;i++){
sum += src[i];
if(sum>maxR){
maxR = sum;
}
}
return maxR+maxL;
}
}
| apache-2.0 |
dernasherbrezon/jradio | src/main/java/ru/r2cloud/jradio/amical1/ComputingUnitFlags.java | 3704 | package ru.r2cloud.jradio.amical1;
public class ComputingUnitFlags {
private boolean onyxOn;
private boolean llcOnyxFault;
private boolean llcSramFault;
private boolean fault1v8r;
private boolean fault1v8m;
private boolean fault3v3v12;
private boolean picReadyRaw;
private boolean picReadyConv;
private boolean picReadyCompressed;
private boolean picReadyCompressed8;
private boolean sdPicWriteOk;
private boolean sdPicReadOk;
private boolean sdGetInfoOk;
private boolean sdEraseOk;
private boolean sdFull;
private boolean adcReady;
public ComputingUnitFlags() {
// do nothing
}
public ComputingUnitFlags(int value) {
onyxOn = (value & 0x1) > 0;
llcOnyxFault = ((value >> 1) & 0x1) > 0;
llcSramFault = ((value >> 2) & 0x1) > 0;
fault1v8r = ((value >> 3) & 0x1) > 0;
fault1v8m = ((value >> 4) & 0x1) > 0;
fault3v3v12 = ((value >> 5) & 0x1) > 0;
picReadyRaw = ((value >> 6) & 0x1) > 0;
picReadyConv = ((value >> 7) & 0x1) > 0;
picReadyCompressed = ((value >> 8) & 0x1) > 0;
picReadyCompressed8 = ((value >> 9) & 0x1) > 0;
sdPicWriteOk = ((value >> 10) & 0x1) > 0;
sdPicReadOk = ((value >> 11) & 0x1) > 0;
sdGetInfoOk = ((value >> 12) & 0x1) > 0;
sdEraseOk = ((value >> 13) & 0x1) > 0;
sdFull = ((value >> 14) & 0x1) > 0;
adcReady = ((value >> 15) & 0x1) > 0;
}
public boolean isOnyxOn() {
return onyxOn;
}
public void setOnyxOn(boolean onyxOn) {
this.onyxOn = onyxOn;
}
public boolean isLlcOnyxFault() {
return llcOnyxFault;
}
public void setLlcOnyxFault(boolean llcOnyxFault) {
this.llcOnyxFault = llcOnyxFault;
}
public boolean isLlcSramFault() {
return llcSramFault;
}
public void setLlcSramFault(boolean llcSramFault) {
this.llcSramFault = llcSramFault;
}
public boolean isFault1v8r() {
return fault1v8r;
}
public void setFault1v8r(boolean fault1v8r) {
this.fault1v8r = fault1v8r;
}
public boolean isFault1v8m() {
return fault1v8m;
}
public void setFault1v8m(boolean fault1v8m) {
this.fault1v8m = fault1v8m;
}
public boolean isFault3v3v12() {
return fault3v3v12;
}
public void setFault3v3v12(boolean fault3v3v12) {
this.fault3v3v12 = fault3v3v12;
}
public boolean isPicReadyRaw() {
return picReadyRaw;
}
public void setPicReadyRaw(boolean picReadyRaw) {
this.picReadyRaw = picReadyRaw;
}
public boolean isPicReadyConv() {
return picReadyConv;
}
public void setPicReadyConv(boolean picReadyConv) {
this.picReadyConv = picReadyConv;
}
public boolean isPicReadyCompressed() {
return picReadyCompressed;
}
public void setPicReadyCompressed(boolean picReadyCompressed) {
this.picReadyCompressed = picReadyCompressed;
}
public boolean isPicReadyCompressed8() {
return picReadyCompressed8;
}
public void setPicReadyCompressed8(boolean picReadyCompressed8) {
this.picReadyCompressed8 = picReadyCompressed8;
}
public boolean isSdPicWriteOk() {
return sdPicWriteOk;
}
public void setSdPicWriteOk(boolean sdPicWriteOk) {
this.sdPicWriteOk = sdPicWriteOk;
}
public boolean isSdPicReadOk() {
return sdPicReadOk;
}
public void setSdPicReadOk(boolean sdPicReadOk) {
this.sdPicReadOk = sdPicReadOk;
}
public boolean isSdGetInfoOk() {
return sdGetInfoOk;
}
public void setSdGetInfoOk(boolean sdGetInfoOk) {
this.sdGetInfoOk = sdGetInfoOk;
}
public boolean isSdEraseOk() {
return sdEraseOk;
}
public void setSdEraseOk(boolean sdEraseOk) {
this.sdEraseOk = sdEraseOk;
}
public boolean isSdFull() {
return sdFull;
}
public void setSdFull(boolean sdFull) {
this.sdFull = sdFull;
}
public boolean isAdcReady() {
return adcReady;
}
public void setAdcReady(boolean adcReady) {
this.adcReady = adcReady;
}
}
| apache-2.0 |
wangqi/gameserver | server/src/main/java/com/xinqihd/sns/gameserver/entity/rank/RankType.java | 257 | package com.xinqihd.sns.gameserver.entity.rank;
/**
* 排行榜类型
* 0: 总排名
* 1: 好友排名
* 2: 公会排名
* 3: 在线玩家排名
*
* @author wangqi
*
*/
public enum RankType {
GLOBAL,
FRIEND,
GUILD,
ONLINE,
PVE,
WORLD,
}
| apache-2.0 |
mesosphere/dcos-commons | sdk/scheduler/src/main/java/com/mesosphere/sdk/offer/evaluate/PlacementRuleEvaluationStage.java | 1286 | package com.mesosphere.sdk.offer.evaluate;
import com.mesosphere.sdk.offer.MesosResourcePool;
import com.mesosphere.sdk.offer.evaluate.placement.PlacementRule;
import org.apache.mesos.Protos;
import java.util.Collection;
/**
* This class evaluates an offer against a given
* {@link com.mesosphere.sdk.scheduler.plan.PodInstanceRequirement}, ensuring that its resources
* meet the constraints imposed by the supplied
* {@link com.mesosphere.sdk.offer.evaluate.placement.PlacementRule}.
*/
public class PlacementRuleEvaluationStage implements OfferEvaluationStage {
private final Collection<Protos.TaskInfo> deployedTasks;
private final PlacementRule placementRule;
public PlacementRuleEvaluationStage(
Collection<Protos.TaskInfo> deployedTasks,
PlacementRule placementRule)
{
this.deployedTasks = deployedTasks;
this.placementRule = placementRule;
}
@Override
public EvaluationOutcome evaluate(
MesosResourcePool mesosResourcePool,
PodInfoBuilder podInfoBuilder)
{
if (placementRule == null) {
return EvaluationOutcome.pass(this, "No placement rule defined").build();
}
return placementRule.filter(
mesosResourcePool.getOffer(),
podInfoBuilder.getPodInstance(),
deployedTasks);
}
}
| apache-2.0 |
facebook/litho | litho-sections-annotations/src/main/java/com/facebook/litho/sections/annotations/OnBindService.java | 1518 | /*
* Copyright (c) Meta Platforms, Inc. and affiliates.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.facebook.litho.sections.annotations;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
/**
* The method with this annotation will be called whenever the Service has been created or
* transferred from the old tree to the new tree and are therefore ready to be used.
*
* <p>This method is the proper place to start something like a network request or register a
* listener on the Service.
*
* <p>For example:
*
* <pre>
*
* {@literal @}DiffSectionSpec
* public class MyChangeSetSpec {
*
* {@literal @}OnBindServices
* protected void onBindService(
* SectionContext c,
* SomeService someService,
* {@literal @}Prop SomeProp prop) {
* myService.startDoingSomething(prop);
* myService.registerListener(...);
* }
* }
* </pre>
*/
@Retention(RetentionPolicy.RUNTIME)
public @interface OnBindService {}
| apache-2.0 |
Ooppa/iot-industrial-internet | iot-industrial-internet/src/test/java/fi/iot/iiframework/services/domain/ServiceIntegrationTest.java | 7164 | /*
* IoT - Industrial Internet Framework
* Apache License Version 2.0, January 2004
* Released as a part of Helsinki University
* Software Engineering Lab in summer 2015
*/
package fi.iot.iiframework.services.domain;
import fi.iot.iiframework.application.TestConfig;
import fi.iot.iiframework.domain.InformationSource;
import fi.iot.iiframework.domain.Readout;
import fi.iot.iiframework.domain.Sensor;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.List;
import org.hibernate.criterion.Criterion;
import org.hibernate.criterion.Restrictions;
import org.hibernate.exception.ConstraintViolationException;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertTrue;
import org.junit.Before;
import org.junit.Ignore;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.test.SpringApplicationConfiguration;
import org.springframework.test.context.junit4.SpringJUnit4ClassRunner;
import org.springframework.test.context.transaction.TransactionConfiguration;
import org.springframework.transaction.annotation.Transactional;
@TransactionConfiguration(defaultRollback = true)
@Transactional
@SpringApplicationConfiguration(classes = {TestConfig.class})
@RunWith(SpringJUnit4ClassRunner.class)
public class ServiceIntegrationTest {
InformationSource i1;
InformationSource i2;
Sensor s1;
Sensor s2;
Readout r1;
Readout r2;
Readout r3;
@Autowired
private InformationSourceService sourceService;
@Autowired
private ReadoutService readoutService;
@Autowired
private SensorService sensorService;
@Before
public void setUp() {
i1 = new InformationSource();
i2 = new InformationSource();
s1 = InformationSourceObjectProvider.provideSensor();
s2 = InformationSourceObjectProvider.provideSensor();
s1.setSource(i1);
s2.setSource(i2);
r1 = InformationSourceObjectProvider.provideReadout();
r2 = InformationSourceObjectProvider.provideReadout();
r3 = InformationSourceObjectProvider.provideReadout();
r1.setValue(21.0);
r2.setValue(23.0);
r3.setValue(22.1);
r1.setSensor(s1);
r2.setSensor(s1);
r3.setSensor(s2);
sourceService.save(i1);
sourceService.save(i2);
}
@Test
public void informationSourceIsSaved() {
assertEquals(2, (long) sourceService.count());
}
@Test
public void sensorsCanBeFoundBySource() {
sensorService.save(s1);
sensorService.save(s2);
List<Sensor> sensors = sensorService.getBy(i1);
assertEquals(1, sensors.size());
assertTrue(sensors.contains(s1));
assertFalse(sensors.contains(s2));
}
@Test
public void anIdIsGeneratedAutomaticallyWhenSaved() {
sensorService.save(s1);
assertNotNull(s1.getId());
}
@Test
public void sensorCanBeRetrieved() {
sensorService.save(s1);
assertEquals(s1, sensorService.get(s1.getId()));
}
@Test
public void allSensorsCanBeRetrieved() {
sensorService.save(s1);
sensorService.save(s2);
List<Sensor> sensors = sensorService.getAll();
assertTrue(sensors.contains(s1));
assertTrue(sensors.contains(s2));
}
@Test
public void sensorsCanBeFoundFromIndexToIndex() {
sensorService.save(s1);
sensorService.save(s2);
List<Sensor> sensors = sensorService.get(0, 0);
assertEquals(1, sensors.size());
}
@Test
public void sensorsCanBeCounted() {
sensorService.save(s1);
sensorService.save(s2);
assertEquals(2, (long) sensorService.count());
}
@Test
public void sensorsCanBeCountedBySource() {
sensorService.save(s1);
sensorService.save(s2);
assertEquals(1, (long) sensorService.countBy(i1));
}
@Test
public void readoutCanBeSavedAndRetrieved() {
sensorService.save(s1);
readoutService.save(r1);
assertEquals(r1, readoutService.get(r1.getId()));
}
@Test
public void allReadoutsCanBeRetrieved() {
sensorService.save(s1);
sensorService.save(s2);
readoutService.save(r1);
readoutService.save(r2);
readoutService.save(r3);
List<Readout> readouts = readoutService.getAll();
assertTrue(readouts.contains(r1));
assertTrue(readouts.contains(r2));
assertTrue(readouts.contains(r3));
}
@Test
public void readoutsCanBeFoundFromIndexToIndex() {
sensorService.save(s1);
sensorService.save(s2);
readoutService.save(r1);
readoutService.save(r2);
readoutService.save(r3);
List<Readout> readouts = readoutService.get(1, 2);
assertEquals(2, readouts.size());
}
@Test
public void readoutsCanBeFoundBySensor() {
sensorService.save(s1);
sensorService.save(s2);
readoutService.save(r1);
readoutService.save(r2);
readoutService.save(r3);
List<Readout> readReadouts = readoutService.getBy(s1);
assertTrue(readReadouts.contains(r1));
assertTrue(readReadouts.contains(r2));
}
@Test
public void readoutsNotConnectedToSensorNotReturnedWhenSearchingBySensor() {
sensorService.save(s1);
sensorService.save(s2);
readoutService.save(r1);
readoutService.save(r2);
readoutService.save(r3);
List<Readout> readReadouts = readoutService.getBy(s1);
assertFalse(readReadouts.contains(r3));
}
@Test
public void readoutsCanBeCounted() {
sensorService.save(s1);
sensorService.save(s2);
readoutService.save(r1);
readoutService.save(r2);
readoutService.save(r3);
assertEquals(3, (long) readoutService.count());
}
@Test
public void readoutsCanBeCountedBySensor() {
sensorService.save(s1);
sensorService.save(s2);
readoutService.save(r1);
readoutService.save(r2);
readoutService.save(r3);
assertEquals(2, (long) readoutService.countBy(s1));
}
@Test
public void readoutsCanBeFiltered() {
sensorService.save(s1);
sensorService.save(s2);
readoutService.save(r1);
readoutService.save(r2);
readoutService.save(r3);
List<Criterion> criterions = new ArrayList<>();
Criterion c1 = Restrictions.ge("value", 22.0);
criterions.add(c1);
assertEquals(2, (long) readoutService.getBy(0, 2, criterions).size());
}
@Test(expected = ConstraintViolationException.class)
public void uniqueConstraintExceptionIsThrown() {
sensorService.save(s1);
readoutService.save(r1);
Readout r4 = new Readout();
r4.setTime(r1.getTime());
r4.setSensor(r1.getSensor());
r4.setValue(r1.getValue());
readoutService.save(r4);
}
}
| apache-2.0 |
sdwilsh/buck | src/com/facebook/buck/jvm/java/JavaTest.java | 25125 | /*
* Copyright 2012-present Facebook, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License. You may obtain
* a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package com.facebook.buck.jvm.java;
import com.facebook.buck.event.BuckEventBus;
import com.facebook.buck.io.MorePaths;
import com.facebook.buck.io.ProjectFilesystem;
import com.facebook.buck.log.Logger;
import com.facebook.buck.model.BuildId;
import com.facebook.buck.model.BuildTargets;
import com.facebook.buck.model.Flavor;
import com.facebook.buck.model.ImmutableFlavor;
import com.facebook.buck.rules.AbstractBuildRule;
import com.facebook.buck.rules.AddToRuleKey;
import com.facebook.buck.rules.BuildContext;
import com.facebook.buck.rules.BuildRule;
import com.facebook.buck.rules.BuildRuleParams;
import com.facebook.buck.rules.BuildableContext;
import com.facebook.buck.rules.ExportDependencies;
import com.facebook.buck.rules.ExternalTestRunnerRule;
import com.facebook.buck.rules.ExternalTestRunnerTestSpec;
import com.facebook.buck.rules.HasPostBuildSteps;
import com.facebook.buck.rules.HasRuntimeDeps;
import com.facebook.buck.rules.Label;
import com.facebook.buck.rules.SourcePath;
import com.facebook.buck.rules.SourcePathResolver;
import com.facebook.buck.rules.TestRule;
import com.facebook.buck.step.AbstractExecutionStep;
import com.facebook.buck.step.ExecutionContext;
import com.facebook.buck.step.Step;
import com.facebook.buck.step.StepExecutionResult;
import com.facebook.buck.step.TargetDevice;
import com.facebook.buck.step.fs.MakeCleanDirectoryStep;
import com.facebook.buck.step.fs.MkdirStep;
import com.facebook.buck.test.TestCaseSummary;
import com.facebook.buck.test.TestResultSummary;
import com.facebook.buck.test.TestResults;
import com.facebook.buck.test.TestRunningOptions;
import com.facebook.buck.test.XmlTestResultParser;
import com.facebook.buck.test.result.type.ResultType;
import com.facebook.buck.test.selectors.TestSelectorList;
import com.facebook.buck.util.MoreCollectors;
import com.facebook.buck.util.ZipFileTraversal;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.base.Preconditions;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.ImmutableSortedSet;
import com.google.common.collect.Iterables;
import com.google.common.collect.Lists;
import com.google.common.collect.Sets;
import java.io.File;
import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.Collections;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import java.util.Random;
import java.util.Set;
import java.util.concurrent.Callable;
import java.util.logging.Level;
import java.util.zip.ZipEntry;
import java.util.zip.ZipFile;
import javax.annotation.Nullable;
@SuppressWarnings("PMD.TestClassWithoutTestCases")
public class JavaTest
extends AbstractBuildRule
implements TestRule, HasClasspathEntries, HasRuntimeDeps, HasPostBuildSteps,
ExternalTestRunnerRule, ExportDependencies {
public static final Flavor COMPILED_TESTS_LIBRARY_FLAVOR = ImmutableFlavor.of("testsjar");
// TODO(#9027062): Migrate this to a PackagedResource so we don't make assumptions
// about the ant build.
private static final Path TESTRUNNER_CLASSES =
Paths.get(
System.getProperty(
"buck.testrunner_classes",
new File("build/testrunner/classes").getAbsolutePath()));
private final JavaLibrary compiledTestsLibrary;
private final ImmutableSet<Path> additionalClasspathEntries;
@AddToRuleKey
private final JavaRuntimeLauncher javaRuntimeLauncher;
@AddToRuleKey
private final ImmutableList<String> vmArgs;
private final ImmutableMap<String, String> nativeLibsEnvironment;
@Nullable
private CompiledClassFileFinder compiledClassFileFinder;
private final ImmutableSet<Label> labels;
private final ImmutableSet<String> contacts;
private final Optional<Level> stdOutLogLevel;
private final Optional<Level> stdErrLogLevel;
@AddToRuleKey
private final TestType testType;
@AddToRuleKey
private final Optional<Long> testRuleTimeoutMs;
@AddToRuleKey
private final Optional<Long> testCaseTimeoutMs;
@AddToRuleKey
private final ImmutableMap<String, String> env;
private final Path pathToTestLogs;
private static final int TEST_CLASSES_SHUFFLE_SEED = 0xFACEB00C;
private static final Logger LOG = Logger.get(JavaTest.class);
@Nullable
private ImmutableList<JUnitStep> junits;
@AddToRuleKey
private final boolean runTestSeparately;
@AddToRuleKey
private final ForkMode forkMode;
public JavaTest(
BuildRuleParams params,
SourcePathResolver resolver,
JavaLibrary compiledTestsLibrary,
ImmutableSet<Path> additionalClasspathEntries,
Set<Label> labels,
Set<String> contacts,
TestType testType,
JavaRuntimeLauncher javaRuntimeLauncher,
List<String> vmArgs,
Map<String, String> nativeLibsEnvironment,
Optional<Long> testRuleTimeoutMs,
Optional<Long> testCaseTimeoutMs,
ImmutableMap<String, String> env,
boolean runTestSeparately,
ForkMode forkMode,
Optional<Level> stdOutLogLevel,
Optional<Level> stdErrLogLevel) {
super(params, resolver);
this.compiledTestsLibrary = compiledTestsLibrary;
this.additionalClasspathEntries = additionalClasspathEntries;
this.javaRuntimeLauncher = javaRuntimeLauncher;
this.vmArgs = ImmutableList.copyOf(vmArgs);
this.nativeLibsEnvironment = ImmutableMap.copyOf(nativeLibsEnvironment);
this.labels = ImmutableSet.copyOf(labels);
this.contacts = ImmutableSet.copyOf(contacts);
this.testType = testType;
this.testRuleTimeoutMs = testRuleTimeoutMs;
this.testCaseTimeoutMs = testCaseTimeoutMs;
this.env = env;
this.runTestSeparately = runTestSeparately;
this.forkMode = forkMode;
this.stdOutLogLevel = stdOutLogLevel;
this.stdErrLogLevel = stdErrLogLevel;
this.pathToTestLogs = getPathToTestOutputDirectory().resolve("logs.txt");
}
@Override
public ImmutableSet<Label> getLabels() {
return labels;
}
@Override
public ImmutableSet<String> getContacts() {
return contacts;
}
/**
* @param context That may be useful in producing the bootclasspath entries.
*/
protected ImmutableSet<Path> getBootClasspathEntries(ExecutionContext context) {
return ImmutableSet.of();
}
private Path getClassPathFile() {
return BuildTargets.getGenPath(getProjectFilesystem(), getBuildTarget(), "%s/classpath-file");
}
private JUnitStep getJUnitStep(
ExecutionContext executionContext,
TestRunningOptions options,
Optional<Path> outDir,
Optional<Path> robolectricLogPath,
Set<String> testClassNames) {
Iterable<String> reorderedTestClasses =
reorderClasses(testClassNames, options.isShufflingTests());
ImmutableList<String> properVmArgs = amendVmArgs(
this.vmArgs,
executionContext.getTargetDevice());
BuckEventBus buckEventBus = executionContext.getBuckEventBus();
BuildId buildId = buckEventBus.getBuildId();
TestSelectorList testSelectorList = options.getTestSelectorList();
JUnitJvmArgs args = JUnitJvmArgs.builder()
.setTestType(testType)
.setDirectoryForTestResults(outDir)
.setClasspathFile(getClassPathFile())
.setTestRunnerClasspath(TESTRUNNER_CLASSES)
.setCodeCoverageEnabled(executionContext.isCodeCoverageEnabled())
.setInclNoLocationClassesEnabled(executionContext.isInclNoLocationClassesEnabled())
.setDebugEnabled(executionContext.isDebugEnabled())
.setPathToJavaAgent(options.getPathToJavaAgent())
.setBuildId(buildId)
.setBuckModuleBaseSourceCodePath(getBuildTarget().getBasePath())
.setStdOutLogLevel(stdOutLogLevel)
.setStdErrLogLevel(stdErrLogLevel)
.setRobolectricLogPath(robolectricLogPath)
.setExtraJvmArgs(properVmArgs)
.addAllTestClasses(reorderedTestClasses)
.setShouldExplainTestSelectorList(options.shouldExplainTestSelectorList())
.setTestSelectorList(testSelectorList)
.build();
return new JUnitStep(
getProjectFilesystem(),
nativeLibsEnvironment,
testRuleTimeoutMs,
testCaseTimeoutMs,
env,
javaRuntimeLauncher,
args);
}
/**
* Returns the underlying java library containing the compiled tests.
*/
public JavaLibrary getCompiledTestsLibrary() {
return compiledTestsLibrary;
}
/**
* Runs the tests specified by the "srcs" of this class. If this rule transitively depends on
* other {@code java_test()} rules, then they will be run separately.
*/
@Override
public ImmutableList<Step> runTests(
ExecutionContext executionContext,
TestRunningOptions options,
TestReportingCallback testReportingCallback) {
// If no classes were generated, then this is probably a java_test() that declares a number of
// other java_test() rules as deps, functioning as a test suite. In this case, simply return an
// empty list of commands.
Set<String> testClassNames = getClassNamesForSources();
LOG.debug("Testing these classes: %s", testClassNames.toString());
if (testClassNames.isEmpty()) {
return ImmutableList.of();
}
ImmutableList.Builder<Step> steps = ImmutableList.builder();
Path pathToTestOutput = getPathToTestOutputDirectory();
steps.add(new MakeCleanDirectoryStep(getProjectFilesystem(), pathToTestOutput));
if (forkMode() == ForkMode.PER_TEST) {
ImmutableList.Builder<JUnitStep> junitsBuilder = ImmutableList.builder();
for (String testClass: testClassNames) {
junitsBuilder.add(
getJUnitStep(
executionContext,
options,
Optional.of(pathToTestOutput),
Optional.of(pathToTestLogs),
Collections.singleton(testClass))
);
}
junits = junitsBuilder.build();
} else {
junits = ImmutableList.of(
getJUnitStep(
executionContext,
options,
Optional.of(pathToTestOutput),
Optional.of(pathToTestLogs),
testClassNames)
);
}
steps.addAll(junits);
return steps.build();
}
private static Iterable<String> reorderClasses(Set<String> testClassNames, boolean shuffle) {
Random rng;
if (shuffle) {
// This is a runtime-seed reorder, which always produces a new order.
rng = new Random(System.nanoTime());
} else {
// This is fixed-seed reorder, which always produces the same order.
// We still want to do this in order to decouple the test order from the
// filesystem/environment.
rng = new Random(TEST_CLASSES_SHUFFLE_SEED);
}
List<String> reorderedClassNames = Lists.newArrayList(testClassNames);
Collections.shuffle(reorderedClassNames, rng);
return reorderedClassNames;
}
@VisibleForTesting
ImmutableList<String> amendVmArgs(
ImmutableList<String> existingVmArgs,
Optional<TargetDevice> targetDevice) {
ImmutableList.Builder<String> vmArgs = ImmutableList.builder();
vmArgs.addAll(existingVmArgs);
onAmendVmArgs(vmArgs, targetDevice);
return vmArgs.build();
}
/**
* Override this method if you need to amend vm args. Subclasses are required
* to call super.onAmendVmArgs(...).
*/
protected void onAmendVmArgs(ImmutableList.Builder<String> vmArgsBuilder,
Optional<TargetDevice> targetDevice) {
if (!targetDevice.isPresent()) {
return;
}
TargetDevice device = targetDevice.get();
if (device.isEmulator()) {
vmArgsBuilder.add("-Dbuck.device=emulator");
} else {
vmArgsBuilder.add("-Dbuck.device=device");
}
if (device.hasIdentifier()) {
vmArgsBuilder.add("-Dbuck.device.id=" + device.getIdentifier());
}
}
@Override
public boolean hasTestResultFiles() {
// It is possible that this rule was not responsible for running any tests because all tests
// were run by its deps. In this case, return an empty TestResults.
Set<String> testClassNames = getClassNamesForSources();
if (testClassNames.isEmpty()) {
return true;
}
Path outputDirectory = getProjectFilesystem()
.getPathForRelativePath(getPathToTestOutputDirectory());
for (String testClass : testClassNames) {
// We never use cached results when using test selectors, so there's no need to incorporate
// the .test_selectors suffix here if we are using selectors.
Path testResultFile = outputDirectory.resolve(testClass + ".xml");
if (!Files.isRegularFile(testResultFile)) {
return false;
}
}
return true;
}
@Override
public Path getPathToTestOutputDirectory() {
return BuildTargets.getGenPath(
getProjectFilesystem(),
getBuildTarget(),
"__java_test_%s_output__");
}
/**
* @return a test case result, named "main", signifying a failure of the entire test class.
*/
private TestCaseSummary getTestClassFailedSummary(
String testClass,
String message,
long time) {
return new TestCaseSummary(
testClass,
ImmutableList.of(
new TestResultSummary(
testClass,
"main",
ResultType.FAILURE,
time,
message,
"",
"",
"")));
}
@Override
public Callable<TestResults> interpretTestResults(
final ExecutionContext context,
final boolean isUsingTestSelectors) {
final ImmutableSet<String> contacts = getContacts();
return () -> {
// It is possible that this rule was not responsible for running any tests because all tests
// were run by its deps. In this case, return an empty TestResults.
Set<String> testClassNames = getClassNamesForSources();
if (testClassNames.isEmpty()) {
return TestResults.of(
getBuildTarget(),
ImmutableList.of(),
contacts,
labels.stream()
.map(Object::toString)
.collect(MoreCollectors.toImmutableSet()));
}
List<TestCaseSummary> summaries = Lists.newArrayListWithCapacity(testClassNames.size());
for (String testClass : testClassNames) {
String testSelectorSuffix = "";
if (isUsingTestSelectors) {
testSelectorSuffix += ".test_selectors";
}
String path = String.format("%s%s.xml", testClass, testSelectorSuffix);
Path testResultFile = getProjectFilesystem().getPathForRelativePath(
getPathToTestOutputDirectory().resolve(path));
if (!isUsingTestSelectors && !Files.isRegularFile(testResultFile)) {
String message;
for (JUnitStep junit: Preconditions.checkNotNull(junits)) {
if (junit.hasTimedOut()) {
message = "test timed out before generating results file";
} else {
message = "test exited before generating results file";
}
summaries.add(
getTestClassFailedSummary(
testClass,
message,
testRuleTimeoutMs.orElse(0L)));
}
// Not having a test result file at all (which only happens when we are using test
// selectors) is interpreted as meaning a test didn't run at all, so we'll completely
// ignore it. This is another result of the fact that JUnit is the only thing that can
// definitively say whether or not a class should be run. It's not possible, for example,
// to filter testClassNames here at the buck end.
} else if (Files.isRegularFile(testResultFile)) {
summaries.add(XmlTestResultParser.parse(testResultFile));
}
}
return TestResults.builder()
.setBuildTarget(getBuildTarget())
.setTestCases(summaries)
.setContacts(contacts)
.setLabels(labels.stream()
.map(Object::toString)
.collect(MoreCollectors.toImmutableSet()))
.addTestLogPaths(getProjectFilesystem().resolve(pathToTestLogs))
.build();
};
}
private Set<String> getClassNamesForSources() {
if (compiledClassFileFinder == null) {
compiledClassFileFinder = new CompiledClassFileFinder(this);
}
return compiledClassFileFinder.getClassNamesForSources();
}
@Override
public ImmutableList<Step> getBuildSteps(
BuildContext context, BuildableContext buildableContext) {
// Nothing to build, this is a test-only rule
return ImmutableList.of();
}
@Nullable
@Override
public Path getPathToOutput() {
return compiledTestsLibrary.getPathToOutput();
}
@Override
public ImmutableSet<Path> getTransitiveClasspaths() {
return compiledTestsLibrary.getTransitiveClasspaths();
}
@Override
public ImmutableSet<JavaLibrary> getTransitiveClasspathDeps() {
return compiledTestsLibrary.getTransitiveClasspathDeps();
}
@Override
public ImmutableSet<Path> getImmediateClasspaths() {
return compiledTestsLibrary.getImmediateClasspaths();
}
@Override
public ImmutableSet<Path> getOutputClasspaths() {
return compiledTestsLibrary.getOutputClasspaths();
}
@Override
public ImmutableSortedSet<BuildRule> getExportedDeps() {
return ImmutableSortedSet.of(compiledTestsLibrary);
}
@VisibleForTesting
static class CompiledClassFileFinder {
private final Set<String> classNamesForSources;
CompiledClassFileFinder(JavaTest rule) {
Path outputPath;
Path relativeOutputPath = rule.getPathToOutput();
if (relativeOutputPath != null) {
outputPath = rule.getProjectFilesystem().resolve(relativeOutputPath);
} else {
outputPath = null;
}
classNamesForSources = getClassNamesForSources(
rule.compiledTestsLibrary.getJavaSrcs(),
outputPath,
rule.getProjectFilesystem(),
rule.getResolver());
}
public Set<String> getClassNamesForSources() {
return classNamesForSources;
}
/**
* When a collection of .java files is compiled into a directory, that directory will have a
* subfolder structure that matches the package structure of the input .java files. In general,
* the .java files will be 1:1 with the .class files with two notable exceptions:
* (1) There will be an additional .class file for each inner/anonymous class generated. These
* types of classes are easy to identify because they will contain a '$' in the name.
* (2) A .java file that defines multiple top-level classes (yes, this can exist:
* http://stackoverflow.com/questions/2336692/java-multiple-class-declarations-in-one-file)
* will generate multiple .class files that do not have '$' in the name.
* In this method, we perform a strict check for (1) and use a heuristic for (2). It is possible
* to filter out the type (2) situation with a stricter check that aligns the package
* directories of the .java files and the .class files, but it is a pain to implement.
* If this heuristic turns out to be insufficient in practice, then we can fix it.
*
* @param sources paths to .java source files that were passed to javac
* @param jarFilePath jar where the generated .class files were written
*/
@VisibleForTesting
static ImmutableSet<String> getClassNamesForSources(
Set<SourcePath> sources,
@Nullable Path jarFilePath,
ProjectFilesystem projectFilesystem,
SourcePathResolver resolver) {
if (jarFilePath == null) {
return ImmutableSet.of();
}
final Set<String> sourceClassNames = Sets.newHashSetWithExpectedSize(sources.size());
for (SourcePath path : sources) {
// We support multiple languages in this rule - the file extension doesn't matter so long
// as the language supports filename == classname.
sourceClassNames.add(MorePaths.getNameWithoutExtension(resolver.getRelativePath(path)));
}
final ImmutableSet.Builder<String> testClassNames = ImmutableSet.builder();
Path jarFile = projectFilesystem.getPathForRelativePath(jarFilePath);
ZipFileTraversal traversal = new ZipFileTraversal(jarFile) {
@Override
public void visit(ZipFile zipFile, ZipEntry zipEntry) {
final String name = new File(zipEntry.getName()).getName();
// Ignore non-.class files.
if (!name.endsWith(".class")) {
return;
}
// As a heuristic for case (2) as described in the Javadoc, make sure the name of the
// .class file matches the name of a .java/.scala/.xxx file.
String nameWithoutDotClass = name.substring(0, name.length() - ".class".length());
if (!sourceClassNames.contains(nameWithoutDotClass)) {
return;
}
// Make sure it is a .class file that corresponds to a top-level .class file and not an
// inner class.
if (!name.contains("$")) {
String fullyQualifiedNameWithDotClassSuffix = zipEntry.getName().replace('/', '.');
String className = fullyQualifiedNameWithDotClassSuffix
.substring(0, fullyQualifiedNameWithDotClassSuffix.length() - ".class".length());
testClassNames.add(className);
}
}
};
try {
traversal.traverse();
} catch (IOException e) {
// There's nothing sane to do here. The jar file really should exist.
throw new RuntimeException(e);
}
return testClassNames.build();
}
}
@Override
public boolean runTestSeparately() {
return runTestSeparately;
}
public ForkMode forkMode() {
return forkMode;
}
@Override
public ImmutableSortedSet<BuildRule> getRuntimeDeps() {
return ImmutableSortedSet.<BuildRule>naturalOrder()
// By the end of the build, all the transitive Java library dependencies *must* be available
// on disk, so signal this requirement via the {@link HasRuntimeDeps} interface.
.addAll(
compiledTestsLibrary.getTransitiveClasspathDeps().stream()
.filter(rule -> !this.equals(rule))
.iterator())
// It's possible that the user added some tool as a dependency, so make sure we promote
// this rules first-order deps to runtime deps, so that these potential tools are available
// when this test runs.
.addAll(compiledTestsLibrary.getDeps())
.build();
}
@Override
public boolean supportsStreamingTests() {
return false;
}
@Override
public ExternalTestRunnerTestSpec getExternalTestRunnerSpec(
ExecutionContext executionContext,
TestRunningOptions options) {
JUnitStep jUnitStep =
getJUnitStep(
executionContext,
options,
Optional.empty(),
Optional.empty(),
getClassNamesForSources()
);
return ExternalTestRunnerTestSpec.builder()
.setTarget(getBuildTarget())
.setType("junit")
.setCommand(jUnitStep.getShellCommandInternal(executionContext))
.setEnv(jUnitStep.getEnvironmentVariables(executionContext))
.setLabels(getLabels())
.setContacts(getContacts())
.build();
}
@Override
public ImmutableList<Step> getPostBuildSteps() {
return ImmutableList.<Step>builder()
.add(new MkdirStep(getProjectFilesystem(), getClassPathFile().getParent()))
.add(
new AbstractExecutionStep("write classpath file") {
@Override
public StepExecutionResult execute(ExecutionContext context) throws IOException {
ImmutableSet<Path> classpathEntries = ImmutableSet.<Path>builder()
.addAll(compiledTestsLibrary.getTransitiveClasspaths())
.addAll(additionalClasspathEntries)
.addAll(getBootClasspathEntries(context))
.build();
getProjectFilesystem().writeLinesToPath(
Iterables.transform(classpathEntries, Object::toString),
getClassPathFile());
return StepExecutionResult.SUCCESS;
}
})
.build();
}
}
| apache-2.0 |
nabilzhang/enunciate | examples/full-api-edge-cases/src/main/java/com/webcohesion/enunciate/examples/jaxwsrijersey/genealogy/services/impl/AdminServiceImpl.java | 1295 | /**
* Copyright © 2006-2016 Web Cohesion (info@webcohesion.com)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.webcohesion.enunciate.examples.jaxwsrijersey.genealogy.services.impl;
import com.webcohesion.enunciate.examples.jaxwsrijersey.genealogy.data.PersonAdmin;
import com.webcohesion.enunciate.examples.jaxwsrijersey.genealogy.services.AdminService;
import javax.jws.WebService;
import javax.ws.rs.Path;
import javax.ws.rs.PathParam;
@Path ("/admin")
@WebService(endpointInterface = "com.webcohesion.enunciate.examples.jaxwsrijersey.genealogy.services.AdminService" )
public class AdminServiceImpl implements AdminService {
@Path("/admin/person/{id}")
public PersonAdmin readAdminPerson(@PathParam("id") String id) {
return new PersonAdmin();
}
}
| apache-2.0 |
nicstrong/fest-assertions-android | fest-assert-android-test/src/main/java/org/fest/assertions/api/ShortAssert_isGreaterThanOrEqualTo_short_Test.java | 1582 | /*
* Created on Oct 20, 2010
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*
* Copyright @2010-2011 the original author or authors.
*/
package org.fest.assertions.api;
import static junit.framework.Assert.assertSame;
import static org.mockito.Mockito.*;
import org.fest.assertions.internal.Shorts;
import org.junit.*;
/**
* Tests for <code>{@link ShortAssert#isGreaterThanOrEqualTo(short)}</code>.
*
* @author Alex Ruiz
*/
public class ShortAssert_isGreaterThanOrEqualTo_short_Test {
private Shorts shorts;
private ShortAssert assertions;
@Before public void setUp() {
shorts = mock(Shorts.class);
assertions = new ShortAssert((short)8);
assertions.shorts = shorts;
}
@Test public void should_verify_that_actual_is_greater_than_expected() {
assertions.isGreaterThanOrEqualTo((short)6);
verify(shorts).assertGreaterThanOrEqualTo(assertions.info, assertions.actual, (short)6);
}
@Test public void should_return_this() {
ShortAssert returned = assertions.isGreaterThanOrEqualTo((short)6);
assertSame(assertions, returned);
}
}
| apache-2.0 |
sekruse/cashflow | cashflow/cashflow-core/src/main/java/org/github/sekruse/cashflow/controller/CashflowController.java | 670 | package org.github.sekruse.cashflow.controller;
import org.github.sekruse.cashflow.model.Account;
import org.github.sekruse.cashflow.model.Share;
import org.github.sekruse.cashflow.model.User;
import java.util.Collection;
import java.util.Properties;
/**
* @author Sebastian
* @since 03.02.2015.
*/
public interface CashflowController {
User createUser(String name) throws CashflowWriteException;
Collection<User> getAllUsers() throws CashflowReadException;
Account createAccount(String name, User user);
Share createShare(String name, Account account, Collection<User> users);
TransactionBuilder buildTransaction();
void close();
}
| apache-2.0 |
NotFound403/WePay | src/main/java/cn/felord/wepay/ali/sdk/api/request/AlipayOpenAppPackagetestRequest.java | 4754 | package cn.felord.wepay.ali.sdk.api.request;
import java.util.Map;
import cn.felord.wepay.ali.sdk.api.AlipayRequest;
import cn.felord.wepay.ali.sdk.api.internal.util.AlipayHashMap;
import cn.felord.wepay.ali.sdk.api.response.AlipayOpenAppPackagetestResponse;
import cn.felord.wepay.ali.sdk.api.AlipayObject;
/**
* ALIPAY API: alipay.open.app.packagetest request
*
* @author auto create
* @version $Id: $Id
*/
public class AlipayOpenAppPackagetestRequest implements AlipayRequest<AlipayOpenAppPackagetestResponse> {
private AlipayHashMap udfParams; // add user-defined text parameters
private String apiVersion="1.0";
/**
* ISP功能测试接口
*/
private String bizContent;
/**
* <p>Setter for the field <code>bizContent</code>.</p>
*
* @param bizContent a {@link java.lang.String} object.
*/
public void setBizContent(String bizContent) {
this.bizContent = bizContent;
}
/**
* <p>Getter for the field <code>bizContent</code>.</p>
*
* @return a {@link java.lang.String} object.
*/
public String getBizContent() {
return this.bizContent;
}
private String terminalType;
private String terminalInfo;
private String prodCode;
private String notifyUrl;
private String returnUrl;
private boolean needEncrypt=false;
private AlipayObject bizModel=null;
/**
* <p>Getter for the field <code>notifyUrl</code>.</p>
*
* @return a {@link java.lang.String} object.
*/
public String getNotifyUrl() {
return this.notifyUrl;
}
/** {@inheritDoc} */
public void setNotifyUrl(String notifyUrl) {
this.notifyUrl = notifyUrl;
}
/**
* <p>Getter for the field <code>returnUrl</code>.</p>
*
* @return a {@link java.lang.String} object.
*/
public String getReturnUrl() {
return this.returnUrl;
}
/** {@inheritDoc} */
public void setReturnUrl(String returnUrl) {
this.returnUrl = returnUrl;
}
/**
* <p>Getter for the field <code>apiVersion</code>.</p>
*
* @return a {@link java.lang.String} object.
*/
public String getApiVersion() {
return this.apiVersion;
}
/** {@inheritDoc} */
public void setApiVersion(String apiVersion) {
this.apiVersion = apiVersion;
}
/** {@inheritDoc} */
public void setTerminalType(String terminalType){
this.terminalType=terminalType;
}
/**
* <p>Getter for the field <code>terminalType</code>.</p>
*
* @return a {@link java.lang.String} object.
*/
public String getTerminalType(){
return this.terminalType;
}
/** {@inheritDoc} */
public void setTerminalInfo(String terminalInfo){
this.terminalInfo=terminalInfo;
}
/**
* <p>Getter for the field <code>terminalInfo</code>.</p>
*
* @return a {@link java.lang.String} object.
*/
public String getTerminalInfo(){
return this.terminalInfo;
}
/** {@inheritDoc} */
public void setProdCode(String prodCode) {
this.prodCode=prodCode;
}
/**
* <p>Getter for the field <code>prodCode</code>.</p>
*
* @return a {@link java.lang.String} object.
*/
public String getProdCode() {
return this.prodCode;
}
/**
* <p>getApiMethodName.</p>
*
* @return a {@link java.lang.String} object.
*/
public String getApiMethodName() {
return "alipay.open.app.packagetest";
}
/**
* <p>getTextParams.</p>
*
* @return a {@link java.util.Map} object.
*/
public Map<String, String> getTextParams() {
AlipayHashMap txtParams = new AlipayHashMap();
txtParams.put("biz_content", this.bizContent);
if(udfParams != null) {
txtParams.putAll(this.udfParams);
}
return txtParams;
}
/**
* <p>putOtherTextParam.</p>
*
* @param key a {@link java.lang.String} object.
* @param value a {@link java.lang.String} object.
*/
public void putOtherTextParam(String key, String value) {
if(this.udfParams == null) {
this.udfParams = new AlipayHashMap();
}
this.udfParams.put(key, value);
}
/**
* <p>getResponseClass.</p>
*
* @return a {@link java.lang.Class} object.
*/
public Class<AlipayOpenAppPackagetestResponse> getResponseClass() {
return AlipayOpenAppPackagetestResponse.class;
}
/**
* <p>isNeedEncrypt.</p>
*
* @return a boolean.
*/
public boolean isNeedEncrypt() {
return this.needEncrypt;
}
/** {@inheritDoc} */
public void setNeedEncrypt(boolean needEncrypt) {
this.needEncrypt=needEncrypt;
}
/**
* <p>Getter for the field <code>bizModel</code>.</p>
*
* @return a {@link cn.felord.wepay.ali.sdk.api.AlipayObject} object.
*/
public AlipayObject getBizModel() {
return this.bizModel;
}
/** {@inheritDoc} */
public void setBizModel(AlipayObject bizModel) {
this.bizModel=bizModel;
}
}
| apache-2.0 |