repo_name stringlengths 5 108 | path stringlengths 6 333 | size stringlengths 1 6 | content stringlengths 4 977k | license stringclasses 15
values |
|---|---|---|---|---|
seava/seava.mod.ad | seava.mod.ad.presenter/src/main/java/seava/ad/presenter/ext/security/qb/MenuItemRtLov_DsQb.java | 795 | /**
* DNet eBusiness Suite
* Copyright: 2010-2013 Nan21 Electronics SRL. All rights reserved.
* Use is subject to license terms.
*/
package seava.ad.presenter.ext.security.qb;
import seava.j4e.api.session.Session;
import seava.j4e.presenter.action.query.QueryBuilderWithJpql;
import seava.ad.presenter.impl.security.model.MenuItemRtLov_Ds;
public class MenuItemRtLov_DsQb extends
QueryBuilderWithJpql<MenuItemRtLov_Ds, MenuItemRtLov_Ds, Object> {
@Override
protected void beforeBuildWhere() throws Exception {
if (!Session.user.get().getProfile().isAdministrator()) {
addFilterCondition(" e.id in ( select p.id from MenuItem p, IN (p.roles) c where c.code in :pRoles ) ");
this.addCustomFilterItem("pRoles", Session.user.get().getProfile()
.getRoles());
}
}
}
| apache-2.0 |
Ro0kieY/iGank | app/src/main/java/com/ro0kiey/igank/mvp/view/IMainView.java | 364 | package com.ro0kiey.igank.mvp.view;
import com.ro0kiey.igank.model.Bean.MeiziBean;
import java.util.List;
/**
* MainActivity的View接口
* Created by Ro0kieY on 2017/7/19.
*/
public interface IMainView extends IBaseView {
void showErrorView();
void showMeiziData(List<MeiziBean> meiziBean);
void showMoreMeizi(List<MeiziBean> meiziBean);
}
| apache-2.0 |
collectivemedia/celos | celos-server/src/main/java/com/collective/celos/SlotStateStatusPredicate.java | 1402 | /*
* Copyright 2015 Collective, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
* implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.collective.celos;
import java.util.Arrays;
import java.util.HashSet;
import java.util.Set;
import org.apache.commons.collections.Predicate;
import com.collective.celos.SlotState.Status;
/**
* Ivor who's a Scala wonk did this.
*/
public class SlotStateStatusPredicate implements Predicate {
private Set<Status> targetValues;
public SlotStateStatusPredicate(Status... targetValues) {
this.targetValues = new HashSet<Status>(Arrays.asList(targetValues));
if (this.targetValues.isEmpty()) {
throw new IllegalArgumentException("please specify some status values");
}
}
@Override
public boolean evaluate(Object object) {
SlotState state = (SlotState) object;
return targetValues.contains(state.status);
}
}
| apache-2.0 |
SHAF-WORK/shaf | core/src/main/java/org/shaf/core/security/AccessDeniedException.java | 1436 | /**
* Copyright 2014-2015 SHAF-WORK
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.shaf.core.security;
import org.shaf.core.util.StringUtils;
/**
* The {@code AccessDeniedException} occurs, when access is denied by the
* firewall.
*
* @author Mykola Galushka
*/
@SuppressWarnings("serial")
public class AccessDeniedException extends Exception {
/**
* Constructs a new {@code AccessDeniedException} object.
*
* @param roles
* the validated roles.
*/
protected AccessDeniedException(final Role... roles) {
super("The following roles are blocked: "
+ StringUtils.array2string(roles));
}
/**
* Constructs a new {@code AccessDeniedException} object.
*
* @param names
* the validated roles names.
*/
protected AccessDeniedException(final String... names) {
super("The following roles are blocked: "
+ StringUtils.array2string(names));
}
}
| apache-2.0 |
GoogleCloudPlatform/spring-cloud-gcp | spring-cloud-gcp-data-spanner/src/main/java/com/google/cloud/spring/data/spanner/repository/config/SpannerAuditingRegistrar.java | 2746 | /*
* Copyright 2017-2019 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.cloud.spring.data.spanner.repository.config;
import com.google.cloud.spring.data.spanner.repository.support.SpannerAuditingEventListener;
import java.lang.annotation.Annotation;
import org.springframework.beans.factory.config.BeanDefinition;
import org.springframework.beans.factory.support.BeanDefinitionBuilder;
import org.springframework.beans.factory.support.BeanDefinitionRegistry;
import org.springframework.data.auditing.AuditingHandler;
import org.springframework.data.auditing.config.AuditingBeanDefinitionRegistrarSupport;
import org.springframework.data.auditing.config.AuditingConfiguration;
/**
* Registers the annotations and classes for providing auditing support in Spring Data Cloud
* Spanner.
*
* @since 1.2
*/
public class SpannerAuditingRegistrar extends AuditingBeanDefinitionRegistrarSupport {
private static final String AUDITING_HANDLER_BEAN_NAME = "spannerAuditingHandler";
private static final String MAPPING_CONTEXT_BEAN_NAME = "spannerMappingContext";
@Override
protected Class<? extends Annotation> getAnnotation() {
return EnableSpannerAuditing.class;
}
@Override
protected void registerAuditListenerBeanDefinition(
BeanDefinition auditingHandlerDefinition, BeanDefinitionRegistry registry) {
Class<?> listenerClass = SpannerAuditingEventListener.class;
BeanDefinitionBuilder builder =
BeanDefinitionBuilder.rootBeanDefinition(listenerClass)
.addConstructorArgReference(AUDITING_HANDLER_BEAN_NAME);
registerInfrastructureBeanWithId(
builder.getRawBeanDefinition(), listenerClass.getName(), registry);
}
@Override
protected BeanDefinitionBuilder getAuditHandlerBeanDefinitionBuilder(
AuditingConfiguration configuration) {
BeanDefinitionBuilder builder =
configureDefaultAuditHandlerAttributes(
configuration, BeanDefinitionBuilder.rootBeanDefinition(AuditingHandler.class));
return builder.addConstructorArgReference(MAPPING_CONTEXT_BEAN_NAME);
}
@Override
protected String getAuditingHandlerBeanName() {
return AUDITING_HANDLER_BEAN_NAME;
}
}
| apache-2.0 |
dbflute-test/dbflute-test-dbms-mysql | src/main/java/org/docksidestage/mysql/dbflute/cbean/WithdrawalReasonCB.java | 462 | /*
* Copyright(c) DBFlute TestCo.,TestLtd. All Rights Reserved.
*/
package org.docksidestage.mysql.dbflute.cbean;
import org.docksidestage.mysql.dbflute.cbean.bs.BsWithdrawalReasonCB;
/**
* The condition-bean of withdrawal_reason.
* <p>
* You can implement your original methods here.
* This class remains when re-generating.
* </p>
* @author DBFlute(AutoGenerator)
*/
public class WithdrawalReasonCB extends BsWithdrawalReasonCB {
}
| apache-2.0 |
Yell777/SeleniumMaven | SeleniumTest/ProjectTest/src/test/java/TestAddNewProduct.java | 4277 | import org.junit.Test;
import org.openqa.selenium.By;
import org.openqa.selenium.WebElement;
import org.openqa.selenium.support.ui.Select;
import java.io.File;
import java.text.DateFormat;
import java.text.SimpleDateFormat;
import java.util.Calendar;
import java.util.Date;
import java.util.GregorianCalendar;
public class TestAddNewProduct extends TestBase {
@Test
public void testAddNewProduct() {
/*Получаю дату для полей */
Date day = new Date();
DateFormat DateFormat = new SimpleDateFormat("yyyy-MM-dd");
String startDate = DateFormat.format(day);
Calendar calendar = GregorianCalendar.getInstance();
calendar.add(Calendar.DAY_OF_YEAR, 10);
Date tenDaysLater = calendar.getTime();
String endDate = DateFormat.format(tenDaysLater);
int num = (int)(Math.random()*1000);
loginAdmin();
driver.get("http://localhost/litecart/admin/?app=countries&doc=countries");
/*Заполняю первый таб */
driver.findElement(By.xpath("//ul[@id='box-apps-menu']/li[2]")).click();
driver.findElement(By.xpath("//ul[@id='box-apps-menu']/li[@class='selected']//li[@id='doc-catalog']")).click();
driver.findElement(By.xpath("//div[@style='float: right;']/a[2]")).click();
driver.findElement(By.xpath("//div[@id='tab-general']//label/input[@value='1']")).click();
driver.findElement(By.xpath("//input[@name='name[en]']")).sendKeys("TestProduct" + num);
driver.findElement(By.xpath("//input[@name='code']")).sendKeys("123" + num);
driver.findElement(By.xpath("//div[@class='input-wrapper']//tr[2]//input[@name='categories[]']")).click();
WebElement list= driver.findElement(By.xpath("//div[@id='tab-general']//select[@name='default_category_id']"));
Select sel = new Select(list);
sel.selectByValue("1");
driver.findElement(By.xpath("//div[@class='input-wrapper']//input[@value='1-1']")).click();
driver.findElement(By.xpath("//input[@name='quantity']")).sendKeys("2");
WebElement list2= driver.findElement(By.xpath("//select[@name='sold_out_status_id']"));
Select sel2 = new Select(list2);
sel2.selectByValue("2");
File file = new File("src/Pictures/25192-759.jpg");
driver.findElement(By.xpath("//input[@type='file']")).sendKeys(file.getAbsolutePath());
driver.findElement(By.xpath("//input[@name='date_valid_from']")).sendKeys(startDate);
driver.findElement(By.xpath("//input[@name='date_valid_to']")).sendKeys(endDate);
/*Заполняю таб Inforamtion */
driver.findElement(By.xpath("//div[@class='tabs']/ul/li[2]")).click();
list= driver.findElement(By.xpath("//select[@name='manufacturer_id']"));
sel = new Select(list);
sel.selectByValue("1");
driver.findElement(By.xpath("//input[@name='keywords']")).sendKeys("Test");
driver.findElement(By.xpath("//input[@name='short_description[en]']")).sendKeys("TestTest");
driver.findElement(By.xpath("//div[@class='trumbowyg-editor']")).sendKeys("Тестовые описание");
driver.findElement(By.xpath("//input[@name='head_title[en]']")).sendKeys("TestTest");
driver.findElement(By.xpath("//input[@name='meta_description[en]']")).sendKeys("TestTest");
/*Заполняю таб Prices*/
driver.findElement(By.xpath("//div[@class='tabs']/ul/li[4]")).click();
driver.findElement(By.xpath("//input[@name='purchase_price']")).sendKeys("100");
list2 = driver.findElement(By.xpath("//select[@name='purchase_price_currency_code']"));
sel2 = new Select(list2);
sel2.selectByValue("USD");
driver.findElement(By.xpath("//input[@name='prices[USD]']")).sendKeys("200");
driver.findElement(By.xpath("//input[@name='prices[EUR]']")).sendKeys("200");
driver.findElement(By.xpath("//button[@name='save']")).click();
/*Проверка наличия товара*/
try{
driver.findElement(By.xpath("//*[text()='TestProduct" + num + "']"));
}catch (Exception e){
System.out.println("Элемент не найден");
}
}
}
| apache-2.0 |
HyvelTjuven/KD405A_Andree_R | Uppgift 1/src/House.java | 648 | public class House {
private int yearBuilt = 0;
private int size;
private static int minSize = 10;
private static int maxSize = 1000;
private static int minYear = 1800;
private static int maxYear = 2015;
public House(int yearBuilt, int size) {
this.yearBuilt = yearBuilt;
this.size = size;
}
public int getYearBuilt() {
return this.yearBuilt;
}
public int getSize() {
return this.size;
}
public static int getminYear() {
return minYear;
}
public static int getmaxYear() {
return maxYear;
}
public static int getminSize() {
return minSize;
}
public static int getmaxSize() {
return maxSize;
}
}
| apache-2.0 |
xdrop/java-symbol-solver | java-symbol-solver-core/src/test/java/com/github/javaparser/symbolsolver/logic/FunctionInterfaceLogicTest.java | 2456 | /*
* Copyright 2016 Federico Tomassetti
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.github.javaparser.symbolsolver.logic;
import com.github.javaparser.symbolsolver.model.resolution.TypeSolver;
import com.github.javaparser.symbolsolver.model.typesystem.ReferenceTypeImpl;
import com.github.javaparser.symbolsolver.model.typesystem.Type;
import com.github.javaparser.symbolsolver.reflectionmodel.ReflectionClassDeclaration;
import com.github.javaparser.symbolsolver.reflectionmodel.ReflectionInterfaceDeclaration;
import com.github.javaparser.symbolsolver.resolution.typesolvers.ReflectionTypeSolver;
import org.junit.Test;
import java.util.function.Consumer;
import java.util.function.Function;
import static org.junit.Assert.assertEquals;
public class FunctionInterfaceLogicTest {
@Test
public void testGetFunctionalMethodNegativeCaseOnClass() {
TypeSolver typeSolver = new ReflectionTypeSolver();
Type string = new ReferenceTypeImpl(new ReflectionClassDeclaration(String.class, typeSolver), typeSolver);
assertEquals(false, FunctionalInterfaceLogic.getFunctionalMethod(string).isPresent());
}
@Test
public void testGetFunctionalMethodPositiveCasesOnInterfaces() {
TypeSolver typeSolver = new ReflectionTypeSolver();
Type function = new ReferenceTypeImpl(new ReflectionInterfaceDeclaration(Function.class, typeSolver), typeSolver);
assertEquals(true, FunctionalInterfaceLogic.getFunctionalMethod(function).isPresent());
assertEquals("apply", FunctionalInterfaceLogic.getFunctionalMethod(function).get().getName());
Type consumer = new ReferenceTypeImpl(new ReflectionInterfaceDeclaration(Consumer.class, typeSolver), typeSolver);
assertEquals(true, FunctionalInterfaceLogic.getFunctionalMethod(consumer).isPresent());
assertEquals("accept", FunctionalInterfaceLogic.getFunctionalMethod(consumer).get().getName());
}
}
| apache-2.0 |
shufudong/bboss | bboss-taglib/src/org/frameworkset/web/token/TokenMethodHelper.java | 2510 | package org.frameworkset.web.token;
import java.lang.reflect.Method;
import javax.servlet.ServletRequest;
import javax.servlet.ServletResponse;
import javax.servlet.http.HttpServletRequest;
import org.apache.log4j.Logger;
public abstract class TokenMethodHelper {
public static Method isEnableToken;
public static Method doDTokencheck;
// buildDToken(String elementType, String jsonsplit,
// HttpServletRequest request, String fid, boolean cache)
public static Method buildDToken;
private static final Logger logger = Logger.getLogger(TokenMethodHelper.class);
static
{
Class clazz = null;
try {
clazz = Class.forName("org.frameworkset.web.token.TokenHelper");
isEnableToken = clazz.getMethod("isEnableToken");
} catch (ClassNotFoundException e) {
logger.info("get isEnableToken method from org.frameworkset.web.token.TokenHelper failed:",e);
} catch (NoSuchMethodException e) {
logger.info("get isEnableToken method from org.frameworkset.web.token.TokenHelper failed:",e);
} catch (Exception e) {
logger.info("get isEnableToken method from org.frameworkset.web.token.TokenHelper failed:",e);
}
try {
if(clazz == null)
clazz = Class.forName("org.frameworkset.web.token.TokenHelper");
doDTokencheck = clazz.getMethod("doDTokencheck",ServletRequest.class,
ServletResponse.class);
} catch (ClassNotFoundException e) {
logger.info("get doDTokencheck method from org.frameworkset.web.token.TokenHelper failed:",e);
} catch (NoSuchMethodException e) {
logger.info("get doDTokencheck method from org.frameworkset.web.token.TokenHelper failed:",e);
} catch (Exception e) {
logger.info("get doDTokencheck method from org.frameworkset.web.token.TokenHelper failed:",e);
}
try {
if(clazz == null)
clazz = Class.forName("org.frameworkset.web.token.TokenHelper");
buildDToken = clazz.getMethod("buildDToken",String.class, String.class,
HttpServletRequest.class, String.class, boolean.class);
} catch (ClassNotFoundException e) {
logger.info("get buildDToken method from org.frameworkset.web.token.TokenHelper failed:",e);
} catch (NoSuchMethodException e) {
logger.info("get buildDToken method from org.frameworkset.web.token.TokenHelper failed:",e);
} catch (Exception e) {
logger.info("get buildDToken method from org.frameworkset.web.token.TokenHelper failed:",e);
}
}
public TokenMethodHelper() {
// TODO Auto-generated constructor stub
}
}
| apache-2.0 |
xusheng1987/jeesite-lite | src/main/java/com/github/flying/jeelite/modules/sys/entity/Role.java | 4919 | /**
* Copyright © 2017-2018 <a href="https://github.com/xusheng1987/jeelite">jeelite</a> All rights reserved.
*/
package com.github.flying.jeelite.modules.sys.entity;
import java.util.List;
import org.apache.commons.lang3.StringUtils;
import org.hibernate.validator.constraints.Length;
import com.baomidou.mybatisplus.annotations.TableField;
import com.baomidou.mybatisplus.annotations.TableName;
import com.google.common.collect.Lists;
import com.github.flying.jeelite.common.config.Global;
import com.github.flying.jeelite.common.persistence.DataEntity;
/**
* 角色Entity
* @author flying
* @version 2013-12-05
*/
@TableName("sys_role")
public class Role extends DataEntity<Role> {
private static final long serialVersionUID = 1L;
@TableField(value="office_id", el = "office.id")
private Office office; // 归属机构
private String name; // 角色名称
private String dataScope;// 数据范围
@TableField(exist=false)
private String oldName; // 原角色名称
private String useable; //是否是可用
@TableField(exist=false)
private User user; // 根据用户ID查询角色列表
@TableField(exist=false)
private List<Menu> menuList = Lists.newArrayList(); // 拥有菜单列表
@TableField(exist=false)
private List<Office> officeList = Lists.newArrayList(); // 按明细设置数据范围
/**
* 数据范围(1:所有数据;2:所在公司及以下数据;3:所在公司数据;4:所在部门及以下数据;5:所在部门数据;8:仅本人数据;9:按明细设置)
*/
public static final String DATA_SCOPE_ALL = "1";
public static final String DATA_SCOPE_COMPANY_AND_CHILD = "2";
public static final String DATA_SCOPE_COMPANY = "3";
public static final String DATA_SCOPE_OFFICE_AND_CHILD = "4";
public static final String DATA_SCOPE_OFFICE = "5";
public static final String DATA_SCOPE_SELF = "8";
public static final String DATA_SCOPE_CUSTOM = "9";
public Role() {
super();
this.dataScope = DATA_SCOPE_SELF;
this.useable=Global.YES;
}
public Role(String id){
super(id);
}
public Role(User user) {
this();
this.user = user;
}
public String getUseable() {
return useable;
}
public void setUseable(String useable) {
this.useable = useable;
}
public Office getOffice() {
return office;
}
public void setOffice(Office office) {
this.office = office;
}
@Length(min=1, max=100)
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public String getDataScope() {
return dataScope;
}
public void setDataScope(String dataScope) {
this.dataScope = dataScope;
}
public String getOldName() {
return oldName;
}
public void setOldName(String oldName) {
this.oldName = oldName;
}
public List<Menu> getMenuList() {
return menuList;
}
public void setMenuList(List<Menu> menuList) {
this.menuList = menuList;
}
public List<String> getMenuIdList() {
List<String> menuIdList = Lists.newArrayList();
for (Menu menu : menuList) {
menuIdList.add(menu.getId());
}
return menuIdList;
}
public void setMenuIdList(List<String> menuIdList) {
menuList = Lists.newArrayList();
for (String menuId : menuIdList) {
Menu menu = new Menu();
menu.setId(menuId);
menuList.add(menu);
}
}
public String getMenuIds() {
return StringUtils.join(getMenuIdList(), ",");
}
public void setMenuIds(String menuIds) {
menuList = Lists.newArrayList();
if (menuIds != null){
String[] ids = StringUtils.split(menuIds, ",");
setMenuIdList(Lists.newArrayList(ids));
}
}
public List<Office> getOfficeList() {
return officeList;
}
public void setOfficeList(List<Office> officeList) {
this.officeList = officeList;
}
public List<String> getOfficeIdList() {
List<String> officeIdList = Lists.newArrayList();
for (Office e : officeList) {
officeIdList.add(e.getId());
}
return officeIdList;
}
public void setOfficeIdList(List<String> officeIdList) {
officeList = Lists.newArrayList();
for (String officeId : officeIdList) {
Office e = new Office();
e.setId(officeId);
officeList.add(e);
}
}
public String getOfficeIds() {
return StringUtils.join(getOfficeIdList(), ",");
}
public void setOfficeIds(String officeIds) {
officeList = Lists.newArrayList();
if (officeIds != null){
String[] ids = StringUtils.split(officeIds, ",");
setOfficeIdList(Lists.newArrayList(ids));
}
}
/**
* 获取权限字符串列表
*/
public List<String> getPermissions() {
List<String> permissions = Lists.newArrayList();
for (Menu menu : menuList) {
if (menu.getPermission()!=null && !"".equals(menu.getPermission())){
permissions.add(menu.getPermission());
}
}
return permissions;
}
public User getUser() {
return user;
}
public void setUser(User user) {
this.user = user;
}
@Override
public String toString() {
return name;
}
} | apache-2.0 |
nicktelford/dropwizard | dropwizard-core/src/test/java/io/dropwizard/setup/AdminEnvironmentTest.java | 1374 | package io.dropwizard.setup;
import com.codahale.metrics.health.HealthCheckRegistry;
import com.google.common.collect.ImmutableMultimap;
import io.dropwizard.jetty.MutableServletContextHandler;
import io.dropwizard.servlets.tasks.Task;
import org.junit.Test;
import javax.servlet.ServletRegistration;
import java.io.PrintWriter;
import static org.fest.assertions.api.Assertions.assertThat;
public class AdminEnvironmentTest {
private final MutableServletContextHandler handler = new MutableServletContextHandler();
private final HealthCheckRegistry healthCheckRegistry = new HealthCheckRegistry();
private final AdminEnvironment env = new AdminEnvironment(handler, healthCheckRegistry);
@Test
public void addsATaskServlet() throws Exception {
final Task task = new Task("thing") {
@Override
public void execute(ImmutableMultimap<String, String> parameters, PrintWriter output) throws Exception {
}
};
env.addTask(task);
handler.start();
final ServletRegistration registration = handler.getServletHandler()
.getServletContext()
.getServletRegistration("tasks");
assertThat(registration.getMappings())
.containsOnly("/tasks/*");
}
}
| apache-2.0 |
fabioCollini/DaggerMock | daggermockTests/src/test/java/it/cosenonjaviste/daggermock/simple/DecoratorTest.java | 1427 | /*
* Copyright 2016 Fabio Collini.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package it.cosenonjaviste.daggermock.simple;
import org.junit.Rule;
import org.junit.Test;
import org.mockito.Mockito;
import it.cosenonjaviste.daggermock.DaggerMockRule;
import it.cosenonjaviste.daggermock.InjectFromComponent;
public class DecoratorTest {
@Rule public final DaggerMockRule<MyComponent> rule = new DaggerMockRule<>(MyComponent.class, new MyModule())
.decorates(MyService.class, new DaggerMockRule.ObjectDecorator<MyService>() {
@Override
public MyService decorate(MyService obj) {
return Mockito.spy(obj);
}
});
@InjectFromComponent MainService mainService;
@Test
public void testObjectIsDecorated() {
mainService.get();
Mockito.verify(mainService.getMyService()).get();
}
}
| apache-2.0 |
vincentpoon/hbase | hbase-procedure/src/test/java/org/apache/hadoop/hbase/procedure2/TestProcedureMetrics.java | 8830 | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hbase.procedure2;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.HBaseCommonTestingUtility;
import org.apache.hadoop.hbase.procedure2.store.ProcedureStore;
import org.apache.hadoop.hbase.testclassification.MasterTests;
import org.apache.hadoop.hbase.testclassification.SmallTests;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import java.io.IOException;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotEquals;
import static org.junit.Assert.assertTrue;
@Category({MasterTests.class, SmallTests.class})
public class TestProcedureMetrics {
private static final Log LOG = LogFactory.getLog(TestProcedureMetrics.class);
private static final int PROCEDURE_EXECUTOR_SLOTS = 1;
private TestProcEnv procEnv;
private static ProcedureExecutor<TestProcEnv> procExecutor;
private ProcedureStore procStore;
private HBaseCommonTestingUtility htu;
private FileSystem fs;
private Path testDir;
private Path logDir;
private static int beginCount = 0;
private static int successCount = 0;
private static int failedCount = 0;
@Before
public void setUp() throws IOException {
htu = new HBaseCommonTestingUtility();
testDir = htu.getDataTestDir();
fs = testDir.getFileSystem(htu.getConfiguration());
assertTrue(testDir.depth() > 1);
logDir = new Path(testDir, "proc-logs");
procEnv = new TestProcEnv();
procStore = ProcedureTestingUtility.createStore(htu.getConfiguration(), logDir);
procExecutor = new ProcedureExecutor<TestProcEnv>(htu.getConfiguration(), procEnv, procStore);
procExecutor.testing = new ProcedureExecutor.Testing();
procStore.start(PROCEDURE_EXECUTOR_SLOTS);
procExecutor.start(PROCEDURE_EXECUTOR_SLOTS, true);
}
@After
public void tearDown() throws IOException {
procExecutor.stop();
procStore.stop(false);
fs.delete(logDir, true);
}
@Test
public void testMetricForSimpleProcedure() throws Exception {
// procedure that executes successfully
ProcedureMetrics proc = new ProcedureMetrics(true);
long id = ProcedureTestingUtility.submitAndWait(procExecutor, proc);
assertNotEquals("ProcId zero!", 0, id);
beginCount++;
successCount++;
ProcedureTestingUtility.waitProcedure(procExecutor, proc);
assertEquals("beginCount doesn't match!", beginCount, proc.beginCount);
assertEquals("successCount doesn't match!", successCount, proc.successCount);
assertEquals("failedCont doesn't match!", failedCount, proc.failedCount);
}
@Test
public void testMetricsForFailedProcedure() throws Exception {
// procedure that fails
ProcedureMetrics proc = new ProcedureMetrics(false);
long id = ProcedureTestingUtility.submitAndWait(procExecutor, proc);
assertNotEquals("ProcId zero!", 0, id);
beginCount++;
failedCount++;
ProcedureTestingUtility.waitProcedure(procExecutor, proc);
assertEquals("beginCount doesn't match!", beginCount, proc.beginCount);
assertEquals("successCount doesn't match!", successCount, proc.successCount);
assertEquals("failedCont doesn't match!", failedCount, proc.failedCount);
}
@Test
public void testMetricForYieldProcedure() throws Exception {
// procedure that yields
ProcedureMetrics proc = new ProcedureMetrics(true, true);
long id = ProcedureTestingUtility.submitAndWait(procExecutor, proc);
assertNotEquals("ProcId zero!", 0, id);
beginCount++;
successCount++;
ProcedureTestingUtility.waitProcedure(procExecutor, proc);
assertEquals("beginCount doesn't match!", beginCount, proc.beginCount);
assertEquals("successCount doesn't match!", successCount, proc.successCount);
assertEquals("failedCont doesn't match!", failedCount, proc.failedCount);
}
@Test
public void testMetricForFailedYiledProcedure() {
// procedure that yields and fails
ProcedureMetrics proc = new ProcedureMetrics(false, true);
long id = ProcedureTestingUtility.submitAndWait(procExecutor, proc);
assertNotEquals("ProcId zero!", 0, id);
beginCount++;
failedCount++;
ProcedureTestingUtility.waitProcedure(procExecutor, proc);
assertEquals("beginCount doesn't match!", beginCount, proc.beginCount);
assertEquals("successCount doesn't match!", successCount, proc.successCount);
assertEquals("failedCont doesn't match!", failedCount, proc.failedCount);
}
@Test
public void testMetricForProcedureWithChildren() throws Exception {
// Procedure that yileds with one of the sub-procedures that fail
int subProcCount = 10;
int failChildIndex = 2;
int yiledChildIndex = -1;
ProcedureMetrics[] subprocs = new ProcedureMetrics[subProcCount];
for (int i = 0; i < subProcCount; ++i) {
subprocs[i] = new ProcedureMetrics(failChildIndex != i, yiledChildIndex == i, 3);
}
ProcedureMetrics proc = new ProcedureMetrics(true, true, 3, subprocs);
long id = ProcedureTestingUtility.submitAndWait(procExecutor, proc);
assertNotEquals("ProcId zero!", 0, id);
beginCount += subProcCount + 1;
successCount += subProcCount - (failChildIndex + 1);
if (failChildIndex >= 0) {
failedCount += subProcCount + 1;
} else {
successCount++;
}
ProcedureTestingUtility.waitProcedure(procExecutor, proc);
assertEquals("beginCount doesn't match!", beginCount, proc.beginCount);
assertEquals("successCount doesn't match!", successCount, proc.successCount);
assertEquals("failedCont doesn't match!", failedCount, proc.failedCount);
}
private static class TestProcEnv {
public boolean toggleKillBeforeStoreUpdate = false;
public boolean triggerRollbackOnChild = false;
}
public static class ProcedureMetrics extends SequentialProcedure<TestProcEnv> {
public static long beginCount = 0;
public static long successCount = 0;
public static long failedCount = 0;
private boolean success;
private boolean yield;
private int yieldCount;
private int yieldNum;
private ProcedureMetrics[] subprocs = null;
public ProcedureMetrics() {
this(true);
}
public ProcedureMetrics(boolean success) {
this(success, true);
}
public ProcedureMetrics(boolean success, boolean yield) {
this(success, yield, 1);
}
public ProcedureMetrics(boolean success, boolean yield, int yieldCount) {
this(success, yield, yieldCount, null);
}
public ProcedureMetrics(boolean success, ProcedureMetrics[] subprocs) {
this(success, false, 1, subprocs);
}
public ProcedureMetrics(boolean success, boolean yield, int yieldCount,
ProcedureMetrics[] subprocs) {
this.success = success;
this.yield = yield;
this.yieldCount = yieldCount;
this.subprocs = subprocs;
yieldNum = 0;
}
@Override
protected void updateMetricsOnSubmit(TestProcEnv env) {
beginCount++;
}
@Override
protected Procedure[] execute(TestProcEnv env) throws ProcedureYieldException,
ProcedureSuspendedException, InterruptedException {
if (this.yield) {
if (yieldNum < yieldCount) {
yieldNum++;
throw new ProcedureYieldException();
}
}
if (!this.success) {
setFailure("Failed", new InterruptedException("Failed"));
return null;
}
return subprocs;
}
@Override
protected void rollback(TestProcEnv env) throws IOException, InterruptedException {
}
@Override
protected boolean abort(TestProcEnv env) {
return false;
}
@Override
protected void updateMetricsOnFinish(final TestProcEnv env, final long time,
boolean success) {
if (success) {
successCount++;
} else {
failedCount++;
}
}
}
}
| apache-2.0 |
SowaLabs/OpenNLP | opennlp-tools/src/main/java/opennlp/tools/namefind/NameSampleDataStream.java | 2183 | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package opennlp.tools.namefind;
import java.io.IOException;
import opennlp.maxent.DataStream;
import opennlp.tools.util.FilterObjectStream;
import opennlp.tools.util.ObjectStream;
/**
* The {@link NameSampleDataStream} class converts tagged {@link String}s
* provided by a {@link DataStream} to {@link NameSample} objects.
* It uses text that is is one-sentence per line and tokenized
* with names identified by <code><START></code> and <code><END></code> tags.
*/
public class NameSampleDataStream extends FilterObjectStream<String, NameSample> {
public static final String START_TAG_PREFIX = "<START:";
public static final String START_TAG = "<START>";
public static final String END_TAG = "<END>";
public NameSampleDataStream(ObjectStream<String> in) {
super(in);
}
public NameSample read() throws IOException {
String token = samples.read();
boolean isClearAdaptiveData = false;
// An empty line indicates the begin of a new article
// for which the adaptive data in the feature generators
// must be cleared
while (token != null && token.trim().length() == 0) {
isClearAdaptiveData = true;
token = samples.read();
}
if (token != null) {
return NameSample.parse(token, isClearAdaptiveData);
}
else {
return null;
}
}
}
| apache-2.0 |
pleacu/jbpm | jbpm-case-mgmt/jbpm-case-mgmt-impl/src/test/java/org/jbpm/casemgmt/impl/CarInsuranceClaimCaseTest.java | 31105 | /*
* Copyright 2016 Red Hat, Inc. and/or its affiliates.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.jbpm.casemgmt.impl;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Date;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.UUID;
import org.drools.core.command.runtime.rule.FireAllRulesCommand;
import org.jbpm.casemgmt.api.model.instance.CaseFileInstance;
import org.jbpm.casemgmt.api.model.instance.CaseInstance;
import org.jbpm.casemgmt.api.model.instance.CaseStageInstance;
import org.jbpm.casemgmt.api.model.instance.CommentInstance;
import org.jbpm.casemgmt.demo.insurance.ClaimReport;
import org.jbpm.casemgmt.demo.insurance.PropertyDamageReport;
import org.jbpm.casemgmt.impl.util.AbstractCaseServicesBaseTest;
import org.jbpm.casemgmt.impl.util.CountDownListenerFactory;
import org.jbpm.document.Document;
import org.jbpm.document.service.impl.DocumentImpl;
import org.jbpm.services.api.model.ProcessInstanceDesc;
import org.jbpm.services.task.impl.model.UserImpl;
import org.junit.After;
import org.junit.FixMethodOrder;
import org.junit.Test;
import org.junit.runners.MethodSorters;
import org.kie.api.runtime.process.ProcessInstance;
import org.kie.api.runtime.query.QueryContext;
import org.kie.api.task.model.OrganizationalEntity;
import org.kie.api.task.model.Status;
import org.kie.api.task.model.TaskSummary;
import org.kie.internal.query.QueryFilter;
import org.kie.internal.runtime.conf.ObjectModel;
import org.kie.internal.runtime.manager.context.ProcessInstanceIdContext;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import static org.junit.Assert.*;
@FixMethodOrder(MethodSorters.NAME_ASCENDING)
public class CarInsuranceClaimCaseTest extends AbstractCaseServicesBaseTest {
private static final Logger logger = LoggerFactory.getLogger(CarInsuranceClaimCaseTest.class);
private static final String CAR_INSURANCE_CLAIM_PROC_ID = "insurance-claims.CarInsuranceClaimCase";
protected static final String CAR_INS_CASE_ID = "CAR_INS-0000000001";
@Override
protected List<String> getProcessDefinitionFiles() {
List<String> processes = new ArrayList<String>();
processes.add("org/jbpm/casemgmt/demo/insurance/CarInsuranceClaimCase.bpmn2");
processes.add("org/jbpm/casemgmt/demo/insurance/insurance-rules.drl");
return processes;
}
@After
public void tearDown() {
super.tearDown();
CountDownListenerFactory.clear();
}
@Test
public void testCarInsuranceClaimCase() {
// let's assign users to roles so they can be participants in the case
String caseId = startAndAssertCaseInstance(deploymentUnit.getIdentifier(), "john", "mary");
try {
// let's verify case is created
assertCaseInstance(deploymentUnit.getIdentifier(), CAR_INS_CASE_ID);
// let's look at what stages are active
assertBuildClaimReportStage();
// since the first task assigned to insured is with auto start it should be already active
// the same task can be claimed by insuranceRepresentative in case claim is reported over phone
long taskId = assertBuildClaimReportAvailableForBothRoles();
// let's provide claim report with initial data
// claim report should be stored in case file data
provideAndAssertClaimReport(taskId);
// now we have another task for insured to provide property damage report
taskId = assertPropertyDamageReportAvailableForBothRoles();
// let's provide the property damage report
provideAndAssertPropertyDamageReport(taskId);
// let's complete the stage by explicitly stating that claimReport is done
caseService.addDataToCaseFile(CAR_INS_CASE_ID, "claimReportDone", true);
// we should be in another stage - Claim assessment
assertClaimAssesmentStage();
// let's trigger claim offer calculation
caseService.triggerAdHocFragment(CAR_INS_CASE_ID, "Calculate claim", null);
// now we have another task for insured as claim was calculated
// let's accept the calculated claim
assertAndAcceptClaimOffer();
// there should be no process instances for the case
Collection<ProcessInstanceDesc> caseProcesInstances = caseRuntimeDataService.getProcessInstancesForCase(CAR_INS_CASE_ID, Arrays.asList(ProcessInstance.STATE_ACTIVE), new QueryContext());
assertEquals(0, caseProcesInstances.size());
caseId = null;
} catch (Exception e) {
logger.error("Unexpected error {}", e.getMessage(), e);
fail("Unexpected exception " + e.getMessage());
} finally {
if (caseId != null) {
caseService.cancelCase(caseId);
}
}
}
@Test
public void testCarInsuranceClaimCaseWithPoliceReport() {
// let's assign users to roles so they can be participants in the case and start it
String caseId = startAndAssertCaseInstance(deploymentUnit.getIdentifier(), "john", "mary");
try {
// let's verify case is created
assertCaseInstance(deploymentUnit.getIdentifier(), CAR_INS_CASE_ID);
// let's look at what stages are active
assertBuildClaimReportStage();
// since the first task assigned to insured is with auto start it should be already active
// the same task can be claimed by insuranceRepresentative in case claim is reported over phone
long taskId = assertBuildClaimReportAvailableForBothRoles();
// let's provide claim report with initial data
// claim report should be stored in case file data
provideAndAssertClaimReport(taskId);
// now we have another task for insured to provide property damage report
taskId = assertPropertyDamageReportAvailableForBothRoles();
// let's attach police report as document
attachAndAssertPoliceReport();
// let's provide the property damage report
provideAndAssertPropertyDamageReport(taskId);
// let's complete the stage by explicitly stating that claimReport is done
caseService.addDataToCaseFile(CAR_INS_CASE_ID, "claimReportDone", true);
// we should be in another stage - Claim assessment
assertClaimAssesmentStage();
// let's trigger claim offer calculation
caseService.triggerAdHocFragment(CAR_INS_CASE_ID, "Calculate claim", null);
// now we have another task for insured as claim was calculated
// let's accept the calculated claim
assertAndAcceptClaimOffer();
// there should be no process instances for the case
Collection<ProcessInstanceDesc> caseProcesInstances = caseRuntimeDataService.getProcessInstancesForCase(CAR_INS_CASE_ID, Arrays.asList(ProcessInstance.STATE_ACTIVE), new QueryContext());
assertEquals(0, caseProcesInstances.size());
caseId = null;
} catch (Exception e) {
logger.error("Unexpected error {}", e.getMessage(), e);
fail("Unexpected exception " + e.getMessage());
} finally {
if (caseId != null) {
caseService.cancelCase(caseId);
}
}
}
@Test(timeout = 10000)
public void testCarInsuranceClaimCaseWithContactByInsured() {
// let's assign users to roles so they can be participants in the case
String caseId = startAndAssertCaseInstance(deploymentUnit.getIdentifier(), "john", "mary");
try {
// let's verify case is created
assertCaseInstance(deploymentUnit.getIdentifier(), CAR_INS_CASE_ID);
// let's look at what stages are active
assertBuildClaimReportStage();
// since the first task assigned to insured is with auto start it should be already active
// the same task can be claimed by insuranceRepresentative in case claim is reported over phone
long taskId = assertBuildClaimReportAvailableForBothRoles();
// let's provide claim report with initial data
// claim report should be stored in case file data
provideAndAssertClaimReport(taskId);
// now we have another task for insured to provide property damage report
taskId = assertPropertyDamageReportAvailableForBothRoles();
// let's provide the property damage report
provideAndAssertPropertyDamageReport(taskId);
// before completing claim report, let's call insurance company with some questions
// when call is answered insurance representative gets a task
caseService.triggerAdHocFragment(CAR_INS_CASE_ID, "Contacted by insured", null);
attachAndAssertPoliceReport(false, null);
// still not satisfied, let's call insurance company with these questions again and ask for callback in 2 sec
// when call is answered insurance representative gets a task
caseService.triggerAdHocFragment(CAR_INS_CASE_ID, "Contacted by insured", null);
attachAndAssertPoliceReport(true, "2s");
// let's complete the stage by explicitly stating that claimReport is done
caseService.addDataToCaseFile(CAR_INS_CASE_ID, "claimReportDone", true);
// we should be in another stage - Claim assessment
assertClaimAssesmentStage();
// let's trigger claim offer calculation
caseService.triggerAdHocFragment(CAR_INS_CASE_ID, "Calculate claim", null);
// now we have another task for insured as claim was calculated
// let's accept the calculated claim
assertAndAcceptClaimOffer();
// there should be no process instances for the case
Collection<ProcessInstanceDesc> caseProcesInstances = caseRuntimeDataService.getProcessInstancesForCase(CAR_INS_CASE_ID, Arrays.asList(ProcessInstance.STATE_ACTIVE), new QueryContext());
assertEquals(0, caseProcesInstances.size());
caseId = null;
} catch (Exception e) {
logger.error("Unexpected error {}", e.getMessage(), e);
fail("Unexpected exception " + e.getMessage());
} finally {
if (caseId != null) {
caseService.cancelCase(caseId);
}
}
}
@Test
public void testCarInsuranceClaimCaseWithNegotiations() {
// let's assign users to roles so they can be participants in the case
String caseId = startAndAssertCaseInstance(deploymentUnit.getIdentifier(), "john", "mary");
try {
// let's verify case is created
assertCaseInstance(deploymentUnit.getIdentifier(), CAR_INS_CASE_ID);
// let's look at what stages are active
assertBuildClaimReportStage();
// since the first task assigned to insured is with auto start it should be already active
// the same task can be claimed by insuranceRepresentative in case claim is reported over phone
long taskId = assertBuildClaimReportAvailableForBothRoles();
// let's provide claim report with initial data
// claim report should be stored in case file data
provideAndAssertClaimReport(taskId);
// now we have another task for insured to provide property damage report
taskId = assertPropertyDamageReportAvailableForBothRoles();
// let's provide the property damage report
provideAndAssertPropertyDamageReport(taskId);
// let's complete the stage by explicitly stating that claimReport is done
caseService.addDataToCaseFile(CAR_INS_CASE_ID, "claimReportDone", true);
// we should be in another stage - Claim assessment
assertClaimAssesmentStage();
// let's trigger claim offer calculation
caseService.triggerAdHocFragment(CAR_INS_CASE_ID, "Calculate claim", null);
// now we have another task for insured as claim was calculated
// let's negotiate few times the calculated claim offer
assertAndNegotiateClaimOffer(3);
// there should be no process instances for the case
Collection<ProcessInstanceDesc> caseProcesInstances = caseRuntimeDataService.getProcessInstancesForCase(CAR_INS_CASE_ID, Arrays.asList(ProcessInstance.STATE_ACTIVE), new QueryContext());
assertEquals(0, caseProcesInstances.size());
caseId = null;
} catch (Exception e) {
logger.error("Unexpected error {}", e.getMessage(), e);
fail("Unexpected exception " + e.getMessage());
} finally {
if (caseId != null) {
caseService.cancelCase(caseId);
}
}
}
@Test
public void testCarInsuranceClaimCaseWithAssessorInvolved() {
// let's assign users to roles so they can be participants in the case
String caseId = startAndAssertCaseInstance(deploymentUnit.getIdentifier(), "john", "mary");
try {
// let's verify case is created
assertCaseInstance(deploymentUnit.getIdentifier(), CAR_INS_CASE_ID);
// let's look at what stages are active
assertBuildClaimReportStage();
// let's now add assessor to the case as we will need his/her opinion
caseService.assignToCaseRole(CAR_INS_CASE_ID, "assessor", new UserImpl("krisv"));
// since the first task assigned to insured is with auto start it should be already active
// the same task can be claimed by insuranceRepresentative in case claim is reported over phone
long taskId = assertBuildClaimReportAvailableForBothRoles();
// let's provide claim report with initial data
// claim report should be stored in case file data
provideAndAssertClaimReport(taskId);
// now we have another task for insured to provide property damage report
taskId = assertPropertyDamageReportAvailableForBothRoles();
// let's provide the property damage report
provideAndAssertPropertyDamageReport(taskId);
// let's complete the stage by explicitly stating that claimReport is done
caseService.addDataToCaseFile(CAR_INS_CASE_ID, "claimReportDone", true);
// we should be in another stage - Claim assessment
assertClaimAssesmentStage();
// now krisv should have a task assigned as assessor
assertAndRunClaimAssessment();
// now we have another task for insured as claim was calculated
// let's accept the calculated claim
assertAndAcceptClaimOffer();
// there should be no process instances for the case
Collection<ProcessInstanceDesc> caseProcesInstances = caseRuntimeDataService.getProcessInstancesForCase(CAR_INS_CASE_ID, Arrays.asList(ProcessInstance.STATE_ACTIVE), new QueryContext());
assertEquals(0, caseProcesInstances.size());
caseId = null;
} catch (Exception e) {
logger.error("Unexpected error {}", e.getMessage(), e);
fail("Unexpected exception " + e.getMessage());
} finally {
if (caseId != null) {
caseService.cancelCase(caseId);
}
}
}
@Test
public void testCarInsuranceClaimCaseWithExtraTaskFromRules() {
// let's assign users to roles so they can be participants in the case
String caseId = startAndAssertCaseInstance(deploymentUnit.getIdentifier(), "john", "mary");
try {
// let's verify case is created
assertCaseInstance(deploymentUnit.getIdentifier(), CAR_INS_CASE_ID);
// let's look at what stages are active
assertBuildClaimReportStage();
// since the first task assigned to insured is with auto start it should be already active
// the same task can be claimed by insuranceRepresentative in case claim is reported over phone
long taskId = assertBuildClaimReportAvailableForBothRoles();
// let's provide claim report with initial data
// claim report should be stored in case file data
provideAndAssertClaimReport(taskId);
// now we have another task for insured to provide property damage report
taskId = assertPropertyDamageReportAvailableForBothRoles();
// let's provide the property damage report
provideAndAssertPropertyDamageReport(taskId);
// let's complete the stage by explicitly stating that claimReport is done
caseService.addDataToCaseFile(CAR_INS_CASE_ID, "claimReportDone", true);
// we should be in another stage - Claim assessment
assertClaimAssesmentStage();
// ask for more details from insured
caseService.addDataToCaseFile(CAR_INS_CASE_ID, "decision", "AskForDetails");
assertAndProvideAdditionalDetails();
// let's trigger claim offer calculation
caseService.triggerAdHocFragment(CAR_INS_CASE_ID, "Calculate claim", null);
// now we have another task for insured as claim was calculated
// let's accept the calculated claim
assertAndAcceptClaimOffer();
// there should be no process instances for the case
Collection<ProcessInstanceDesc> caseProcesInstances = caseRuntimeDataService.getProcessInstancesForCase(CAR_INS_CASE_ID, Arrays.asList(ProcessInstance.STATE_ACTIVE), new QueryContext());
assertEquals(0, caseProcesInstances.size());
caseId = null;
} catch (Exception e) {
logger.error("Unexpected error {}", e.getMessage(), e);
fail("Unexpected exception " + e.getMessage());
} finally {
if (caseId != null) {
caseService.cancelCase(caseId);
}
}
}
/*
* Helper methods
*/
protected void assertComment(CommentInstance comment, String author, String content) {
assertNotNull(comment);
assertEquals(author, comment.getAuthor());
assertEquals(content, comment.getComment());
}
protected void assertTask(TaskSummary task, String actor, String name, Status status) {
assertNotNull(task);
assertEquals(name, task.getName());
assertEquals(actor, task.getActualOwnerId());
assertEquals(status, task.getStatus());
}
protected String startAndAssertCaseInstance(String deploymentId, String insured, String insuranceRepresentative) {
Map<String, OrganizationalEntity> roleAssignments = new HashMap<>();
roleAssignments.put("insured", new UserImpl(insured));
roleAssignments.put("insuranceRepresentative", new UserImpl(insuranceRepresentative));
// start new instance of a case with data and role assignment
Map<String, Object> data = new HashMap<>();
CaseFileInstance caseFile = caseService.newCaseFileInstance(deploymentId, CAR_INSURANCE_CLAIM_PROC_ID, data, roleAssignments);
String caseId = caseService.startCase(deploymentId, CAR_INSURANCE_CLAIM_PROC_ID, caseFile);
assertNotNull(caseId);
assertEquals(CAR_INS_CASE_ID, caseId);
return caseId;
}
protected void assertCaseInstance(String deploymentId, String caseId) {
CaseInstance cInstance = caseService.getCaseInstance(caseId);
assertNotNull(cInstance);
assertEquals(caseId, cInstance.getCaseId());
assertEquals(deploymentId, cInstance.getDeploymentId());
}
protected void assertBuildClaimReportStage() {
Collection<CaseStageInstance> activeStages = caseRuntimeDataService.getCaseInstanceStages(CAR_INS_CASE_ID, true, new QueryContext());
assertEquals(1, activeStages.size());
CaseStageInstance stage = activeStages.iterator().next();
assertEquals("Build claim report", stage.getName());
}
protected void assertClaimAssesmentStage() {
Collection<CaseStageInstance> activeStages = caseRuntimeDataService.getCaseInstanceStages(CAR_INS_CASE_ID, true, new QueryContext());
assertEquals(1, activeStages.size());
CaseStageInstance stage = activeStages.iterator().next();
assertEquals("Claim assesment", stage.getName());
}
protected long assertBuildClaimReportAvailableForBothRoles() {
return assertTasksForBothRoles("Provide accident information", "john", "mary", Status.Ready);
}
protected long assertPropertyDamageReportAvailableForBothRoles() {
return assertTasksForBothRoles("File property damage claim", "john", "mary", Status.Ready);
}
protected long assertTasksForBothRoles(String taskName, String actor1, String actor2, Status status) {
List<TaskSummary> tasks = runtimeDataService.getTasksAssignedAsPotentialOwner(actor1, new QueryFilter());
assertNotNull(tasks);
assertEquals(1, tasks.size());
assertTask(tasks.get(0), null, taskName, status);
// the same task can be claimed by insuranceRepresentative in case claim is reported over phone
tasks = runtimeDataService.getTasksAssignedAsPotentialOwner(actor2, new QueryFilter());
assertNotNull(tasks);
assertEquals(1, tasks.size());
assertTask(tasks.get(0), null, taskName, status);
return tasks.get(0).getId();
}
protected void provideAndAssertClaimReport(Long taskId) {
ClaimReport claimReport = new ClaimReport();
claimReport.setName("John Doe");
claimReport.setAddress("Main street, NY");
claimReport.setAccidentDescription("It happened so sudden...");
claimReport.setAccidentDate(new Date());
Map<String, Object> params = new HashMap<>();
params.put("claimReport_", claimReport);
userTaskService.completeAutoProgress(taskId, "john", params);
// claim report should be stored in case file data
CaseFileInstance caseFile = caseService.getCaseFileInstance(CAR_INS_CASE_ID);
assertNotNull(caseFile);
ClaimReport caseClaimReport = (ClaimReport) caseFile.getData("claimReport");
assertNotNull(caseClaimReport);
}
protected void provideAndAssertPropertyDamageReport(Long taskId) {
PropertyDamageReport damageReport = new PropertyDamageReport("Car is completely destroyed", 1000.0);
Map<String, Object> params = new HashMap<>();
params.put("propertyDamageReport_", damageReport);
userTaskService.completeAutoProgress(taskId, "john", params);
// property damage report should be stored in case file data
CaseFileInstance caseFile = caseService.getCaseFileInstance(CAR_INS_CASE_ID);
assertNotNull(caseFile);
PropertyDamageReport casePropertyDamageReport = (PropertyDamageReport) caseFile.getData("propertyDamageReport");
assertNotNull(casePropertyDamageReport);
}
protected void assertAndAcceptClaimOffer() {
List<TaskSummary> tasks = runtimeDataService.getTasksAssignedAsPotentialOwner("john", new QueryFilter());
assertNotNull(tasks);
assertEquals(1, tasks.size());
assertTask(tasks.get(0), "john", "Present calculated claim", Status.Reserved);
// let's accept the calculated claim
Map<String, Object> params = new HashMap<>();
params.put("accepted", true);
userTaskService.completeAutoProgress(tasks.get(0).getId(), "john", params);
}
protected void attachAndAssertPoliceReport() {
caseService.triggerAdHocFragment(CAR_INS_CASE_ID, "Submit police report", null);
List<TaskSummary> tasks = runtimeDataService.getTasksAssignedAsPotentialOwner("john", new QueryFilter());
assertNotNull(tasks);
assertEquals(2, tasks.size());
assertTask(tasks.get(0), null, "Submit police report", Status.Ready);
assertTask(tasks.get(1), null, "File property damage claim", Status.Ready);
byte[] docContent = "police report content".getBytes();
DocumentImpl document = new DocumentImpl(UUID.randomUUID().toString(), "car-accident-police-report.txt", docContent.length, new Date());
document.setContent(docContent);
Map<String, Object> params = new HashMap<>();
params.put("policeReport_", document);
userTaskService.completeAutoProgress(tasks.get(0).getId(), "john", params);
// police report should be stored in case file data
CaseFileInstance caseFile = caseService.getCaseFileInstance(CAR_INS_CASE_ID);
assertNotNull(caseFile);
Document policeReport = (Document) caseFile.getData("policeReport");
assertNotNull(policeReport);
assertEquals("car-accident-police-report.txt", policeReport.getName());
}
protected void attachAndAssertPoliceReport(boolean callback, String callbackAfter) {
List<TaskSummary> tasks = runtimeDataService.getTasksOwned("mary", new QueryFilter());
assertNotNull(tasks);
assertEquals(1, tasks.size());
assertTask(tasks.get(0), "mary", "Contacted by insured", Status.Reserved);
Map<String, Object> params = new HashMap<>();
params.put("callback_", callback);
if (callback) {
params.put("callbackAfter_", callbackAfter);
}
userTaskService.completeAutoProgress(tasks.get(0).getId(), "mary", params);
if (callback) {
CountDownListenerFactory.getExisting("carInsuranceCase").waitTillCompleted();
tasks = runtimeDataService.getTasksOwned("mary", new QueryFilter());
assertNotNull(tasks);
assertEquals(1, tasks.size());
assertTask(tasks.get(0), "mary", "Requested callback", Status.Reserved);
userTaskService.completeAutoProgress(tasks.get(0).getId(), "mary", null);
}
}
protected void assertAndNegotiateClaimOffer(int numberOfNegotiations) {
List<TaskSummary> tasks = runtimeDataService.getTasksAssignedAsPotentialOwner("john", new QueryFilter());
assertNotNull(tasks);
assertEquals(1, tasks.size());
assertTask(tasks.get(0), "john", "Present calculated claim", Status.Reserved);
// let's accept the calculated claim
Map<String, Object> params = new HashMap<>();
params.put("accepted", false);
userTaskService.completeAutoProgress(tasks.get(0).getId(), "john", params);
Collection<CaseStageInstance> activeStages = caseRuntimeDataService.getCaseInstanceStages(CAR_INS_CASE_ID, true, new QueryContext());
assertEquals(1, activeStages.size());
CaseStageInstance stage = activeStages.iterator().next();
assertEquals("Escalate rejected claim", stage.getName());
while (numberOfNegotiations > 0) {
params.clear();
params.put("Offer", 1000);
caseService.triggerAdHocFragment(CAR_INS_CASE_ID, "Negotiation meeting", params);
tasks = runtimeDataService.getTasksAssignedAsPotentialOwner("john", new QueryFilter());
assertNotNull(tasks);
assertEquals(1, tasks.size());
assertTask(tasks.get(0), null, "Negotiation meeting", Status.Ready);
boolean accepted = false;
if (numberOfNegotiations == 1) {
accepted = true;
}
params.put("accepted", accepted);
userTaskService.completeAutoProgress(tasks.get(0).getId(), "john", params);
numberOfNegotiations--;
}
}
private void assertAndRunClaimAssessment() {
List<TaskSummary> tasks = runtimeDataService.getTasksAssignedAsPotentialOwner("krisv", new QueryFilter());
assertNotNull(tasks);
assertEquals(1, tasks.size());
assertTask(tasks.get(0), "krisv", "Assessor evaluation", Status.Reserved);
long taskId = tasks.get(0).getId();
Map<String, Object> taskInput = userTaskService.getTaskInputContentByTaskId(taskId);
assertNotNull(taskInput);
assertTrue(taskInput.containsKey("_claimReport"));
ClaimReport claimReport = (ClaimReport) taskInput.get("_claimReport");
claimReport.setAmount(20000.0);
claimReport.setCalculated(Boolean.TRUE);
Map<String, Object> params = new HashMap<>();
params.put("claimReport_", claimReport);
userTaskService.completeAutoProgress(taskId, "krisv", params);
}
protected void assertAndProvideAdditionalDetails() {
List<TaskSummary> tasks = runtimeDataService.getTasksAssignedAsPotentialOwner("john", new QueryFilter());
assertNotNull(tasks);
assertEquals(1, tasks.size());
assertTask(tasks.get(0), "john", "Please provide additional details", Status.Reserved);
long taskId = tasks.get(0).getId();
Map<String, Object> inputs = userTaskService.getTaskInputContentByTaskId(taskId);
assertNotNull(inputs);
assertEquals("How did it happen?", inputs.get("reason"));
Map<String, Object> params = new HashMap<>();
params.put("caseFile_answer", "It just happened in a split second, don't remember anything else");
userTaskService.completeAutoProgress(taskId, "john", params);
CaseFileInstance caseFile = caseService.getCaseFileInstance(CAR_INS_CASE_ID);
assertNotNull(caseFile);
String answer = (String) caseFile.getData("answer");
assertNotNull(answer);
assertEquals("It just happened in a split second, don't remember anything else", answer);
}
@Override
protected List<ObjectModel> getProcessListeners() {
List<ObjectModel> listeners = super.getProcessListeners();
listeners.add(new ObjectModel("mvel", "org.jbpm.casemgmt.impl.util.CountDownListenerFactory.get(\"carInsuranceCase\", \"wait before callback\", 1)"));
return listeners;
}
} | apache-2.0 |
palessandro/activejdbc | activejdbc/src/main/java/org/javalite/activejdbc/conversion/StringToTimestampConverter.java | 2292 | /*
Copyright 2009-2016 Igor Polevoy
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package org.javalite.activejdbc.conversion;
import java.text.DateFormat;
import java.text.ParseException;
import java.text.SimpleDateFormat;
import static org.javalite.common.Util.*;
/**
* Converts instances of {@link String} to {@link java.sql.Timestamp}. This class is thread-safe.
*
* @author Eric Nielsen
*/
public class StringToTimestampConverter extends ConverterAdapter<String, java.sql.Timestamp> {
private final DateFormat format;
// Calendar and DateFormat are not thread safe: http://www.javacodegeeks.com/2010/07/java-best-practices-dateformat-in.html
private final ThreadLocal<DateFormat> threadLocalFormat = new ThreadLocal<DateFormat>() {
@Override protected DateFormat initialValue() {
return (DateFormat) format.clone();
}
};
/**
* @param pattern pattern to use for conversion
*/
public StringToTimestampConverter(String pattern) {
this(new SimpleDateFormat(pattern));
}
/**
* @param format DateFormat to use for conversion
*/
public StringToTimestampConverter(DateFormat format) {
this.format = format;
}
@Override protected Class<String> sourceClass() { return String.class; }
@Override protected Class<java.sql.Timestamp> destinationClass() { return java.sql.Timestamp.class; }
/**
* @param source instance of String or null
* @return source converted to java.sql.Timestamp, or null if source is blank
* @throws ParseException if conversion failed
*/
@Override
public java.sql.Timestamp doConvert(String source) throws ParseException {
return blank(source) ? null : new java.sql.Timestamp(threadLocalFormat.get().parse(source).getTime());
}
}
| apache-2.0 |
searchbox-io/Jest | jest-common/src/main/java/io/searchbox/core/Explain.java | 1089 | package io.searchbox.core;
import io.searchbox.action.SingleResultAbstractDocumentTargetedAction;
import io.searchbox.client.config.ElasticsearchVersion;
/**
* @author Dogukan Sonmez
* @author cihat keser
*/
public class Explain extends SingleResultAbstractDocumentTargetedAction {
protected Explain(Builder builder) {
super(builder);
this.payload = builder.query;
}
@Override
public String getRestMethodName() {
return "POST";
}
@Override
protected String buildURI(ElasticsearchVersion elasticsearchVersion) {
return super.buildURI(elasticsearchVersion) + "/_explain";
}
public static class Builder extends SingleResultAbstractDocumentTargetedAction.Builder<Explain, Builder> {
private final Object query;
public Builder(String index, String type, String id, Object query) {
this.index(index);
this.type(type);
this.id(id);
this.query = query;
}
public Explain build() {
return new Explain(this);
}
}
}
| apache-2.0 |
mlucero88/Tp2-Framework.Tests-Tecnicas | FrameworkTests/src/test/java/grupo11/frameworktests/entrega1/UnitTestNivel2Bis.java | 716 | package grupo11.frameworktests.entrega1;
import grupo11.frameworktests.Fixture;
import grupo11.frameworktests.UnitTest;
// Padre: TestCollectionNivel1
// Hijos: -
public class UnitTestNivel2Bis extends UnitTest {
public UnitTestNivel2Bis(String testName) {
super(testName);
}
@Override
protected void test() {
String str = "testUnitTestNivel2Bis";
Fixture.getInstance().addVariable("VarTestUT2Bis", str);
}
@Override
protected void setUp() {
String str = "setupUnitTestNivel2Bis";
Fixture.getInstance().addVariable("VarSetupUT2Bis", str);
}
@Override
protected void tearDown() {
String str = "teardownUnitTestNivel2Bis";
Fixture.getInstance().addVariable("VarTeardownUT2Bis", str);
}
} | apache-2.0 |
Ariah-Group/Finance | af_webapp/src/main/java/org/kuali/kfs/module/endow/document/web/struts/CashIncreaseDocumentForm.java | 1230 | /*
* Copyright 2010 The Kuali Foundation.
*
* Licensed under the Educational Community License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.opensource.org/licenses/ecl2.php
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.kuali.kfs.module.endow.document.web.struts;
import org.kuali.kfs.module.endow.document.CashIncreaseDocument;
public class CashIncreaseDocumentForm extends EndowmentTransactionLinesDocumentFormBase {
public CashIncreaseDocumentForm() {
super();
}
@Override
protected String getDefaultDocumentTypeName() {
return "ECI";
}
/**
* This method gets the cash increase document
*
* @return the CashIncreaseDocument
*/
public CashIncreaseDocument getCashIncreaseDocument() {
return (CashIncreaseDocument) getDocument();
}
}
| apache-2.0 |
fx19880617/pinot-1 | pinot-controller/src/main/java/com/linkedin/pinot/controller/helix/core/periodictask/ControllerPeriodicTask.java | 4009 | /**
* Copyright (C) 2014-2018 LinkedIn Corp. (pinot-core@linkedin.com)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.linkedin.pinot.controller.helix.core.periodictask;
import com.linkedin.pinot.controller.helix.core.PinotHelixResourceManager;
import com.linkedin.pinot.core.periodictask.BasePeriodicTask;
import java.util.List;
import java.util.Random;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* The base periodic task for pinot controller only. It uses <code>PinotHelixResourceManager</code> to determine
* which table resources should be managed by this Pinot controller.
*/
public abstract class ControllerPeriodicTask extends BasePeriodicTask {
private static final Logger LOGGER = LoggerFactory.getLogger(ControllerPeriodicTask.class);
private static final Random RANDOM = new Random();
public static final int MIN_INITIAL_DELAY_IN_SECONDS = 120;
public static final int MAX_INITIAL_DELAY_IN_SECONDS = 300;
protected final PinotHelixResourceManager _pinotHelixResourceManager;
private boolean _isLeader = false;
public ControllerPeriodicTask(String taskName, long runFrequencyInSeconds, long initialDelayInSeconds,
PinotHelixResourceManager pinotHelixResourceManager) {
super(taskName, runFrequencyInSeconds, initialDelayInSeconds);
_pinotHelixResourceManager = pinotHelixResourceManager;
}
public ControllerPeriodicTask(String taskName, long runFrequencyInSeconds,
PinotHelixResourceManager pinotHelixResourceManager) {
this(taskName, runFrequencyInSeconds, getRandomInitialDelayInSeconds(), pinotHelixResourceManager);
}
private static long getRandomInitialDelayInSeconds() {
return MIN_INITIAL_DELAY_IN_SECONDS + RANDOM.nextInt(MAX_INITIAL_DELAY_IN_SECONDS - MIN_INITIAL_DELAY_IN_SECONDS);
}
@Override
public void init() {
}
@Override
public void run() {
if (!_pinotHelixResourceManager.isLeader()) {
skipLeaderTask();
} else {
List<String> allTableNames = _pinotHelixResourceManager.getAllTables();
processLeaderTask(allTableNames);
}
}
private void skipLeaderTask() {
if (_isLeader) {
LOGGER.info("Current pinot controller lost leadership.");
_isLeader = false;
onBecomeNotLeader();
}
LOGGER.info("Skip running periodic task: {} on non-leader controller", _taskName);
}
private void processLeaderTask(List<String> tables) {
if (!_isLeader) {
LOGGER.info("Current pinot controller became leader. Starting {} with running frequency of {} seconds.",
_taskName, _intervalInSeconds);
_isLeader = true;
onBecomeLeader();
}
long startTime = System.currentTimeMillis();
int numTables = tables.size();
LOGGER.info("Start processing {} tables in periodic task: {}", numTables, _taskName);
process(tables);
LOGGER.info("Finish processing {} tables in periodic task: {} in {}ms", numTables, _taskName,
(System.currentTimeMillis() - startTime));
}
/**
* Does the following logic when losing the leadership. This should be done only once during leadership transition.
*/
public void onBecomeNotLeader() {
}
/**
* Does the following logic when becoming lead controller. This should be done only once during leadership transition.
*/
public void onBecomeLeader() {
}
/**
* Processes the task on the given tables.
*
* @param tables List of table names
*/
public abstract void process(List<String> tables);
}
| apache-2.0 |
jentfoo/aws-sdk-java | aws-java-sdk-cloudtrail/src/main/java/com/amazonaws/services/cloudtrail/package-info.java | 2044 | /*
* Copyright 2014-2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with
* the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
/**
* <fullname>AWS CloudTrail</fullname>
* <p>
* This is the CloudTrail API Reference. It provides descriptions of actions, data types, common parameters, and common
* errors for CloudTrail.
* </p>
* <p>
* CloudTrail is a web service that records AWS API calls for your AWS account and delivers log files to an Amazon S3
* bucket. The recorded information includes the identity of the user, the start time of the AWS API call, the source IP
* address, the request parameters, and the response elements returned by the service.
* </p>
* <note>
* <p>
* As an alternative to the API, you can use one of the AWS SDKs, which consist of libraries and sample code for various
* programming languages and platforms (Java, Ruby, .NET, iOS, Android, etc.). The SDKs provide a convenient way to
* create programmatic access to AWSCloudTrail. For example, the SDKs take care of cryptographically signing requests,
* managing errors, and retrying requests automatically. For information about the AWS SDKs, including how to download
* and install them, see the <a href="http://aws.amazon.com/tools/">Tools for Amazon Web Services page</a>.
* </p>
* </note>
* <p>
* See the <a href="http://docs.aws.amazon.com/awscloudtrail/latest/userguide/cloudtrail-user-guide.html">AWS CloudTrail
* User Guide</a> for information about the data that is included with each AWS API call listed in the log files.
* </p>
*/
package com.amazonaws.services.cloudtrail;
| apache-2.0 |
JoelMarcey/buck | src/com/facebook/buck/features/rust/ConfigBasedUnresolvedRustPlatform.java | 11605 | /*
* Copyright (c) Facebook, Inc. and its affiliates.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.facebook.buck.features.rust;
import com.facebook.buck.core.config.BuckConfig;
import com.facebook.buck.core.model.BuildTarget;
import com.facebook.buck.core.model.Flavor;
import com.facebook.buck.core.model.InternalFlavor;
import com.facebook.buck.core.model.TargetConfiguration;
import com.facebook.buck.core.rules.BuildRuleResolver;
import com.facebook.buck.core.toolchain.tool.impl.HashedFileTool;
import com.facebook.buck.core.toolchain.toolprovider.ToolProvider;
import com.facebook.buck.core.toolchain.toolprovider.impl.ConstantToolProvider;
import com.facebook.buck.core.util.log.Logger;
import com.facebook.buck.cxx.config.CxxBuckConfig;
import com.facebook.buck.cxx.toolchain.CxxPlatform;
import com.facebook.buck.cxx.toolchain.CxxPlatformsProvider;
import com.facebook.buck.cxx.toolchain.UnresolvedCxxPlatform;
import com.facebook.buck.cxx.toolchain.linker.LinkerProvider;
import com.facebook.buck.cxx.toolchain.linker.impl.DefaultLinkerProvider;
import com.facebook.buck.io.ExecutableFinder;
import com.facebook.buck.rules.args.StringArg;
import com.facebook.buck.util.ProcessExecutor;
import com.facebook.buck.util.ProcessExecutor.Option;
import com.facebook.buck.util.ProcessExecutor.Result;
import com.facebook.buck.util.ProcessExecutorParams;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableList.Builder;
import com.google.common.collect.ImmutableMap;
import java.io.IOException;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.EnumSet;
import java.util.Optional;
import java.util.Set;
import javax.annotation.Nullable;
/** An {@link UnresolvedRustPlatform} based on .buckconfig values. */
public class ConfigBasedUnresolvedRustPlatform implements UnresolvedRustPlatform {
private static final Path DEFAULT_RUSTC_COMPILER = Paths.get("rustc");
private static final Path RUSTDOC = Paths.get("rustdoc");
private final RustBuckConfig rustBuckConfig;
private final CxxBuckConfig cxxBuckConfig;
private final String platformName;
private final ToolProvider rustCompiler;
private final ToolProvider rustdoc;
private final Optional<ToolProvider> linkerOverride;
private final UnresolvedCxxPlatform unresolvedCxxPlatform;
private final @Nullable ProcessExecutor processExecutor;
private final Optional<ConfigBasedUnresolvedRustPlatform> unresolvedPluginPlatform;
private static final Logger LOG = Logger.get(RustBuckConfig.class);
ConfigBasedUnresolvedRustPlatform(
String platformName,
BuckConfig buckConfig,
ExecutableFinder executableFinder,
CxxPlatformsProvider cxxPlatformsProvider,
RustPlatformFactory platformFactory,
@Nullable ProcessExecutor processExecutor) {
this.rustBuckConfig = new RustBuckConfig(buckConfig);
this.cxxBuckConfig = new CxxBuckConfig(buckConfig);
this.platformName = platformName;
this.unresolvedCxxPlatform =
cxxPlatformsProvider.getUnresolvedCxxPlatforms().getValue(InternalFlavor.of(platformName));
this.rustCompiler =
rustBuckConfig
.getRustCompiler(platformName)
.orElseGet(
() -> {
HashedFileTool tool =
new HashedFileTool(
() ->
buckConfig.getPathSourcePath(
executableFinder.getExecutable(
DEFAULT_RUSTC_COMPILER, buckConfig.getEnvironment())));
return new ConstantToolProvider(tool);
});
this.rustdoc =
rustBuckConfig
.getRustdoc(platformName)
.orElseGet(
() -> {
HashedFileTool tool =
new HashedFileTool(
() ->
buckConfig.getPathSourcePath(
executableFinder.getExecutable(
RUSTDOC, buckConfig.getEnvironment())));
return new ConstantToolProvider(tool);
});
this.linkerOverride = rustBuckConfig.getRustLinker(platformName);
this.processExecutor = processExecutor;
this.unresolvedPluginPlatform =
rustBuckConfig
.getRustcPluginPlatform(platformName)
.map(
plugPlat ->
platformFactory.getPlatform(plugPlat, cxxPlatformsProvider, processExecutor));
}
@Override
public RustPlatform resolve(BuildRuleResolver resolver, TargetConfiguration targetConfiguration) {
CxxPlatform cxxPlatform = unresolvedCxxPlatform.resolve(resolver, targetConfiguration);
Optional<RustPlatform> pluginPlatform =
unresolvedPluginPlatform.map(plugPlat -> plugPlat.resolve(resolver, targetConfiguration));
LinkerProvider linkerProvider =
linkerOverride
.map(
tp ->
(LinkerProvider)
new DefaultLinkerProvider(
rustBuckConfig
.getLinkerPlatform(platformName)
.orElse(cxxPlatform.getLd().getType()),
tp,
true,
true,
false,
cxxBuckConfig.getLinkPathNormalizationArgsEnabled()))
.orElseGet(cxxPlatform::getLd);
ImmutableRustPlatform.Builder builder =
ImmutableRustPlatform.builder()
.setRustCompiler(rustCompiler)
.setRustdoc(rustdoc)
.addAllRustLibraryFlags(
rustBuckConfig.getRustcLibraryFlags(platformName).stream()
.map(StringArg::of)
.collect(ImmutableList.toImmutableList()))
.addAllRustBinaryFlags(
rustBuckConfig.getRustcBinaryFlags(platformName).stream()
.map(StringArg::of)
.collect(ImmutableList.toImmutableList()))
.addAllRustTestFlags(
rustBuckConfig.getRustcTestFlags(platformName).stream()
.map(StringArg::of)
.collect(ImmutableList.toImmutableList()))
.addAllRustCheckFlags(
rustBuckConfig.getRustcCheckFlags(platformName).stream()
.map(StringArg::of)
.collect(ImmutableList.toImmutableList()))
.addAllRustDocFlags(
rustBuckConfig.getRustDocFlags(platformName).stream()
.map(StringArg::of)
.collect(ImmutableList.toImmutableList()))
.setLinker(linkerOverride)
.setLinkerProvider(linkerProvider)
.addAllLinkerArgs(
rustBuckConfig.getLinkerFlags(platformName).stream()
.map(StringArg::of)
.collect(ImmutableList.toImmutableList()))
.setCxxPlatform(cxxPlatform)
.setXcrunSdkPath(computeXcrunSdkPath(cxxPlatform.getFlavor()))
.setRustcPluginPlatform(pluginPlatform);
if (!linkerOverride.isPresent()) {
builder.addAllLinkerArgs(cxxPlatform.getLdflags());
}
return builder.build();
}
@Override
public Flavor getFlavor() {
return unresolvedCxxPlatform.getFlavor();
}
private void addParseTimeDeps(
Builder<BuildTarget> deps, TargetConfiguration targetConfiguration) {
deps.addAll(unresolvedCxxPlatform.getParseTimeDeps(targetConfiguration));
deps.addAll(rustCompiler.getParseTimeDeps(targetConfiguration));
deps.addAll(rustdoc.getParseTimeDeps(targetConfiguration));
linkerOverride.ifPresent(l -> deps.addAll(l.getParseTimeDeps(targetConfiguration)));
}
@Override
public Iterable<BuildTarget> getParseTimeDeps(TargetConfiguration targetConfiguration) {
Builder<BuildTarget> deps = ImmutableList.<BuildTarget>builder();
addParseTimeDeps(deps, targetConfiguration);
unresolvedPluginPlatform.ifPresent(
plugPlat -> plugPlat.addParseTimeDeps(deps, targetConfiguration));
return deps.build();
}
private Optional<Path> computeXcrunSdkPath(Flavor flavor) {
Optional<String> xcrunSdk = getXcrunSdk(flavor);
if (processExecutor == null) {
if (xcrunSdk.isPresent()) {
LOG.warn(
"No processExecutor while trying to get Apple SDK path for rustc. This is unlikely to work.");
}
return Optional.empty();
}
return xcrunSdk.flatMap(
(sdk) -> {
Optional<Path> developerDir = rustBuckConfig.getAppleDeveloperDirIfSet();
ImmutableMap<String, String> environment;
if (developerDir.isPresent()) {
environment = ImmutableMap.of("DEVELOPER_DIR", developerDir.get().toString());
} else {
environment = ImmutableMap.of();
}
ProcessExecutorParams processExecutorParams =
ProcessExecutorParams.builder()
.setCommand(
ImmutableList.of(
rustBuckConfig
.getAppleXcrunPath()
.map((path) -> path.toString())
.orElse("xcrun"),
"--sdk",
sdk,
"--show-sdk-path"))
.setEnvironment(environment)
.build();
// Must specify that stdout is expected or else output may be wrapped in Ansi escape
// chars.
Set<Option> options = EnumSet.of(Option.EXPECTING_STD_OUT);
Result result;
try {
result =
processExecutor.launchAndExecute(
processExecutorParams,
options,
/* stdin */ Optional.empty(),
/* timeOutMs */ Optional.empty(),
/* timeOutHandler */ Optional.empty());
} catch (InterruptedException | IOException e) {
LOG.warn("Could not execute xcrun, continuing without sdk path.");
return Optional.empty();
}
if (result.getExitCode() != 0) {
throw new RuntimeException(
result.getMessageForUnexpectedResult("xcrun --print-sdk-path"));
}
return Optional.of(Paths.get(result.getStdout().get().trim()));
});
}
private static Optional<String> getXcrunSdk(Flavor platformFlavor) {
String platformFlavorName = platformFlavor.getName();
if (platformFlavorName.startsWith("iphoneos-")) {
return Optional.of("iphoneos");
}
if (platformFlavorName.startsWith("iphonesimulator-")) {
return Optional.of("iphonesimulator");
}
if (platformFlavorName.startsWith("appletvos")) {
return Optional.of("appletvos");
}
if (platformFlavorName.startsWith("watchos")) {
return Optional.of("watchos");
}
return Optional.empty();
}
}
| apache-2.0 |
slyak/slyak-commons | slyak-impl-parent/slyak-widget-impl/src/main/java/com/slyak/services/services/widget/Application.java | 1099 | /*
* Copyright 2017 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.slyak.services.services.widget;
import org.springframework.boot.SpringApplication;
import org.springframework.boot.autoconfigure.SpringBootApplication;
import org.springframework.cloud.client.discovery.EnableDiscoveryClient;
/**
* .
*
* @author stormning 2017/4/7
* @since 1.3.0
*/
@SpringBootApplication
@EnableDiscoveryClient
public class Application {
public static void main(String[] args) {
SpringApplication.run(Application.class, args);
}
}
| apache-2.0 |
ScaleUnlimited/tenaya | src/main/java/com/scaleunlimited/tenaya/data/KmerGenerator.java | 1143 | package com.scaleunlimited.tenaya.data;
import java.util.Iterator;
import com.scaleunlimited.tenaya.sample.Sample;
public class KmerGenerator implements Iterator<String> {
private Sample sample;
private boolean more;
private String currentSequence;
private int ksize;
private int currentIndex;
private int len;
public KmerGenerator(int ksize, Sample sample) {
this.ksize = ksize;
setSample(sample);
}
public void setSample(Sample sample) {
this.sample = sample;
more = true;
currentSequence = sample.readSequence();
currentIndex = 0;
len = currentSequence.length();
}
@Override
public boolean hasNext() {
return more;
}
@Override
public String next() {
String kmer = currentSequence.substring(currentIndex++, currentIndex + ksize - 1);
if ((currentIndex + ksize) > len) {
currentSequence = sample.readSequence();
if (currentSequence == null) {
more = false;
} else {
currentIndex = 0;
len = currentSequence.length();
}
}
return kmer;
}
@Override
public void remove() {
throw new UnsupportedOperationException("KmerGenerator doesn't support remove");
}
}
| apache-2.0 |
u2ware/springfield | src/main/java/com/u2ware/springfield/controller/AbstractEntityController.java | 11149 | package com.u2ware.springfield.controller;
import javax.servlet.http.HttpServletRequest;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.data.domain.Pageable;
import org.springframework.http.HttpStatus;
import org.springframework.ui.Model;
import org.springframework.util.StringUtils;
import org.springframework.validation.BindingResult;
import org.springframework.validation.ObjectError;
import org.springframework.web.bind.annotation.ModelAttribute;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestMethod;
import org.springframework.web.bind.annotation.RequestParam;
import org.springframework.web.client.HttpClientErrorException;
import org.springframework.web.context.request.RequestContextHolder;
import org.springframework.web.context.request.ServletRequestAttributes;
import com.u2ware.springfield.domain.EntityInformation;
import com.u2ware.springfield.domain.Pagination;
import com.u2ware.springfield.service.EntityService;
import com.u2ware.springfield.validation.EntityValidator;
import com.u2ware.springfield.validation.RejectableException;
/**
* /{topLevelMapping}/{methodLevelMapping}
* Home: GET /{path}/
* List : GET /{path}
* Create Form : GET /{path}/new
* Create : POST /{path}/new
* Read : GET /{path}/{id}
* Update Form : GET /{path}/{id}/edit
* Update : PUT /{path}/{id}/edit
* Delete : DELETE /{path}/{id}/edit
*
* @author admin
*
* @param <T>
* @param <Q>
*/
public abstract class AbstractEntityController<T,Q> implements EntityController<T,Q> {
protected final Logger logger = LoggerFactory.getLogger(getClass());
protected abstract EntityInformation<T, Q> getInformation();
protected abstract EntityService<T,Q> getService();
protected abstract EntityValidator<T,Q> getValidator();
/////////////////////////////////////////
//
////////////////////////////////////////
@ModelAttribute(MODEL_ENTITY)
public T createEntityObject(){
try {
T command = getInformation().getEntityClass().newInstance();
return command;
} catch (Exception e) {
e.printStackTrace();
return null;
}
}
@ModelAttribute(MODEL_QUERY)
public Q createQueryObject(){
try {
Q command = getInformation().getQueryClass().newInstance();
return command;
} catch (Exception e) {
e.printStackTrace();
return null;
}
}
/////////////////////////////////////////
//
////////////////////////////////////////
protected void validate(BindingResult errors, RejectableException e){
String field = e.getField();
String errorCode = e.getErrorCode();
Object[] errorArgs = e.getErrorArgs();
String defaultMessage = e.getDefaultMessage();
if(field == null){
errors.reject(errorCode, errorArgs, defaultMessage);
}else{
errors.rejectValue(field, errorCode, errorArgs, defaultMessage);
}
}
protected String resolveViewName(Model model, BindingResult errors, String commandMethod, Object entity, Object query, Pageable pageable, Object queryResult)throws Exception{
if(errors.hasErrors()){
for(ObjectError objectError : errors.getAllErrors()){
logger.warn("validator error: \n"+objectError.toString());
}
}
if(entity == null && query == null)
throw new HttpClientErrorException(HttpStatus.NOT_FOUND);
model.addAttribute(MODEL_INFORMATION, getInformation());
model.addAttribute(MODEL_ENTITY, entity);
model.addAttribute(MODEL_QUERY, query);
model.addAttribute(MODEL_QUERY_PAGEABLE, pageable);
model.addAttribute(MODEL_QUERY_RESULT, queryResult);
/*
if(queryResult != null && ClassUtils.isAssignableValue(PageImpl.class, queryResult)){
PageImpl<?> p = (PageImpl<?>)queryResult;
logger.warn("getNumber : "+p.getNumber());
logger.warn("getSize : "+p.getSize());
logger.warn("getTotalElements : "+p.getTotalElements());
logger.warn("getTotalPages "+p.getTotalPages());
logger.warn("getCurrentIndex "+p.getCurrentIndex());
logger.warn("getBeginIndex "+p.getBeginIndex());
logger.warn("getEndIndex "+p.getEndIndex());
}
*/
ServletRequestAttributes attrs = (ServletRequestAttributes)RequestContextHolder.getRequestAttributes();
HttpServletRequest request = attrs.getRequest();
String commandMethodAppend = "";
/// //change view name by User agent
//// String userAgent = request.getHeader("User-Agent");
//// logger.debug(userAgent);
//// if(userAgent == null){
//// commandMethodAppend = "-mobile";
//// }
String identityPath = entity != null ? getInformation().getEntityPath(entity) : "";
String requestUri = request.getRequestURI();
String extension = StringUtils.getFilenameExtension(requestUri);
//String path = ClassUtils.convertClassNameToResourcePath(className)
String viewName = //getInformation().getBasePackage().replace('.', '/')+
getInformation().getTopLevelMapping()+"/"
+commandMethod+commandMethodAppend
+(StringUtils.hasText(extension) ? "."+extension : "");
if(StringUtils.hasText(getInformation().getAttributesCSV())){
viewName = viewName + "?" + getInformation().getAttributesCSV();
}
logger.warn("response model: "+COMMAND_ID_PATH+"="+identityPath);
logger.warn("response model: "+COMMAND_METHOD+"="+commandMethod);
logger.warn("response model: "+COMMAND_EXTENSION+"="+(extension == null ? "" : "."+extension));
logger.warn("response model: "+COMMAND_VIEW+"="+viewName);
model.addAttribute(COMMAND_ID_PATH , identityPath);
model.addAttribute(COMMAND_METHOD , commandMethod);
model.addAttribute(COMMAND_EXTENSION , extension == null ? "" : "."+extension);
model.addAttribute(COMMAND_VIEW , viewName);
return viewName;
}
@RequestMapping(method={RequestMethod.GET, RequestMethod.POST}, value="")
public String find(
@RequestParam(required=false,value=ENABLE_PARAMETER_NAME,defaultValue="true")Boolean pageEnable,
Model model, Pageable pageable, @ModelAttribute(MODEL_QUERY)Q query, BindingResult errors) throws Exception{
logger.warn("request method: find()");
logger.warn("request model : "+query);
Pageable p = pageEnable ? pageable : null;
logger.warn("request model : pageable="+p);
getValidator().find(query, errors);
if(errors.hasErrors()){
return resolveViewName(model, errors, "find", null, query, p, new Pagination<T>());
}
try{
Iterable<?> result = getService().find(query, p);
if(result == null)
result = new Pagination<T>();
return resolveViewName(model, errors, "find", null, query, p, result);
}catch(RejectableException e){
validate(errors, e);
return resolveViewName(model, errors, "find", null, query, p, new Pagination<T>());
}
}
@RequestMapping(method=RequestMethod.GET, value="/"+COMMAND_ID_PATH+"")
public String read(Model model, @ModelAttribute(MODEL_ENTITY)T entity,BindingResult errors) throws Exception{
logger.warn("request method: read()");
logger.warn("request model : "+entity);
getValidator().read(entity, errors);
if(errors.hasErrors()){
return resolveViewName(model, errors, "read", entity, null, null, null);
}
try{
T newEntity = getService().read(entity);
if(newEntity == null)
throw new HttpClientErrorException(HttpStatus.NOT_FOUND);
return resolveViewName(model, errors, "read", newEntity, null, null, null);
}catch(RejectableException e){
validate(errors, e);
return resolveViewName(model, errors, "read", entity, null, null, null);
}
}
@RequestMapping(method=RequestMethod.GET, value="/new")
public String createForm(Model model, @ModelAttribute(MODEL_ENTITY)T entity, BindingResult errors) throws Exception{
logger.warn("request method: createForm()");
logger.warn("request model : "+entity);
getValidator().createForm(entity, errors);
if(errors.hasErrors()){
return resolveViewName(model, errors, "createForm", entity, null, null, null);
}
try{
T newEntity = getService().createForm(entity);
return resolveViewName(model, errors, "createForm", newEntity, null, null, null);
}catch(RejectableException e){
validate(errors, e);
return resolveViewName(model, errors, "createForm", entity, null, null, null);
}
}
@RequestMapping(method=RequestMethod.POST, value="/new")
public String create(Model model, @ModelAttribute(MODEL_ENTITY) T entity, BindingResult errors) throws Exception{
logger.warn("request method: create()");
logger.warn("request model : "+entity);
getValidator().create(entity, errors);
if(errors.hasErrors()){
return resolveViewName(model, errors, "createForm", entity, null, null, null);
}
try{
T newEntity = getService().create(entity);
return resolveViewName(model, errors,"create", newEntity, null, null, null);
}catch(RejectableException e){
validate(errors, e);
return resolveViewName(model, errors, "createForm", entity, null, null, null);
}
}
@RequestMapping(method=RequestMethod.GET, value="/"+COMMAND_ID_PATH+"/edit")
public String updateForm(Model model, @ModelAttribute(MODEL_ENTITY)T entity, BindingResult errors) throws Exception{
logger.warn("request method: updateForm()");
logger.warn("request model : "+entity);
getValidator().updateForm(entity, errors);
if(errors.hasErrors()){
return resolveViewName(model, errors, "updateForm", entity, null, null, null);
}
try{
T newEntity = getService().updateForm(entity);
return resolveViewName(model, errors, "updateForm", newEntity, null, null, null);
}catch(RejectableException e){
validate(errors, e);
return resolveViewName(model, errors, "updateForm", entity, null, null, null);
}
}
@RequestMapping(method=RequestMethod.PUT, value="/"+COMMAND_ID_PATH+"/edit")
public String update(Model model, @ModelAttribute(MODEL_ENTITY) T entity,BindingResult errors) throws Exception{
logger.warn("request method: update()");
logger.warn("request model : "+entity);
getValidator().update(entity, errors);
if(errors.hasErrors()){
return resolveViewName(model, errors, "updateForm", entity, null, null, null);
}
try{
T newEntity = getService().update(entity);
return resolveViewName(model, errors, "update", newEntity, null, null, null);
}catch(RejectableException e){
validate(errors, e);
return resolveViewName(model, errors, "updateForm", entity, null, null, null);
}
}
@RequestMapping(method=RequestMethod.DELETE, value="/"+COMMAND_ID_PATH+"/edit")
public String delete(Model model, @ModelAttribute(MODEL_ENTITY)T entity, BindingResult errors) throws Exception{
logger.warn("request method: delete()");
logger.warn("request model : "+entity);
getValidator().delete(entity, errors);
if(errors.hasErrors()){
return resolveViewName(model, errors, "read", entity, null, null, null);
}
try{
T newEntity = getService().delete(entity);
return resolveViewName(model, errors, "delete", newEntity, null, null, null);
}catch(RejectableException e){
validate(errors, e);
return resolveViewName(model, errors, "read", entity, null, null, null);
}
}
}
| apache-2.0 |
alancnet/artifactory | web/common/src/main/java/org/artifactory/common/wicket/component/links/TitledSubmitLink.java | 3794 | /*
* Artifactory is a binaries repository manager.
* Copyright (C) 2012 JFrog Ltd.
*
* Artifactory is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* Artifactory is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with Artifactory. If not, see <http://www.gnu.org/licenses/>.
*/
package org.artifactory.common.wicket.component.links;
import org.apache.wicket.Component;
import org.apache.wicket.Page;
import org.apache.wicket.markup.ComponentTag;
import org.apache.wicket.markup.html.form.Form;
import org.apache.wicket.markup.html.form.IFormSubmittingComponent;
import org.apache.wicket.model.IModel;
import org.apache.wicket.util.string.PrependingStringBuffer;
/**
* @author Yoav Aharoni
*/
public abstract class TitledSubmitLink extends BaseTitledLink implements IFormSubmittingComponent {
protected Form<?> form;
protected TitledSubmitLink(String id) {
this(id, (Form<?>) null);
}
protected TitledSubmitLink(String id, String title) {
this(id, title, null);
}
protected TitledSubmitLink(String id, Form<?> form) {
super(id);
this.form = form;
}
protected TitledSubmitLink(String id, IModel titleModel, Form<?> form) {
super(id, titleModel);
this.form = form;
}
protected TitledSubmitLink(String id, String title, Form<?> form) {
super(id, title);
this.form = form;
}
@Override
protected void onComponentTag(ComponentTag tag) {
super.onComponentTag(tag);
if ("input".equalsIgnoreCase(tag.getName()) || "button".equalsIgnoreCase(tag.getName())) {
tag.put("type", "submit");
tag.put("name", getInputName());
}
}
@Override
public boolean getDefaultFormProcessing() {
return true;
}
@Override
public Component setDefaultFormProcessing(boolean defaultFormProcessing) {
return null;
}
@Override
public void onError() {
}
@Override
public final Form<?> getForm() {
if (form == null) {
// try to find form in the hierarchy of owning component
form = findParent(Form.class);
if (form == null) {
throw new IllegalStateException(
"form was not specified in the constructor and cannot be found in the hierarchy of the TitledSubmitLink");
}
}
return form;
}
@Override
public String getInputName() {
// TODO: This is a copy & paste from the FormComponent class.
String id = getId();
final PrependingStringBuffer inputName = new PrependingStringBuffer(id.length());
Component c = this;
while (true) {
inputName.prepend(id);
c = c.getParent();
if (c == null || (c instanceof Form && ((Form) c).isRootForm()) || c instanceof Page) {
break;
}
inputName.prepend(Component.PATH_SEPARATOR);
id = c.getId();
}
// having input name "submit" causes problems with javascript, so we
// create a unique string to replace it by prepending a path separator
if ("submit".equals(inputName.toString())) {
inputName.prepend(Component.PATH_SEPARATOR);
}
return inputName.toString();
}
}
| apache-2.0 |
JetBrains/xodus | entity-store/src/test/java/jetbrains/exodus/entitystore/PropertyCustomTypePersistenceTest.java | 4294 | /**
* Copyright 2010 - 2022 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package jetbrains.exodus.entitystore;
import jetbrains.exodus.TestFor;
import jetbrains.exodus.bindings.BindingUtils;
import jetbrains.exodus.bindings.ComparableBinding;
import jetbrains.exodus.bindings.ComparableSet;
import jetbrains.exodus.util.LightOutputStream;
import org.jetbrains.annotations.NotNull;
import java.io.ByteArrayInputStream;
public class PropertyCustomTypePersistenceTest extends EntityStoreTestBase {
@Override
protected String[] casesThatDontNeedExplicitTxn() {
return new String[]{"testPersistentCustomPropertyType", "testPersistentCustomPropertyTypeSet"};
}
@TestFor(issue = "XD-555")
public void testPersistentCustomPropertyType() {
PersistentEntityStoreImpl store = getEntityStore();
registerDatatype(store);
store.executeInTransaction(txn -> {
Entity testEntity = txn.newEntity("Entity");
testEntity.setProperty("property", new MockData(42));
txn.saveEntity(testEntity);
});
store.close();
store = openStore();
registerDatatype(store);
store.executeInReadonlyTransaction(txn -> {
final Entity entity = txn.getAll("Entity").getFirst();
assertNotNull(entity);
assertEquals(new MockData(42), entity.getProperty("property"));
});
}
@TestFor(issue = "XD-833")
public void testPersistentCustomPropertyTypeSet() {
PersistentEntityStoreImpl store = getEntityStore();
registerDatatype(store);
store.executeInTransaction(txn -> {
Entity testEntity = txn.newEntity("Entity");
final ComparableSet<MockData> set = new ComparableSet<>();
set.addItem(new MockData(42));
testEntity.setProperty("properties", set);
txn.saveEntity(testEntity);
});
store.executeInReadonlyTransaction(txn -> {
final Entity entity = txn.getAll("Entity").getFirst();
assertNotNull(entity);
final Comparable set = entity.getProperty("properties");
assertTrue(set instanceof ComparableSet);
assertEquals(new MockData(42), ((ComparableSet) set).getMaximum());
});
}
private void registerDatatype(PersistentEntityStore store) {
StoreTransaction txn = store.beginTransaction();
store.registerCustomPropertyType(txn, MockData.class, new MockBinding());
if (!txn.commit()) {
throw new IllegalStateException("Couldn't register MockData property type.");
}
}
private static class MockData implements Comparable<MockData> {
private final int value;
private MockData(int value) {
this.value = value;
}
@Override
public int compareTo(@NotNull MockData o) {
return value - o.value;
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
MockData mockData = (MockData) o;
return value == mockData.value;
}
@Override
public int hashCode() {
return value;
}
}
private static class MockBinding extends ComparableBinding {
@Override
public MockData readObject(@NotNull final ByteArrayInputStream stream) {
return new MockData(BindingUtils.readInt(stream));
}
@Override
public void writeObject(@NotNull final LightOutputStream output, @NotNull final Comparable object) {
output.writeUnsignedInt(((MockData) object).value ^ 0x80000000);
}
}
} | apache-2.0 |
PATRIC3/p3_solr | solr/core/src/java/org/apache/solr/cloud/ZkSolrResourceLoader.java | 5490 | package org.apache.solr.cloud;
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import java.io.ByteArrayInputStream;
import java.io.File;
import java.io.IOException;
import java.io.InputStream;
import java.nio.file.Path;
import java.util.List;
import java.util.Properties;
import org.apache.solr.common.SolrException;
import org.apache.solr.common.SolrException.ErrorCode;
import org.apache.solr.common.cloud.ZkConfigManager;
import org.apache.solr.common.cloud.ZooKeeperException;
import org.apache.solr.core.SolrResourceLoader;
import org.apache.solr.core.SolrResourceNotFoundException;
import org.apache.solr.schema.ZkIndexSchemaReader;
import org.apache.zookeeper.KeeperException;
import org.apache.zookeeper.data.Stat;
/**
* ResourceLoader that works with ZooKeeper.
*
*/
public class ZkSolrResourceLoader extends SolrResourceLoader {
private final String configSetZkPath;
private ZkController zkController;
private ZkIndexSchemaReader zkIndexSchemaReader;
public ZkSolrResourceLoader(Path instanceDir, String configSet, ZkController zooKeeperController) {
super(instanceDir);
this.zkController = zooKeeperController;
configSetZkPath = ZkConfigManager.CONFIGS_ZKNODE + "/" + configSet;
}
/**
* <p>
* This loader will first attempt to load resources from ZooKeeper, but if not found
* will delegate to the context classloader when possible,
* otherwise it will attempt to resolve resources using any jar files found in
* the "lib/" directory in the specified instance directory.
*/
public ZkSolrResourceLoader(Path instanceDir, String configSet, ClassLoader parent,
Properties coreProperties, ZkController zooKeeperController) {
super(instanceDir, parent, coreProperties);
this.zkController = zooKeeperController;
configSetZkPath = ZkConfigManager.CONFIGS_ZKNODE + "/" + configSet;
}
/**
* Opens any resource by its name. By default, this will look in multiple
* locations to load the resource: $configDir/$resource from ZooKeeper.
* It will look for it in any jar
* accessible through the class loader if it cannot be found in ZooKeeper.
* Override this method to customize loading resources.
*
* @return the stream for the named resource
*/
@Override
public InputStream openResource(String resource) throws IOException {
InputStream is = null;
String file = configSetZkPath + "/" + resource;
try {
if (zkController.pathExists(file)) {
Stat stat = new Stat();
byte[] bytes = zkController.getZkClient().getData(file, null, stat, true);
return new ZkByteArrayInputStream(bytes, stat);
}
} catch (Exception e) {
throw new IOException("Error opening " + file, e);
}
try {
// delegate to the class loader (looking into $INSTANCE_DIR/lib jars)
is = classLoader.getResourceAsStream(resource.replace(File.separatorChar, '/'));
} catch (Exception e) {
throw new IOException("Error opening " + resource, e);
}
if (is == null) {
throw new SolrResourceNotFoundException("Can't find resource '" + resource
+ "' in classpath or '" + configSetZkPath + "', cwd="
+ System.getProperty("user.dir"));
}
return is;
}
public static class ZkByteArrayInputStream extends ByteArrayInputStream{
private final Stat stat;
public ZkByteArrayInputStream(byte[] buf, Stat stat) {
super(buf);
this.stat = stat;
}
public Stat getStat(){
return stat;
}
}
@Override
public String getConfigDir() {
throw new ZooKeeperException(
ErrorCode.SERVER_ERROR,
"ZkSolrResourceLoader does not support getConfigDir() - likely, what you are trying to do is not supported in ZooKeeper mode");
}
@Override
public String[] listConfigDir() {
List<String> list;
try {
list = zkController.getZkClient().getChildren(configSetZkPath, null, true);
} catch (InterruptedException e) {
// Restore the interrupted status
Thread.currentThread().interrupt();
log.error("", e);
throw new ZooKeeperException(SolrException.ErrorCode.SERVER_ERROR,
"", e);
} catch (KeeperException e) {
log.error("", e);
throw new ZooKeeperException(SolrException.ErrorCode.SERVER_ERROR,
"", e);
}
return list.toArray(new String[0]);
}
public String getConfigSetZkPath() {
return configSetZkPath;
}
public ZkController getZkController() {
return zkController;
}
public void setZkIndexSchemaReader(ZkIndexSchemaReader zkIndexSchemaReader) {
this.zkIndexSchemaReader = zkIndexSchemaReader;
}
public ZkIndexSchemaReader getZkIndexSchemaReader() { return zkIndexSchemaReader; }
}
| apache-2.0 |
blerer/horizondb-io | src/main/java/io/horizondb/io/files/FileUtils.java | 9667 | /**
* Copyright 2013 Benjamin Lerer
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.horizondb.io.files;
import io.netty.util.internal.PlatformDependent;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.io.OutputStream;
import java.io.RandomAccessFile;
import java.nio.MappedByteBuffer;
import java.nio.channels.FileChannel;
import java.nio.channels.FileChannel.MapMode;
import java.nio.file.FileVisitResult;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.SimpleFileVisitor;
import java.nio.file.StandardOpenOption;
import java.nio.file.attribute.BasicFileAttributes;
import java.util.Locale;
/**
* Utility methods to work with files.
*/
public final class FileUtils {
/**
* The number of bytes in a kilobyte.
*/
public static final int ONE_KB = 1024;
/**
* The number of bytes in a megabyte.
*/
public static final int ONE_MB = ONE_KB * ONE_KB;
/**
* The number of bytes in a gigabyte.
*/
public static final int ONE_GB = ONE_KB * ONE_MB;
/**
* <code>FileVisitor</code> used to delete directory content.
*/
private static SimpleFileVisitor<Path> DELETER = new SimpleFileVisitor<Path>() {
/**
* {@inheritDoc}
*/
@Override
public FileVisitResult visitFile(Path file, BasicFileAttributes attrs) throws IOException {
file.toFile().delete();
return FileVisitResult.CONTINUE;
}
/**
* {@inheritDoc}
*/
@Override
public FileVisitResult postVisitDirectory(Path dir, IOException exc) throws IOException {
dir.toFile().delete();
return FileVisitResult.CONTINUE;
}
};
/**
* Opens the specified random access file.
*
* @param path the path to the file
* @return the random access file
* @throws FileNotFoundException if the file does not exists.
*/
public static RandomAccessFile openRandomAccessFile(Path path) throws FileNotFoundException {
return new RandomAccessFile(path.toFile(), "rw");
}
/**
* Extends or truncates to the specified length the specified file.
*
* @param path the path to the file to extends or truncate
* @param length the expected length of the file after the extension or the truncation
* @throws IOException if a problem occurs during the operation.
*/
public static void extendsOrTruncate(Path path, long length) throws IOException {
if (!Files.exists(path) && length == 0) {
return;
}
try (RandomAccessFile file = openRandomAccessFile(path)) {
extendsOrTruncate(file, length);
}
}
/**
* Extends or truncates to the specified length the specified file.
*
* @param file the file to extends or truncate
* @param length the expected length of the file after the extension or the truncation
* @throws IOException if a problem occurs during the operation.
*/
public static void extendsOrTruncate(RandomAccessFile file, long length) throws IOException {
file.setLength(length);
}
/**
* Memory map the specified portion of the specified file.
*
* @param path the file path.
* @param position the position within the file at which the mapped region is to start; must be non-negative
* @param size the size of the region to be mapped; must be non-negative and no greater than {@link j
* ava.lang.Integer#MAX_VALUE}
* @return the mapped byte buffer
* @throws IOException if a problem occurs while mapping the file.
*/
public static MappedByteBuffer mmap(Path path, long position, long size) throws IOException {
try (FileChannel channel = openChannel(path)) {
return mmap(channel, position, size);
}
}
/**
* Memory map the specified file.
*
* @param path the file path.
* @return the mapped byte buffer
* @throws IOException if a problem occurs while mapping the file.
*/
public static MappedByteBuffer mmap(Path path) throws IOException {
try (FileChannel channel = openChannel(path)) {
return mmap(channel, 0, channel.size());
}
}
/**
* Memory map the specified portion of the file corresponding to the specified channel.
*
* @param channel the file channel.
* @param position the position within the file at which the mapped region is to start; must be non-negative
* @param size the size of the region to be mapped; must be non-negative and no greater than
* {@link java.lang.Integer#MAX_VALUE}
* @return the mapped byte buffer
* @throws IOException if a problem occurs while mapping the file.
*/
public static MappedByteBuffer mmap(FileChannel channel, long position, long size) throws IOException {
return channel.map(MapMode.READ_WRITE, position, size);
}
/**
* Memory map the specified portion of the specified file.
*
* @param file the random access file to memory map.
* @param position the position within the file at which the mapped region is to start; must be non-negative
* @param size the size of the region to be mapped; must be non-negative and no greater than
* {@link java.lang.Integer#MAX_VALUE}
* @return the mapped byte buffer
* @throws IOException if a problem occurs while mapping the file.
*/
public static MappedByteBuffer mmap(RandomAccessFile file, long position, long size) throws IOException {
return mmap(file.getChannel(), position, size);
}
/**
* Unmap the specified memory mapping.
*
* @param buffer the buffer representing the memory mapping.
*/
public static void munmap(MappedByteBuffer buffer) {
PlatformDependent.freeDirectBuffer(buffer);
}
/**
* Forces the deletion of the file or directory corresponding to the specified path.
*
* @param path the path of the file or directory to delete.
* @throws IOException if a problem occurs while deleting the file or directory.
*/
public static void forceDelete(Path path) throws IOException {
Files.walkFileTree(path, DELETER);
}
/**
* Converts the specified number of bytes in KB.
*
* @param numberOfBytes the number of bytes to convert
* @return the number of kilobytes
*/
public static double toKiloBytes(long numberOfBytes) {
return ((double) numberOfBytes / ONE_KB);
}
/**
* Converts the specified number of bytes in MB.
*
* @param numberOfBytes the number of bytes to convert
* @return the number of megabytes
*/
public static double toMegaBytes(long numberOfBytes) {
return ((double) numberOfBytes / ONE_MB);
}
/**
* Returns the specified number of bytes in a readable format.
*
* @param numberOfBytes the number of bytes to display
* @return the specified number of bytes in a readable format.
*/
public static String printNumberOfBytes(long numberOfBytes) {
if (numberOfBytes < ONE_KB) {
return String.format(Locale.US, "%d bytes", Long.valueOf(numberOfBytes));
} else if (numberOfBytes < ONE_MB) {
return String.format(Locale.US, "%.2g KB", Double.valueOf(toKiloBytes(numberOfBytes)));
}
return String.format(Locale.US, "%.2g MB", Double.valueOf(toMegaBytes(numberOfBytes)));
}
/**
* Creates a file with the specified content.
*
* @param filePath the file path.
* @param content the file content.
* @throws java.io.IOException if the file cannot be created.
*/
public static void createFile(Path filePath, byte[] content) throws IOException {
try (OutputStream output = Files.newOutputStream(filePath)) {
output.write(content);
output.flush();
}
}
/**
* Creates the specified directory if it does not exists.
*
* @param directory the directory to create
* @throws IOException if an I/O problem occurs.
*/
public static void createDirectoriesIfNeeded(Path directory) throws IOException {
if (!Files.exists(directory)) {
Files.createDirectories(directory);
}
}
/**
* Opens a file channel to the specified path.
*
* @param path the file path.
* @return a file channel to the specified path.
* @throws FileNotFoundException if the file cannot be found.
*/
private static FileChannel openChannel(Path path) throws IOException {
return (FileChannel) Files.newByteChannel(path,
StandardOpenOption.CREATE,
StandardOpenOption.READ,
StandardOpenOption.WRITE);
}
/**
* Must not be instantiated.
*/
private FileUtils() {
}
}
| apache-2.0 |
dimone-kun/cuba | modules/web-toolkit/src/com/haulmont/cuba/web/widgets/client/verticalactionslayout/CubaVerticalActionsLayoutWidget.java | 1009 | /*
* Copyright (c) 2008-2016 Haulmont.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.haulmont.cuba.web.widgets.client.verticalactionslayout;
import com.haulmont.cuba.web.widgets.client.orderedactionslayout.CubaOrderedActionsLayoutWidget;
public class CubaVerticalActionsLayoutWidget extends CubaOrderedActionsLayoutWidget {
public static final String CLASSNAME = "v-verticallayout";
public CubaVerticalActionsLayoutWidget(){
super(CLASSNAME, true);
}
} | apache-2.0 |
R0g3r10LL31t3/LFMS-ROLLSoftware | test/com/rollsoftware/br/common/db/entity/ObjectDataTest.java | 6796 | /*
* Copyright 2016-2026 Rogério Lecarião Leite
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* CEO 2016: Rogério Lecarião Leite; ROLL Software
*/
package com.rollsoftware.br.common.db.entity;
import com.rollsoftware.br.common.properties.Resource;
import java.sql.DriverManager;
import java.util.Objects;
import java.util.Properties;
import javax.persistence.EntityManager;
import javax.persistence.EntityManagerFactory;
import javax.persistence.Persistence;
import javax.xml.bind.JAXBContext;
import javax.xml.bind.JAXBException;
import javax.xml.bind.Marshaller;
import org.eclipse.persistence.jaxb.MarshallerProperties;
import org.junit.After;
import org.junit.AfterClass;
import static org.junit.Assert.*;
import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Test;
/**
*
* @author Rogério
* @date October, 2016
*/
public class ObjectDataTest {
protected static final String PU
= Resource.getProperty("roll.software.br.application.database.PU");
protected static final Properties DB_PROPS
= Resource.getDatabaseProperties();
protected static EntityManagerFactory EMF;
protected static EntityManager EM;
private ObjectData.ObjectDataPK objectDataPK;
public ObjectDataTest() {
}
protected <T extends ObjectData.ObjectDataPK>
T getObjectDataPK() {
return (T) objectDataPK;
}
protected <T extends ObjectData>
Class<T> getObjectDataClass() {
return (Class<T>) ObjectData.class;
}
protected <T extends ObjectData>
T createObjectData() {
ObjectData objectData = new ObjectData();
objectData.setUUID("uuid" + Math.random());
objectData.generateUUID();
return (T) objectData;
}
public <T extends ObjectData>
void save(T objectData) {
EM.getTransaction().begin();
EM.createNativeQuery("set schema LFMS_DB");
EM.persist(objectData);
EM.flush();
EM.refresh(objectData);
EM.getTransaction().commit();
}
public <T extends ObjectData>
T load() {
return load(getObjectDataClass(), getObjectDataPK());
}
public <T extends ObjectData>
T load(Class<T> clazz, Object id) {
ObjectData objectData
= EM.find(clazz, id);
if (objectData != null) {
EM.refresh(objectData);
}
return (T) objectData;
}
@BeforeClass
public static void setUpClass() {
DriverManager.setLogWriter(new java.io.PrintWriter(System.out));
EMF = Persistence.createEntityManagerFactory(PU);
EM = EMF.createEntityManager(DB_PROPS);
}
@AfterClass
public static void tearDownClass() {
EM.close();
EMF.close();
DriverManager.setLogWriter(null);
}
@Before
public void setUp() throws Exception {
try {
ObjectData objectData = createObjectData();
save(objectData);
objectDataPK = objectData.getODPK();
} catch (Throwable ex) {
ex.printStackTrace(System.out);
throw ex;
}
}
@After
public void tearDown() throws Exception {
if (EM.getTransaction().isActive()) {
EM.getTransaction().rollback();
}
}
@Test
public void testBasic() {
System.out.println("testBasic");
ObjectData objectData = load();
System.out.println("Object Data: " + objectData);
System.out.println("Object Data UUID: " + objectData.getUUID());
System.out.println("Object Data ODPK: " + objectData.getODPK());
assertEquals(getObjectDataPK(), objectData.getODPK());
}
@Test
public void testObjectDataToXML() throws JAXBException {
System.out.println("testObjectDataToXML");
JAXBContext jc = JAXBContext.newInstance(
ObjectDataTest.this.getObjectDataClass());
ObjectData objectData = createObjectData();
Marshaller marshaller = jc.createMarshaller();
marshaller.setProperty(Marshaller.JAXB_FORMATTED_OUTPUT, true);
System.out.println("XML Output:");
marshaller.marshal(objectData, System.out);
System.out.println();
}
@Test
public void testObjectData2ToXML() throws JAXBException {
System.out.println("testObjectData2ToXML");
JAXBContext jc = JAXBContext.newInstance(ObjectData.class);
ObjectData objectData = createObjectData();
Marshaller marshaller = jc.createMarshaller();
marshaller.setProperty(Marshaller.JAXB_FORMATTED_OUTPUT, true);
System.out.println("XML Output:");
marshaller.marshal(objectData, System.out);
System.out.println();
}
@Test
public void testObjectDataToJSON() throws JAXBException {
System.out.println("testObjectDataToJSON");
JAXBContext jc = JAXBContext.newInstance(
ObjectDataTest.this.getObjectDataClass());
ObjectData objectData = createObjectData();
Marshaller marshaller = jc.createMarshaller();
marshaller.setProperty(Marshaller.JAXB_FORMATTED_OUTPUT, true);
marshaller.setProperty(
MarshallerProperties.MEDIA_TYPE, "application/json");
marshaller.setProperty(MarshallerProperties.JSON_INCLUDE_ROOT, false);
System.out.println("JSON Output:");
marshaller.marshal(objectData, System.out);
System.out.println();
}
@Test
public void testEquals() {
ObjectData objectData1 = createObjectData();
save(objectData1);
ObjectData objectData2 = load(getObjectDataClass(),
objectData1.getODPK());
assertEquals(objectData1, objectData2);
}
@Test
public void testNotEquals() {
ObjectData objectData1 = createObjectData();
ObjectData objectData2 = createObjectData();
assertNotEquals(objectData1, objectData2);
}
@Test
public void testType() {
ObjectData objectData = load();
String className = objectData.getClass().getSimpleName();
String type = objectData.getType();
assertTrue(Objects.equals(className, type));
}
}
| apache-2.0 |
m-m-m/client | ui/core/api/src/main/java/net/sf/mmm/client/ui/api/common/ButtonContainer.java | 1067 | /* Copyright (c) The m-m-m Team, Licensed under the Apache License, Version 2.0
* http://www.apache.org/licenses/LICENSE-2.0 */
package net.sf.mmm.client.ui.api.common;
/**
* TODO: this class ...
*
* @author Joerg Hohwiller (hohwille at users.sourceforge.net)
* @since 1.0.0
*/
public abstract interface ButtonContainer {
/**
* This method starts a new button-group and adds it to this container. All buttons (or widgets) added to
* this {@link ButtonContainer container} until the button-group was {@link #endGroup() ended} will be added
* to the current button-group.
*
* @throws IllegalStateException if the previous button-group has NOT been {@link #endGroup() ended}.
*
* @see #endGroup()
*/
void startGroup() throws IllegalStateException;
/**
* This method ends a {@link #startGroup() previously started group}.
*
* @return <code>true</code> if the current button-group has successfully been ended, <code>false</code>
* otherwise (if there was no such button-group).
*/
boolean endGroup();
}
| apache-2.0 |
NationalSecurityAgency/ghidra | Ghidra/Framework/Graph/src/test.slow/java/ghidra/graph/VisualGraphComponentProviderTest.java | 7084 | /* ###
* IP: GHIDRA
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package ghidra.graph;
import static org.junit.Assert.*;
import javax.swing.JComponent;
import javax.swing.JPanel;
import org.junit.Test;
import docking.*;
import docking.action.DockingActionIf;
import docking.action.ToggleDockingAction;
import ghidra.graph.graphs.*;
import ghidra.graph.support.TestVisualGraph;
import ghidra.graph.support.TextAreaTestVertex;
import ghidra.graph.viewer.AbstractVisualGraphTest;
import ghidra.graph.viewer.VisualGraphView;
public class VisualGraphComponentProviderTest extends AbstractVisualGraphTest {
private VisualGraphComponentProvider<AbstractTestVertex, TestEdge, TestVisualGraph> provider;
private VisualGraphView<AbstractTestVertex, TestEdge, TestVisualGraph> viewer;
@Override
public void setUp() throws Exception {
FakeDockingTool tool = createTool();
DockingWindowManager dwm = tool.getWindowManager();
runSwing(() -> dwm.setVisible(true), false);
buildAndLayoutGraph();
viewer = new VisualGraphView<>();
viewer.setGraph(graph);
provider = new TestProvider(tool);
provider.setVisible(true);
}
@Override
public void tearDown() {
closeAllWindows();
}
@Override
protected TestVisualGraph buildGraph() {
TestVisualGraph g = new TestVisualGraph();
AbstractTestVertex v1 = new LabelTestVertex("1");
AbstractTestVertex v2 = new LabelTestVertex("2");
AbstractTestVertex v3 = new LabelTestVertex("3");
TextAreaTestVertex textAreaVertex = new TextAreaTestVertex("Text Area vertex...");
TestEdge e1 = new TestEdge(v1, v2);
TestEdge e2 = new TestEdge(v2, v3);
TestEdge e3 = new TestEdge(v1, textAreaVertex);
g.addVertex(v1);
g.addVertex(v2);
g.addVertex(v3);
g.addVertex(textAreaVertex);
g.addEdge(e1);
g.addEdge(e2);
g.addEdge(e3);
return g;
}
@Test
public void testOpenSatelliteWindowReopensWhenMainGraphWindowIsReopened() {
//
// If the user closes the main graph provider while the satellite is undocked and showing,
// then verify the satellite window will re-open when the main graph provider is re-opened
//
assertTrue(provider.isSatelliteShowing());
assertTrue(provider.isSatelliteDocked());
setSatelliteDocked(false);
assertTrue(provider.isSatelliteShowing());
assertFalse(provider.isSatelliteDocked());
assertUndockedProviderVisible();
closeMainGraphProvider();
assertUndockedProviderHidden();
showMainGraphProvider();
assertUndockedProviderVisible();
assertTrue(provider.isSatelliteShowing());
}
@Test
public void testClosedSatelliteWindowDoesNotReopenWhenMainGraphWindowIsReopened() {
//
// If the user closes an undocked satellite window, then closes and re-opens the
// main graph provider, the satellite window should *not* re-open.
//
assertTrue(provider.isSatelliteShowing());
assertTrue(provider.isSatelliteDocked());
setSatelliteDocked(false);
assertTrue(provider.isSatelliteShowing());
assertFalse(provider.isSatelliteDocked());
assertUndockedProviderVisible();
setSatelliteVisible(false);
assertFalse(provider.isSatelliteShowing());
assertFalse(provider.isSatelliteDocked());
closeMainGraphProvider();
assertUndockedProviderHidden();
showMainGraphProvider();
assertUndockedProviderHidden();
assertFalse(provider.isSatelliteShowing());
assertFalse(provider.isSatelliteDocked());
}
@Test
public void testHideUndockedSatellite_ByClosingSatelliteProvider() {
//
// If the user closes the satellite window, verify that can re-open it via the action
//
assertTrue(provider.isSatelliteShowing());
assertTrue(provider.isSatelliteDocked());
setSatelliteDocked(false);
assertTrue(provider.isSatelliteShowing());
assertFalse(provider.isSatelliteDocked());
assertUndockedProviderVisible();
setSatelliteVisible(false);
assertFalse(provider.isSatelliteShowing());
assertFalse(provider.isSatelliteDocked());
setSatelliteVisible(true);
assertTrue(provider.isSatelliteShowing());
assertFalse(provider.isSatelliteDocked());
}
//==================================================================================================
// Private Methods
//==================================================================================================
private void assertUndockedProviderHidden() {
ComponentProvider p = provider.getSatelliteProvider();
if (p == null) {
return;
}
assertFalse("Undocked provider is not hidden", p.isVisible());
}
private void assertUndockedProviderVisible() {
ComponentProvider p = provider.getSatelliteProvider();
assertNotNull("Undocked provider does not exist", p);
assertTrue("Undocked provider is not visible", p.isVisible());
}
private void showMainGraphProvider() {
runSwing(() -> provider.setVisible(true));
}
private void closeMainGraphProvider() {
runSwing(() -> provider.closeComponent());
}
private void setSatelliteVisible(boolean visible) {
Tool tool = provider.getTool();
String name = "Display Satellite View";
DockingActionIf action = getAction(tool, name);
assertNotNull(name + " not in tool", action);
ToggleDockingAction showSatelliteAction = (ToggleDockingAction) action;
setToggleActionSelected(showSatelliteAction, provider.getActionContext(null), visible);
waitForSwing();
}
private void setSatelliteDocked(boolean docked) {
Tool tool = provider.getTool();
String name = "Dock Satellite View";
DockingActionIf action = getAction(tool, name);
assertNotNull(name + " not in tool", action);
ToggleDockingAction dockSatelliteAction = (ToggleDockingAction) action;
setToggleActionSelected(dockSatelliteAction, provider.getActionContext(null), docked);
waitForSwing();
}
private FakeDockingTool createTool() {
return runSwing(() -> new FakeDockingTool());
}
//==================================================================================================
// Inner Classes
//==================================================================================================
private class TestProvider
extends VisualGraphComponentProvider<AbstractTestVertex, TestEdge, TestVisualGraph> {
private JComponent component;
protected TestProvider(Tool tool) {
super(tool, "Test VG Provider", "Test Owner");
component = new JPanel();
component.add(viewer.getViewComponent());
addToTool();
addSatelliteFeature();
}
@Override
public VisualGraphView<AbstractTestVertex, TestEdge, TestVisualGraph> getView() {
return viewer;
}
@Override
public JComponent getComponent() {
return component;
}
}
}
| apache-2.0 |
ThilinaManamgoda/incubator-taverna-workbench | taverna-edits-api/src/main/java/org/apache/taverna/workflow/edits/ReorderMergePositionsEdit.java | 1941 | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/*
package org.apache.taverna.workflow.edits;
import java.util.List;
import org.apache.taverna.workbench.edits.EditException;
import org.apache.taverna.scufl2.api.core.DataLink;
import org.apache.taverna.scufl2.api.port.ReceiverPort;
/**
* Change datalink merge positions based on ordered list of data links.
*
* @author David Withers
* @author Stian Soiland-Reyes
*/
public class ReorderMergePositionsEdit extends AbstractEdit<ReceiverPort> {
private List<DataLink> newMergePositions;
private final List<DataLink> oldMergePositions;
public ReorderMergePositionsEdit(List<DataLink> dataLinks,
List<DataLink> newMergePositions) {
super(dataLinks.get(0).getSendsTo());
this.oldMergePositions = dataLinks;
this.newMergePositions = newMergePositions;
}
@Override
protected void doEditAction(ReceiverPort subject) throws EditException {
for (int i = 0; i < newMergePositions.size(); i++)
newMergePositions.get(i).setMergePosition(i);
}
@Override
protected void undoEditAction(ReceiverPort subject) {
for (int i = 0; i < oldMergePositions.size(); i++)
oldMergePositions.get(i).setMergePosition(i);
}
}
| apache-2.0 |
cbaenziger/oozie | core/src/test/java/org/apache/oozie/client/TestOozieCLI.java | 81596 | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.oozie.client;
import java.io.ByteArrayOutputStream;
import java.io.DataOutputStream;
import java.io.File;
import java.io.FileOutputStream;
import java.io.FileWriter;
import java.io.OutputStream;
import java.io.PrintStream;
import java.io.StringReader;
import java.util.Properties;
import java.util.concurrent.Callable;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.oozie.BuildInfo;
import org.apache.oozie.cli.CLIParser;
import org.apache.oozie.cli.OozieCLI;
import org.apache.oozie.client.rest.RestConstants;
import org.apache.oozie.service.InstrumentationService;
import org.apache.oozie.service.MetricsInstrumentationService;
import org.apache.oozie.service.Services;
import org.apache.oozie.service.ShareLibService;
import org.apache.oozie.servlet.DagServletTestCase;
import org.apache.oozie.servlet.MockCoordinatorEngineService;
import org.apache.oozie.servlet.MockDagEngineService;
import org.apache.oozie.servlet.SLAServlet;
import org.apache.oozie.servlet.V1AdminServlet;
import org.apache.oozie.servlet.V1JobServlet;
import org.apache.oozie.servlet.V1JobsServlet;
import org.apache.oozie.servlet.V2AdminServlet;
import org.apache.oozie.servlet.V2JobServlet;
import org.apache.oozie.servlet.V2ValidateServlet;
import org.apache.oozie.util.IOUtils;
import org.apache.oozie.util.XConfiguration;
import org.json.simple.JSONValue;
//hardcoding options instead using constants on purpose, to detect changes to option names if any and correct docs.
public class TestOozieCLI extends DagServletTestCase {
static {
new HeaderTestingVersionServlet();
new V1JobServlet();
new V1JobsServlet();
new V1AdminServlet();
new V2AdminServlet();
new V2JobServlet();
new V2ValidateServlet();
new SLAServlet();
}
static final boolean IS_SECURITY_ENABLED = false;
static final String VERSION = "/v" + OozieClient.WS_PROTOCOL_VERSION;
static final String[] END_POINTS = {"/versions", VERSION + "/jobs", VERSION + "/job/*", VERSION + "/admin/*",
VERSION + "/validate/*", "/v1/sla"};
static final Class<?>[] SERVLET_CLASSES = { HeaderTestingVersionServlet.class, V1JobsServlet.class,
V2JobServlet.class, V2AdminServlet.class, V2ValidateServlet.class, SLAServlet.class};
@Override
protected void setUp() throws Exception {
super.setUp();
MockDagEngineService.reset();
MockCoordinatorEngineService.reset();
}
private String createConfigFile(String appPath) throws Exception {
String path = getTestCaseDir() + "/" + getName() + ".xml";
Configuration conf = new Configuration(false);
conf.set(OozieClient.APP_PATH, appPath);
conf.set(OozieClient.RERUN_SKIP_NODES, "node");
OutputStream os = new FileOutputStream(path);
conf.writeXml(os);
os.close();
return path;
}
private String createCoodrConfigFile(String appPath) throws Exception {
String path = getTestCaseDir() + "/" + getName() + ".xml";
Configuration conf = new Configuration(false);
conf.set(OozieClient.COORDINATOR_APP_PATH, appPath);
OutputStream os = new FileOutputStream(path);
conf.writeXml(os);
os.close();
return path;
}
private String createPropertiesFile(String appPath) throws Exception {
String path = getTestCaseDir() + "/" + getName() + ".properties";
Properties props = new Properties();
props.setProperty(OozieClient.USER_NAME, getTestUser());
props.setProperty(OozieClient.GROUP_NAME, getTestGroup());
props.setProperty(OozieClient.APP_PATH, appPath);
props.setProperty(OozieClient.RERUN_SKIP_NODES, "node");
props.setProperty("a", "A");
OutputStream os = new FileOutputStream(path);
props.store(os, "");
os.close();
return path;
}
private String createPropertiesFileWithTrailingSpaces(String appPath) throws Exception {
String path = getTestCaseDir() + "/" + getName() + ".properties";
Properties props = new Properties();
props.setProperty(OozieClient.USER_NAME, getTestUser());
props.setProperty(OozieClient.GROUP_NAME, getTestGroup());
props.setProperty(OozieClient.APP_PATH, appPath);
//add spaces to string
props.setProperty(OozieClient.RERUN_SKIP_NODES + " ", " node ");
OutputStream os = new FileOutputStream(path);
props.store(os, "");
os.close();
return path;
}
private String createPigPropertiesFile(String appPath) throws Exception {
String path = getTestCaseDir() + "/" + getName() + ".properties";
Properties props = new Properties();
props.setProperty(OozieClient.USER_NAME, getTestUser());
props.setProperty(XOozieClient.NN, "localhost:8020");
props.setProperty(XOozieClient.RM, "localhost:8032");
props.setProperty("oozie.libpath", appPath);
props.setProperty("mapred.output.dir", appPath);
props.setProperty("a", "A");
OutputStream os = new FileOutputStream(path);
props.store(os, "");
os.close();
return path;
}
private String createMRProperties(String appPath, boolean useNewAPI) throws Exception {
String path = getTestCaseDir() + "/" + getName() + ".properties";
Properties props = new Properties();
props.setProperty(OozieClient.USER_NAME, getTestUser());
props.setProperty(OozieClient.GROUP_NAME, getTestGroup());
props.setProperty(OozieClient.APP_PATH, appPath);
props.setProperty(OozieClient.RERUN_SKIP_NODES, "node");
props.setProperty(XOozieClient.NN, "localhost:9000");
props.setProperty(XOozieClient.RM, "localhost:9001");
if (useNewAPI) {
props.setProperty("mapreduce.map.class", "mapper.class");
props.setProperty("mapreduce.reduce.class", "reducer.class");
}
else {
props.setProperty("mapred.mapper.class", "mapper.class");
props.setProperty("mapred.reducer.class", "reducer.class");
}
props.setProperty("mapred.input.dir", "input");
props.setProperty("mapred.output.dir", "output");
props.setProperty("oozie.libpath", appPath);
props.setProperty("a", "A");
OutputStream os = new FileOutputStream(path);
props.store(os, "");
os.close();
return path;
}
private String createPigScript(String appPath) throws Exception {
String path = getTestCaseDir() + "/" + getName() + ".properties";
DataOutputStream dos = new DataOutputStream(new FileOutputStream(path));
String pigScript = "A = load \'/user/data\' using PigStorage(:);\n" +
"B = foreach A generate $0" +
"dumb B;";
dos.writeBytes(pigScript);
dos.close();
return path;
}
public void testSubmit() throws Exception {
runTest(END_POINTS, SERVLET_CLASSES, IS_SECURITY_ENABLED, new Callable<Void>() {
@Override
public Void call() throws Exception {
String oozieUrl = getContextURL();
int wfCount = MockDagEngineService.INIT_WF_COUNT;
Path appPath = new Path(getFsTestCaseDir(), "app");
getFileSystem().mkdirs(appPath);
getFileSystem().create(new Path(appPath, "workflow.xml")).close();
String[] args = new String[]{"job", "-submit", "-oozie", oozieUrl, "-config",
createConfigFile(appPath.toString())};
assertEquals(0, new OozieCLI().run(args));
assertEquals("submit", MockDagEngineService.did);
assertFalse(MockDagEngineService.started.get(wfCount));
wfCount++;
args = new String[]{"job", "-submit", "-oozie", oozieUrl, "-config",
createPropertiesFile(appPath.toString())};
assertEquals(0, new OozieCLI().run(args));
assertEquals("submit", MockDagEngineService.did);
assertFalse(MockDagEngineService.started.get(wfCount));
MockDagEngineService.reset();
wfCount = MockDagEngineService.INIT_WF_COUNT;
args = new String[]{"job", "-submit", "-oozie", oozieUrl, "-config",
createPropertiesFile(appPath.toString()) + "x"};
assertEquals(-1, new OozieCLI().run(args));
assertEquals(null, MockDagEngineService.did);
try {
MockDagEngineService.started.get(wfCount);
//job was not created, then how did this extra job come after reset? fail!!
fail();
}
catch (Exception e) {
//job was not submitted, so its fine
}
return null;
}
});
}
public void testSubmitPig() throws Exception {
runTest(END_POINTS, SERVLET_CLASSES, IS_SECURITY_ENABLED, new Callable<Void>() {
@Override
public Void call() throws Exception {
String oozieUrl = getContextURL();
int wfCount = MockDagEngineService.INIT_WF_COUNT;
Path appPath = new Path(getFsTestCaseDir(), "app");
getFileSystem().mkdirs(appPath);
String[] args = new String[]{"pig", "-oozie", oozieUrl, "-file", createPigScript(appPath.toString()), "-config",
createPigPropertiesFile(appPath.toString())};
assertEquals(0, new OozieCLI().run(args));
assertEquals("submitPig", MockDagEngineService.did);
assertTrue(MockDagEngineService.started.get(wfCount));
return null;
}
});
}
public void testSubmitMapReduce() throws Exception {
runTest(END_POINTS, SERVLET_CLASSES, IS_SECURITY_ENABLED, new Callable<Void>() {
@Override
public Void call() throws Exception {
String oozieUrl = getContextURL();
int wfCount = MockDagEngineService.INIT_WF_COUNT;
Path appPath = new Path(getFsTestCaseDir(), "app");
getFileSystem().mkdirs(appPath);
String[] args = new String[]{"mapreduce", "-oozie", oozieUrl, "-config",
createMRProperties(appPath.toString(), false)};
assertEquals(0, new OozieCLI().run(args));
assertEquals("submitMR", MockDagEngineService.did);
assertTrue(MockDagEngineService.started.get(wfCount));
return null;
}
});
}
public void testSubmitMapReduce2() throws Exception {
runTest(END_POINTS, SERVLET_CLASSES, IS_SECURITY_ENABLED, new Callable<Void>() {
@Override
public Void call() throws Exception {
String oozieUrl = getContextURL();
int wfCount = MockDagEngineService.INIT_WF_COUNT;
Path appPath = new Path(getFsTestCaseDir(), "app");
getFileSystem().mkdirs(appPath);
String[] args = new String[]{"mapreduce", "-oozie", oozieUrl, "-config",
createMRProperties(appPath.toString(), true)};
assertEquals(0, new OozieCLI().run(args));
assertEquals("submitMR", MockDagEngineService.did);
assertTrue(MockDagEngineService.started.get(wfCount));
return null;
}
});
}
public void testSubmitDoAs() throws Exception {
setSystemProperty("oozie.authentication.simple.anonymous.allowed", "false");
runTest(END_POINTS, SERVLET_CLASSES, IS_SECURITY_ENABLED, new Callable<Void>() {
@Override
public Void call() throws Exception {
String oozieUrl = getContextURL();
Path appPath = new Path(getFsTestCaseDir(), "app");
getFileSystem().mkdirs(appPath);
getFileSystem().create(new Path(appPath, "workflow.xml")).close();
String[] args = new String[]{"job", "-submit", "-oozie", oozieUrl, "-config",
createConfigFile(appPath.toString()), "-doas", getTestUser2() };
assertEquals(0, new OozieCLI().run(args));
assertEquals("submit", MockDagEngineService.did);
assertEquals(getTestUser2(), MockDagEngineService.user);
return null;
}
});
}
public void testSubmitWithPropertyArguments() throws Exception {
runTest(END_POINTS, SERVLET_CLASSES, IS_SECURITY_ENABLED, new Callable<Void>() {
@Override
public Void call() throws Exception {
String oozieUrl = getContextURL();
int wfCount = MockDagEngineService.INIT_WF_COUNT;
Path appPath = new Path(getFsTestCaseDir(), "app");
getFileSystem().mkdirs(appPath);
getFileSystem().create(new Path(appPath, "workflow.xml")).close();
String[] args = new String[]{"job", "-submit", "-oozie", oozieUrl, "-config",
createConfigFile(appPath.toString()), "-Da=X", "-Db=B"};
assertEquals(0, new OozieCLI().run(args));
assertEquals("submit", MockDagEngineService.did);
assertFalse(MockDagEngineService.started.get(wfCount));
assertEquals("X", MockDagEngineService.submittedConf.get("a"));
assertEquals("B", MockDagEngineService.submittedConf.get("b"));
return null;
}
});
}
public void testRun() throws Exception {
runTest(END_POINTS, SERVLET_CLASSES, IS_SECURITY_ENABLED, new Callable<Void>() {
@Override
public Void call() throws Exception {
Path appPath = new Path(getFsTestCaseDir(), "app");
getFileSystem().mkdirs(appPath);
getFileSystem().create(new Path(appPath, "workflow.xml")).close();
String oozieUrl = getContextURL();
int wfCount = MockDagEngineService.INIT_WF_COUNT;
String[] args = new String[]{"job", "-run", "-oozie", oozieUrl, "-config",
createConfigFile(appPath.toString())};
assertEquals(0, new OozieCLI().run(args));
assertEquals("submit", MockDagEngineService.did);
assertTrue(MockDagEngineService.started.get(wfCount));
return null;
}
});
}
/**
* Check if "-debug" option is accepted at CLI with job run command
*
* @throws Exception
*/
public void testRunWithDebug() throws Exception {
runTest(END_POINTS, SERVLET_CLASSES, IS_SECURITY_ENABLED, new Callable<Void>() {
@Override
public Void call() throws Exception {
Path appPath = new Path(getFsTestCaseDir(), "app");
getFileSystem().mkdirs(appPath);
getFileSystem().create(new Path(appPath, "workflow.xml")).close();
String oozieUrl = getContextURL();
int wfCount = MockDagEngineService.INIT_WF_COUNT;
String[] args = new String[]{"job", "-run", "-oozie", oozieUrl, "-config",
createConfigFile(appPath.toString()), "-debug"};
assertEquals(0, new OozieCLI().run(args));
assertEquals("submit", MockDagEngineService.did);
assertTrue(MockDagEngineService.started.get(wfCount));
return null;
}
});
}
public void testStart() throws Exception {
runTest(END_POINTS, SERVLET_CLASSES, IS_SECURITY_ENABLED, new Callable<Void>() {
@Override
public Void call() throws Exception {
String oozieUrl = getContextURL();
String[] args = new String[]{"job", "-oozie", oozieUrl, "-start", MockDagEngineService.JOB_ID + "1" +
MockDagEngineService.JOB_ID_END};
assertEquals(0, new OozieCLI().run(args));
assertEquals(RestConstants.JOB_ACTION_START, MockDagEngineService.did);
assertTrue(MockDagEngineService.started.get(1));
args = new String[]{"job", "-oozie", oozieUrl, "-start",
MockDagEngineService.JOB_ID + (MockDagEngineService.workflows.size() + 1)};
assertEquals(-1, new OozieCLI().run(args));
return null;
}
});
}
public void testSuspend() throws Exception {
runTest(END_POINTS, SERVLET_CLASSES, IS_SECURITY_ENABLED, new Callable<Void>() {
@Override
public Void call() throws Exception {
String oozieUrl = getContextURL();
String[] args = new String[]{"job", "-oozie", oozieUrl, "-suspend", MockDagEngineService.JOB_ID + "1" +
MockDagEngineService.JOB_ID_END};
assertEquals(0, new OozieCLI().run(args));
assertEquals(RestConstants.JOB_ACTION_SUSPEND, MockDagEngineService.did);
args = new String[]{"job", "-oozie", oozieUrl, "-suspend",
MockDagEngineService.JOB_ID + (MockDagEngineService.workflows.size() + 1)};
assertEquals(-1, new OozieCLI().run(args));
return null;
}
});
}
public void testResume() throws Exception {
runTest(END_POINTS, SERVLET_CLASSES, IS_SECURITY_ENABLED, new Callable<Void>() {
@Override
public Void call() throws Exception {
String oozieUrl = getContextURL();
String[] args = new String[]{"job", "-oozie", oozieUrl, "-resume", MockDagEngineService.JOB_ID + "1" +
MockDagEngineService.JOB_ID_END};
assertEquals(0, new OozieCLI().run(args));
assertEquals(RestConstants.JOB_ACTION_RESUME, MockDagEngineService.did);
args = new String[]{"job", "-oozie", oozieUrl, "-resume",
MockDagEngineService.JOB_ID + (MockDagEngineService.workflows.size() + 1)};
assertEquals(-1, new OozieCLI().run(args));
return null;
}
});
}
public void testKill() throws Exception {
runTest(END_POINTS, SERVLET_CLASSES, IS_SECURITY_ENABLED, new Callable<Void>() {
@Override
public Void call() throws Exception {
String oozieUrl = getContextURL();
String[] args = new String[]{"job", "-oozie", oozieUrl, "-kill", MockDagEngineService.JOB_ID + "1" +
MockDagEngineService.JOB_ID_END};
assertEquals(0, new OozieCLI().run(args));
assertEquals(RestConstants.JOB_ACTION_KILL, MockDagEngineService.did);
args = new String[]{"job", "-oozie", oozieUrl, "-kill",
MockDagEngineService.JOB_ID + (MockDagEngineService.workflows.size() + 1)};
assertEquals(-1, new OozieCLI().run(args));
return null;
}
});
}
public void testBulkSuspendResumeKill1() throws Exception {
runTest(END_POINTS, SERVLET_CLASSES, IS_SECURITY_ENABLED, new Callable<Void>() {
@Override
public Void call() throws Exception {
String oozieUrl = getContextURL();
String[] args = new String[]{"jobs", "-oozie", oozieUrl, "-suspend", "-filter",
"name=workflow-1"};
assertEquals(0, new OozieCLI().run(args));
assertEquals(RestConstants.JOBS, MockDagEngineService.did);
args = new String[]{"jobs", "-oozie", oozieUrl, "-resume", "-filter",
"name=workflow-1"};
assertEquals(0, new OozieCLI().run(args));
assertEquals(RestConstants.JOBS, MockDagEngineService.did);
args = new String[]{"jobs", "-oozie", oozieUrl, "-kill", "-filter",
"name=workflow-1"};
assertEquals(0, new OozieCLI().run(args));
assertEquals(RestConstants.JOBS, MockDagEngineService.did);
return null;
}
});
}
public void testBulkSuspendResumeKill2() throws Exception {
runTest(END_POINTS, SERVLET_CLASSES, IS_SECURITY_ENABLED, new Callable<Void>() {
@Override
public Void call() throws Exception {
String oozieUrl = getContextURL();
String[] args = new String[]{"jobs", "-oozie", oozieUrl, "-suspend", "-filter",
"name=coordinator", "-jobtype", "coordinator"};
assertEquals(0, new OozieCLI().run(args));
assertEquals(RestConstants.JOBS, MockCoordinatorEngineService.did);
args = new String[]{"jobs", "-oozie", oozieUrl, "-resume", "-filter",
"name=coordinator", "-jobtype", "coordinator"};
assertEquals(0, new OozieCLI().run(args));
assertEquals(RestConstants.JOBS, MockCoordinatorEngineService.did);
args = new String[]{"jobs", "-oozie", oozieUrl, "-kill", "-filter",
"name=coordinator", "-jobtype", "coordinator"};
assertEquals(0, new OozieCLI().run(args));
assertEquals(RestConstants.JOBS, MockCoordinatorEngineService.did);
return null;
}
});
}
public void testBulkCommandWithoutFilterNegative() throws Exception {
runTest(END_POINTS, SERVLET_CLASSES, IS_SECURITY_ENABLED, new Callable<Void>() {
@Override
public Void call() throws Exception {
String oozieUrl = getContextURL();
String[] args = new String[]{"jobs", "-oozie", oozieUrl, "-suspend", "-jobtype", "coordinator"};
assertEquals(-1, new OozieCLI().run(args));
assertNull(MockCoordinatorEngineService.did);
args = new String[]{"jobs", "-oozie", oozieUrl, "-resume", "-jobtype", "coordinator"};
assertEquals(-1, new OozieCLI().run(args));
assertNull(MockCoordinatorEngineService.did);
args = new String[]{"jobs", "-oozie", oozieUrl, "-kill", "-jobtype", "coordinator"};
assertEquals(-1, new OozieCLI().run(args));
assertNull(MockCoordinatorEngineService.did);
return null;
}
});
}
/**
* Test the working of coord action kill from Client with action numbers
*
* @throws Exception
*/
public void testCoordActionKill1() throws Exception {
runTest(END_POINTS, SERVLET_CLASSES, IS_SECURITY_ENABLED, new Callable<Void>() {
@Override
public Void call() throws Exception {
Path appPath = new Path(getFsTestCaseDir(), "app");
getFileSystem().mkdirs(appPath);
getFileSystem().create(new Path(appPath, "coordinator.xml")).close();
String oozieUrl = getContextURL();
String[] args = new String[] { "job", "-oozie", oozieUrl, "-kill",
MockCoordinatorEngineService.JOB_ID + "1", "-action", "1" };
assertEquals(0, new OozieCLI().run(args));
assertEquals(RestConstants.JOB_ACTION_KILL, MockCoordinatorEngineService.did);
assertFalse(MockCoordinatorEngineService.startedCoordJobs.get(1));
return null;
}
});
}
/**
* Test the working of coord action kill from Client with action nominal
* date ranges
*
* @throws Exception
*/
public void testCoordActionKill2() throws Exception {
runTest(END_POINTS, SERVLET_CLASSES, IS_SECURITY_ENABLED, new Callable<Void>() {
@Override
public Void call() throws Exception {
Path appPath = new Path(getFsTestCaseDir(), "app");
getFileSystem().mkdirs(appPath);
getFileSystem().create(new Path(appPath, "coordinator.xml")).close();
String oozieUrl = getContextURL();
String[] args = new String[] { "job", "-oozie", oozieUrl, "-kill",
MockCoordinatorEngineService.JOB_ID + "1", "-date", "2009-12-15T01:00Z::2009-12-16T01:00Z" };
assertEquals(0, new OozieCLI().run(args));
assertEquals(RestConstants.JOB_ACTION_KILL, MockCoordinatorEngineService.did);
assertFalse(MockCoordinatorEngineService.startedCoordJobs.get(1));
return null;
}
});
}
public void testReRun() throws Exception {
runTest(END_POINTS, SERVLET_CLASSES, IS_SECURITY_ENABLED, new Callable<Void>() {
@Override
public Void call() throws Exception {
Path appPath = new Path(getFsTestCaseDir(), "app");
getFileSystem().mkdirs(appPath);
getFileSystem().create(new Path(appPath, "workflow.xml")).close();
String oozieUrl = getContextURL();
String[] args = new String[]{"job", "-oozie", oozieUrl, "-config", createConfigFile(appPath.toString()),
"-rerun", MockDagEngineService.JOB_ID + "1" + MockDagEngineService.JOB_ID_END};
assertEquals(0, new OozieCLI().run(args));
assertEquals(RestConstants.JOB_ACTION_RERUN, MockDagEngineService.did);
assertTrue(MockDagEngineService.started.get(1));
return null;
}
});
}
/**
* Test: oozie -rerun coord_job_id -action 1
*
* @throws Exception
*/
public void testCoordReRun1() throws Exception {
runTest(END_POINTS, SERVLET_CLASSES, IS_SECURITY_ENABLED, new Callable<Void>() {
@Override
public Void call() throws Exception {
Path appPath = new Path(getFsTestCaseDir(), "app");
getFileSystem().mkdirs(appPath);
getFileSystem().create(new Path(appPath, "coordinator.xml")).close();
String oozieUrl = getContextURL();
String[] args = new String[] { "job", "-oozie", oozieUrl, "-rerun",
MockCoordinatorEngineService.JOB_ID + "1",
"-action", "1" };
assertEquals(0, new OozieCLI().run(args));
assertEquals(RestConstants.JOB_COORD_ACTION_RERUN, MockCoordinatorEngineService.did);
assertTrue(MockCoordinatorEngineService.startedCoordJobs.get(1));
return null;
}
});
}
/**
* Test: oozie -rerun coord_job_id -date 2009-12-15T01:00Z::2009-12-16T01:00Z
*
* @throws Exception
*/
public void testCoordReRun2() throws Exception {
runTest(END_POINTS, SERVLET_CLASSES, IS_SECURITY_ENABLED, new Callable<Void>() {
@Override
public Void call() throws Exception {
Path appPath = new Path(getFsTestCaseDir(), "app");
getFileSystem().mkdirs(appPath);
getFileSystem().create(new Path(appPath, "coordinator.xml")).close();
String oozieUrl = getContextURL();
String[] args = new String[] { "job", "-oozie", oozieUrl, "-rerun",
MockCoordinatorEngineService.JOB_ID + "1",
"-date", "2009-12-15T01:00Z::2009-12-16T01:00Z" };
assertEquals(0, new OozieCLI().run(args));
assertEquals(RestConstants.JOB_COORD_ACTION_RERUN, MockCoordinatorEngineService.did);
assertTrue(MockCoordinatorEngineService.startedCoordJobs.get(1));
return null;
}
});
}
/**
*
* Test: oozie -rerun coord_job_id -action 0 -refresh
*
*/
public void testCoordReRun3() throws Exception {
runTest(END_POINTS, SERVLET_CLASSES, IS_SECURITY_ENABLED, new Callable<Void>() {
@Override
public Void call() throws Exception {
Path appPath = new Path(getFsTestCaseDir(), "app");
getFileSystem().mkdirs(appPath);
getFileSystem().create(new Path(appPath, "coordinator.xml")).close();
String oozieUrl = getContextURL();
String[] args = new String[] { "job", "-oozie", oozieUrl, "-rerun",
MockCoordinatorEngineService.JOB_ID + "0",
"-action", "0", "-refresh" };
assertEquals(0, new OozieCLI().run(args));
assertEquals(RestConstants.JOB_COORD_ACTION_RERUN, MockCoordinatorEngineService.did);
assertTrue(MockCoordinatorEngineService.startedCoordJobs.get(0));
return null;
}
});
}
/**
*
* Test: oozie -rerun coord_job_id -action 0 -nocleanup
*
*/
public void testCoordReRun4() throws Exception {
runTest(END_POINTS, SERVLET_CLASSES, IS_SECURITY_ENABLED, new Callable<Void>() {
@Override
public Void call() throws Exception {
Path appPath = new Path(getFsTestCaseDir(), "app");
getFileSystem().mkdirs(appPath);
getFileSystem().create(new Path(appPath, "coordinator.xml")).close();
String oozieUrl = getContextURL();
String[] args = new String[] { "job", "-oozie", oozieUrl, "-rerun",
MockCoordinatorEngineService.JOB_ID + "0",
"-action", "0", "-nocleanup" };
assertEquals(0, new OozieCLI().run(args));
assertEquals(RestConstants.JOB_COORD_ACTION_RERUN, MockCoordinatorEngineService.did);
assertTrue(MockCoordinatorEngineService.startedCoordJobs.get(0));
return null;
}
});
}
/**
* Negative Test: oozie -rerun coord_job_id -date 2009-12-15T01:00Z -action 1
*
* @throws Exception
*/
public void testCoordReRunNeg1() throws Exception {
runTest(END_POINTS, SERVLET_CLASSES, IS_SECURITY_ENABLED, new Callable<Void>() {
@Override
public Void call() throws Exception {
Path appPath = new Path(getFsTestCaseDir(), "app");
getFileSystem().mkdirs(appPath);
getFileSystem().create(new Path(appPath, "coordinator.xml")).close();
String oozieUrl = getContextURL();
String[] args = new String[] { "job", "-oozie", oozieUrl, "-rerun",
MockCoordinatorEngineService.JOB_ID + "1" + MockDagEngineService.JOB_ID_END,
"-date", "2009-12-15T01:00Z", "-action", "1" };
assertEquals(-1, new OozieCLI().run(args));
assertNull(MockCoordinatorEngineService.did);
assertFalse(MockCoordinatorEngineService.startedCoordJobs.get(1));
return null;
}
});
}
/**
* Negative Test: oozie -rerun coord_job_id
*
* @throws Exception
*/
public void testCoordReRunNeg2() throws Exception {
runTest(END_POINTS, SERVLET_CLASSES, IS_SECURITY_ENABLED, new Callable<Void>() {
@Override
public Void call() throws Exception {
Path appPath = new Path(getFsTestCaseDir(), "app");
getFileSystem().mkdirs(appPath);
getFileSystem().create(new Path(appPath, "coordinator.xml")).close();
String oozieUrl = getContextURL();
String[] args = new String[] { "job", "-oozie", oozieUrl, "-rerun",
MockCoordinatorEngineService.JOB_ID + "1" + MockDagEngineService.JOB_ID_END};
assertEquals(-1, new OozieCLI().run(args));
assertNull(MockCoordinatorEngineService.did);
assertFalse(MockCoordinatorEngineService.startedCoordJobs.get(1));
return null;
}
});
}
/**
*
* Negative Test: date or action option expected
* @throws Exception
*
*/
public void testCoordReRunNeg3() throws Exception {
runTest(END_POINTS, SERVLET_CLASSES, IS_SECURITY_ENABLED, new Callable<Void>() {
@Override
public Void call() throws Exception {
Path appPath = new Path(getFsTestCaseDir(), "app");
getFileSystem().mkdirs(appPath);
getFileSystem().create(new Path(appPath, "coordinator.xml")).close();
String oozieUrl = getContextURL();
String[] args = new String[] {"job", "-oozie", oozieUrl, "-config", createConfigFile(appPath.toString()),
"-rerun", MockCoordinatorEngineService.JOB_ID + "0" };
assertEquals(-1, new OozieCLI().run(args));
assertNull(MockCoordinatorEngineService.did);
assertFalse(MockCoordinatorEngineService.startedCoordJobs.get(1));
return null;
}
});
}
/**
* Negative Test: Invalid options provided for rerun: eitherdate or action expected. Don't use both at the same time
* @throws Exception
*/
public void testCoordReRunNeg4() throws Exception {
runTest(END_POINTS, SERVLET_CLASSES, IS_SECURITY_ENABLED, new Callable<Void>() {
@Override
public Void call() throws Exception {
Path appPath = new Path(getFsTestCaseDir(), "app");
getFileSystem().mkdirs(appPath);
getFileSystem().create(new Path(appPath, "coordinator.xml")).close();
String oozieUrl = getContextURL();
String[] args = new String[] {"job", "-oozie", oozieUrl, "-config", createConfigFile(appPath.toString()),
"-rerun", MockCoordinatorEngineService.JOB_ID + "0",
"-date", "2009-12-15T01:00Z", "-action", "1"};
assertEquals(-1, new OozieCLI().run(args));
assertNull(MockCoordinatorEngineService.did);
assertFalse(MockCoordinatorEngineService.startedCoordJobs.get(1));
return null;
}
});
}
public void testCoordJobIgnore() throws Exception {
runTest(END_POINTS, SERVLET_CLASSES, IS_SECURITY_ENABLED, new Callable<Void>() {
@Override
public Void call() throws Exception {
String oozieUrl = getContextURL();
String[] args = new String[]{"job", "-oozie", oozieUrl, "-ignore", MockCoordinatorEngineService.JOB_ID + "1"};
assertEquals(0, new OozieCLI().run(args));
assertEquals(RestConstants.JOB_ACTION_CHANGE, MockCoordinatorEngineService.did);
assertTrue(MockCoordinatorEngineService.startedCoordJobs.get(1));
// negative test for "oozie job -ignore <non-existent coord>"
MockCoordinatorEngineService.reset();
args = new String[] {
"job","-oozie",oozieUrl,"ignore",
MockDagEngineService.JOB_ID + (MockCoordinatorEngineService.coordJobs.size() + 1)};
assertEquals(-1, new OozieCLI().run(args));
assertNull(MockCoordinatorEngineService.did);
assertFalse(MockCoordinatorEngineService.startedCoordJobs.get(1));
return null;
}
});
}
public void testCoordActionsIgnore() throws Exception {
runTest(END_POINTS, SERVLET_CLASSES, IS_SECURITY_ENABLED, new Callable<Void>() {
@Override
public Void call() throws Exception {
String oozieUrl = getContextURL();
String[] args = new String[]{"job", "-oozie", oozieUrl, "-ignore",
MockCoordinatorEngineService.JOB_ID + "1", "-action", "1"};
assertEquals(0, new OozieCLI().run(args));
assertEquals(RestConstants.JOB_ACTION_IGNORE, MockCoordinatorEngineService.did);
assertTrue(MockCoordinatorEngineService.startedCoordJobs.get(1));
// negative test for "oozie job -ignore <non-existent coord> -action 1"
MockCoordinatorEngineService.reset();
args = new String[]{"job", "-oozie", oozieUrl, "ignore",
MockDagEngineService.JOB_ID + (MockCoordinatorEngineService.coordJobs.size() + 1), "-action", "1" };
assertEquals(-1, new OozieCLI().run(args));
assertNull(MockCoordinatorEngineService.did);
assertFalse(MockCoordinatorEngineService.startedCoordJobs.get(1));
// negative test for "oozie job -ignore <id> -action (action is empty)"
MockCoordinatorEngineService.reset();
args = new String[]{"job", "-oozie", oozieUrl, "-ignore",
MockCoordinatorEngineService.JOB_ID, "-action", ""};
assertEquals(-1, new OozieCLI().run(args));
assertNull(MockCoordinatorEngineService.did);
assertFalse(MockCoordinatorEngineService.startedCoordJobs.get(1));
return null;
}
});
}
public void testJobStatus() throws Exception {
runTest(END_POINTS, SERVLET_CLASSES, IS_SECURITY_ENABLED, new Callable<Void>() {
@Override
public Void call() throws Exception {
String oozieUrl = getContextURL();
MockDagEngineService.reset();
String[] args = new String[]{"job", "-oozie", oozieUrl, "-info", MockDagEngineService.JOB_ID + "0" +
MockDagEngineService.JOB_ID_END};
assertEquals(0, new OozieCLI().run(args));
assertEquals(RestConstants.JOB_SHOW_INFO, MockDagEngineService.did);
args = new String[]{"job", "-localtime", "-oozie", oozieUrl, "-info", MockDagEngineService.JOB_ID +
"1" + MockDagEngineService.JOB_ID_END};
assertEquals(0, new OozieCLI().run(args));
assertEquals(RestConstants.JOB_SHOW_INFO, MockDagEngineService.did);
args = new String[]{"job", "-timezone", "PST", "-oozie", oozieUrl, "-info",
MockDagEngineService.JOB_ID + "1" + MockDagEngineService.JOB_ID_END};
assertEquals(0, new OozieCLI().run(args));
assertEquals(RestConstants.JOB_SHOW_INFO, MockDagEngineService.did);
args = new String[]{"job", "-oozie", oozieUrl, "-info", MockDagEngineService.JOB_ID + "2" +
MockDagEngineService.JOB_ID_END};
assertEquals(0, new OozieCLI().run(args));
assertEquals(RestConstants.JOB_SHOW_INFO, MockDagEngineService.did);
args = new String[]{"job", "-oozie", oozieUrl, "-info",
MockDagEngineService.JOB_ID + (MockDagEngineService.workflows.size() + 1)};
assertEquals(-1, new OozieCLI().run(args));
return null;
}
});
}
public void testJobsStatus() throws Exception {
runTest(END_POINTS, SERVLET_CLASSES, IS_SECURITY_ENABLED, new Callable<Void>() {
@Override
public Void call() throws Exception {
String oozieUrl = getContextURL();
String[] args = new String[]{"jobs", "-len", "3", "-offset", "2", "-oozie", oozieUrl, "-filter",
"name=x"};
assertEquals(0, new OozieCLI().run(args));
assertEquals(RestConstants.JOBS_FILTER_PARAM, MockDagEngineService.did);
args = new String[]{"jobs", "-localtime", "-len", "3", "-offset", "2", "-oozie", oozieUrl, "-filter",
"name=x"};
assertEquals(0, new OozieCLI().run(args));
assertEquals(RestConstants.JOBS_FILTER_PARAM, MockDagEngineService.did);
args = new String[]{"jobs", "-timezone", "PST", "-len", "3", "-offset", "2", "-oozie", oozieUrl,
"-filter", "name=x"};
assertEquals(0, new OozieCLI().run(args));
assertEquals(RestConstants.JOBS_FILTER_PARAM, MockDagEngineService.did);
args = new String[]{"jobs", "-jobtype", "coord", "-filter", "status=FAILED", "-oozie", oozieUrl};
assertEquals(0, new OozieCLI().run(args));
assertEquals(RestConstants.JOBS_FILTER_PARAM, MockDagEngineService.did);
args = new String[] { "jobs", "-filter",
"startcreatedtime=2014-04-01T00:00Z;endcreatedtime=2014-05-01T00:00Z", "-oozie", oozieUrl };
assertEquals(0, new OozieCLI().run(args));
assertEquals(RestConstants.JOBS_FILTER_PARAM, MockDagEngineService.did);
args = new String[] { "jobs", "-filter",
"startcreatedtime=-10d;endcreatedtime=-20m", "-oozie", oozieUrl };
assertEquals(0, new OozieCLI().run(args));
assertEquals(RestConstants.JOBS_FILTER_PARAM, MockDagEngineService.did);
args = new String[] { "jobs", "-filter",
"sortby=lastmodifiedtime", "-oozie", oozieUrl };
assertEquals(0, new OozieCLI().run(args));
assertEquals(RestConstants.JOBS_FILTER_PARAM, MockDagEngineService.did);
args = new String[] { "jobs", "-filter",
"sortby=lastmodifiedtime", "-jobtype", "coord", "-oozie", oozieUrl };
assertEquals(0, new OozieCLI().run(args));
assertEquals(RestConstants.JOBS_FILTER_PARAM, MockDagEngineService.did);
args = new String[] { "jobs", "-filter",
"sortby=lastmodifiedtime", "-jobtype", "bundle", "-oozie", oozieUrl };
assertEquals(0, new OozieCLI().run(args));
assertEquals(RestConstants.JOBS_FILTER_PARAM, MockDagEngineService.did);
args = new String[] { "jobs", "-filter",
"startcreatedtime=-10d;endcreatedtime=-20m", "-jobtype", "coord", "-oozie", oozieUrl };
assertEquals(0, new OozieCLI().run(args));
assertEquals(RestConstants.JOBS_FILTER_PARAM, MockDagEngineService.did);
args = new String[] { "jobs", "-filter",
"startcreatedtime=-10d;endcreatedtime=-20m", "-jobtype", "bundle", "-oozie", oozieUrl };
assertEquals(0, new OozieCLI().run(args));
assertEquals(RestConstants.JOBS_FILTER_PARAM, MockDagEngineService.did);
return null;
}
});
}
public void testHeaderPropagation() throws Exception {
runTest(END_POINTS, SERVLET_CLASSES, IS_SECURITY_ENABLED, new Callable<Void>() {
@Override
public Void call() throws Exception {
HeaderTestingVersionServlet.OOZIE_HEADERS.clear();
setSystemProperty(OozieCLI.WS_HEADER_PREFIX + "header", "test");
String oozieUrl = getContextURL();
String[] args = new String[]{"job", "-oozie", oozieUrl, "-start", MockDagEngineService.JOB_ID + "1" +
MockDagEngineService.JOB_ID_END};
assertEquals(0, new OozieCLI().run(args));
assertEquals(RestConstants.JOB_ACTION_START, MockDagEngineService.did);
assertTrue(HeaderTestingVersionServlet.OOZIE_HEADERS.containsKey("header"));
assertTrue(HeaderTestingVersionServlet.OOZIE_HEADERS.containsValue("test"));
return null;
}
});
}
public void testOozieStatus() throws Exception {
runTest(END_POINTS, SERVLET_CLASSES, IS_SECURITY_ENABLED, new Callable<Void>() {
@Override
public Void call() throws Exception {
HeaderTestingVersionServlet.OOZIE_HEADERS.clear();
String oozieUrl = getContextURL();
String[] args = new String[]{"admin", "-status", "-oozie", oozieUrl};
String out = runOozieCLIAndGetStdout(args);
assertEquals("System mode: NORMAL" + SYSTEM_LINE_SEPARATOR, out);
args = new String[]{"admin", "-oozie", oozieUrl, "-systemmode", "NORMAL"};
out = runOozieCLIAndGetStdout(args);
assertEquals("System mode: NORMAL" + SYSTEM_LINE_SEPARATOR, out);
return null;
}
});
}
public void testServerBuildVersion() throws Exception {
runTest(END_POINTS, SERVLET_CLASSES, IS_SECURITY_ENABLED, new Callable<Void>() {
@Override
public Void call() throws Exception {
HeaderTestingVersionServlet.OOZIE_HEADERS.clear();
String oozieUrl = getContextURL();
String[] args = new String[] { "admin", "-version", "-oozie", oozieUrl };
String out = runOozieCLIAndGetStdout(args);
assertTrue(out, out.startsWith("Oozie server build version: {"));
assertTrue(out, out.endsWith(SYSTEM_LINE_SEPARATOR));
assertTrue(out, out.contains("build.time"));
assertTrue(out, out.contains("build.version"));
assertTrue(out, out.contains("build.user"));
assertTrue(out, out.contains("vc.url"));
assertTrue(out, out.contains("vc.revision"));
return null;
}
});
}
public void testAdminPurgeCommand() throws Exception {
runTest(END_POINTS, SERVLET_CLASSES, IS_SECURITY_ENABLED, new Callable<Void>() {
@Override
public Void call() throws Exception {
String oozieUrl = getContextURL();
String[] args = new String[]{"admin", "-purge", "wf=1;coord=2;bundle=3;limit=10;oldCoordAction=true", "-oozie",
oozieUrl};
String out = runOozieCLIAndGetStdout(args);
assertEquals("Purge command executed successfully" + SYSTEM_LINE_SEPARATOR, out);
return null;
}
});
runTest(END_POINTS, SERVLET_CLASSES, IS_SECURITY_ENABLED, new Callable<Void>() {
@Override
public Void call() throws Exception {
String oozieUrl = getContextURL();
String[] args = new String[]{"admin", "-purge", "wf=1;coord=0;bundle=0;limit=10;oldCoordAction=true", "-oozie",
oozieUrl};
String out = runOozieCLIAndGetStdout(args);
assertEquals("Purge command executed successfully" + SYSTEM_LINE_SEPARATOR, out);
return null;
}
});
}
public void testAdminPurgeCommandNegative() throws Exception {
runTest(END_POINTS, SERVLET_CLASSES, IS_SECURITY_ENABLED, new Callable<Void>() {
@Override
public Void call() throws Exception {
String oozieUrl = getContextURL();
String[] args = new String[]{"admin", "-purge", "-oozie", oozieUrl};
String error = runOozieCLIAndGetStderr(args);
assertTrue(error.contains("Missing argument for option: purge"));
args = new String[]{"admin", "-purge", "invalid=1", "-oozie", oozieUrl};
error = runOozieCLIAndGetStderr(args);
assertTrue(error.contains("INVALID_INPUT : Invalid purge option [invalid] specified."));
args = new String[]{"admin", "-purge", "wf=1;coord=", "-oozie", oozieUrl};
error = runOozieCLIAndGetStderr(args);
assertTrue(error.contains("INVALID_INPUT : Invalid purge option pair [coord=] specified."));
args = new String[]{"admin", "-purge", "wf=1;coord=-1", "-oozie", oozieUrl};
error = runOozieCLIAndGetStderr(args);
assertTrue(error.contains("Input value should be a positive integer. Value: -1"));
args = new String[]{"admin", "-purge", "wf=a", "-oozie", oozieUrl};
error = runOozieCLIAndGetStderr(args);
assertTrue(error.contains("For input string: \"a\""));
return null;
}
});
}
public void testClientBuildVersion() throws Exception {
String[] args = new String[]{"version"};
String out = runOozieCLIAndGetStdout(args);
StringBuilder sb = new StringBuilder();
sb.append("Oozie client build version: ")
.append(BuildInfo.getBuildInfo().getProperty(BuildInfo.BUILD_VERSION))
.append("\nSource code repository: ")
.append(BuildInfo.getBuildInfo().getProperty(BuildInfo.BUILD_VC_URL))
.append("\nCompiled by ")
.append(BuildInfo.getBuildInfo().getProperty(BuildInfo.BUILD_USER_NAME))
.append(" on ")
.append(BuildInfo.getBuildInfo().getProperty(BuildInfo.BUILD_TIME))
.append("\nFrom source with checksum: ")
.append(BuildInfo.getBuildInfo().getProperty(BuildInfo.BUILD_VC_REVISION));
assertEquals(sb.toString() + SYSTEM_LINE_SEPARATOR, out);
}
public void testJobInfo() throws Exception {
runTest(END_POINTS, SERVLET_CLASSES, IS_SECURITY_ENABLED, new Callable<Void>() {
@Override
public Void call() throws Exception {
String oozieUrl = getContextURL();
MockDagEngineService.reset();
String[] args = new String[]{"job", "-oozie", oozieUrl, "-info", MockDagEngineService.JOB_ID + "0" +
MockDagEngineService.JOB_ID_END};
assertEquals(0, new OozieCLI().run(args));
assertEquals(RestConstants.JOB_SHOW_INFO, MockDagEngineService.did);
args = new String[]{"job", "-oozie", oozieUrl, "-info", MockDagEngineService.JOB_ID + "1" +
MockDagEngineService.JOB_ID_END, "-len", "3", "-offset", "1"};
assertEquals(0, new OozieCLI().run(args));
assertEquals(RestConstants.JOB_SHOW_INFO, MockDagEngineService.did);
args = new String[]{"job", "-oozie", oozieUrl, "-info", MockDagEngineService.JOB_ID + "2" +
MockDagEngineService.JOB_ID_END, "-len", "2"};
assertEquals(0, new OozieCLI().run(args));
assertEquals(RestConstants.JOB_SHOW_INFO, MockDagEngineService.did);
args = new String[]{"job", "-oozie", oozieUrl, "-info", MockDagEngineService.JOB_ID + "3" +
MockDagEngineService.JOB_ID_END, "-offset", "3"};
assertEquals(0, new OozieCLI().run(args));
assertEquals(RestConstants.JOB_SHOW_INFO, MockDagEngineService.did);
args = new String[] { "job", "-oozie", oozieUrl, "-info",
MockCoordinatorEngineService.JOB_ID + 1 + MockCoordinatorEngineService.JOB_ID_END };
assertEquals(0, new OozieCLI().run(args));
assertEquals(RestConstants.JOB_SHOW_INFO, MockCoordinatorEngineService.did);
assertEquals(MockCoordinatorEngineService.offset, new Integer(1));
assertEquals(MockCoordinatorEngineService.length, new Integer(1000));
MockCoordinatorEngineService.reset();
args = new String[] { "job", "-oozie", oozieUrl, "-info",
MockCoordinatorEngineService.JOB_ID + 1 + MockCoordinatorEngineService.JOB_ID_END,
"-len", "10", "-offset", "5", "-order", "desc", "-filter", "status=FAILED"};
assertEquals(0, new OozieCLI().run(args));
assertEquals(RestConstants.JOB_SHOW_INFO, MockCoordinatorEngineService.did);
assertEquals(MockCoordinatorEngineService.offset, new Integer(5));
assertEquals(MockCoordinatorEngineService.length, new Integer(10));
assertEquals(MockCoordinatorEngineService.order, "desc");
assertEquals(MockCoordinatorEngineService.filter, "status=FAILED");
MockCoordinatorEngineService.reset();
args = new String[] { "job", "-oozie", oozieUrl, "-info",
MockCoordinatorEngineService.JOB_ID + 1 + MockCoordinatorEngineService.JOB_ID_END,
"-len", "10", "-offset", "5", "-order", "desc", "-filter", "status!=FAILED"};
assertEquals(0, new OozieCLI().run(args));
assertEquals(RestConstants.JOB_SHOW_INFO, MockCoordinatorEngineService.did);
assertEquals(MockCoordinatorEngineService.offset, new Integer(5));
assertEquals(MockCoordinatorEngineService.length, new Integer(10));
assertEquals(MockCoordinatorEngineService.order, "desc");
assertEquals(MockCoordinatorEngineService.filter, "status!=FAILED");
return null;
}
});
}
public void testJobPoll() throws Exception {
runTest(END_POINTS, SERVLET_CLASSES, IS_SECURITY_ENABLED, new Callable<Void>() {
@Override
public Void call() throws Exception {
String oozieUrl = getContextURL();
MockDagEngineService.reset();
String[] args = new String[]{"job", "-oozie", oozieUrl, "-poll", MockDagEngineService.JOB_ID + "1" +
MockDagEngineService.JOB_ID_END};
assertEquals(0, new OozieCLI().run(args));
assertEquals(RestConstants.JOB_SHOW_STATUS, MockDagEngineService.did);
args = new String[]{"job", "-oozie", oozieUrl, "-poll", MockDagEngineService.JOB_ID + "1" +
MockDagEngineService.JOB_ID_END, "-interval", "10"};
assertEquals(0, new OozieCLI().run(args));
assertEquals(RestConstants.JOB_SHOW_STATUS, MockDagEngineService.did);
args = new String[]{"job", "-oozie", oozieUrl, "-poll", MockDagEngineService.JOB_ID + "1" +
MockDagEngineService.JOB_ID_END, "-timeout", "60"};
assertEquals(0, new OozieCLI().run(args));
assertEquals(RestConstants.JOB_SHOW_STATUS, MockDagEngineService.did);
args = new String[]{"job", "-oozie", oozieUrl, "-poll", MockDagEngineService.JOB_ID + "1" +
MockDagEngineService.JOB_ID_END, "-interval", "10", "-timeout", "60"};
assertEquals(0, new OozieCLI().run(args));
assertEquals(RestConstants.JOB_SHOW_STATUS, MockDagEngineService.did);
return null;
}
});
}
public void testJobLog() throws Exception {
runTest(END_POINTS, SERVLET_CLASSES, IS_SECURITY_ENABLED, new Callable<Void>() {
@Override
public Void call() throws Exception {
String oozieUrl = getContextURL();
MockDagEngineService.reset();
String[] args = new String[]{"job", "-oozie", oozieUrl, "-log", MockDagEngineService.JOB_ID + "0" +
MockDagEngineService.JOB_ID_END};
assertEquals(0, new OozieCLI().run(args));
assertEquals(RestConstants.JOB_SHOW_LOG, MockDagEngineService.did);
args = new String[]{"job", "-oozie", oozieUrl, "-log", MockCoordinatorEngineService.JOB_ID + "0"};
assertEquals(0, new OozieCLI().run(args));
assertEquals(RestConstants.JOB_SHOW_LOG, MockDagEngineService.did);
args = new String[]{"job", "-oozie", oozieUrl, "-log", MockCoordinatorEngineService.JOB_ID + "0",
"-action", "0", "-date", "2009-12-16T01:00Z"};
assertEquals(0, new OozieCLI().run(args));
assertEquals(RestConstants.JOB_SHOW_LOG, MockDagEngineService.did);
return null;
}
});
}
public void testJobDefinition() throws Exception {
runTest(END_POINTS, SERVLET_CLASSES, IS_SECURITY_ENABLED, new Callable<Void>() {
@Override
public Void call() throws Exception {
String oozieUrl = getContextURL();
MockDagEngineService.reset();
String[] args = new String[]{"job", "-oozie", oozieUrl, "-definition", MockDagEngineService.JOB_ID +
"0" + MockDagEngineService.JOB_ID_END};
assertEquals(0, new OozieCLI().run(args));
assertEquals(RestConstants.JOB_SHOW_DEFINITION, MockDagEngineService.did);
return null;
}
});
}
public void testPropertiesWithTrailingSpaces() throws Exception {
runTest(END_POINTS, SERVLET_CLASSES, IS_SECURITY_ENABLED, new Callable<Void>() {
@Override
public Void call() throws Exception {
MockDagEngineService.reset();
String oozieUrl = getContextURL();
Path appPath = new Path(getFsTestCaseDir(), "app");
getFileSystem().mkdirs(appPath);
getFileSystem().create(new Path(appPath, "workflow.xml")).close();
String[] args = new String[]{"job", "-submit", "-oozie", oozieUrl, "-config",
createPropertiesFileWithTrailingSpaces(appPath.toString())};
assertEquals(0, new OozieCLI().run(args));
assertEquals("submit", MockDagEngineService.did);
String confStr = MockDagEngineService.workflows.get(MockDagEngineService.INIT_WF_COUNT).getConf();
XConfiguration conf = new XConfiguration(new StringReader(confStr));
assertNotNull(conf.get(OozieClient.RERUN_SKIP_NODES));
assertEquals("node", conf.get(OozieClient.RERUN_SKIP_NODES));
return null;
}
});
}
public void testWfActionRetries() throws Exception {
runTest(END_POINTS, SERVLET_CLASSES, IS_SECURITY_ENABLED, new Callable<Void>() {
@Override
public Void call() throws Exception {
String oozieUrl = getContextURL();
MockDagEngineService.reset();
String[] args = new String[] { "job", "-oozie", oozieUrl, "-retries",
MockDagEngineService.JOB_ID + "0" + MockDagEngineService.JOB_ID_END + "@a"};
assertEquals(0, new OozieCLI().run(args));
assertEquals(RestConstants.JOB_SHOW_ACTION_RETRIES_PARAM, MockDagEngineService.did);
return null;
}
});
}
public void testAdminQueueDump() throws Exception {
runTest(END_POINTS, SERVLET_CLASSES, IS_SECURITY_ENABLED, new Callable<Void>() {
@Override
public Void call() throws Exception {
HeaderTestingVersionServlet.OOZIE_HEADERS.clear();
String oozieUrl = getContextURL();
String[] args = new String[]{"admin", "-queuedump", "-oozie", oozieUrl};
String out = runOozieCLIAndGetStdout(args);
assertTrue("Queue dump",
out.contains("Server Queue Dump"));
assertTrue("Queue dump empty message",
out.contains("The queue dump is empty, nothing to display."));
assertTrue("Uniqueness map dump",
out.contains("Server Uniqueness Map Dump"));
assertTrue("Uniqueness dump empty message",
out.contains("The uniqueness map dump is empty, nothing to display."));
return null;
}
});
}
public void testInfo() throws Exception {
String[] args = new String[]{"info"};
assertEquals(0, new OozieCLI().run(args));
args = new String[]{"info", "-timezones"};
String out = runOozieCLIAndGetStdout(args);
assertTrue(out.contains("Available Time Zones"));
}
public void testValidateWorkFlowCommand() throws Exception {
runTest(END_POINTS, SERVLET_CLASSES, IS_SECURITY_ENABLED, new Callable<Void>() {
@Override
public Void call() throws Exception {
String validFileName = "test-workflow-app.xml";
String invalidFileName = "test-invalid-workflow-app.xml";
String validContent = "<workflow-app xmlns=\"uri:oozie:workflow:0.2\" name=\"no-op-wf\"> "+
" <start to=\"end\"/> <end name=\"end\"/> </workflow-app>";
String invalidContent = "<workflow-app xmlns=\"uri:oozie:workflow:0.2\" name=\"f\"> "+
" <tag=\"end\"/> <tag=\"end\"/> </workflow-app>";
File validfile = new File(getTestCaseDir(), validFileName);
File invalidfile = new File(getTestCaseDir(), invalidFileName);
validfile.delete();
invalidfile.delete();
String oozieUrl = getContextURL();
IOUtils.copyCharStream(new StringReader(validContent), new FileWriter(validfile));
String [] args = new String[] { "validate", "-oozie", oozieUrl, validfile.getAbsolutePath() };
String out = runOozieCLIAndGetStdout(args);
assertTrue(out.contains("Valid"));
IOUtils.copyCharStream(new StringReader(invalidContent), new FileWriter(invalidfile));
args = new String[] { "validate", "-oozie", oozieUrl, invalidfile.getAbsolutePath() };
out = runOozieCLIAndGetStderr(args);
assertTrue(out.contains("XML schema error"));
return null;
}
});
}
/**
*
* oozie -change coord_job_id -value concurrency=10
*
*/
public void testChangeValue() throws Exception {
runTest(END_POINTS, SERVLET_CLASSES, IS_SECURITY_ENABLED, new Callable<Void>() {
@Override
public Void call() throws Exception {
Path appPath = new Path(getFsTestCaseDir(), "app");
getFileSystem().mkdirs(appPath);
getFileSystem().create(new Path(appPath, "coordinator.xml")).close();
String oozieUrl = getContextURL();
String[] args = new String[] {"job", "-oozie", oozieUrl, "-change",
MockCoordinatorEngineService.JOB_ID + "0", "-value", "concurrency=10" };
assertEquals(0, new OozieCLI().run(args));
assertEquals(RestConstants.JOB_ACTION_CHANGE, MockCoordinatorEngineService.did);
return null;
}
});
}
/**
* Test response to sla list
*/
public void testSlaEvents() throws Exception {
runTest(END_POINTS, SERVLET_CLASSES, IS_SECURITY_ENABLED, new Callable<Void>() {
@Override
public Void call() throws Exception {
String oozieUrl = getContextURL();
String[] args = new String[] {"sla", "-oozie", oozieUrl, "-len", "1" };
String out = runOozieCLIAndGetStdout(args);
assertTrue(out, out.contains("<sla-message>"));
return null;
}
});
}
public void testshareLibUpdate() throws Exception {
runTest(END_POINTS, SERVLET_CLASSES, IS_SECURITY_ENABLED, new Callable<Void>() {
@Override
public Void call() throws Exception {
HeaderTestingVersionServlet.OOZIE_HEADERS.clear();
Services.get().setService(ShareLibService.class);
String oozieUrl = getContextURL();
String[] args = new String[] { "admin", "-sharelibupdate", "-oozie", oozieUrl };
String out = runOozieCLIAndGetStdout(args);
assertTrue(out.contains("ShareLib update status"));
return null;
}
});
}
public void testshareLibUpdate_withSecurity() throws Exception {
runTest(END_POINTS, SERVLET_CLASSES, true, new Callable<Void>() {
@Override
public Void call() throws Exception {
HeaderTestingVersionServlet.OOZIE_HEADERS.clear();
String oozieUrl = getContextURL();
// Need to pass "-auth simple" instead of allowing fallback for Hadoop 2.3.0 - Hadoop 2.6.0 (see OOZIE-2315)
String[] args = new String[] { "admin", "-sharelibupdate", "-oozie", oozieUrl, "-auth", "simple" };
String out = runOozieCLIAndGetStderr(args);
assertEquals("Error: E0503 : E0503: User [test] does not have admin privileges\n", out);
return null;
}
});
}
public void testGetShareLib() throws Exception {
runTest(END_POINTS, SERVLET_CLASSES, false, new Callable<Void>() {
@Override
public Void call() throws Exception {
HeaderTestingVersionServlet.OOZIE_HEADERS.clear();
Services.get().setService(ShareLibService.class);
String oozieUrl = getContextURL();
String[] args = new String[] { "admin", "-shareliblist", "-oozie", oozieUrl };
String out = runOozieCLIAndGetStdout(args);
assertTrue(out.contains("Available ShareLib"));
return null;
}
});
}
public void testGetShareLib_withKey() throws Exception {
runTest(END_POINTS, SERVLET_CLASSES, false, new Callable<Void>() {
@Override
public Void call() throws Exception {
HeaderTestingVersionServlet.OOZIE_HEADERS.clear();
Services.get().setService(ShareLibService.class);
String oozieUrl = getContextURL();
String[] args = new String[] { "admin", "-shareliblist", "pig", "-oozie", oozieUrl };
String out = runOozieCLIAndGetStdout(args);
assertTrue(out.contains("Available ShareLib"));
return null;
}
});
}
public void testJobDryrun() throws Exception {
runTest(END_POINTS, SERVLET_CLASSES, false, new Callable<Void>() {
@Override
public Void call() throws Exception {
HeaderTestingVersionServlet.OOZIE_HEADERS.clear();
Path appPath = new Path(getFsTestCaseDir(), "app");
getFileSystem().mkdirs(appPath);
getFileSystem().create(new Path(appPath, "coordinator.xml")).close();
String oozieUrl = getContextURL();
String[] args = new String[] { "job", "-dryrun", "-config", createCoodrConfigFile(appPath.toString()),
"-oozie", oozieUrl, "-Doozie.proxysubmission=true" };
assertEquals(0, new OozieCLI().run(args));
assertEquals(MockCoordinatorEngineService.did, RestConstants.JOB_ACTION_DRYRUN);
assertFalse(MockCoordinatorEngineService.startedCoordJobs.get(1));
return null;
}
});
}
public void testUpdate() throws Exception {
runTest(END_POINTS, SERVLET_CLASSES, false, new Callable<Void>() {
@Override
public Void call() throws Exception {
HeaderTestingVersionServlet.OOZIE_HEADERS.clear();
String oozieUrl = getContextURL();
String[] args = new String[] { "job", "-update", "aaa", "-oozie", oozieUrl };
assertEquals(-1, new OozieCLI().run(args));
assertEquals(MockCoordinatorEngineService.did, RestConstants.JOB_COORD_UPDATE );
assertFalse(MockCoordinatorEngineService.startedCoordJobs.get(1));
return null;
}
});
}
public void testUpdateWithDryrun() throws Exception {
runTest(END_POINTS, SERVLET_CLASSES, false, new Callable<Void>() {
@Override
public Void call() throws Exception {
HeaderTestingVersionServlet.OOZIE_HEADERS.clear();
String oozieUrl = getContextURL();
String[] args = new String[] { "job", "-update", "aaa", "-dryrun", "-oozie", oozieUrl };
assertEquals(-1, new OozieCLI().run(args));
assertEquals(MockCoordinatorEngineService.did, RestConstants.JOB_COORD_UPDATE + "&"
+ RestConstants.JOB_ACTION_DRYRUN);
assertFalse(MockCoordinatorEngineService.startedCoordJobs.get(1));
return null;
}
});
}
public void testFailNoArg() throws Exception {
runTest(END_POINTS, SERVLET_CLASSES, false, new Callable<Void>() {
@Override
public Void call() throws Exception {
HeaderTestingVersionServlet.OOZIE_HEADERS.clear();
String oozieUrl = getContextURL();
String[] args = new String[] { "job", "-oozie", oozieUrl };
String out = runOozieCLIAndGetStderr(args);
assertTrue(out.contains("Invalid sub-command"));
return null;
}
});
}
public void testRetryForTimeout() throws Exception {
runTest(END_POINTS, SERVLET_CLASSES, false, new Callable<Void>() {
@Override
public Void call() throws Exception {
HeaderTestingVersionServlet.OOZIE_HEADERS.clear();
String oozieUrl = "http://localhost:11/oozie";
String[] args = new String[] { "job", "-update", "aaa", "-dryrun", "-oozie", oozieUrl, "-debug" };
OozieCLI cli = new OozieCLI();
CLIParser parser = cli.getCLIParser();
try {
final CLIParser.Command command = parser.parse(args);
cli.processCommand(parser, command);
}
catch (Exception e) {
assertTrue(e.getMessage().contains(
"Error while connecting Oozie server. No of retries = 4. Exception = Connection refused"));
}
return null;
}
});
}
public void testNoRetryForError() throws Exception {
runTest(END_POINTS, SERVLET_CLASSES, false, new Callable<Void>() {
@Override
public Void call() throws Exception {
HeaderTestingVersionServlet.OOZIE_HEADERS.clear();
String oozieUrl = getContextURL();
String[] args = new String[] { "job", "-info", "aaa", "-oozie", oozieUrl, "-debug" };
OozieCLI cli = new OozieCLI();
CLIParser parser = cli.getCLIParser();
try {
final CLIParser.Command command = parser.parse(args);
cli.processCommand(parser, command);
}
catch (Exception e) {
//Create connection will be successful, no retry
assertFalse(e.getMessage().contains("Error while connecting Oozie server"));
assertTrue(e.getMessage().contains("invalid job id [aaa]"));
}
return null;
}
});
}
public void testRetryWithRetryCount() throws Exception {
runTest(END_POINTS, SERVLET_CLASSES, false, new Callable<Void>() {
@Override
public Void call() throws Exception {
HeaderTestingVersionServlet.OOZIE_HEADERS.clear();
String oozieUrl = "http://localhost:11/oozie";
String[] args = new String[] { "job", "-update", "aaa", "-dryrun", "-oozie", oozieUrl, "-debug" };
OozieCLI cli = new OozieCLI() {
protected void setRetryCount(OozieClient wc) {
wc.setRetryCount(2);
}
public CLIParser getCLIParser(){
return super.getCLIParser();
}
};
CLIParser parser = cli.getCLIParser();
try {
final CLIParser.Command command = parser.parse(args);
cli.processCommand(parser, command);
}
catch (Exception e) {
assertTrue(e.getMessage().contains(
"Error while connecting Oozie server. No of retries = 2. Exception = Connection refused"));
}
return null;
}
});
}
public void testAdminConfiguration() throws Exception {
runTest(END_POINTS, SERVLET_CLASSES, IS_SECURITY_ENABLED, new Callable<Void>() {
@Override
public Void call() throws Exception {
HeaderTestingVersionServlet.OOZIE_HEADERS.clear();
String oozieUrl = getContextURL();
String[] args = new String[]{"admin", "-configuration", "-oozie", oozieUrl};
String out = runOozieCLIAndGetStdout(args);
assertTrue(out.contains("oozie.base.url"));
return null;
}
});
}
public void testAdminOsEnv() throws Exception {
runTest(END_POINTS, SERVLET_CLASSES, IS_SECURITY_ENABLED, new Callable<Void>() {
@Override
public Void call() throws Exception {
HeaderTestingVersionServlet.OOZIE_HEADERS.clear();
String oozieUrl = getContextURL();
String[] args = new String[]{"admin", "-osenv", "-oozie", oozieUrl};
String out = runOozieCLIAndGetStdout(args);
assertTrue(out.contains("JAVA_HOME"));
return null;
}
});
}
public void testAdminJavaSystemProperties() throws Exception {
runTest(END_POINTS, SERVLET_CLASSES, IS_SECURITY_ENABLED, new Callable<Void>() {
@Override
public Void call() throws Exception {
HeaderTestingVersionServlet.OOZIE_HEADERS.clear();
String oozieUrl = getContextURL();
String[] args = new String[]{"admin", "-javasysprops", "-oozie", oozieUrl};
String out = runOozieCLIAndGetStdout(args);
assertTrue(out.contains("java.vendor"));
return null;
}
});
}
public void testAdminInstrumentation() throws Exception {
runTest(END_POINTS, SERVLET_CLASSES, IS_SECURITY_ENABLED, new Callable<Void>() {
@Override
public Void call() throws Exception {
HeaderTestingVersionServlet.OOZIE_HEADERS.clear();
Services.get().setService(InstrumentationService.class);
String oozieUrl = getContextURL();
String[] args = new String[]{"admin", "-instrumentation", "-oozie", oozieUrl};
String out = runOozieCLIAndGetStdout(args);
assertTrue(out.contains("webservices.version-GET"));
args = new String[]{"admin", "-metrics", "-oozie", oozieUrl};
out = runOozieCLIAndGetStdout(args);
assertTrue(out.contains("Metrics are unavailable"));
return null;
}
});
}
public void testAdminMetrics() throws Exception {
runTest(END_POINTS, SERVLET_CLASSES, IS_SECURITY_ENABLED, new Callable<Void>() {
@Override
public Void call() throws Exception {
HeaderTestingVersionServlet.OOZIE_HEADERS.clear();
Services.get().setService(MetricsInstrumentationService.class);
String oozieUrl = getContextURL();
String[] args = new String[]{"admin", "-metrics", "-oozie", oozieUrl};
String out = runOozieCLIAndGetStdout(args);
assertTrue(out.contains("webservices.version-GET"));
args = new String[]{"admin", "-instrumentation", "-oozie", oozieUrl};
out = runOozieCLIAndGetStdout(args);
assertTrue(out.contains("Instrumentation is unavailable"));
return null;
}
});
}
public void testSlaEnable() throws Exception {
runTest(END_POINTS, SERVLET_CLASSES, false, new Callable<Void>() {
@Override
public Void call() throws Exception {
HeaderTestingVersionServlet.OOZIE_HEADERS.clear();
String oozieUrl = getContextURL();
String[] args = new String[] { "job", "-slaenable", "aaa-C", "-oozie", oozieUrl };
assertEquals(0, new OozieCLI().run(args));
assertEquals(MockCoordinatorEngineService.did, RestConstants.SLA_ENABLE_ALERT);
return null;
}
});
}
public void testSlaDisable() throws Exception {
runTest(END_POINTS, SERVLET_CLASSES, false, new Callable<Void>() {
@Override
public Void call() throws Exception {
HeaderTestingVersionServlet.OOZIE_HEADERS.clear();
String oozieUrl = getContextURL();
String[] args = new String[] { "job", "-sladisable", "aaa-C", "-oozie", oozieUrl };
assertEquals(0, new OozieCLI().run(args));
assertEquals(MockCoordinatorEngineService.did, RestConstants.SLA_DISABLE_ALERT);
return null;
}
});
}
public void testSlaChange() throws Exception {
runTest(END_POINTS, SERVLET_CLASSES, false, new Callable<Void>() {
@Override
public Void call() throws Exception {
HeaderTestingVersionServlet.OOZIE_HEADERS.clear();
String oozieUrl = getContextURL();
String[] args = new String[] { "job", "-slachange", "aaa-C", "-oozie", oozieUrl };
assertEquals(0, new OozieCLI().run(args));
assertEquals(MockCoordinatorEngineService.did, RestConstants.SLA_CHANGE);
return null;
}
});
}
public void testCoordActionMissingdependencies() throws Exception {
runTest(END_POINTS, SERVLET_CLASSES, false, new Callable<Void>() {
@Override
public Void call() throws Exception {
HeaderTestingVersionServlet.OOZIE_HEADERS.clear();
String oozieUrl = getContextURL();
String[] args = new String[] { "job", "-missingdeps", "aaa-C", "-oozie", oozieUrl };
assertEquals(0, new OozieCLI().run(args));
assertEquals(MockCoordinatorEngineService.did, RestConstants.COORD_ACTION_MISSING_DEPENDENCIES);
assertFalse(MockCoordinatorEngineService.startedCoordJobs.get(1));
return null;
}
});
}
private String runOozieCLIAndGetStdout(String[] args) {
PrintStream original = System.out;
ByteArrayOutputStream baos = new ByteArrayOutputStream();
PrintStream ps = new PrintStream(baos);
String outStr = null;
System.out.flush();
try {
System.setOut(ps);
assertEquals(0, new OozieCLI().run(args));
System.out.flush();
outStr = baos.toString();
} finally {
System.setOut(original);
if (outStr != null) {
System.out.print(outStr);
}
System.out.flush();
}
return outStr;
}
private String runOozieCLIAndGetStderr(String[] args) {
PrintStream original = System.err;
ByteArrayOutputStream baos = new ByteArrayOutputStream();
PrintStream ps = new PrintStream(baos);
String outStr = null;
System.err.flush();
try {
System.setErr(ps);
assertEquals(-1, new OozieCLI().run(args));
System.err.flush();
outStr = baos.toString();
} finally {
System.setErr(original);
if (outStr != null) {
System.err.print(outStr);
}
System.err.flush();
}
return outStr;
}
}
| apache-2.0 |
asiaon123/hsweb-framework | hsweb-authorization/hsweb-authorization-api/src/main/java/org/hswebframework/web/authorization/define/AuthorizingContext.java | 1010 | package org.hswebframework.web.authorization.define;
import org.hswebframework.web.authorization.Authentication;
import org.hswebframework.web.boost.aop.context.MethodInterceptorContext;
/**
* 权限控制上下文
*/
public class AuthorizingContext {
private AuthorizeDefinition definition;
private Authentication authentication;
private MethodInterceptorContext paramContext;
public AuthorizeDefinition getDefinition() {
return definition;
}
public void setDefinition(AuthorizeDefinition definition) {
this.definition = definition;
}
public Authentication getAuthentication() {
return authentication;
}
public void setAuthentication(Authentication authentication) {
this.authentication = authentication;
}
public MethodInterceptorContext getParamContext() {
return paramContext;
}
public void setParamContext(MethodInterceptorContext paramContext) {
this.paramContext = paramContext;
}
}
| apache-2.0 |
chRyNaN/Android-Guitar-Tuner | app/src/main/java/com/chrynan/android_guitar_tuner/ui/dialog/PermissionRationalDialogFragment.java | 2386 | package com.chrynan.android_guitar_tuner.ui.dialog;
import android.app.Dialog;
import android.content.Context;
import android.os.Bundle;
import android.support.annotation.NonNull;
import android.support.v4.app.DialogFragment;
import android.support.v7.app.AlertDialog;
import com.chrynan.android_guitar_tuner.R;
import com.chrynan.android_guitar_tuner.exception.MissingListenerException;
/**
* A {@link DialogFragment} that displays more information on why the AUDIO_RECORD permission is
* needed.
*/
public class PermissionRationalDialogFragment extends DialogFragment {
public static final String TAG = "PermissionRationalDialogFragment";
private DialogListener listener;
public static PermissionRationalDialogFragment newInstance() {
return new PermissionRationalDialogFragment();
}
@Override
public void onAttach(Context context) {
super.onAttach(context);
if (DialogListener.class.isInstance(getParentFragment())) {
listener = (DialogListener) getParentFragment();
} else if (DialogListener.class.isInstance(getActivity())) {
listener = (DialogListener) getActivity();
} else {
Class<?> parentClazz = getParentFragment() != null ? getParentFragment().getClass() : getActivity().getClass();
throw new MissingListenerException(DialogListener.class, parentClazz, PermissionRationalDialogFragment.class);
}
}
@NonNull
@Override
public Dialog onCreateDialog(Bundle savedInstanceState) {
return new AlertDialog.Builder(getActivity(), R.style.PermissionDialog)
.setTitle(R.string.permission_dialog_rationale_title)
.setMessage(R.string.permission_dialog_rationale_desc)
.setNegativeButton(android.R.string.cancel, (dialog, which) -> {
dialog.dismiss();
listener.onDialogCanceled();
})
.setPositiveButton(android.R.string.ok, (dialog, which) -> {
dialog.dismiss();
listener.onDialogProceed();
})
.setOnDismissListener(dialog -> listener.onDialogDismissed())
.create();
}
public interface DialogListener {
void onDialogProceed();
void onDialogCanceled();
void onDialogDismissed();
}
}
| apache-2.0 |
EsupPortail/esup-papercut | src/main/java/org/esupportail/papercut/domain/izlypay/IzlyPayUser.java | 1261 | /**
* Licensed to EsupPortail under one or more contributor license
* agreements. See the NOTICE file distributed with this work for
* additional information regarding copyright ownership.
*
* EsupPortail licenses this file to you under the Apache License,
* Version 2.0 (the "License"); you may not use this file except in
* compliance with the License. You may obtain a copy of the License at:
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.esupportail.papercut.domain.izlypay;
public class IzlyPayUser {
String id;
String email;
String clientCode;
public String getId() {
return id;
}
public void setId(String id) {
this.id = id;
}
public String getEmail() {
return email;
}
public void setEmail(String email) {
this.email = email;
}
public String getClientCode() {
return clientCode;
}
public void setClientCode(String clientCode) {
this.clientCode = clientCode;
}
}
| apache-2.0 |
wanggit/Access-Frequency | src/main/java/com/github/wanggit/access/frequency/autoconfigure/AccessFrequencyProperties.java | 398 | package com.github.wanggit.access.frequency.autoconfigure;
import org.springframework.boot.context.properties.ConfigurationProperties;
@ConfigurationProperties("access.frequency")
public class AccessFrequencyProperties {
public boolean isEnable() {
return enable;
}
public void setEnable(boolean enable) {
this.enable = enable;
}
private boolean enable;
}
| apache-2.0 |
formix/dsx | src/test/java/org/formix/dsx/serialization/entities/EntityBase.java | 2946 | /****************************************************************************
* Copyright 2009-2014 Jean-Philippe Gravel, P. Eng. CSDP
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
***************************************************************************/
package org.formix.dsx.serialization.entities;
import javax.xml.bind.annotation.XmlTransient;
public abstract class EntityBase implements Entity {
private EntityState entityState;
private boolean listeningForChanges;
public EntityBase() {
this.entityState = EntityState.NEW;
this.listeningForChanges = true;
}
@Override
public EntityState getEntityState() {
return this.entityState;
}
@Override
public void setEntityState(EntityState value) {
this.entityState = value;
}
@Override
public void delete() {
if (!this.listeningForChanges)
return;
if (this.entityState == EntityState.NEW)
this.entityState = EntityState.DEAD;
else
this.entityState = EntityState.DELETED;
}
@Override
public void apply() {
if (!this.listeningForChanges)
return;
if (this.entityState == EntityState.DELETED)
this.entityState = EntityState.DEAD;
else
this.entityState = EntityState.UNCHANGED;
}
protected void notifyUpdate(Object oldValue, Object newValue) {
if (!this.listeningForChanges)
return;
if ((oldValue == null) && (newValue == null))
return;
if ((oldValue != null) && (newValue == null)) {
this.notifyUpdate();
return;
}
if ((oldValue == null) && (newValue != null)) {
this.notifyUpdate();
return;
}
if (!oldValue.equals(newValue))
this.notifyUpdate();
}
protected void notifyUpdate() {
if (this.entityState == EntityState.UNCHANGED) {
this.entityState = EntityState.UPDATED;
}
}
/**
* Tells if the current entity is listening for changes. If the entity is
* not listening for changes, nothing is going to affect the entityState
* outside of setEntityState.
*
* @return True if the current entity is listening for changes. False
* otherwise.
*/
@XmlTransient
public boolean isListenForChanges() {
return listeningForChanges;
}
/**
* Tells the current entity to start listening for changes.
*/
public void startListeningForChanges() {
this.listeningForChanges = true;
}
/**
* Tells the current entity to stop listening for changes.
*/
public void stopListeningForChanges() {
this.listeningForChanges = false;
}
}
| apache-2.0 |
riengcs/zk-tutorial | src/main/java/com/zk/tutorial/model/SidebarPageDto.java | 610 | package com.zk.tutorial.model;
import java.io.Serializable;
/**
* @author csrieng
*
*/
public class SidebarPageDto implements Serializable{
private static final long serialVersionUID = 1L;
String name;
String label;
String iconUri;
String uri;
public SidebarPageDto(String name, String label, String iconUri, String uri){
this.name = name;
this.label = label;
this.iconUri = iconUri;
this.uri = uri;
}
public String getName(){
return name;
}
public String getLabel(){
return label;
}
public String getIconUri(){
return iconUri;
}
public String getUri(){
return uri;
}
} | apache-2.0 |
opetrovski/development | oscm-saml2-api-unittests/javasrc/org/oscm/saml2/api/AssertionConsumerServiceTest.java | 3311 | /*******************************************************************************
*
* Copyright FUJITSU LIMITED 2017
*
* Creation Date: 05.06.2013
*
*******************************************************************************/
package org.oscm.saml2.api;
import java.util.Calendar;
import org.junit.Before;
import org.junit.Test;
import org.oscm.internal.types.exception.AssertionValidationException;
import org.oscm.string.Strings;
/**
* @author kulle
*
*/
public class AssertionConsumerServiceTest {
private AssertionConsumerService acs;
private final String FILE_OPENAM_RESPONSE = "javares/openamResponse.xml";
private final String FILE_KEYSTORE_OPENAM = "javares/openam.jks";
private final String acsUrl = "http://estkulle:8680/test/jsp/showPostResponse.jsp";
private final String acsUrlHttps = "https://estkulle:8681/test/jsp/showPostResponse.jsp";
public static final String tenantID = "8f96dede";
@Before
public void setup() {
}
@Test
public void validateResponse_Http() throws Exception {
// given
acs = new AssertionConsumerService(acsUrl, acsUrlHttps,
FILE_KEYSTORE_OPENAM, "changeit");
String response = Strings.textFileToString(FILE_OPENAM_RESPONSE);
response = response.replace("2013-05-29T10:53:36Z", (Calendar
.getInstance().get(Calendar.YEAR) + 1) + "-05-29T10:53:36Z");
response = response.replace("@RECIPIENT", acsUrl);
// when
acs.validateResponse(response, "4040406c-1530-11e0-e869-0110283f4jj6", tenantID);
// then no exception expected
}
@Test
public void validateResponse_Https() throws Exception {
// given
acs = new AssertionConsumerService(acsUrl, acsUrlHttps,
FILE_KEYSTORE_OPENAM, "changeit");
String response = Strings.textFileToString(FILE_OPENAM_RESPONSE);
response = response.replace("2013-05-29T10:53:36Z", (Calendar
.getInstance().get(Calendar.YEAR) + 1) + "-05-29T10:53:36Z");
response = response.replace("@RECIPIENT", acsUrlHttps);
// when
acs.validateResponse(response, "4040406c-1530-11e0-e869-0110283f4jj6", tenantID);
// then no exception expected
}
@Test(expected = AssertionValidationException.class)
public void validateResponse_wrongRecipient() throws Exception {
// given
acs = new AssertionConsumerService(acsUrl, acsUrlHttps,
FILE_KEYSTORE_OPENAM, "changeit");
String response = Strings.textFileToString(FILE_OPENAM_RESPONSE);
response = response.replace("2013-05-29T10:53:36Z", (Calendar
.getInstance().get(Calendar.YEAR) + 1) + "-05-29T10:53:36Z");
response = response.replace("@RECIPIENT", "https://something.else.de");
// when
acs.validateResponse(response, "4040406c-1530-11e0-e869-0110283f4jj6", tenantID);
// then exception
}
}
| apache-2.0 |
asakusafw/asakusafw-legacy | legacy-project/asakusa-model-generator/src/test/java/com/asakusafw/modelgen/emitter/EmitterTestRoot.java | 16093 | /**
* Copyright 2011-2021 Asakusa Framework Team.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.asakusafw.modelgen.emitter;
import static org.hamcrest.Matchers.*;
import static org.junit.Assert.*;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.File;
import java.io.IOException;
import java.io.ObjectInputStream;
import java.io.ObjectOutputStream;
import java.io.PrintWriter;
import java.lang.reflect.Constructor;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.Locale;
import javax.tools.Diagnostic;
import javax.tools.JavaFileObject;
import org.apache.hadoop.io.Writable;
import org.junit.After;
import org.junit.Before;
import com.asakusafw.runtime.io.ModelInput;
import com.asakusafw.runtime.io.ModelOutput;
import com.asakusafw.runtime.io.RecordEmitter;
import com.asakusafw.runtime.io.RecordParser;
import com.asakusafw.utils.java.jsr199.testing.VolatileCompiler;
import com.asakusafw.utils.java.jsr199.testing.VolatileJavaFile;
import com.asakusafw.utils.java.model.syntax.CompilationUnit;
import com.asakusafw.utils.java.model.syntax.ModelFactory;
import com.asakusafw.utils.java.model.syntax.TypeDeclaration;
import com.asakusafw.utils.java.model.util.Emitter;
import com.asakusafw.utils.java.model.util.Models;
import com.asakusafw.vocabulary.model.DataModel;
import com.asakusafw.vocabulary.model.JoinedModel;
import com.asakusafw.vocabulary.model.SummarizedModel;
/**
* Test root for this package.
*/
public abstract class EmitterTestRoot {
/**
* ファクトリオブジェクト。
*/
protected ModelFactory f;
/**
* 生成したファイルの一覧。
*/
List<VolatileJavaFile> files;
/**
* 利用可能なコンパイラ
*/
VolatileCompiler compiler;
/**
* テストを初期化する。
* @throws Exception if occur
*/
@Before
public void setUp() throws Exception {
f = Models.getModelFactory();
files = new ArrayList<>();
compiler = new VolatileCompiler();
}
/**
* テストの情報を破棄する。
* @throws Exception 例外が発生した場合
*/
@After
public void tearDown() throws Exception {
if (compiler != null) {
compiler.close();
}
}
/**
* 仮想的な出力を開く。
* @param source 対象ソースプログラム
* @return 出力
*/
protected PrintWriter createOutputFor(CompilationUnit source) {
StringBuilder buf = new StringBuilder();
TypeDeclaration type = Emitter.findPrimaryType(source);
if (source.getPackageDeclaration() != null) {
buf.append(source.getPackageDeclaration().toString().replace('.', '/'));
buf.append('/');
}
buf.append(type.getName().getToken());
VolatileJavaFile file = new VolatileJavaFile(buf.toString());
files.add(file);
return new PrintWriter(file.openWriter());
}
/**
* コンパイルを実行する。
* @return コンパイル結果のクラスローダー。
*/
protected ClassLoader compile() {
if (files.isEmpty()) {
throw new AssertionError();
}
for (JavaFileObject java : files) {
compiler.addSource(java);
}
compiler.addArguments("-Xlint");
List<Diagnostic<? extends JavaFileObject>> diagnostics = compiler.doCompile();
boolean hasWrong = false;
for (Diagnostic<? extends JavaFileObject> d : diagnostics) {
if (d.getKind() == Diagnostic.Kind.ERROR || d.getKind() == Diagnostic.Kind.WARNING) {
JavaFileObject java = d.getSource();
if (java != null) {
try {
System.out.println("=== " + java.getName());
System.out.println(java.getCharContent(true));
System.out.println();
System.out.println();
} catch (IOException e) {
// ignored
}
}
System.out.println("--");
System.out.println(d.getMessage(Locale.getDefault()));
hasWrong = true;
}
}
if (hasWrong) {
throw new AssertionError(diagnostics);
}
return compiler.getClassLoader();
}
/**
* 指定の名前のクラスインスタンスを生成する。
* @param loader 対象のローダー
* @param name クラス名
* @return 対象クラスのインスタンス
*/
protected Object create(ClassLoader loader, String name) {
try {
Class<?> klass = loader.loadClass("com.example." + name);
return klass.newInstance();
} catch (Exception e) {
throw new AssertionError(e);
}
}
/**
* 指定の名前の{@link ModelInput}を生成する。
* @param loader 対象のローダー
* @param parser パーサー
* @param name クラス名
* @return 対象クラスのインスタンス
*/
@SuppressWarnings("unchecked")
protected ModelInput<Object> createInput(
ClassLoader loader,
RecordParser parser,
String name) {
try {
Class<?> klass = loader.loadClass("com.example." + name);
Constructor<?> ctor = klass.getConstructor(RecordParser.class);
return (ModelInput<Object>) ctor.newInstance(parser);
} catch (Exception e) {
throw new AssertionError(e);
}
}
/**
* 指定の名前の{@link ModelOutput}を生成する。
* @param loader 対象のローダー
* @param emitter エミッター
* @param name クラス名
* @return 対象クラスのインスタンス
*/
@SuppressWarnings("unchecked")
protected ModelOutput<Object> createOutput(
ClassLoader loader,
RecordEmitter emitter,
String name) {
try {
Class<?> klass = loader.loadClass("com.example." + name);
Constructor<?> ctor = klass.getConstructor(RecordEmitter.class);
return (ModelOutput<Object>) ctor.newInstance(emitter);
} catch (Exception e) {
throw new AssertionError(e);
}
}
/**
* 指定のオブジェクトのプロパティの内容を返す。
* @param object 対象のオブジェクト
* @param name プロパティに対応するgetterの名称
* @return 対応するプロパティ
* @throws Throwable 例外が発生した場合
*/
public static Object get(Object object, String name) throws Throwable {
try {
return find(object, name).invoke(object);
} catch (InvocationTargetException e) {
throw e.getCause();
} catch (Exception e) {
throw new AssertionError(e);
}
}
/**
* 指定のオブジェクトのプロパティの内容を変更する。
* @param object 対象のオブジェクト
* @param name プロパティに対応するsetterの名称
* @param value 設定する値
* @throws Throwable 例外が発生した場合
*/
public static void set(Object object, String name, Object value) throws Throwable {
try {
Method method = find(object, name);
method.invoke(object, value);
} catch (InvocationTargetException e) {
throw e.getCause();
} catch (Exception e) {
throw new AssertionError(e);
}
}
/**
* {@code argument}の内容を、{@code object}にコピーする。
* @param object コピー先のオブジェクト
* @param argument コピー元のオブジェクト
* @throws Throwable 例外が発生した場合
*/
public static void copyFrom(Object object, Object argument) throws Throwable {
try {
Method method = find(object, DataModel.Interface.METHOD_NAME_COPY_FROM);
method.invoke(object, argument);
} catch (InvocationTargetException e) {
throw e.getCause();
} catch (Exception e) {
throw new AssertionError(e);
}
}
/**
* 対象の二つのオブジェクトの内容を結合して、単一のオブジェクトに書き出す。
* @param object コピー元のオブジェクト
* @param left 結合されるオブジェクト
* @param right 結合するオブジェクト
* @throws Throwable 例外が発生した場合
*/
public static void joinFrom(Object object, Object left, Object right) throws Throwable {
try {
Method method = find(object, JoinedModel.Interface.METHOD_NAME_JOIN_FROM);
method.invoke(object, left, right);
} catch (InvocationTargetException e) {
throw e.getCause();
} catch (Exception e) {
throw new AssertionError(e);
}
}
/**
* 結合された単一のオブジェクトの内容を分割し、対象の二つのオブジェクトに書き出す。
* @param object コピー先のオブジェクト
* @param left 結合されたオブジェクト
* @param right 結合したオブジェクト
* @throws Throwable 例外が発生した場合
*/
public static void split(Object object, Object left, Object right) throws Throwable {
try {
Method method = find(object, JoinedModel.Interface.METHOD_NAME_SPLIT_INTO);
method.invoke(object, left, right);
} catch (InvocationTargetException e) {
throw e.getCause();
} catch (Exception e) {
throw new AssertionError(e);
}
}
/**
* 指定のオブジェクトの内容を、集計用のオブジェクトに書き出す。
* @param object 対象の集計用オブジェクト
* @param argument 書き出すオブジェクト
* @throws Throwable 例外が発生した場合
*/
public static void startSummarize(Object object, Object argument) throws Throwable {
try {
Method method = find(object, SummarizedModel.Interface.METHOD_NAME_START_SUMMARIZATION);
method.invoke(object, argument);
} catch (InvocationTargetException e) {
throw e.getCause();
} catch (Exception e) {
throw new AssertionError(e);
}
}
/**
* 指定のオブジェクトの内容を、集計用のオブジェクトに追記する。
* @param object 対象の集計用オブジェクト
* @param argument 追記するオブジェクト
* @throws Throwable 例外が発生した場合
*/
public static void combineSummarize(Object object, Object argument) throws Throwable {
try {
Method method = find(object, SummarizedModel.Interface.METHOD_NAME_COMBINE_SUMMARIZATION);
method.invoke(object, argument);
} catch (InvocationTargetException e) {
throw e.getCause();
} catch (Exception e) {
throw new AssertionError(e);
}
}
private static Method find(Object object, String name) {
List<Method> found = new ArrayList<>();
for (Method method : object.getClass().getMethods()) {
if (method.getName().equals(name)) {
found.add(method);
}
}
if (found.size() != 1) {
throw new AssertionError(name + found);
}
return found.get(0);
}
/**
* Writableとして書き出した後に復元する。
* @param <T> データの種類
* @param value 対象のデータ
* @return 復元したデータ
*/
@SuppressWarnings("unchecked")
protected <T> T restore(T value) {
assertThat(value, instanceOf(Writable.class));
Writable writable = (Writable) value;
try {
ByteArrayOutputStream write = new ByteArrayOutputStream();
ObjectOutputStream out = new ObjectOutputStream(write);
writable.write(out);
out.flush();
ByteArrayInputStream read = new ByteArrayInputStream(write.toByteArray());
ObjectInputStream in = new ObjectInputStream(read);
Writable copy = writable.getClass().newInstance();
copy.readFields(in);
assertThat(in.read(), is(-1));
assertThat(copy, is((Writable) value));
assertThat(copy.hashCode(), is(value.hashCode()));
return (T) copy;
} catch (Exception e) {
throw new AssertionError(e);
}
}
/**
* {@link TableModelEntityEmitter}のモック。
*/
protected class Table extends TableModelEntityEmitter {
Table() {
super(
Models.getModelFactory(),
new File("."),
"com.example",
Collections.singletonList("Table Model Entity Emitter"));
}
@Override
protected PrintWriter openOutputFor(CompilationUnit source) throws IOException {
return createOutputFor(source);
}
}
/**
* {@link JoinedModelEntityEmitter}のモック。
*/
protected class Joined extends JoinedModelEntityEmitter {
Joined() {
super(
Models.getModelFactory(),
new File("."),
"com.example",
Collections.singletonList("Joined Model Entity Emitter"));
}
@Override
protected PrintWriter openOutputFor(CompilationUnit source) throws IOException {
return createOutputFor(source);
}
}
/**
* {@link SummarizedModelEntityEmitter}のモック。
*/
protected class Summarized extends SummarizedModelEntityEmitter {
Summarized() {
super(
Models.getModelFactory(),
new File("."),
"com.example",
Collections.singletonList("Summarized Model Entity Emitter"));
}
@Override
protected PrintWriter openOutputFor(CompilationUnit source) throws IOException {
return createOutputFor(source);
}
}
/**
* {@link ModelInputEmitter}のモック。
*/
protected class TsvIn extends ModelInputEmitter {
TsvIn() {
super(
Models.getModelFactory(),
new File("."),
"com.example",
Collections.singletonList("TSV Input Emitter"));
}
@Override
protected PrintWriter openOutputFor(CompilationUnit source) throws IOException {
return createOutputFor(source);
}
}
/**
* {@link ModelInputEmitter}のモック。
*/
protected class TsvOut extends ModelOutputEmitter {
TsvOut() {
super(
Models.getModelFactory(),
new File("."),
"com.example",
Collections.singletonList("TSV Output Emitter"));
}
@Override
protected PrintWriter openOutputFor(CompilationUnit source) throws IOException {
return createOutputFor(source);
}
}
}
| apache-2.0 |
everttigchelaar/camel-svn | components/camel-ftp/src/test/java/org/apache/camel/component/file/remote/FromFtpThirdPoolOkTest.java | 3371 | /**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.component.file.remote;
import java.io.File;
import org.apache.camel.Exchange;
import org.apache.camel.Processor;
import org.apache.camel.builder.RouteBuilder;
import org.junit.Before;
import org.junit.Test;
/**
* @version
*/
public class FromFtpThirdPoolOkTest extends FtpServerTestSupport {
private static int counter;
private String body = "Hello World this file will be deleted";
private String getFtpUrl() {
return "ftp://admin@localhost:" + getPort() + "/thirdpool?password=admin&delete=true";
}
@Override
@Before
public void setUp() throws Exception {
deleteDirectory("target/thridpool");
super.setUp();
}
@Test
public void testPollFileAndShouldBeDeletedAtThirdPoll() throws Exception {
template.sendBodyAndHeader(getFtpUrl(), body, Exchange.FILE_NAME, "hello.txt");
getMockEndpoint("mock:result").expectedBodiesReceived(body);
// 2 first attempt should fail
getMockEndpoint("mock:error").expectedMessageCount(2);
assertMockEndpointsSatisfied();
// give time to delete file
Thread.sleep(200);
assertEquals(3, counter);
// assert the file is deleted
File file = new File("./res/home/thirdpool/hello.txt");
file = file.getAbsoluteFile();
assertFalse("The file should have been deleted", file.exists());
}
protected RouteBuilder createRouteBuilder() throws Exception {
return new RouteBuilder() {
public void configure() throws Exception {
// no redeliveries as we want the ftp consumer to try again
errorHandler(deadLetterChannel("mock:error").maximumRedeliveries(0).logStackTrace(false).handled(false));
from(getFtpUrl()).process(new Processor() {
public void process(Exchange exchange) throws Exception {
counter++;
if (counter < 3) {
// file should exists
File file = new File("./res/home/thirdpool/hello.txt");
file = file.getAbsoluteFile();
assertTrue("The file should NOT have been deleted", file.exists());
throw new IllegalArgumentException("Forced by unittest");
}
}
}).to("mock:result");
}
};
}
} | apache-2.0 |
uriio/beacons-android | beacons-android/src/main/java/com/uriio/beacons/ble/gatt/EddystoneGattServer.java | 8998 | package com.uriio.beacons.ble.gatt;
import android.bluetooth.BluetoothDevice;
import android.bluetooth.BluetoothGatt;
import android.bluetooth.BluetoothGattCharacteristic;
import android.bluetooth.BluetoothGattServer;
import android.bluetooth.BluetoothGattServerCallback;
import android.bluetooth.BluetoothGattService;
import android.bluetooth.BluetoothManager;
import android.bluetooth.BluetoothProfile;
import android.content.Context;
import android.os.Build;
import android.support.annotation.NonNull;
import android.support.annotation.RequiresApi;
import com.uriio.beacons.Beacons;
import com.uriio.beacons.Loggable;
import com.uriio.beacons.Util;
import com.uriio.beacons.model.EddystoneBase;
import com.uriio.beacons.model.EddystoneURL;
import java.util.List;
import java.util.Locale;
/**
* Manages an Eddystone-GATT config service.
*/
@RequiresApi(api = Build.VERSION_CODES.JELLY_BEAN_MR2)
public class EddystoneGattServer extends BluetoothGattServerCallback {
public interface Listener {
void onGattFinished(EddystoneBase configuredBeacon);
}
private static final String TAG = "EddystoneGattServer";
private EddystoneGattConfigurator mEddystoneConfigurator;
private EddystoneGattService mEddystoneGattService = null;
private BluetoothGattServer mGattServer;
private Listener mListener;
private EddystoneBase mBeacon = null;
private Loggable mLogger;
private BluetoothManager mBluetoothManager;
private boolean mStarted = false;
public EddystoneGattServer(Listener listener) {
mListener = listener;
}
public void setLogger(Loggable loggable) {
mLogger = loggable;
}
/**
* Atempts to add this GATT service to the device's GATT server.
* @param beacon The initial beacon that will become connectable and be presented as configured currently.
* @return True if the GATT service was successfully added to the device's Bluetooth GATT server.
* Only one GATT service can run on the same device at the same time.
*/
public boolean start(@NonNull EddystoneBase beacon) {
if (mStarted) return false;
mBeacon = beacon;
Context context = Beacons.getContext();
mBluetoothManager = (BluetoothManager) context.getSystemService(Context.BLUETOOTH_SERVICE);
if (null == mBluetoothManager) {
// this check mostly hides a NPE warning - this is not null on Pixel 2 API 27 emulator
log("Could not obtain access to Bluetooth manager");
return false;
}
// fix an inner NPE not handled by openGattServer
if (null == mBluetoothManager.getAdapter()) {
log("No Bluetooth adapter");
return false;
}
mGattServer = mBluetoothManager.openGattServer(context, this);
if (null == mGattServer) {
log("Failed to open GATT server");
return false;
}
List<BluetoothGattService> gattServices = mGattServer.getServices();
for (BluetoothGattService service : gattServices) {
if (service.getUuid() == EddystoneGattService.UUID_EDDYSTONE_GATT_SERVICE) {
log("Another Eddystone-GATT service is already being served by this device");
close();
return false;
}
}
mEddystoneConfigurator = new EddystoneGattConfigurator(beacon);
mEddystoneGattService = new EddystoneGattService(this, mEddystoneConfigurator);
if (!mGattServer.addService(mEddystoneGattService.getService())) {
log("Eddystone-GATT service registration failed");
close();
return false;
}
// advertise beacon as connectable
if (!beacon.isConnectable()) {
log("Setting beacon connectable");
beacon.edit().setConnectable(true).apply();
}
// finally, make sure the provided beacon is started
return beacon.start();
}
/**
* Equivalent to start("http://cf.physical-web.org")
*/
public boolean start() {
return start("http://cf.physical-web.org");
}
/**
* Starts the GATT config service using an Eddystone-URL as the initial configurable beacon.
* @param url An URL to use as the initial Eddystone-URL configurable / connectable beacon
*/
public boolean start(String url) {
return !mStarted && start(new EddystoneURL(url));
}
/**
* @return The currently configured beacon. May be null if an authenticated user reset it.
*/
public EddystoneBase getBeacon() {
return mBeacon;
}
public void close() {
if (null != mGattServer) {
mGattServer.close();
mGattServer = null;
}
// if BT was off when we tried to start, the configurator is null
if (null != mBeacon && null != mEddystoneConfigurator) {
EddystoneBase configuredBeacon = mEddystoneConfigurator.getConfiguredBeacon();
if (mBeacon == configuredBeacon) {
log("Setting beacon un-connectable");
configuredBeacon.edit().setConnectable(false).apply();
}
else {
// no beacon configured, or the configured beacon is not the initial one
// stop temporary or provided beacon (and delete it if it was also saved)
mBeacon.delete();
if (null != configuredBeacon) {
// save, if the original beacon was saved
if (mBeacon.getSavedId() > 0) {
configuredBeacon.save(true);
}
else configuredBeacon.start();
}
}
if (null != mListener) {
mListener.onGattFinished(null == mEddystoneConfigurator ? null : configuredBeacon);
}
mBeacon = null;
}
}
@Override
public void onCharacteristicReadRequest(BluetoothDevice device, int requestId, int offset,
BluetoothGattCharacteristic characteristic) {
super.onCharacteristicReadRequest(device, requestId, offset, characteristic);
mEddystoneGattService.readCharacteristic(mGattServer, device, requestId, offset, characteristic);
}
@Override
public void onCharacteristicWriteRequest(BluetoothDevice device, int requestId,
BluetoothGattCharacteristic characteristic,
boolean preparedWrite, boolean responseNeeded,
int offset, byte[] value) {
super.onCharacteristicWriteRequest(device, requestId, characteristic, preparedWrite, responseNeeded, offset, value);
int status = mEddystoneGattService.writeCharacteristic(device, characteristic, value);
if (responseNeeded) {
mGattServer.sendResponse(device, requestId, status, offset,
status == BluetoothGatt.GATT_SUCCESS ? characteristic.getValue() : null);
}
}
@Override
public void onConnectionStateChange(BluetoothDevice device, int status, int newState) {
super.onConnectionStateChange(device, status, newState);
if (newState == BluetoothGatt.STATE_DISCONNECTED) {
log(device + " has disconnected");
if (device.equals(mEddystoneGattService.getConnectedOwner())) {
log("Owner disconnected, stopping GATT server");
mEddystoneGattService.onOwnerDisconnected();
close();
}
}
else if (newState == BluetoothGatt.STATE_CONNECTED) {
log(device + " has connected");
if (mEddystoneGattService.getConnectedOwner() != null) {
// don't allow a second client to connect at the same time
log(device + " tried to connect, but owner is active. Disconnecting.");
mGattServer.cancelConnection(device);
}
}
}
@Override
public void onExecuteWrite(BluetoothDevice device, int requestId, boolean execute) {
super.onExecuteWrite(device, requestId, execute);
log(String.format(Locale.US, "%s Unexpected request %d: executeWrite(%s)",
device, requestId, execute));
// mGattServer.sendResponse(device, requestId, BluetoothGatt.GATT_SUCCESS, 0, new byte[0]);
}
void disconnectAll(BluetoothDevice allowedDevice) {
for (BluetoothDevice device : mBluetoothManager.getConnectedDevices(BluetoothProfile.GATT)) {
if (!device.equals(allowedDevice)) {
log(String.format("Disconnecting %s", device));
mGattServer.cancelConnection(device);
}
}
}
void log(String message) {
if (null != mLogger) {
mLogger.log(TAG, message);
}
else Util.log(TAG, message);
}
} | apache-2.0 |
pengzong1111/solr4 | lucene/analysis/common/src/test/org/apache/lucene/analysis/miscellaneous/TestASCIIFoldingFilter.java | 92939 | package org.apache.lucene.analysis.miscellaneous;
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import org.apache.lucene.analysis.Analyzer;
import org.apache.lucene.analysis.BaseTokenStreamTestCase;
import org.apache.lucene.analysis.MockTokenizer;
import org.apache.lucene.analysis.TokenStream;
import org.apache.lucene.analysis.Tokenizer;
import org.apache.lucene.analysis.core.KeywordTokenizer;
import org.apache.lucene.analysis.tokenattributes.CharTermAttribute;
import java.io.IOException;
import java.io.Reader;
import java.io.StringReader;
import java.util.List;
import java.util.ArrayList;
import java.util.Iterator;
public class TestASCIIFoldingFilter extends BaseTokenStreamTestCase {
/**
* Pop one input token's worth of tokens off the filter and verify that they are as expected.
*/
void assertNextTerms(String expectedUnfolded, String expectedFolded, ASCIIFoldingFilter filter,
CharTermAttribute termAtt) throws Exception {
assertTrue(filter.incrementToken());
assertEquals(expectedFolded, termAtt.toString());
if (filter.isPreserveOriginal() && !expectedUnfolded.equals(expectedFolded)) {
assertTrue(filter.incrementToken());
assertEquals(expectedUnfolded, termAtt.toString());
}
}
// testLain1Accents() is a copy of TestLatin1AccentFilter.testU().
public void testLatin1Accents() throws Exception {
TokenStream stream = new MockTokenizer(new StringReader
("Des mot clés À LA CHAÎNE À Á Â Ã Ä Å Æ Ç È É Ê Ë Ì Í Î Ï IJ Ð Ñ"
+" Ò Ó Ô Õ Ö Ø Œ Þ Ù Ú Û Ü Ý Ÿ à á â ã ä å æ ç è é ê ë ì í î ï ij"
+" ð ñ ò ó ô õ ö ø œ ß þ ù ú û ü ý ÿ fi fl"), MockTokenizer.WHITESPACE, false);
ASCIIFoldingFilter filter = new ASCIIFoldingFilter(stream, random().nextBoolean());
CharTermAttribute termAtt = filter.getAttribute(CharTermAttribute.class);
filter.reset();
assertNextTerms("Des", "Des", filter, termAtt);
assertNextTerms("mot", "mot", filter, termAtt);
assertNextTerms("clés", "cles", filter, termAtt);
assertNextTerms("À", "A", filter, termAtt);
assertNextTerms("LA", "LA", filter, termAtt);
assertNextTerms("CHAÎNE", "CHAINE", filter, termAtt);
assertNextTerms("À", "A", filter, termAtt);
assertNextTerms("Á", "A", filter, termAtt);
assertNextTerms("Â", "A", filter, termAtt);
assertNextTerms("Ã", "A", filter, termAtt);
assertNextTerms("Ä", "A", filter, termAtt);
assertNextTerms("Å", "A", filter, termAtt);
assertNextTerms("Æ", "AE", filter, termAtt);
assertNextTerms("Ç", "C", filter, termAtt);
assertNextTerms("È", "E", filter, termAtt);
assertNextTerms("É", "E", filter, termAtt);
assertNextTerms("Ê", "E", filter, termAtt);
assertNextTerms("Ë", "E", filter, termAtt);
assertNextTerms("Ì", "I", filter, termAtt);
assertNextTerms("Í", "I", filter, termAtt);
assertNextTerms("Î", "I", filter, termAtt);
assertNextTerms("Ï", "I", filter, termAtt);
assertNextTerms("IJ", "IJ", filter, termAtt);
assertNextTerms("Ð", "D", filter, termAtt);
assertNextTerms("Ñ", "N", filter, termAtt);
assertNextTerms("Ò", "O", filter, termAtt);
assertNextTerms("Ó", "O", filter, termAtt);
assertNextTerms("Ô", "O", filter, termAtt);
assertNextTerms("Õ", "O", filter, termAtt);
assertNextTerms("Ö", "O", filter, termAtt);
assertNextTerms("Ø", "O", filter, termAtt);
assertNextTerms("Œ", "OE", filter, termAtt);
assertNextTerms("Þ", "TH", filter, termAtt);
assertNextTerms("Ù", "U", filter, termAtt);
assertNextTerms("Ú", "U", filter, termAtt);
assertNextTerms("Û", "U", filter, termAtt);
assertNextTerms("Ü", "U", filter, termAtt);
assertNextTerms("Ý", "Y", filter, termAtt);
assertNextTerms("Ÿ", "Y", filter, termAtt);
assertNextTerms("à", "a", filter, termAtt);
assertNextTerms("á", "a", filter, termAtt);
assertNextTerms("â", "a", filter, termAtt);
assertNextTerms("ã", "a", filter, termAtt);
assertNextTerms("ä", "a", filter, termAtt);
assertNextTerms("å", "a", filter, termAtt);
assertNextTerms("æ", "ae", filter, termAtt);
assertNextTerms("ç", "c", filter, termAtt);
assertNextTerms("è", "e", filter, termAtt);
assertNextTerms("é", "e", filter, termAtt);
assertNextTerms("ê", "e", filter, termAtt);
assertNextTerms("ë", "e", filter, termAtt);
assertNextTerms("ì", "i", filter, termAtt);
assertNextTerms("í", "i", filter, termAtt);
assertNextTerms("î", "i", filter, termAtt);
assertNextTerms("ï", "i", filter, termAtt);
assertNextTerms("ij", "ij", filter, termAtt);
assertNextTerms("ð", "d", filter, termAtt);
assertNextTerms("ñ", "n", filter, termAtt);
assertNextTerms("ò", "o", filter, termAtt);
assertNextTerms("ó", "o", filter, termAtt);
assertNextTerms("ô", "o", filter, termAtt);
assertNextTerms("õ", "o", filter, termAtt);
assertNextTerms("ö", "o", filter, termAtt);
assertNextTerms("ø", "o", filter, termAtt);
assertNextTerms("œ", "oe", filter, termAtt);
assertNextTerms("ß", "ss", filter, termAtt);
assertNextTerms("þ", "th", filter, termAtt);
assertNextTerms("ù", "u", filter, termAtt);
assertNextTerms("ú", "u", filter, termAtt);
assertNextTerms("û", "u", filter, termAtt);
assertNextTerms("ü", "u", filter, termAtt);
assertNextTerms("ý", "y", filter, termAtt);
assertNextTerms("ÿ", "y", filter, termAtt);
assertNextTerms("fi", "fi", filter, termAtt);
assertNextTerms("fl", "fl", filter, termAtt);
assertFalse(filter.incrementToken());
}
// The following Perl script generated the foldings[] array automatically
// from ASCIIFoldingFilter.java:
//
// ============== begin get.test.cases.pl ==============
//
// use strict;
// use warnings;
//
// my $file = "ASCIIFoldingFilter.java";
// my $output = "testcases.txt";
// my %codes = ();
// my $folded = '';
//
// open IN, "<:utf8", $file || die "Error opening input file '$file': $!";
// open OUT, ">:utf8", $output || die "Error opening output file '$output': $!";
//
// while (my $line = <IN>) {
// chomp($line);
// # case '\u0133': // <char> <maybe URL> [ description ]
// if ($line =~ /case\s+'\\u(....)':.*\[([^\]]+)\]/) {
// my $code = $1;
// my $desc = $2;
// $codes{$code} = $desc;
// }
// # output[outputPos++] = 'A';
// elsif ($line =~ /output\[outputPos\+\+\] = '(.+)';/) {
// my $output_char = $1;
// $folded .= $output_char;
// }
// elsif ($line =~ /break;/ && length($folded) > 0) {
// my $first = 1;
// for my $code (sort { hex($a) <=> hex($b) } keys %codes) {
// my $desc = $codes{$code};
// print OUT ' ';
// print OUT '+ ' if (not $first);
// $first = 0;
// print OUT '"', chr(hex($code)), qq!" // U+$code: $desc\n!;
// }
// print OUT qq! ,"$folded", // Folded result\n\n!;
// %codes = ();
// $folded = '';
// }
// }
// close OUT;
//
// ============== end get.test.cases.pl ==============
//
public void testAllFoldings() throws Exception {
// Alternating strings of:
// 1. All non-ASCII characters to be folded, concatenated together as a
// single string.
// 2. The string of ASCII characters to which each of the above
// characters should be folded.
String[] foldings = {
"À" // U+00C0: LATIN CAPITAL LETTER A WITH GRAVE
+ "Á" // U+00C1: LATIN CAPITAL LETTER A WITH ACUTE
+ "Â" // U+00C2: LATIN CAPITAL LETTER A WITH CIRCUMFLEX
+ "Ã" // U+00C3: LATIN CAPITAL LETTER A WITH TILDE
+ "Ä" // U+00C4: LATIN CAPITAL LETTER A WITH DIAERESIS
+ "Å" // U+00C5: LATIN CAPITAL LETTER A WITH RING ABOVE
+ "Ā" // U+0100: LATIN CAPITAL LETTER A WITH MACRON
+ "Ă" // U+0102: LATIN CAPITAL LETTER A WITH BREVE
+ "Ą" // U+0104: LATIN CAPITAL LETTER A WITH OGONEK
+ "Ə" // U+018F: LATIN CAPITAL LETTER SCHWA
+ "Ǎ" // U+01CD: LATIN CAPITAL LETTER A WITH CARON
+ "Ǟ" // U+01DE: LATIN CAPITAL LETTER A WITH DIAERESIS AND MACRON
+ "Ǡ" // U+01E0: LATIN CAPITAL LETTER A WITH DOT ABOVE AND MACRON
+ "Ǻ" // U+01FA: LATIN CAPITAL LETTER A WITH RING ABOVE AND ACUTE
+ "Ȁ" // U+0200: LATIN CAPITAL LETTER A WITH DOUBLE GRAVE
+ "Ȃ" // U+0202: LATIN CAPITAL LETTER A WITH INVERTED BREVE
+ "Ȧ" // U+0226: LATIN CAPITAL LETTER A WITH DOT ABOVE
+ "Ⱥ" // U+023A: LATIN CAPITAL LETTER A WITH STROKE
+ "ᴀ" // U+1D00: LATIN LETTER SMALL CAPITAL A
+ "Ḁ" // U+1E00: LATIN CAPITAL LETTER A WITH RING BELOW
+ "Ạ" // U+1EA0: LATIN CAPITAL LETTER A WITH DOT BELOW
+ "Ả" // U+1EA2: LATIN CAPITAL LETTER A WITH HOOK ABOVE
+ "Ấ" // U+1EA4: LATIN CAPITAL LETTER A WITH CIRCUMFLEX AND ACUTE
+ "Ầ" // U+1EA6: LATIN CAPITAL LETTER A WITH CIRCUMFLEX AND GRAVE
+ "Ẩ" // U+1EA8: LATIN CAPITAL LETTER A WITH CIRCUMFLEX AND HOOK ABOVE
+ "Ẫ" // U+1EAA: LATIN CAPITAL LETTER A WITH CIRCUMFLEX AND TILDE
+ "Ậ" // U+1EAC: LATIN CAPITAL LETTER A WITH CIRCUMFLEX AND DOT BELOW
+ "Ắ" // U+1EAE: LATIN CAPITAL LETTER A WITH BREVE AND ACUTE
+ "Ằ" // U+1EB0: LATIN CAPITAL LETTER A WITH BREVE AND GRAVE
+ "Ẳ" // U+1EB2: LATIN CAPITAL LETTER A WITH BREVE AND HOOK ABOVE
+ "Ẵ" // U+1EB4: LATIN CAPITAL LETTER A WITH BREVE AND TILDE
+ "Ặ" // U+1EB6: LATIN CAPITAL LETTER A WITH BREVE AND DOT BELOW
+ "Ⓐ" // U+24B6: CIRCLED LATIN CAPITAL LETTER A
+ "A" // U+FF21: FULLWIDTH LATIN CAPITAL LETTER A
,"A", // Folded result
"à" // U+00E0: LATIN SMALL LETTER A WITH GRAVE
+ "á" // U+00E1: LATIN SMALL LETTER A WITH ACUTE
+ "â" // U+00E2: LATIN SMALL LETTER A WITH CIRCUMFLEX
+ "ã" // U+00E3: LATIN SMALL LETTER A WITH TILDE
+ "ä" // U+00E4: LATIN SMALL LETTER A WITH DIAERESIS
+ "å" // U+00E5: LATIN SMALL LETTER A WITH RING ABOVE
+ "ā" // U+0101: LATIN SMALL LETTER A WITH MACRON
+ "ă" // U+0103: LATIN SMALL LETTER A WITH BREVE
+ "ą" // U+0105: LATIN SMALL LETTER A WITH OGONEK
+ "ǎ" // U+01CE: LATIN SMALL LETTER A WITH CARON
+ "ǟ" // U+01DF: LATIN SMALL LETTER A WITH DIAERESIS AND MACRON
+ "ǡ" // U+01E1: LATIN SMALL LETTER A WITH DOT ABOVE AND MACRON
+ "ǻ" // U+01FB: LATIN SMALL LETTER A WITH RING ABOVE AND ACUTE
+ "ȁ" // U+0201: LATIN SMALL LETTER A WITH DOUBLE GRAVE
+ "ȃ" // U+0203: LATIN SMALL LETTER A WITH INVERTED BREVE
+ "ȧ" // U+0227: LATIN SMALL LETTER A WITH DOT ABOVE
+ "ɐ" // U+0250: LATIN SMALL LETTER TURNED A
+ "ə" // U+0259: LATIN SMALL LETTER SCHWA
+ "ɚ" // U+025A: LATIN SMALL LETTER SCHWA WITH HOOK
+ "ᶏ" // U+1D8F: LATIN SMALL LETTER A WITH RETROFLEX HOOK
+ "ḁ" // U+1E01: LATIN SMALL LETTER A WITH RING BELOW
+ "ᶕ" // U+1D95: LATIN SMALL LETTER SCHWA WITH RETROFLEX HOOK
+ "ẚ" // U+1E9A: LATIN SMALL LETTER A WITH RIGHT HALF RING
+ "ạ" // U+1EA1: LATIN SMALL LETTER A WITH DOT BELOW
+ "ả" // U+1EA3: LATIN SMALL LETTER A WITH HOOK ABOVE
+ "ấ" // U+1EA5: LATIN SMALL LETTER A WITH CIRCUMFLEX AND ACUTE
+ "ầ" // U+1EA7: LATIN SMALL LETTER A WITH CIRCUMFLEX AND GRAVE
+ "ẩ" // U+1EA9: LATIN SMALL LETTER A WITH CIRCUMFLEX AND HOOK ABOVE
+ "ẫ" // U+1EAB: LATIN SMALL LETTER A WITH CIRCUMFLEX AND TILDE
+ "ậ" // U+1EAD: LATIN SMALL LETTER A WITH CIRCUMFLEX AND DOT BELOW
+ "ắ" // U+1EAF: LATIN SMALL LETTER A WITH BREVE AND ACUTE
+ "ằ" // U+1EB1: LATIN SMALL LETTER A WITH BREVE AND GRAVE
+ "ẳ" // U+1EB3: LATIN SMALL LETTER A WITH BREVE AND HOOK ABOVE
+ "ẵ" // U+1EB5: LATIN SMALL LETTER A WITH BREVE AND TILDE
+ "ặ" // U+1EB7: LATIN SMALL LETTER A WITH BREVE AND DOT BELOW
+ "ₐ" // U+2090: LATIN SUBSCRIPT SMALL LETTER A
+ "ₔ" // U+2094: LATIN SUBSCRIPT SMALL LETTER SCHWA
+ "ⓐ" // U+24D0: CIRCLED LATIN SMALL LETTER A
+ "ⱥ" // U+2C65: LATIN SMALL LETTER A WITH STROKE
+ "Ɐ" // U+2C6F: LATIN CAPITAL LETTER TURNED A
+ "a" // U+FF41: FULLWIDTH LATIN SMALL LETTER A
,"a", // Folded result
"Ꜳ" // U+A732: LATIN CAPITAL LETTER AA
,"AA", // Folded result
"Æ" // U+00C6: LATIN CAPITAL LETTER AE
+ "Ǣ" // U+01E2: LATIN CAPITAL LETTER AE WITH MACRON
+ "Ǽ" // U+01FC: LATIN CAPITAL LETTER AE WITH ACUTE
+ "ᴁ" // U+1D01: LATIN LETTER SMALL CAPITAL AE
,"AE", // Folded result
"Ꜵ" // U+A734: LATIN CAPITAL LETTER AO
,"AO", // Folded result
"Ꜷ" // U+A736: LATIN CAPITAL LETTER AU
,"AU", // Folded result
"Ꜹ" // U+A738: LATIN CAPITAL LETTER AV
+ "Ꜻ" // U+A73A: LATIN CAPITAL LETTER AV WITH HORIZONTAL BAR
,"AV", // Folded result
"Ꜽ" // U+A73C: LATIN CAPITAL LETTER AY
,"AY", // Folded result
"⒜" // U+249C: PARENTHESIZED LATIN SMALL LETTER A
,"(a)", // Folded result
"ꜳ" // U+A733: LATIN SMALL LETTER AA
,"aa", // Folded result
"æ" // U+00E6: LATIN SMALL LETTER AE
+ "ǣ" // U+01E3: LATIN SMALL LETTER AE WITH MACRON
+ "ǽ" // U+01FD: LATIN SMALL LETTER AE WITH ACUTE
+ "ᴂ" // U+1D02: LATIN SMALL LETTER TURNED AE
,"ae", // Folded result
"ꜵ" // U+A735: LATIN SMALL LETTER AO
,"ao", // Folded result
"ꜷ" // U+A737: LATIN SMALL LETTER AU
,"au", // Folded result
"ꜹ" // U+A739: LATIN SMALL LETTER AV
+ "ꜻ" // U+A73B: LATIN SMALL LETTER AV WITH HORIZONTAL BAR
,"av", // Folded result
"ꜽ" // U+A73D: LATIN SMALL LETTER AY
,"ay", // Folded result
"Ɓ" // U+0181: LATIN CAPITAL LETTER B WITH HOOK
+ "Ƃ" // U+0182: LATIN CAPITAL LETTER B WITH TOPBAR
+ "Ƀ" // U+0243: LATIN CAPITAL LETTER B WITH STROKE
+ "ʙ" // U+0299: LATIN LETTER SMALL CAPITAL B
+ "ᴃ" // U+1D03: LATIN LETTER SMALL CAPITAL BARRED B
+ "Ḃ" // U+1E02: LATIN CAPITAL LETTER B WITH DOT ABOVE
+ "Ḅ" // U+1E04: LATIN CAPITAL LETTER B WITH DOT BELOW
+ "Ḇ" // U+1E06: LATIN CAPITAL LETTER B WITH LINE BELOW
+ "Ⓑ" // U+24B7: CIRCLED LATIN CAPITAL LETTER B
+ "B" // U+FF22: FULLWIDTH LATIN CAPITAL LETTER B
,"B", // Folded result
"ƀ" // U+0180: LATIN SMALL LETTER B WITH STROKE
+ "ƃ" // U+0183: LATIN SMALL LETTER B WITH TOPBAR
+ "ɓ" // U+0253: LATIN SMALL LETTER B WITH HOOK
+ "ᵬ" // U+1D6C: LATIN SMALL LETTER B WITH MIDDLE TILDE
+ "ᶀ" // U+1D80: LATIN SMALL LETTER B WITH PALATAL HOOK
+ "ḃ" // U+1E03: LATIN SMALL LETTER B WITH DOT ABOVE
+ "ḅ" // U+1E05: LATIN SMALL LETTER B WITH DOT BELOW
+ "ḇ" // U+1E07: LATIN SMALL LETTER B WITH LINE BELOW
+ "ⓑ" // U+24D1: CIRCLED LATIN SMALL LETTER B
+ "b" // U+FF42: FULLWIDTH LATIN SMALL LETTER B
,"b", // Folded result
"⒝" // U+249D: PARENTHESIZED LATIN SMALL LETTER B
,"(b)", // Folded result
"Ç" // U+00C7: LATIN CAPITAL LETTER C WITH CEDILLA
+ "Ć" // U+0106: LATIN CAPITAL LETTER C WITH ACUTE
+ "Ĉ" // U+0108: LATIN CAPITAL LETTER C WITH CIRCUMFLEX
+ "Ċ" // U+010A: LATIN CAPITAL LETTER C WITH DOT ABOVE
+ "Č" // U+010C: LATIN CAPITAL LETTER C WITH CARON
+ "Ƈ" // U+0187: LATIN CAPITAL LETTER C WITH HOOK
+ "Ȼ" // U+023B: LATIN CAPITAL LETTER C WITH STROKE
+ "ʗ" // U+0297: LATIN LETTER STRETCHED C
+ "ᴄ" // U+1D04: LATIN LETTER SMALL CAPITAL C
+ "Ḉ" // U+1E08: LATIN CAPITAL LETTER C WITH CEDILLA AND ACUTE
+ "Ⓒ" // U+24B8: CIRCLED LATIN CAPITAL LETTER C
+ "C" // U+FF23: FULLWIDTH LATIN CAPITAL LETTER C
,"C", // Folded result
"ç" // U+00E7: LATIN SMALL LETTER C WITH CEDILLA
+ "ć" // U+0107: LATIN SMALL LETTER C WITH ACUTE
+ "ĉ" // U+0109: LATIN SMALL LETTER C WITH CIRCUMFLEX
+ "ċ" // U+010B: LATIN SMALL LETTER C WITH DOT ABOVE
+ "č" // U+010D: LATIN SMALL LETTER C WITH CARON
+ "ƈ" // U+0188: LATIN SMALL LETTER C WITH HOOK
+ "ȼ" // U+023C: LATIN SMALL LETTER C WITH STROKE
+ "ɕ" // U+0255: LATIN SMALL LETTER C WITH CURL
+ "ḉ" // U+1E09: LATIN SMALL LETTER C WITH CEDILLA AND ACUTE
+ "ↄ" // U+2184: LATIN SMALL LETTER REVERSED C
+ "ⓒ" // U+24D2: CIRCLED LATIN SMALL LETTER C
+ "Ꜿ" // U+A73E: LATIN CAPITAL LETTER REVERSED C WITH DOT
+ "ꜿ" // U+A73F: LATIN SMALL LETTER REVERSED C WITH DOT
+ "c" // U+FF43: FULLWIDTH LATIN SMALL LETTER C
,"c", // Folded result
"⒞" // U+249E: PARENTHESIZED LATIN SMALL LETTER C
,"(c)", // Folded result
"Ð" // U+00D0: LATIN CAPITAL LETTER ETH
+ "Ď" // U+010E: LATIN CAPITAL LETTER D WITH CARON
+ "Đ" // U+0110: LATIN CAPITAL LETTER D WITH STROKE
+ "Ɖ" // U+0189: LATIN CAPITAL LETTER AFRICAN D
+ "Ɗ" // U+018A: LATIN CAPITAL LETTER D WITH HOOK
+ "Ƌ" // U+018B: LATIN CAPITAL LETTER D WITH TOPBAR
+ "ᴅ" // U+1D05: LATIN LETTER SMALL CAPITAL D
+ "ᴆ" // U+1D06: LATIN LETTER SMALL CAPITAL ETH
+ "Ḋ" // U+1E0A: LATIN CAPITAL LETTER D WITH DOT ABOVE
+ "Ḍ" // U+1E0C: LATIN CAPITAL LETTER D WITH DOT BELOW
+ "Ḏ" // U+1E0E: LATIN CAPITAL LETTER D WITH LINE BELOW
+ "Ḑ" // U+1E10: LATIN CAPITAL LETTER D WITH CEDILLA
+ "Ḓ" // U+1E12: LATIN CAPITAL LETTER D WITH CIRCUMFLEX BELOW
+ "Ⓓ" // U+24B9: CIRCLED LATIN CAPITAL LETTER D
+ "Ꝺ" // U+A779: LATIN CAPITAL LETTER INSULAR D
+ "D" // U+FF24: FULLWIDTH LATIN CAPITAL LETTER D
,"D", // Folded result
"ð" // U+00F0: LATIN SMALL LETTER ETH
+ "ď" // U+010F: LATIN SMALL LETTER D WITH CARON
+ "đ" // U+0111: LATIN SMALL LETTER D WITH STROKE
+ "ƌ" // U+018C: LATIN SMALL LETTER D WITH TOPBAR
+ "ȡ" // U+0221: LATIN SMALL LETTER D WITH CURL
+ "ɖ" // U+0256: LATIN SMALL LETTER D WITH TAIL
+ "ɗ" // U+0257: LATIN SMALL LETTER D WITH HOOK
+ "ᵭ" // U+1D6D: LATIN SMALL LETTER D WITH MIDDLE TILDE
+ "ᶁ" // U+1D81: LATIN SMALL LETTER D WITH PALATAL HOOK
+ "ᶑ" // U+1D91: LATIN SMALL LETTER D WITH HOOK AND TAIL
+ "ḋ" // U+1E0B: LATIN SMALL LETTER D WITH DOT ABOVE
+ "ḍ" // U+1E0D: LATIN SMALL LETTER D WITH DOT BELOW
+ "ḏ" // U+1E0F: LATIN SMALL LETTER D WITH LINE BELOW
+ "ḑ" // U+1E11: LATIN SMALL LETTER D WITH CEDILLA
+ "ḓ" // U+1E13: LATIN SMALL LETTER D WITH CIRCUMFLEX BELOW
+ "ⓓ" // U+24D3: CIRCLED LATIN SMALL LETTER D
+ "ꝺ" // U+A77A: LATIN SMALL LETTER INSULAR D
+ "d" // U+FF44: FULLWIDTH LATIN SMALL LETTER D
,"d", // Folded result
"DŽ" // U+01C4: LATIN CAPITAL LETTER DZ WITH CARON
+ "DZ" // U+01F1: LATIN CAPITAL LETTER DZ
,"DZ", // Folded result
"Dž" // U+01C5: LATIN CAPITAL LETTER D WITH SMALL LETTER Z WITH CARON
+ "Dz" // U+01F2: LATIN CAPITAL LETTER D WITH SMALL LETTER Z
,"Dz", // Folded result
"⒟" // U+249F: PARENTHESIZED LATIN SMALL LETTER D
,"(d)", // Folded result
"ȸ" // U+0238: LATIN SMALL LETTER DB DIGRAPH
,"db", // Folded result
"dž" // U+01C6: LATIN SMALL LETTER DZ WITH CARON
+ "dz" // U+01F3: LATIN SMALL LETTER DZ
+ "ʣ" // U+02A3: LATIN SMALL LETTER DZ DIGRAPH
+ "ʥ" // U+02A5: LATIN SMALL LETTER DZ DIGRAPH WITH CURL
,"dz", // Folded result
"È" // U+00C8: LATIN CAPITAL LETTER E WITH GRAVE
+ "É" // U+00C9: LATIN CAPITAL LETTER E WITH ACUTE
+ "Ê" // U+00CA: LATIN CAPITAL LETTER E WITH CIRCUMFLEX
+ "Ë" // U+00CB: LATIN CAPITAL LETTER E WITH DIAERESIS
+ "Ē" // U+0112: LATIN CAPITAL LETTER E WITH MACRON
+ "Ĕ" // U+0114: LATIN CAPITAL LETTER E WITH BREVE
+ "Ė" // U+0116: LATIN CAPITAL LETTER E WITH DOT ABOVE
+ "Ę" // U+0118: LATIN CAPITAL LETTER E WITH OGONEK
+ "Ě" // U+011A: LATIN CAPITAL LETTER E WITH CARON
+ "Ǝ" // U+018E: LATIN CAPITAL LETTER REVERSED E
+ "Ɛ" // U+0190: LATIN CAPITAL LETTER OPEN E
+ "Ȅ" // U+0204: LATIN CAPITAL LETTER E WITH DOUBLE GRAVE
+ "Ȇ" // U+0206: LATIN CAPITAL LETTER E WITH INVERTED BREVE
+ "Ȩ" // U+0228: LATIN CAPITAL LETTER E WITH CEDILLA
+ "Ɇ" // U+0246: LATIN CAPITAL LETTER E WITH STROKE
+ "ᴇ" // U+1D07: LATIN LETTER SMALL CAPITAL E
+ "Ḕ" // U+1E14: LATIN CAPITAL LETTER E WITH MACRON AND GRAVE
+ "Ḗ" // U+1E16: LATIN CAPITAL LETTER E WITH MACRON AND ACUTE
+ "Ḙ" // U+1E18: LATIN CAPITAL LETTER E WITH CIRCUMFLEX BELOW
+ "Ḛ" // U+1E1A: LATIN CAPITAL LETTER E WITH TILDE BELOW
+ "Ḝ" // U+1E1C: LATIN CAPITAL LETTER E WITH CEDILLA AND BREVE
+ "Ẹ" // U+1EB8: LATIN CAPITAL LETTER E WITH DOT BELOW
+ "Ẻ" // U+1EBA: LATIN CAPITAL LETTER E WITH HOOK ABOVE
+ "Ẽ" // U+1EBC: LATIN CAPITAL LETTER E WITH TILDE
+ "Ế" // U+1EBE: LATIN CAPITAL LETTER E WITH CIRCUMFLEX AND ACUTE
+ "Ề" // U+1EC0: LATIN CAPITAL LETTER E WITH CIRCUMFLEX AND GRAVE
+ "Ể" // U+1EC2: LATIN CAPITAL LETTER E WITH CIRCUMFLEX AND HOOK ABOVE
+ "Ễ" // U+1EC4: LATIN CAPITAL LETTER E WITH CIRCUMFLEX AND TILDE
+ "Ệ" // U+1EC6: LATIN CAPITAL LETTER E WITH CIRCUMFLEX AND DOT BELOW
+ "Ⓔ" // U+24BA: CIRCLED LATIN CAPITAL LETTER E
+ "ⱻ" // U+2C7B: LATIN LETTER SMALL CAPITAL TURNED E
+ "E" // U+FF25: FULLWIDTH LATIN CAPITAL LETTER E
,"E", // Folded result
"è" // U+00E8: LATIN SMALL LETTER E WITH GRAVE
+ "é" // U+00E9: LATIN SMALL LETTER E WITH ACUTE
+ "ê" // U+00EA: LATIN SMALL LETTER E WITH CIRCUMFLEX
+ "ë" // U+00EB: LATIN SMALL LETTER E WITH DIAERESIS
+ "ē" // U+0113: LATIN SMALL LETTER E WITH MACRON
+ "ĕ" // U+0115: LATIN SMALL LETTER E WITH BREVE
+ "ė" // U+0117: LATIN SMALL LETTER E WITH DOT ABOVE
+ "ę" // U+0119: LATIN SMALL LETTER E WITH OGONEK
+ "ě" // U+011B: LATIN SMALL LETTER E WITH CARON
+ "ǝ" // U+01DD: LATIN SMALL LETTER TURNED E
+ "ȅ" // U+0205: LATIN SMALL LETTER E WITH DOUBLE GRAVE
+ "ȇ" // U+0207: LATIN SMALL LETTER E WITH INVERTED BREVE
+ "ȩ" // U+0229: LATIN SMALL LETTER E WITH CEDILLA
+ "ɇ" // U+0247: LATIN SMALL LETTER E WITH STROKE
+ "ɘ" // U+0258: LATIN SMALL LETTER REVERSED E
+ "ɛ" // U+025B: LATIN SMALL LETTER OPEN E
+ "ɜ" // U+025C: LATIN SMALL LETTER REVERSED OPEN E
+ "ɝ" // U+025D: LATIN SMALL LETTER REVERSED OPEN E WITH HOOK
+ "ɞ" // U+025E: LATIN SMALL LETTER CLOSED REVERSED OPEN E
+ "ʚ" // U+029A: LATIN SMALL LETTER CLOSED OPEN E
+ "ᴈ" // U+1D08: LATIN SMALL LETTER TURNED OPEN E
+ "ᶒ" // U+1D92: LATIN SMALL LETTER E WITH RETROFLEX HOOK
+ "ᶓ" // U+1D93: LATIN SMALL LETTER OPEN E WITH RETROFLEX HOOK
+ "ᶔ" // U+1D94: LATIN SMALL LETTER REVERSED OPEN E WITH RETROFLEX HOOK
+ "ḕ" // U+1E15: LATIN SMALL LETTER E WITH MACRON AND GRAVE
+ "ḗ" // U+1E17: LATIN SMALL LETTER E WITH MACRON AND ACUTE
+ "ḙ" // U+1E19: LATIN SMALL LETTER E WITH CIRCUMFLEX BELOW
+ "ḛ" // U+1E1B: LATIN SMALL LETTER E WITH TILDE BELOW
+ "ḝ" // U+1E1D: LATIN SMALL LETTER E WITH CEDILLA AND BREVE
+ "ẹ" // U+1EB9: LATIN SMALL LETTER E WITH DOT BELOW
+ "ẻ" // U+1EBB: LATIN SMALL LETTER E WITH HOOK ABOVE
+ "ẽ" // U+1EBD: LATIN SMALL LETTER E WITH TILDE
+ "ế" // U+1EBF: LATIN SMALL LETTER E WITH CIRCUMFLEX AND ACUTE
+ "ề" // U+1EC1: LATIN SMALL LETTER E WITH CIRCUMFLEX AND GRAVE
+ "ể" // U+1EC3: LATIN SMALL LETTER E WITH CIRCUMFLEX AND HOOK ABOVE
+ "ễ" // U+1EC5: LATIN SMALL LETTER E WITH CIRCUMFLEX AND TILDE
+ "ệ" // U+1EC7: LATIN SMALL LETTER E WITH CIRCUMFLEX AND DOT BELOW
+ "ₑ" // U+2091: LATIN SUBSCRIPT SMALL LETTER E
+ "ⓔ" // U+24D4: CIRCLED LATIN SMALL LETTER E
+ "ⱸ" // U+2C78: LATIN SMALL LETTER E WITH NOTCH
+ "e" // U+FF45: FULLWIDTH LATIN SMALL LETTER E
,"e", // Folded result
"⒠" // U+24A0: PARENTHESIZED LATIN SMALL LETTER E
,"(e)", // Folded result
"Ƒ" // U+0191: LATIN CAPITAL LETTER F WITH HOOK
+ "Ḟ" // U+1E1E: LATIN CAPITAL LETTER F WITH DOT ABOVE
+ "Ⓕ" // U+24BB: CIRCLED LATIN CAPITAL LETTER F
+ "ꜰ" // U+A730: LATIN LETTER SMALL CAPITAL F
+ "Ꝼ" // U+A77B: LATIN CAPITAL LETTER INSULAR F
+ "ꟻ" // U+A7FB: LATIN EPIGRAPHIC LETTER REVERSED F
+ "F" // U+FF26: FULLWIDTH LATIN CAPITAL LETTER F
,"F", // Folded result
"ƒ" // U+0192: LATIN SMALL LETTER F WITH HOOK
+ "ᵮ" // U+1D6E: LATIN SMALL LETTER F WITH MIDDLE TILDE
+ "ᶂ" // U+1D82: LATIN SMALL LETTER F WITH PALATAL HOOK
+ "ḟ" // U+1E1F: LATIN SMALL LETTER F WITH DOT ABOVE
+ "ẛ" // U+1E9B: LATIN SMALL LETTER LONG S WITH DOT ABOVE
+ "ⓕ" // U+24D5: CIRCLED LATIN SMALL LETTER F
+ "ꝼ" // U+A77C: LATIN SMALL LETTER INSULAR F
+ "f" // U+FF46: FULLWIDTH LATIN SMALL LETTER F
,"f", // Folded result
"⒡" // U+24A1: PARENTHESIZED LATIN SMALL LETTER F
,"(f)", // Folded result
"ff" // U+FB00: LATIN SMALL LIGATURE FF
,"ff", // Folded result
"ffi" // U+FB03: LATIN SMALL LIGATURE FFI
,"ffi", // Folded result
"ffl" // U+FB04: LATIN SMALL LIGATURE FFL
,"ffl", // Folded result
"fi" // U+FB01: LATIN SMALL LIGATURE FI
,"fi", // Folded result
"fl" // U+FB02: LATIN SMALL LIGATURE FL
,"fl", // Folded result
"Ĝ" // U+011C: LATIN CAPITAL LETTER G WITH CIRCUMFLEX
+ "Ğ" // U+011E: LATIN CAPITAL LETTER G WITH BREVE
+ "Ġ" // U+0120: LATIN CAPITAL LETTER G WITH DOT ABOVE
+ "Ģ" // U+0122: LATIN CAPITAL LETTER G WITH CEDILLA
+ "Ɠ" // U+0193: LATIN CAPITAL LETTER G WITH HOOK
+ "Ǥ" // U+01E4: LATIN CAPITAL LETTER G WITH STROKE
+ "ǥ" // U+01E5: LATIN SMALL LETTER G WITH STROKE
+ "Ǧ" // U+01E6: LATIN CAPITAL LETTER G WITH CARON
+ "ǧ" // U+01E7: LATIN SMALL LETTER G WITH CARON
+ "Ǵ" // U+01F4: LATIN CAPITAL LETTER G WITH ACUTE
+ "ɢ" // U+0262: LATIN LETTER SMALL CAPITAL G
+ "ʛ" // U+029B: LATIN LETTER SMALL CAPITAL G WITH HOOK
+ "Ḡ" // U+1E20: LATIN CAPITAL LETTER G WITH MACRON
+ "Ⓖ" // U+24BC: CIRCLED LATIN CAPITAL LETTER G
+ "Ᵹ" // U+A77D: LATIN CAPITAL LETTER INSULAR G
+ "Ꝿ" // U+A77E: LATIN CAPITAL LETTER TURNED INSULAR G
+ "G" // U+FF27: FULLWIDTH LATIN CAPITAL LETTER G
,"G", // Folded result
"ĝ" // U+011D: LATIN SMALL LETTER G WITH CIRCUMFLEX
+ "ğ" // U+011F: LATIN SMALL LETTER G WITH BREVE
+ "ġ" // U+0121: LATIN SMALL LETTER G WITH DOT ABOVE
+ "ģ" // U+0123: LATIN SMALL LETTER G WITH CEDILLA
+ "ǵ" // U+01F5: LATIN SMALL LETTER G WITH ACUTE
+ "ɠ" // U+0260: LATIN SMALL LETTER G WITH HOOK
+ "ɡ" // U+0261: LATIN SMALL LETTER SCRIPT G
+ "ᵷ" // U+1D77: LATIN SMALL LETTER TURNED G
+ "ᵹ" // U+1D79: LATIN SMALL LETTER INSULAR G
+ "ᶃ" // U+1D83: LATIN SMALL LETTER G WITH PALATAL HOOK
+ "ḡ" // U+1E21: LATIN SMALL LETTER G WITH MACRON
+ "ⓖ" // U+24D6: CIRCLED LATIN SMALL LETTER G
+ "ꝿ" // U+A77F: LATIN SMALL LETTER TURNED INSULAR G
+ "g" // U+FF47: FULLWIDTH LATIN SMALL LETTER G
,"g", // Folded result
"⒢" // U+24A2: PARENTHESIZED LATIN SMALL LETTER G
,"(g)", // Folded result
"Ĥ" // U+0124: LATIN CAPITAL LETTER H WITH CIRCUMFLEX
+ "Ħ" // U+0126: LATIN CAPITAL LETTER H WITH STROKE
+ "Ȟ" // U+021E: LATIN CAPITAL LETTER H WITH CARON
+ "ʜ" // U+029C: LATIN LETTER SMALL CAPITAL H
+ "Ḣ" // U+1E22: LATIN CAPITAL LETTER H WITH DOT ABOVE
+ "Ḥ" // U+1E24: LATIN CAPITAL LETTER H WITH DOT BELOW
+ "Ḧ" // U+1E26: LATIN CAPITAL LETTER H WITH DIAERESIS
+ "Ḩ" // U+1E28: LATIN CAPITAL LETTER H WITH CEDILLA
+ "Ḫ" // U+1E2A: LATIN CAPITAL LETTER H WITH BREVE BELOW
+ "Ⓗ" // U+24BD: CIRCLED LATIN CAPITAL LETTER H
+ "Ⱨ" // U+2C67: LATIN CAPITAL LETTER H WITH DESCENDER
+ "Ⱶ" // U+2C75: LATIN CAPITAL LETTER HALF H
+ "H" // U+FF28: FULLWIDTH LATIN CAPITAL LETTER H
,"H", // Folded result
"ĥ" // U+0125: LATIN SMALL LETTER H WITH CIRCUMFLEX
+ "ħ" // U+0127: LATIN SMALL LETTER H WITH STROKE
+ "ȟ" // U+021F: LATIN SMALL LETTER H WITH CARON
+ "ɥ" // U+0265: LATIN SMALL LETTER TURNED H
+ "ɦ" // U+0266: LATIN SMALL LETTER H WITH HOOK
+ "ʮ" // U+02AE: LATIN SMALL LETTER TURNED H WITH FISHHOOK
+ "ʯ" // U+02AF: LATIN SMALL LETTER TURNED H WITH FISHHOOK AND TAIL
+ "ḣ" // U+1E23: LATIN SMALL LETTER H WITH DOT ABOVE
+ "ḥ" // U+1E25: LATIN SMALL LETTER H WITH DOT BELOW
+ "ḧ" // U+1E27: LATIN SMALL LETTER H WITH DIAERESIS
+ "ḩ" // U+1E29: LATIN SMALL LETTER H WITH CEDILLA
+ "ḫ" // U+1E2B: LATIN SMALL LETTER H WITH BREVE BELOW
+ "ẖ" // U+1E96: LATIN SMALL LETTER H WITH LINE BELOW
+ "ⓗ" // U+24D7: CIRCLED LATIN SMALL LETTER H
+ "ⱨ" // U+2C68: LATIN SMALL LETTER H WITH DESCENDER
+ "ⱶ" // U+2C76: LATIN SMALL LETTER HALF H
+ "h" // U+FF48: FULLWIDTH LATIN SMALL LETTER H
,"h", // Folded result
"Ƕ" // U+01F6: LATIN CAPITAL LETTER HWAIR
,"HV", // Folded result
"⒣" // U+24A3: PARENTHESIZED LATIN SMALL LETTER H
,"(h)", // Folded result
"ƕ" // U+0195: LATIN SMALL LETTER HV
,"hv", // Folded result
"Ì" // U+00CC: LATIN CAPITAL LETTER I WITH GRAVE
+ "Í" // U+00CD: LATIN CAPITAL LETTER I WITH ACUTE
+ "Î" // U+00CE: LATIN CAPITAL LETTER I WITH CIRCUMFLEX
+ "Ï" // U+00CF: LATIN CAPITAL LETTER I WITH DIAERESIS
+ "Ĩ" // U+0128: LATIN CAPITAL LETTER I WITH TILDE
+ "Ī" // U+012A: LATIN CAPITAL LETTER I WITH MACRON
+ "Ĭ" // U+012C: LATIN CAPITAL LETTER I WITH BREVE
+ "Į" // U+012E: LATIN CAPITAL LETTER I WITH OGONEK
+ "İ" // U+0130: LATIN CAPITAL LETTER I WITH DOT ABOVE
+ "Ɩ" // U+0196: LATIN CAPITAL LETTER IOTA
+ "Ɨ" // U+0197: LATIN CAPITAL LETTER I WITH STROKE
+ "Ǐ" // U+01CF: LATIN CAPITAL LETTER I WITH CARON
+ "Ȉ" // U+0208: LATIN CAPITAL LETTER I WITH DOUBLE GRAVE
+ "Ȋ" // U+020A: LATIN CAPITAL LETTER I WITH INVERTED BREVE
+ "ɪ" // U+026A: LATIN LETTER SMALL CAPITAL I
+ "ᵻ" // U+1D7B: LATIN SMALL CAPITAL LETTER I WITH STROKE
+ "Ḭ" // U+1E2C: LATIN CAPITAL LETTER I WITH TILDE BELOW
+ "Ḯ" // U+1E2E: LATIN CAPITAL LETTER I WITH DIAERESIS AND ACUTE
+ "Ỉ" // U+1EC8: LATIN CAPITAL LETTER I WITH HOOK ABOVE
+ "Ị" // U+1ECA: LATIN CAPITAL LETTER I WITH DOT BELOW
+ "Ⓘ" // U+24BE: CIRCLED LATIN CAPITAL LETTER I
+ "ꟾ" // U+A7FE: LATIN EPIGRAPHIC LETTER I LONGA
+ "I" // U+FF29: FULLWIDTH LATIN CAPITAL LETTER I
,"I", // Folded result
"ì" // U+00EC: LATIN SMALL LETTER I WITH GRAVE
+ "í" // U+00ED: LATIN SMALL LETTER I WITH ACUTE
+ "î" // U+00EE: LATIN SMALL LETTER I WITH CIRCUMFLEX
+ "ï" // U+00EF: LATIN SMALL LETTER I WITH DIAERESIS
+ "ĩ" // U+0129: LATIN SMALL LETTER I WITH TILDE
+ "ī" // U+012B: LATIN SMALL LETTER I WITH MACRON
+ "ĭ" // U+012D: LATIN SMALL LETTER I WITH BREVE
+ "į" // U+012F: LATIN SMALL LETTER I WITH OGONEK
+ "ı" // U+0131: LATIN SMALL LETTER DOTLESS I
+ "ǐ" // U+01D0: LATIN SMALL LETTER I WITH CARON
+ "ȉ" // U+0209: LATIN SMALL LETTER I WITH DOUBLE GRAVE
+ "ȋ" // U+020B: LATIN SMALL LETTER I WITH INVERTED BREVE
+ "ɨ" // U+0268: LATIN SMALL LETTER I WITH STROKE
+ "ᴉ" // U+1D09: LATIN SMALL LETTER TURNED I
+ "ᵢ" // U+1D62: LATIN SUBSCRIPT SMALL LETTER I
+ "ᵼ" // U+1D7C: LATIN SMALL LETTER IOTA WITH STROKE
+ "ᶖ" // U+1D96: LATIN SMALL LETTER I WITH RETROFLEX HOOK
+ "ḭ" // U+1E2D: LATIN SMALL LETTER I WITH TILDE BELOW
+ "ḯ" // U+1E2F: LATIN SMALL LETTER I WITH DIAERESIS AND ACUTE
+ "ỉ" // U+1EC9: LATIN SMALL LETTER I WITH HOOK ABOVE
+ "ị" // U+1ECB: LATIN SMALL LETTER I WITH DOT BELOW
+ "ⁱ" // U+2071: SUPERSCRIPT LATIN SMALL LETTER I
+ "ⓘ" // U+24D8: CIRCLED LATIN SMALL LETTER I
+ "i" // U+FF49: FULLWIDTH LATIN SMALL LETTER I
,"i", // Folded result
"IJ" // U+0132: LATIN CAPITAL LIGATURE IJ
,"IJ", // Folded result
"⒤" // U+24A4: PARENTHESIZED LATIN SMALL LETTER I
,"(i)", // Folded result
"ij" // U+0133: LATIN SMALL LIGATURE IJ
,"ij", // Folded result
"Ĵ" // U+0134: LATIN CAPITAL LETTER J WITH CIRCUMFLEX
+ "Ɉ" // U+0248: LATIN CAPITAL LETTER J WITH STROKE
+ "ᴊ" // U+1D0A: LATIN LETTER SMALL CAPITAL J
+ "Ⓙ" // U+24BF: CIRCLED LATIN CAPITAL LETTER J
+ "J" // U+FF2A: FULLWIDTH LATIN CAPITAL LETTER J
,"J", // Folded result
"ĵ" // U+0135: LATIN SMALL LETTER J WITH CIRCUMFLEX
+ "ǰ" // U+01F0: LATIN SMALL LETTER J WITH CARON
+ "ȷ" // U+0237: LATIN SMALL LETTER DOTLESS J
+ "ɉ" // U+0249: LATIN SMALL LETTER J WITH STROKE
+ "ɟ" // U+025F: LATIN SMALL LETTER DOTLESS J WITH STROKE
+ "ʄ" // U+0284: LATIN SMALL LETTER DOTLESS J WITH STROKE AND HOOK
+ "ʝ" // U+029D: LATIN SMALL LETTER J WITH CROSSED-TAIL
+ "ⓙ" // U+24D9: CIRCLED LATIN SMALL LETTER J
+ "ⱼ" // U+2C7C: LATIN SUBSCRIPT SMALL LETTER J
+ "j" // U+FF4A: FULLWIDTH LATIN SMALL LETTER J
,"j", // Folded result
"⒥" // U+24A5: PARENTHESIZED LATIN SMALL LETTER J
,"(j)", // Folded result
"Ķ" // U+0136: LATIN CAPITAL LETTER K WITH CEDILLA
+ "Ƙ" // U+0198: LATIN CAPITAL LETTER K WITH HOOK
+ "Ǩ" // U+01E8: LATIN CAPITAL LETTER K WITH CARON
+ "ᴋ" // U+1D0B: LATIN LETTER SMALL CAPITAL K
+ "Ḱ" // U+1E30: LATIN CAPITAL LETTER K WITH ACUTE
+ "Ḳ" // U+1E32: LATIN CAPITAL LETTER K WITH DOT BELOW
+ "Ḵ" // U+1E34: LATIN CAPITAL LETTER K WITH LINE BELOW
+ "Ⓚ" // U+24C0: CIRCLED LATIN CAPITAL LETTER K
+ "Ⱪ" // U+2C69: LATIN CAPITAL LETTER K WITH DESCENDER
+ "Ꝁ" // U+A740: LATIN CAPITAL LETTER K WITH STROKE
+ "Ꝃ" // U+A742: LATIN CAPITAL LETTER K WITH DIAGONAL STROKE
+ "Ꝅ" // U+A744: LATIN CAPITAL LETTER K WITH STROKE AND DIAGONAL STROKE
+ "K" // U+FF2B: FULLWIDTH LATIN CAPITAL LETTER K
,"K", // Folded result
"ķ" // U+0137: LATIN SMALL LETTER K WITH CEDILLA
+ "ƙ" // U+0199: LATIN SMALL LETTER K WITH HOOK
+ "ǩ" // U+01E9: LATIN SMALL LETTER K WITH CARON
+ "ʞ" // U+029E: LATIN SMALL LETTER TURNED K
+ "ᶄ" // U+1D84: LATIN SMALL LETTER K WITH PALATAL HOOK
+ "ḱ" // U+1E31: LATIN SMALL LETTER K WITH ACUTE
+ "ḳ" // U+1E33: LATIN SMALL LETTER K WITH DOT BELOW
+ "ḵ" // U+1E35: LATIN SMALL LETTER K WITH LINE BELOW
+ "ⓚ" // U+24DA: CIRCLED LATIN SMALL LETTER K
+ "ⱪ" // U+2C6A: LATIN SMALL LETTER K WITH DESCENDER
+ "ꝁ" // U+A741: LATIN SMALL LETTER K WITH STROKE
+ "ꝃ" // U+A743: LATIN SMALL LETTER K WITH DIAGONAL STROKE
+ "ꝅ" // U+A745: LATIN SMALL LETTER K WITH STROKE AND DIAGONAL STROKE
+ "k" // U+FF4B: FULLWIDTH LATIN SMALL LETTER K
,"k", // Folded result
"⒦" // U+24A6: PARENTHESIZED LATIN SMALL LETTER K
,"(k)", // Folded result
"Ĺ" // U+0139: LATIN CAPITAL LETTER L WITH ACUTE
+ "Ļ" // U+013B: LATIN CAPITAL LETTER L WITH CEDILLA
+ "Ľ" // U+013D: LATIN CAPITAL LETTER L WITH CARON
+ "Ŀ" // U+013F: LATIN CAPITAL LETTER L WITH MIDDLE DOT
+ "Ł" // U+0141: LATIN CAPITAL LETTER L WITH STROKE
+ "Ƚ" // U+023D: LATIN CAPITAL LETTER L WITH BAR
+ "ʟ" // U+029F: LATIN LETTER SMALL CAPITAL L
+ "ᴌ" // U+1D0C: LATIN LETTER SMALL CAPITAL L WITH STROKE
+ "Ḷ" // U+1E36: LATIN CAPITAL LETTER L WITH DOT BELOW
+ "Ḹ" // U+1E38: LATIN CAPITAL LETTER L WITH DOT BELOW AND MACRON
+ "Ḻ" // U+1E3A: LATIN CAPITAL LETTER L WITH LINE BELOW
+ "Ḽ" // U+1E3C: LATIN CAPITAL LETTER L WITH CIRCUMFLEX BELOW
+ "Ⓛ" // U+24C1: CIRCLED LATIN CAPITAL LETTER L
+ "Ⱡ" // U+2C60: LATIN CAPITAL LETTER L WITH DOUBLE BAR
+ "Ɫ" // U+2C62: LATIN CAPITAL LETTER L WITH MIDDLE TILDE
+ "Ꝇ" // U+A746: LATIN CAPITAL LETTER BROKEN L
+ "Ꝉ" // U+A748: LATIN CAPITAL LETTER L WITH HIGH STROKE
+ "Ꞁ" // U+A780: LATIN CAPITAL LETTER TURNED L
+ "L" // U+FF2C: FULLWIDTH LATIN CAPITAL LETTER L
,"L", // Folded result
"ĺ" // U+013A: LATIN SMALL LETTER L WITH ACUTE
+ "ļ" // U+013C: LATIN SMALL LETTER L WITH CEDILLA
+ "ľ" // U+013E: LATIN SMALL LETTER L WITH CARON
+ "ŀ" // U+0140: LATIN SMALL LETTER L WITH MIDDLE DOT
+ "ł" // U+0142: LATIN SMALL LETTER L WITH STROKE
+ "ƚ" // U+019A: LATIN SMALL LETTER L WITH BAR
+ "ȴ" // U+0234: LATIN SMALL LETTER L WITH CURL
+ "ɫ" // U+026B: LATIN SMALL LETTER L WITH MIDDLE TILDE
+ "ɬ" // U+026C: LATIN SMALL LETTER L WITH BELT
+ "ɭ" // U+026D: LATIN SMALL LETTER L WITH RETROFLEX HOOK
+ "ᶅ" // U+1D85: LATIN SMALL LETTER L WITH PALATAL HOOK
+ "ḷ" // U+1E37: LATIN SMALL LETTER L WITH DOT BELOW
+ "ḹ" // U+1E39: LATIN SMALL LETTER L WITH DOT BELOW AND MACRON
+ "ḻ" // U+1E3B: LATIN SMALL LETTER L WITH LINE BELOW
+ "ḽ" // U+1E3D: LATIN SMALL LETTER L WITH CIRCUMFLEX BELOW
+ "ⓛ" // U+24DB: CIRCLED LATIN SMALL LETTER L
+ "ⱡ" // U+2C61: LATIN SMALL LETTER L WITH DOUBLE BAR
+ "ꝇ" // U+A747: LATIN SMALL LETTER BROKEN L
+ "ꝉ" // U+A749: LATIN SMALL LETTER L WITH HIGH STROKE
+ "ꞁ" // U+A781: LATIN SMALL LETTER TURNED L
+ "l" // U+FF4C: FULLWIDTH LATIN SMALL LETTER L
,"l", // Folded result
"LJ" // U+01C7: LATIN CAPITAL LETTER LJ
,"LJ", // Folded result
"Ỻ" // U+1EFA: LATIN CAPITAL LETTER MIDDLE-WELSH LL
,"LL", // Folded result
"Lj" // U+01C8: LATIN CAPITAL LETTER L WITH SMALL LETTER J
,"Lj", // Folded result
"⒧" // U+24A7: PARENTHESIZED LATIN SMALL LETTER L
,"(l)", // Folded result
"lj" // U+01C9: LATIN SMALL LETTER LJ
,"lj", // Folded result
"ỻ" // U+1EFB: LATIN SMALL LETTER MIDDLE-WELSH LL
,"ll", // Folded result
"ʪ" // U+02AA: LATIN SMALL LETTER LS DIGRAPH
,"ls", // Folded result
"ʫ" // U+02AB: LATIN SMALL LETTER LZ DIGRAPH
,"lz", // Folded result
"Ɯ" // U+019C: LATIN CAPITAL LETTER TURNED M
+ "ᴍ" // U+1D0D: LATIN LETTER SMALL CAPITAL M
+ "Ḿ" // U+1E3E: LATIN CAPITAL LETTER M WITH ACUTE
+ "Ṁ" // U+1E40: LATIN CAPITAL LETTER M WITH DOT ABOVE
+ "Ṃ" // U+1E42: LATIN CAPITAL LETTER M WITH DOT BELOW
+ "Ⓜ" // U+24C2: CIRCLED LATIN CAPITAL LETTER M
+ "Ɱ" // U+2C6E: LATIN CAPITAL LETTER M WITH HOOK
+ "ꟽ" // U+A7FD: LATIN EPIGRAPHIC LETTER INVERTED M
+ "ꟿ" // U+A7FF: LATIN EPIGRAPHIC LETTER ARCHAIC M
+ "M" // U+FF2D: FULLWIDTH LATIN CAPITAL LETTER M
,"M", // Folded result
"ɯ" // U+026F: LATIN SMALL LETTER TURNED M
+ "ɰ" // U+0270: LATIN SMALL LETTER TURNED M WITH LONG LEG
+ "ɱ" // U+0271: LATIN SMALL LETTER M WITH HOOK
+ "ᵯ" // U+1D6F: LATIN SMALL LETTER M WITH MIDDLE TILDE
+ "ᶆ" // U+1D86: LATIN SMALL LETTER M WITH PALATAL HOOK
+ "ḿ" // U+1E3F: LATIN SMALL LETTER M WITH ACUTE
+ "ṁ" // U+1E41: LATIN SMALL LETTER M WITH DOT ABOVE
+ "ṃ" // U+1E43: LATIN SMALL LETTER M WITH DOT BELOW
+ "ⓜ" // U+24DC: CIRCLED LATIN SMALL LETTER M
+ "m" // U+FF4D: FULLWIDTH LATIN SMALL LETTER M
,"m", // Folded result
"⒨" // U+24A8: PARENTHESIZED LATIN SMALL LETTER M
,"(m)", // Folded result
"Ñ" // U+00D1: LATIN CAPITAL LETTER N WITH TILDE
+ "Ń" // U+0143: LATIN CAPITAL LETTER N WITH ACUTE
+ "Ņ" // U+0145: LATIN CAPITAL LETTER N WITH CEDILLA
+ "Ň" // U+0147: LATIN CAPITAL LETTER N WITH CARON
+ "Ŋ" // U+014A: LATIN CAPITAL LETTER ENG
+ "Ɲ" // U+019D: LATIN CAPITAL LETTER N WITH LEFT HOOK
+ "Ǹ" // U+01F8: LATIN CAPITAL LETTER N WITH GRAVE
+ "Ƞ" // U+0220: LATIN CAPITAL LETTER N WITH LONG RIGHT LEG
+ "ɴ" // U+0274: LATIN LETTER SMALL CAPITAL N
+ "ᴎ" // U+1D0E: LATIN LETTER SMALL CAPITAL REVERSED N
+ "Ṅ" // U+1E44: LATIN CAPITAL LETTER N WITH DOT ABOVE
+ "Ṇ" // U+1E46: LATIN CAPITAL LETTER N WITH DOT BELOW
+ "Ṉ" // U+1E48: LATIN CAPITAL LETTER N WITH LINE BELOW
+ "Ṋ" // U+1E4A: LATIN CAPITAL LETTER N WITH CIRCUMFLEX BELOW
+ "Ⓝ" // U+24C3: CIRCLED LATIN CAPITAL LETTER N
+ "N" // U+FF2E: FULLWIDTH LATIN CAPITAL LETTER N
,"N", // Folded result
"ñ" // U+00F1: LATIN SMALL LETTER N WITH TILDE
+ "ń" // U+0144: LATIN SMALL LETTER N WITH ACUTE
+ "ņ" // U+0146: LATIN SMALL LETTER N WITH CEDILLA
+ "ň" // U+0148: LATIN SMALL LETTER N WITH CARON
+ "ʼn" // U+0149: LATIN SMALL LETTER N PRECEDED BY APOSTROPHE
+ "ŋ" // U+014B: LATIN SMALL LETTER ENG
+ "ƞ" // U+019E: LATIN SMALL LETTER N WITH LONG RIGHT LEG
+ "ǹ" // U+01F9: LATIN SMALL LETTER N WITH GRAVE
+ "ȵ" // U+0235: LATIN SMALL LETTER N WITH CURL
+ "ɲ" // U+0272: LATIN SMALL LETTER N WITH LEFT HOOK
+ "ɳ" // U+0273: LATIN SMALL LETTER N WITH RETROFLEX HOOK
+ "ᵰ" // U+1D70: LATIN SMALL LETTER N WITH MIDDLE TILDE
+ "ᶇ" // U+1D87: LATIN SMALL LETTER N WITH PALATAL HOOK
+ "ṅ" // U+1E45: LATIN SMALL LETTER N WITH DOT ABOVE
+ "ṇ" // U+1E47: LATIN SMALL LETTER N WITH DOT BELOW
+ "ṉ" // U+1E49: LATIN SMALL LETTER N WITH LINE BELOW
+ "ṋ" // U+1E4B: LATIN SMALL LETTER N WITH CIRCUMFLEX BELOW
+ "ⁿ" // U+207F: SUPERSCRIPT LATIN SMALL LETTER N
+ "ⓝ" // U+24DD: CIRCLED LATIN SMALL LETTER N
+ "n" // U+FF4E: FULLWIDTH LATIN SMALL LETTER N
,"n", // Folded result
"NJ" // U+01CA: LATIN CAPITAL LETTER NJ
,"NJ", // Folded result
"Nj" // U+01CB: LATIN CAPITAL LETTER N WITH SMALL LETTER J
,"Nj", // Folded result
"⒩" // U+24A9: PARENTHESIZED LATIN SMALL LETTER N
,"(n)", // Folded result
"nj" // U+01CC: LATIN SMALL LETTER NJ
,"nj", // Folded result
"Ò" // U+00D2: LATIN CAPITAL LETTER O WITH GRAVE
+ "Ó" // U+00D3: LATIN CAPITAL LETTER O WITH ACUTE
+ "Ô" // U+00D4: LATIN CAPITAL LETTER O WITH CIRCUMFLEX
+ "Õ" // U+00D5: LATIN CAPITAL LETTER O WITH TILDE
+ "Ö" // U+00D6: LATIN CAPITAL LETTER O WITH DIAERESIS
+ "Ø" // U+00D8: LATIN CAPITAL LETTER O WITH STROKE
+ "Ō" // U+014C: LATIN CAPITAL LETTER O WITH MACRON
+ "Ŏ" // U+014E: LATIN CAPITAL LETTER O WITH BREVE
+ "Ő" // U+0150: LATIN CAPITAL LETTER O WITH DOUBLE ACUTE
+ "Ɔ" // U+0186: LATIN CAPITAL LETTER OPEN O
+ "Ɵ" // U+019F: LATIN CAPITAL LETTER O WITH MIDDLE TILDE
+ "Ơ" // U+01A0: LATIN CAPITAL LETTER O WITH HORN
+ "Ǒ" // U+01D1: LATIN CAPITAL LETTER O WITH CARON
+ "Ǫ" // U+01EA: LATIN CAPITAL LETTER O WITH OGONEK
+ "Ǭ" // U+01EC: LATIN CAPITAL LETTER O WITH OGONEK AND MACRON
+ "Ǿ" // U+01FE: LATIN CAPITAL LETTER O WITH STROKE AND ACUTE
+ "Ȍ" // U+020C: LATIN CAPITAL LETTER O WITH DOUBLE GRAVE
+ "Ȏ" // U+020E: LATIN CAPITAL LETTER O WITH INVERTED BREVE
+ "Ȫ" // U+022A: LATIN CAPITAL LETTER O WITH DIAERESIS AND MACRON
+ "Ȭ" // U+022C: LATIN CAPITAL LETTER O WITH TILDE AND MACRON
+ "Ȯ" // U+022E: LATIN CAPITAL LETTER O WITH DOT ABOVE
+ "Ȱ" // U+0230: LATIN CAPITAL LETTER O WITH DOT ABOVE AND MACRON
+ "ᴏ" // U+1D0F: LATIN LETTER SMALL CAPITAL O
+ "ᴐ" // U+1D10: LATIN LETTER SMALL CAPITAL OPEN O
+ "Ṍ" // U+1E4C: LATIN CAPITAL LETTER O WITH TILDE AND ACUTE
+ "Ṏ" // U+1E4E: LATIN CAPITAL LETTER O WITH TILDE AND DIAERESIS
+ "Ṑ" // U+1E50: LATIN CAPITAL LETTER O WITH MACRON AND GRAVE
+ "Ṓ" // U+1E52: LATIN CAPITAL LETTER O WITH MACRON AND ACUTE
+ "Ọ" // U+1ECC: LATIN CAPITAL LETTER O WITH DOT BELOW
+ "Ỏ" // U+1ECE: LATIN CAPITAL LETTER O WITH HOOK ABOVE
+ "Ố" // U+1ED0: LATIN CAPITAL LETTER O WITH CIRCUMFLEX AND ACUTE
+ "Ồ" // U+1ED2: LATIN CAPITAL LETTER O WITH CIRCUMFLEX AND GRAVE
+ "Ổ" // U+1ED4: LATIN CAPITAL LETTER O WITH CIRCUMFLEX AND HOOK ABOVE
+ "Ỗ" // U+1ED6: LATIN CAPITAL LETTER O WITH CIRCUMFLEX AND TILDE
+ "Ộ" // U+1ED8: LATIN CAPITAL LETTER O WITH CIRCUMFLEX AND DOT BELOW
+ "Ớ" // U+1EDA: LATIN CAPITAL LETTER O WITH HORN AND ACUTE
+ "Ờ" // U+1EDC: LATIN CAPITAL LETTER O WITH HORN AND GRAVE
+ "Ở" // U+1EDE: LATIN CAPITAL LETTER O WITH HORN AND HOOK ABOVE
+ "Ỡ" // U+1EE0: LATIN CAPITAL LETTER O WITH HORN AND TILDE
+ "Ợ" // U+1EE2: LATIN CAPITAL LETTER O WITH HORN AND DOT BELOW
+ "Ⓞ" // U+24C4: CIRCLED LATIN CAPITAL LETTER O
+ "Ꝋ" // U+A74A: LATIN CAPITAL LETTER O WITH LONG STROKE OVERLAY
+ "Ꝍ" // U+A74C: LATIN CAPITAL LETTER O WITH LOOP
+ "O" // U+FF2F: FULLWIDTH LATIN CAPITAL LETTER O
,"O", // Folded result
"ò" // U+00F2: LATIN SMALL LETTER O WITH GRAVE
+ "ó" // U+00F3: LATIN SMALL LETTER O WITH ACUTE
+ "ô" // U+00F4: LATIN SMALL LETTER O WITH CIRCUMFLEX
+ "õ" // U+00F5: LATIN SMALL LETTER O WITH TILDE
+ "ö" // U+00F6: LATIN SMALL LETTER O WITH DIAERESIS
+ "ø" // U+00F8: LATIN SMALL LETTER O WITH STROKE
+ "ō" // U+014D: LATIN SMALL LETTER O WITH MACRON
+ "ŏ" // U+014F: LATIN SMALL LETTER O WITH BREVE
+ "ő" // U+0151: LATIN SMALL LETTER O WITH DOUBLE ACUTE
+ "ơ" // U+01A1: LATIN SMALL LETTER O WITH HORN
+ "ǒ" // U+01D2: LATIN SMALL LETTER O WITH CARON
+ "ǫ" // U+01EB: LATIN SMALL LETTER O WITH OGONEK
+ "ǭ" // U+01ED: LATIN SMALL LETTER O WITH OGONEK AND MACRON
+ "ǿ" // U+01FF: LATIN SMALL LETTER O WITH STROKE AND ACUTE
+ "ȍ" // U+020D: LATIN SMALL LETTER O WITH DOUBLE GRAVE
+ "ȏ" // U+020F: LATIN SMALL LETTER O WITH INVERTED BREVE
+ "ȫ" // U+022B: LATIN SMALL LETTER O WITH DIAERESIS AND MACRON
+ "ȭ" // U+022D: LATIN SMALL LETTER O WITH TILDE AND MACRON
+ "ȯ" // U+022F: LATIN SMALL LETTER O WITH DOT ABOVE
+ "ȱ" // U+0231: LATIN SMALL LETTER O WITH DOT ABOVE AND MACRON
+ "ɔ" // U+0254: LATIN SMALL LETTER OPEN O
+ "ɵ" // U+0275: LATIN SMALL LETTER BARRED O
+ "ᴖ" // U+1D16: LATIN SMALL LETTER TOP HALF O
+ "ᴗ" // U+1D17: LATIN SMALL LETTER BOTTOM HALF O
+ "ᶗ" // U+1D97: LATIN SMALL LETTER OPEN O WITH RETROFLEX HOOK
+ "ṍ" // U+1E4D: LATIN SMALL LETTER O WITH TILDE AND ACUTE
+ "ṏ" // U+1E4F: LATIN SMALL LETTER O WITH TILDE AND DIAERESIS
+ "ṑ" // U+1E51: LATIN SMALL LETTER O WITH MACRON AND GRAVE
+ "ṓ" // U+1E53: LATIN SMALL LETTER O WITH MACRON AND ACUTE
+ "ọ" // U+1ECD: LATIN SMALL LETTER O WITH DOT BELOW
+ "ỏ" // U+1ECF: LATIN SMALL LETTER O WITH HOOK ABOVE
+ "ố" // U+1ED1: LATIN SMALL LETTER O WITH CIRCUMFLEX AND ACUTE
+ "ồ" // U+1ED3: LATIN SMALL LETTER O WITH CIRCUMFLEX AND GRAVE
+ "ổ" // U+1ED5: LATIN SMALL LETTER O WITH CIRCUMFLEX AND HOOK ABOVE
+ "ỗ" // U+1ED7: LATIN SMALL LETTER O WITH CIRCUMFLEX AND TILDE
+ "ộ" // U+1ED9: LATIN SMALL LETTER O WITH CIRCUMFLEX AND DOT BELOW
+ "ớ" // U+1EDB: LATIN SMALL LETTER O WITH HORN AND ACUTE
+ "ờ" // U+1EDD: LATIN SMALL LETTER O WITH HORN AND GRAVE
+ "ở" // U+1EDF: LATIN SMALL LETTER O WITH HORN AND HOOK ABOVE
+ "ỡ" // U+1EE1: LATIN SMALL LETTER O WITH HORN AND TILDE
+ "ợ" // U+1EE3: LATIN SMALL LETTER O WITH HORN AND DOT BELOW
+ "ₒ" // U+2092: LATIN SUBSCRIPT SMALL LETTER O
+ "ⓞ" // U+24DE: CIRCLED LATIN SMALL LETTER O
+ "ⱺ" // U+2C7A: LATIN SMALL LETTER O WITH LOW RING INSIDE
+ "ꝋ" // U+A74B: LATIN SMALL LETTER O WITH LONG STROKE OVERLAY
+ "ꝍ" // U+A74D: LATIN SMALL LETTER O WITH LOOP
+ "o" // U+FF4F: FULLWIDTH LATIN SMALL LETTER O
,"o", // Folded result
"Œ" // U+0152: LATIN CAPITAL LIGATURE OE
+ "ɶ" // U+0276: LATIN LETTER SMALL CAPITAL OE
,"OE", // Folded result
"Ꝏ" // U+A74E: LATIN CAPITAL LETTER OO
,"OO", // Folded result
"Ȣ" // U+0222: LATIN CAPITAL LETTER OU
+ "ᴕ" // U+1D15: LATIN LETTER SMALL CAPITAL OU
,"OU", // Folded result
"⒪" // U+24AA: PARENTHESIZED LATIN SMALL LETTER O
,"(o)", // Folded result
"œ" // U+0153: LATIN SMALL LIGATURE OE
+ "ᴔ" // U+1D14: LATIN SMALL LETTER TURNED OE
,"oe", // Folded result
"ꝏ" // U+A74F: LATIN SMALL LETTER OO
,"oo", // Folded result
"ȣ" // U+0223: LATIN SMALL LETTER OU
,"ou", // Folded result
"Ƥ" // U+01A4: LATIN CAPITAL LETTER P WITH HOOK
+ "ᴘ" // U+1D18: LATIN LETTER SMALL CAPITAL P
+ "Ṕ" // U+1E54: LATIN CAPITAL LETTER P WITH ACUTE
+ "Ṗ" // U+1E56: LATIN CAPITAL LETTER P WITH DOT ABOVE
+ "Ⓟ" // U+24C5: CIRCLED LATIN CAPITAL LETTER P
+ "Ᵽ" // U+2C63: LATIN CAPITAL LETTER P WITH STROKE
+ "Ꝑ" // U+A750: LATIN CAPITAL LETTER P WITH STROKE THROUGH DESCENDER
+ "Ꝓ" // U+A752: LATIN CAPITAL LETTER P WITH FLOURISH
+ "Ꝕ" // U+A754: LATIN CAPITAL LETTER P WITH SQUIRREL TAIL
+ "P" // U+FF30: FULLWIDTH LATIN CAPITAL LETTER P
,"P", // Folded result
"ƥ" // U+01A5: LATIN SMALL LETTER P WITH HOOK
+ "ᵱ" // U+1D71: LATIN SMALL LETTER P WITH MIDDLE TILDE
+ "ᵽ" // U+1D7D: LATIN SMALL LETTER P WITH STROKE
+ "ᶈ" // U+1D88: LATIN SMALL LETTER P WITH PALATAL HOOK
+ "ṕ" // U+1E55: LATIN SMALL LETTER P WITH ACUTE
+ "ṗ" // U+1E57: LATIN SMALL LETTER P WITH DOT ABOVE
+ "ⓟ" // U+24DF: CIRCLED LATIN SMALL LETTER P
+ "ꝑ" // U+A751: LATIN SMALL LETTER P WITH STROKE THROUGH DESCENDER
+ "ꝓ" // U+A753: LATIN SMALL LETTER P WITH FLOURISH
+ "ꝕ" // U+A755: LATIN SMALL LETTER P WITH SQUIRREL TAIL
+ "ꟼ" // U+A7FC: LATIN EPIGRAPHIC LETTER REVERSED P
+ "p" // U+FF50: FULLWIDTH LATIN SMALL LETTER P
,"p", // Folded result
"⒫" // U+24AB: PARENTHESIZED LATIN SMALL LETTER P
,"(p)", // Folded result
"Ɋ" // U+024A: LATIN CAPITAL LETTER SMALL Q WITH HOOK TAIL
+ "Ⓠ" // U+24C6: CIRCLED LATIN CAPITAL LETTER Q
+ "Ꝗ" // U+A756: LATIN CAPITAL LETTER Q WITH STROKE THROUGH DESCENDER
+ "Ꝙ" // U+A758: LATIN CAPITAL LETTER Q WITH DIAGONAL STROKE
+ "Q" // U+FF31: FULLWIDTH LATIN CAPITAL LETTER Q
,"Q", // Folded result
"ĸ" // U+0138: LATIN SMALL LETTER KRA
+ "ɋ" // U+024B: LATIN SMALL LETTER Q WITH HOOK TAIL
+ "ʠ" // U+02A0: LATIN SMALL LETTER Q WITH HOOK
+ "ⓠ" // U+24E0: CIRCLED LATIN SMALL LETTER Q
+ "ꝗ" // U+A757: LATIN SMALL LETTER Q WITH STROKE THROUGH DESCENDER
+ "ꝙ" // U+A759: LATIN SMALL LETTER Q WITH DIAGONAL STROKE
+ "q" // U+FF51: FULLWIDTH LATIN SMALL LETTER Q
,"q", // Folded result
"⒬" // U+24AC: PARENTHESIZED LATIN SMALL LETTER Q
,"(q)", // Folded result
"ȹ" // U+0239: LATIN SMALL LETTER QP DIGRAPH
,"qp", // Folded result
"Ŕ" // U+0154: LATIN CAPITAL LETTER R WITH ACUTE
+ "Ŗ" // U+0156: LATIN CAPITAL LETTER R WITH CEDILLA
+ "Ř" // U+0158: LATIN CAPITAL LETTER R WITH CARON
+ "Ȑ" // U+0210: LATIN CAPITAL LETTER R WITH DOUBLE GRAVE
+ "Ȓ" // U+0212: LATIN CAPITAL LETTER R WITH INVERTED BREVE
+ "Ɍ" // U+024C: LATIN CAPITAL LETTER R WITH STROKE
+ "ʀ" // U+0280: LATIN LETTER SMALL CAPITAL R
+ "ʁ" // U+0281: LATIN LETTER SMALL CAPITAL INVERTED R
+ "ᴙ" // U+1D19: LATIN LETTER SMALL CAPITAL REVERSED R
+ "ᴚ" // U+1D1A: LATIN LETTER SMALL CAPITAL TURNED R
+ "Ṙ" // U+1E58: LATIN CAPITAL LETTER R WITH DOT ABOVE
+ "Ṛ" // U+1E5A: LATIN CAPITAL LETTER R WITH DOT BELOW
+ "Ṝ" // U+1E5C: LATIN CAPITAL LETTER R WITH DOT BELOW AND MACRON
+ "Ṟ" // U+1E5E: LATIN CAPITAL LETTER R WITH LINE BELOW
+ "Ⓡ" // U+24C7: CIRCLED LATIN CAPITAL LETTER R
+ "Ɽ" // U+2C64: LATIN CAPITAL LETTER R WITH TAIL
+ "Ꝛ" // U+A75A: LATIN CAPITAL LETTER R ROTUNDA
+ "Ꞃ" // U+A782: LATIN CAPITAL LETTER INSULAR R
+ "R" // U+FF32: FULLWIDTH LATIN CAPITAL LETTER R
,"R", // Folded result
"ŕ" // U+0155: LATIN SMALL LETTER R WITH ACUTE
+ "ŗ" // U+0157: LATIN SMALL LETTER R WITH CEDILLA
+ "ř" // U+0159: LATIN SMALL LETTER R WITH CARON
+ "ȑ" // U+0211: LATIN SMALL LETTER R WITH DOUBLE GRAVE
+ "ȓ" // U+0213: LATIN SMALL LETTER R WITH INVERTED BREVE
+ "ɍ" // U+024D: LATIN SMALL LETTER R WITH STROKE
+ "ɼ" // U+027C: LATIN SMALL LETTER R WITH LONG LEG
+ "ɽ" // U+027D: LATIN SMALL LETTER R WITH TAIL
+ "ɾ" // U+027E: LATIN SMALL LETTER R WITH FISHHOOK
+ "ɿ" // U+027F: LATIN SMALL LETTER REVERSED R WITH FISHHOOK
+ "ᵣ" // U+1D63: LATIN SUBSCRIPT SMALL LETTER R
+ "ᵲ" // U+1D72: LATIN SMALL LETTER R WITH MIDDLE TILDE
+ "ᵳ" // U+1D73: LATIN SMALL LETTER R WITH FISHHOOK AND MIDDLE TILDE
+ "ᶉ" // U+1D89: LATIN SMALL LETTER R WITH PALATAL HOOK
+ "ṙ" // U+1E59: LATIN SMALL LETTER R WITH DOT ABOVE
+ "ṛ" // U+1E5B: LATIN SMALL LETTER R WITH DOT BELOW
+ "ṝ" // U+1E5D: LATIN SMALL LETTER R WITH DOT BELOW AND MACRON
+ "ṟ" // U+1E5F: LATIN SMALL LETTER R WITH LINE BELOW
+ "ⓡ" // U+24E1: CIRCLED LATIN SMALL LETTER R
+ "ꝛ" // U+A75B: LATIN SMALL LETTER R ROTUNDA
+ "ꞃ" // U+A783: LATIN SMALL LETTER INSULAR R
+ "r" // U+FF52: FULLWIDTH LATIN SMALL LETTER R
,"r", // Folded result
"⒭" // U+24AD: PARENTHESIZED LATIN SMALL LETTER R
,"(r)", // Folded result
"Ś" // U+015A: LATIN CAPITAL LETTER S WITH ACUTE
+ "Ŝ" // U+015C: LATIN CAPITAL LETTER S WITH CIRCUMFLEX
+ "Ş" // U+015E: LATIN CAPITAL LETTER S WITH CEDILLA
+ "Š" // U+0160: LATIN CAPITAL LETTER S WITH CARON
+ "Ș" // U+0218: LATIN CAPITAL LETTER S WITH COMMA BELOW
+ "Ṡ" // U+1E60: LATIN CAPITAL LETTER S WITH DOT ABOVE
+ "Ṣ" // U+1E62: LATIN CAPITAL LETTER S WITH DOT BELOW
+ "Ṥ" // U+1E64: LATIN CAPITAL LETTER S WITH ACUTE AND DOT ABOVE
+ "Ṧ" // U+1E66: LATIN CAPITAL LETTER S WITH CARON AND DOT ABOVE
+ "Ṩ" // U+1E68: LATIN CAPITAL LETTER S WITH DOT BELOW AND DOT ABOVE
+ "Ⓢ" // U+24C8: CIRCLED LATIN CAPITAL LETTER S
+ "ꜱ" // U+A731: LATIN LETTER SMALL CAPITAL S
+ "ꞅ" // U+A785: LATIN SMALL LETTER INSULAR S
+ "S" // U+FF33: FULLWIDTH LATIN CAPITAL LETTER S
,"S", // Folded result
"ś" // U+015B: LATIN SMALL LETTER S WITH ACUTE
+ "ŝ" // U+015D: LATIN SMALL LETTER S WITH CIRCUMFLEX
+ "ş" // U+015F: LATIN SMALL LETTER S WITH CEDILLA
+ "š" // U+0161: LATIN SMALL LETTER S WITH CARON
+ "ſ" // U+017F: LATIN SMALL LETTER LONG S
+ "ș" // U+0219: LATIN SMALL LETTER S WITH COMMA BELOW
+ "ȿ" // U+023F: LATIN SMALL LETTER S WITH SWASH TAIL
+ "ʂ" // U+0282: LATIN SMALL LETTER S WITH HOOK
+ "ᵴ" // U+1D74: LATIN SMALL LETTER S WITH MIDDLE TILDE
+ "ᶊ" // U+1D8A: LATIN SMALL LETTER S WITH PALATAL HOOK
+ "ṡ" // U+1E61: LATIN SMALL LETTER S WITH DOT ABOVE
+ "ṣ" // U+1E63: LATIN SMALL LETTER S WITH DOT BELOW
+ "ṥ" // U+1E65: LATIN SMALL LETTER S WITH ACUTE AND DOT ABOVE
+ "ṧ" // U+1E67: LATIN SMALL LETTER S WITH CARON AND DOT ABOVE
+ "ṩ" // U+1E69: LATIN SMALL LETTER S WITH DOT BELOW AND DOT ABOVE
+ "ẜ" // U+1E9C: LATIN SMALL LETTER LONG S WITH DIAGONAL STROKE
+ "ẝ" // U+1E9D: LATIN SMALL LETTER LONG S WITH HIGH STROKE
+ "ⓢ" // U+24E2: CIRCLED LATIN SMALL LETTER S
+ "Ꞅ" // U+A784: LATIN CAPITAL LETTER INSULAR S
+ "s" // U+FF53: FULLWIDTH LATIN SMALL LETTER S
,"s", // Folded result
"ẞ" // U+1E9E: LATIN CAPITAL LETTER SHARP S
,"SS", // Folded result
"⒮" // U+24AE: PARENTHESIZED LATIN SMALL LETTER S
,"(s)", // Folded result
"ß" // U+00DF: LATIN SMALL LETTER SHARP S
,"ss", // Folded result
"st" // U+FB06: LATIN SMALL LIGATURE ST
,"st", // Folded result
"Ţ" // U+0162: LATIN CAPITAL LETTER T WITH CEDILLA
+ "Ť" // U+0164: LATIN CAPITAL LETTER T WITH CARON
+ "Ŧ" // U+0166: LATIN CAPITAL LETTER T WITH STROKE
+ "Ƭ" // U+01AC: LATIN CAPITAL LETTER T WITH HOOK
+ "Ʈ" // U+01AE: LATIN CAPITAL LETTER T WITH RETROFLEX HOOK
+ "Ț" // U+021A: LATIN CAPITAL LETTER T WITH COMMA BELOW
+ "Ⱦ" // U+023E: LATIN CAPITAL LETTER T WITH DIAGONAL STROKE
+ "ᴛ" // U+1D1B: LATIN LETTER SMALL CAPITAL T
+ "Ṫ" // U+1E6A: LATIN CAPITAL LETTER T WITH DOT ABOVE
+ "Ṭ" // U+1E6C: LATIN CAPITAL LETTER T WITH DOT BELOW
+ "Ṯ" // U+1E6E: LATIN CAPITAL LETTER T WITH LINE BELOW
+ "Ṱ" // U+1E70: LATIN CAPITAL LETTER T WITH CIRCUMFLEX BELOW
+ "Ⓣ" // U+24C9: CIRCLED LATIN CAPITAL LETTER T
+ "Ꞇ" // U+A786: LATIN CAPITAL LETTER INSULAR T
+ "T" // U+FF34: FULLWIDTH LATIN CAPITAL LETTER T
,"T", // Folded result
"ţ" // U+0163: LATIN SMALL LETTER T WITH CEDILLA
+ "ť" // U+0165: LATIN SMALL LETTER T WITH CARON
+ "ŧ" // U+0167: LATIN SMALL LETTER T WITH STROKE
+ "ƫ" // U+01AB: LATIN SMALL LETTER T WITH PALATAL HOOK
+ "ƭ" // U+01AD: LATIN SMALL LETTER T WITH HOOK
+ "ț" // U+021B: LATIN SMALL LETTER T WITH COMMA BELOW
+ "ȶ" // U+0236: LATIN SMALL LETTER T WITH CURL
+ "ʇ" // U+0287: LATIN SMALL LETTER TURNED T
+ "ʈ" // U+0288: LATIN SMALL LETTER T WITH RETROFLEX HOOK
+ "ᵵ" // U+1D75: LATIN SMALL LETTER T WITH MIDDLE TILDE
+ "ṫ" // U+1E6B: LATIN SMALL LETTER T WITH DOT ABOVE
+ "ṭ" // U+1E6D: LATIN SMALL LETTER T WITH DOT BELOW
+ "ṯ" // U+1E6F: LATIN SMALL LETTER T WITH LINE BELOW
+ "ṱ" // U+1E71: LATIN SMALL LETTER T WITH CIRCUMFLEX BELOW
+ "ẗ" // U+1E97: LATIN SMALL LETTER T WITH DIAERESIS
+ "ⓣ" // U+24E3: CIRCLED LATIN SMALL LETTER T
+ "ⱦ" // U+2C66: LATIN SMALL LETTER T WITH DIAGONAL STROKE
+ "t" // U+FF54: FULLWIDTH LATIN SMALL LETTER T
,"t", // Folded result
"Þ" // U+00DE: LATIN CAPITAL LETTER THORN
+ "Ꝧ" // U+A766: LATIN CAPITAL LETTER THORN WITH STROKE THROUGH DESCENDER
,"TH", // Folded result
"Ꜩ" // U+A728: LATIN CAPITAL LETTER TZ
,"TZ", // Folded result
"⒯" // U+24AF: PARENTHESIZED LATIN SMALL LETTER T
,"(t)", // Folded result
"ʨ" // U+02A8: LATIN SMALL LETTER TC DIGRAPH WITH CURL
,"tc", // Folded result
"þ" // U+00FE: LATIN SMALL LETTER THORN
+ "ᵺ" // U+1D7A: LATIN SMALL LETTER TH WITH STRIKETHROUGH
+ "ꝧ" // U+A767: LATIN SMALL LETTER THORN WITH STROKE THROUGH DESCENDER
,"th", // Folded result
"ʦ" // U+02A6: LATIN SMALL LETTER TS DIGRAPH
,"ts", // Folded result
"ꜩ" // U+A729: LATIN SMALL LETTER TZ
,"tz", // Folded result
"Ù" // U+00D9: LATIN CAPITAL LETTER U WITH GRAVE
+ "Ú" // U+00DA: LATIN CAPITAL LETTER U WITH ACUTE
+ "Û" // U+00DB: LATIN CAPITAL LETTER U WITH CIRCUMFLEX
+ "Ü" // U+00DC: LATIN CAPITAL LETTER U WITH DIAERESIS
+ "Ũ" // U+0168: LATIN CAPITAL LETTER U WITH TILDE
+ "Ū" // U+016A: LATIN CAPITAL LETTER U WITH MACRON
+ "Ŭ" // U+016C: LATIN CAPITAL LETTER U WITH BREVE
+ "Ů" // U+016E: LATIN CAPITAL LETTER U WITH RING ABOVE
+ "Ű" // U+0170: LATIN CAPITAL LETTER U WITH DOUBLE ACUTE
+ "Ų" // U+0172: LATIN CAPITAL LETTER U WITH OGONEK
+ "Ư" // U+01AF: LATIN CAPITAL LETTER U WITH HORN
+ "Ǔ" // U+01D3: LATIN CAPITAL LETTER U WITH CARON
+ "Ǖ" // U+01D5: LATIN CAPITAL LETTER U WITH DIAERESIS AND MACRON
+ "Ǘ" // U+01D7: LATIN CAPITAL LETTER U WITH DIAERESIS AND ACUTE
+ "Ǚ" // U+01D9: LATIN CAPITAL LETTER U WITH DIAERESIS AND CARON
+ "Ǜ" // U+01DB: LATIN CAPITAL LETTER U WITH DIAERESIS AND GRAVE
+ "Ȕ" // U+0214: LATIN CAPITAL LETTER U WITH DOUBLE GRAVE
+ "Ȗ" // U+0216: LATIN CAPITAL LETTER U WITH INVERTED BREVE
+ "Ʉ" // U+0244: LATIN CAPITAL LETTER U BAR
+ "ᴜ" // U+1D1C: LATIN LETTER SMALL CAPITAL U
+ "ᵾ" // U+1D7E: LATIN SMALL CAPITAL LETTER U WITH STROKE
+ "Ṳ" // U+1E72: LATIN CAPITAL LETTER U WITH DIAERESIS BELOW
+ "Ṵ" // U+1E74: LATIN CAPITAL LETTER U WITH TILDE BELOW
+ "Ṷ" // U+1E76: LATIN CAPITAL LETTER U WITH CIRCUMFLEX BELOW
+ "Ṹ" // U+1E78: LATIN CAPITAL LETTER U WITH TILDE AND ACUTE
+ "Ṻ" // U+1E7A: LATIN CAPITAL LETTER U WITH MACRON AND DIAERESIS
+ "Ụ" // U+1EE4: LATIN CAPITAL LETTER U WITH DOT BELOW
+ "Ủ" // U+1EE6: LATIN CAPITAL LETTER U WITH HOOK ABOVE
+ "Ứ" // U+1EE8: LATIN CAPITAL LETTER U WITH HORN AND ACUTE
+ "Ừ" // U+1EEA: LATIN CAPITAL LETTER U WITH HORN AND GRAVE
+ "Ử" // U+1EEC: LATIN CAPITAL LETTER U WITH HORN AND HOOK ABOVE
+ "Ữ" // U+1EEE: LATIN CAPITAL LETTER U WITH HORN AND TILDE
+ "Ự" // U+1EF0: LATIN CAPITAL LETTER U WITH HORN AND DOT BELOW
+ "Ⓤ" // U+24CA: CIRCLED LATIN CAPITAL LETTER U
+ "U" // U+FF35: FULLWIDTH LATIN CAPITAL LETTER U
,"U", // Folded result
"ù" // U+00F9: LATIN SMALL LETTER U WITH GRAVE
+ "ú" // U+00FA: LATIN SMALL LETTER U WITH ACUTE
+ "û" // U+00FB: LATIN SMALL LETTER U WITH CIRCUMFLEX
+ "ü" // U+00FC: LATIN SMALL LETTER U WITH DIAERESIS
+ "ũ" // U+0169: LATIN SMALL LETTER U WITH TILDE
+ "ū" // U+016B: LATIN SMALL LETTER U WITH MACRON
+ "ŭ" // U+016D: LATIN SMALL LETTER U WITH BREVE
+ "ů" // U+016F: LATIN SMALL LETTER U WITH RING ABOVE
+ "ű" // U+0171: LATIN SMALL LETTER U WITH DOUBLE ACUTE
+ "ų" // U+0173: LATIN SMALL LETTER U WITH OGONEK
+ "ư" // U+01B0: LATIN SMALL LETTER U WITH HORN
+ "ǔ" // U+01D4: LATIN SMALL LETTER U WITH CARON
+ "ǖ" // U+01D6: LATIN SMALL LETTER U WITH DIAERESIS AND MACRON
+ "ǘ" // U+01D8: LATIN SMALL LETTER U WITH DIAERESIS AND ACUTE
+ "ǚ" // U+01DA: LATIN SMALL LETTER U WITH DIAERESIS AND CARON
+ "ǜ" // U+01DC: LATIN SMALL LETTER U WITH DIAERESIS AND GRAVE
+ "ȕ" // U+0215: LATIN SMALL LETTER U WITH DOUBLE GRAVE
+ "ȗ" // U+0217: LATIN SMALL LETTER U WITH INVERTED BREVE
+ "ʉ" // U+0289: LATIN SMALL LETTER U BAR
+ "ᵤ" // U+1D64: LATIN SUBSCRIPT SMALL LETTER U
+ "ᶙ" // U+1D99: LATIN SMALL LETTER U WITH RETROFLEX HOOK
+ "ṳ" // U+1E73: LATIN SMALL LETTER U WITH DIAERESIS BELOW
+ "ṵ" // U+1E75: LATIN SMALL LETTER U WITH TILDE BELOW
+ "ṷ" // U+1E77: LATIN SMALL LETTER U WITH CIRCUMFLEX BELOW
+ "ṹ" // U+1E79: LATIN SMALL LETTER U WITH TILDE AND ACUTE
+ "ṻ" // U+1E7B: LATIN SMALL LETTER U WITH MACRON AND DIAERESIS
+ "ụ" // U+1EE5: LATIN SMALL LETTER U WITH DOT BELOW
+ "ủ" // U+1EE7: LATIN SMALL LETTER U WITH HOOK ABOVE
+ "ứ" // U+1EE9: LATIN SMALL LETTER U WITH HORN AND ACUTE
+ "ừ" // U+1EEB: LATIN SMALL LETTER U WITH HORN AND GRAVE
+ "ử" // U+1EED: LATIN SMALL LETTER U WITH HORN AND HOOK ABOVE
+ "ữ" // U+1EEF: LATIN SMALL LETTER U WITH HORN AND TILDE
+ "ự" // U+1EF1: LATIN SMALL LETTER U WITH HORN AND DOT BELOW
+ "ⓤ" // U+24E4: CIRCLED LATIN SMALL LETTER U
+ "u" // U+FF55: FULLWIDTH LATIN SMALL LETTER U
,"u", // Folded result
"⒰" // U+24B0: PARENTHESIZED LATIN SMALL LETTER U
,"(u)", // Folded result
"ᵫ" // U+1D6B: LATIN SMALL LETTER UE
,"ue", // Folded result
"Ʋ" // U+01B2: LATIN CAPITAL LETTER V WITH HOOK
+ "Ʌ" // U+0245: LATIN CAPITAL LETTER TURNED V
+ "ᴠ" // U+1D20: LATIN LETTER SMALL CAPITAL V
+ "Ṽ" // U+1E7C: LATIN CAPITAL LETTER V WITH TILDE
+ "Ṿ" // U+1E7E: LATIN CAPITAL LETTER V WITH DOT BELOW
+ "Ỽ" // U+1EFC: LATIN CAPITAL LETTER MIDDLE-WELSH V
+ "Ⓥ" // U+24CB: CIRCLED LATIN CAPITAL LETTER V
+ "Ꝟ" // U+A75E: LATIN CAPITAL LETTER V WITH DIAGONAL STROKE
+ "Ꝩ" // U+A768: LATIN CAPITAL LETTER VEND
+ "V" // U+FF36: FULLWIDTH LATIN CAPITAL LETTER V
,"V", // Folded result
"ʋ" // U+028B: LATIN SMALL LETTER V WITH HOOK
+ "ʌ" // U+028C: LATIN SMALL LETTER TURNED V
+ "ᵥ" // U+1D65: LATIN SUBSCRIPT SMALL LETTER V
+ "ᶌ" // U+1D8C: LATIN SMALL LETTER V WITH PALATAL HOOK
+ "ṽ" // U+1E7D: LATIN SMALL LETTER V WITH TILDE
+ "ṿ" // U+1E7F: LATIN SMALL LETTER V WITH DOT BELOW
+ "ⓥ" // U+24E5: CIRCLED LATIN SMALL LETTER V
+ "ⱱ" // U+2C71: LATIN SMALL LETTER V WITH RIGHT HOOK
+ "ⱴ" // U+2C74: LATIN SMALL LETTER V WITH CURL
+ "ꝟ" // U+A75F: LATIN SMALL LETTER V WITH DIAGONAL STROKE
+ "v" // U+FF56: FULLWIDTH LATIN SMALL LETTER V
,"v", // Folded result
"Ꝡ" // U+A760: LATIN CAPITAL LETTER VY
,"VY", // Folded result
"⒱" // U+24B1: PARENTHESIZED LATIN SMALL LETTER V
,"(v)", // Folded result
"ꝡ" // U+A761: LATIN SMALL LETTER VY
,"vy", // Folded result
"Ŵ" // U+0174: LATIN CAPITAL LETTER W WITH CIRCUMFLEX
+ "Ƿ" // U+01F7: LATIN CAPITAL LETTER WYNN
+ "ᴡ" // U+1D21: LATIN LETTER SMALL CAPITAL W
+ "Ẁ" // U+1E80: LATIN CAPITAL LETTER W WITH GRAVE
+ "Ẃ" // U+1E82: LATIN CAPITAL LETTER W WITH ACUTE
+ "Ẅ" // U+1E84: LATIN CAPITAL LETTER W WITH DIAERESIS
+ "Ẇ" // U+1E86: LATIN CAPITAL LETTER W WITH DOT ABOVE
+ "Ẉ" // U+1E88: LATIN CAPITAL LETTER W WITH DOT BELOW
+ "Ⓦ" // U+24CC: CIRCLED LATIN CAPITAL LETTER W
+ "Ⱳ" // U+2C72: LATIN CAPITAL LETTER W WITH HOOK
+ "W" // U+FF37: FULLWIDTH LATIN CAPITAL LETTER W
,"W", // Folded result
"ŵ" // U+0175: LATIN SMALL LETTER W WITH CIRCUMFLEX
+ "ƿ" // U+01BF: LATIN LETTER WYNN
+ "ʍ" // U+028D: LATIN SMALL LETTER TURNED W
+ "ẁ" // U+1E81: LATIN SMALL LETTER W WITH GRAVE
+ "ẃ" // U+1E83: LATIN SMALL LETTER W WITH ACUTE
+ "ẅ" // U+1E85: LATIN SMALL LETTER W WITH DIAERESIS
+ "ẇ" // U+1E87: LATIN SMALL LETTER W WITH DOT ABOVE
+ "ẉ" // U+1E89: LATIN SMALL LETTER W WITH DOT BELOW
+ "ẘ" // U+1E98: LATIN SMALL LETTER W WITH RING ABOVE
+ "ⓦ" // U+24E6: CIRCLED LATIN SMALL LETTER W
+ "ⱳ" // U+2C73: LATIN SMALL LETTER W WITH HOOK
+ "w" // U+FF57: FULLWIDTH LATIN SMALL LETTER W
,"w", // Folded result
"⒲" // U+24B2: PARENTHESIZED LATIN SMALL LETTER W
,"(w)", // Folded result
"Ẋ" // U+1E8A: LATIN CAPITAL LETTER X WITH DOT ABOVE
+ "Ẍ" // U+1E8C: LATIN CAPITAL LETTER X WITH DIAERESIS
+ "Ⓧ" // U+24CD: CIRCLED LATIN CAPITAL LETTER X
+ "X" // U+FF38: FULLWIDTH LATIN CAPITAL LETTER X
,"X", // Folded result
"ᶍ" // U+1D8D: LATIN SMALL LETTER X WITH PALATAL HOOK
+ "ẋ" // U+1E8B: LATIN SMALL LETTER X WITH DOT ABOVE
+ "ẍ" // U+1E8D: LATIN SMALL LETTER X WITH DIAERESIS
+ "ₓ" // U+2093: LATIN SUBSCRIPT SMALL LETTER X
+ "ⓧ" // U+24E7: CIRCLED LATIN SMALL LETTER X
+ "x" // U+FF58: FULLWIDTH LATIN SMALL LETTER X
,"x", // Folded result
"⒳" // U+24B3: PARENTHESIZED LATIN SMALL LETTER X
,"(x)", // Folded result
"Ý" // U+00DD: LATIN CAPITAL LETTER Y WITH ACUTE
+ "Ŷ" // U+0176: LATIN CAPITAL LETTER Y WITH CIRCUMFLEX
+ "Ÿ" // U+0178: LATIN CAPITAL LETTER Y WITH DIAERESIS
+ "Ƴ" // U+01B3: LATIN CAPITAL LETTER Y WITH HOOK
+ "Ȳ" // U+0232: LATIN CAPITAL LETTER Y WITH MACRON
+ "Ɏ" // U+024E: LATIN CAPITAL LETTER Y WITH STROKE
+ "ʏ" // U+028F: LATIN LETTER SMALL CAPITAL Y
+ "Ẏ" // U+1E8E: LATIN CAPITAL LETTER Y WITH DOT ABOVE
+ "Ỳ" // U+1EF2: LATIN CAPITAL LETTER Y WITH GRAVE
+ "Ỵ" // U+1EF4: LATIN CAPITAL LETTER Y WITH DOT BELOW
+ "Ỷ" // U+1EF6: LATIN CAPITAL LETTER Y WITH HOOK ABOVE
+ "Ỹ" // U+1EF8: LATIN CAPITAL LETTER Y WITH TILDE
+ "Ỿ" // U+1EFE: LATIN CAPITAL LETTER Y WITH LOOP
+ "Ⓨ" // U+24CE: CIRCLED LATIN CAPITAL LETTER Y
+ "Y" // U+FF39: FULLWIDTH LATIN CAPITAL LETTER Y
,"Y", // Folded result
"ý" // U+00FD: LATIN SMALL LETTER Y WITH ACUTE
+ "ÿ" // U+00FF: LATIN SMALL LETTER Y WITH DIAERESIS
+ "ŷ" // U+0177: LATIN SMALL LETTER Y WITH CIRCUMFLEX
+ "ƴ" // U+01B4: LATIN SMALL LETTER Y WITH HOOK
+ "ȳ" // U+0233: LATIN SMALL LETTER Y WITH MACRON
+ "ɏ" // U+024F: LATIN SMALL LETTER Y WITH STROKE
+ "ʎ" // U+028E: LATIN SMALL LETTER TURNED Y
+ "ẏ" // U+1E8F: LATIN SMALL LETTER Y WITH DOT ABOVE
+ "ẙ" // U+1E99: LATIN SMALL LETTER Y WITH RING ABOVE
+ "ỳ" // U+1EF3: LATIN SMALL LETTER Y WITH GRAVE
+ "ỵ" // U+1EF5: LATIN SMALL LETTER Y WITH DOT BELOW
+ "ỷ" // U+1EF7: LATIN SMALL LETTER Y WITH HOOK ABOVE
+ "ỹ" // U+1EF9: LATIN SMALL LETTER Y WITH TILDE
+ "ỿ" // U+1EFF: LATIN SMALL LETTER Y WITH LOOP
+ "ⓨ" // U+24E8: CIRCLED LATIN SMALL LETTER Y
+ "y" // U+FF59: FULLWIDTH LATIN SMALL LETTER Y
,"y", // Folded result
"⒴" // U+24B4: PARENTHESIZED LATIN SMALL LETTER Y
,"(y)", // Folded result
"Ź" // U+0179: LATIN CAPITAL LETTER Z WITH ACUTE
+ "Ż" // U+017B: LATIN CAPITAL LETTER Z WITH DOT ABOVE
+ "Ž" // U+017D: LATIN CAPITAL LETTER Z WITH CARON
+ "Ƶ" // U+01B5: LATIN CAPITAL LETTER Z WITH STROKE
+ "Ȝ" // U+021C: LATIN CAPITAL LETTER YOGH
+ "Ȥ" // U+0224: LATIN CAPITAL LETTER Z WITH HOOK
+ "ᴢ" // U+1D22: LATIN LETTER SMALL CAPITAL Z
+ "Ẑ" // U+1E90: LATIN CAPITAL LETTER Z WITH CIRCUMFLEX
+ "Ẓ" // U+1E92: LATIN CAPITAL LETTER Z WITH DOT BELOW
+ "Ẕ" // U+1E94: LATIN CAPITAL LETTER Z WITH LINE BELOW
+ "Ⓩ" // U+24CF: CIRCLED LATIN CAPITAL LETTER Z
+ "Ⱬ" // U+2C6B: LATIN CAPITAL LETTER Z WITH DESCENDER
+ "Ꝣ" // U+A762: LATIN CAPITAL LETTER VISIGOTHIC Z
+ "Z" // U+FF3A: FULLWIDTH LATIN CAPITAL LETTER Z
,"Z", // Folded result
"ź" // U+017A: LATIN SMALL LETTER Z WITH ACUTE
+ "ż" // U+017C: LATIN SMALL LETTER Z WITH DOT ABOVE
+ "ž" // U+017E: LATIN SMALL LETTER Z WITH CARON
+ "ƶ" // U+01B6: LATIN SMALL LETTER Z WITH STROKE
+ "ȝ" // U+021D: LATIN SMALL LETTER YOGH
+ "ȥ" // U+0225: LATIN SMALL LETTER Z WITH HOOK
+ "ɀ" // U+0240: LATIN SMALL LETTER Z WITH SWASH TAIL
+ "ʐ" // U+0290: LATIN SMALL LETTER Z WITH RETROFLEX HOOK
+ "ʑ" // U+0291: LATIN SMALL LETTER Z WITH CURL
+ "ᵶ" // U+1D76: LATIN SMALL LETTER Z WITH MIDDLE TILDE
+ "ᶎ" // U+1D8E: LATIN SMALL LETTER Z WITH PALATAL HOOK
+ "ẑ" // U+1E91: LATIN SMALL LETTER Z WITH CIRCUMFLEX
+ "ẓ" // U+1E93: LATIN SMALL LETTER Z WITH DOT BELOW
+ "ẕ" // U+1E95: LATIN SMALL LETTER Z WITH LINE BELOW
+ "ⓩ" // U+24E9: CIRCLED LATIN SMALL LETTER Z
+ "ⱬ" // U+2C6C: LATIN SMALL LETTER Z WITH DESCENDER
+ "ꝣ" // U+A763: LATIN SMALL LETTER VISIGOTHIC Z
+ "z" // U+FF5A: FULLWIDTH LATIN SMALL LETTER Z
,"z", // Folded result
"⒵" // U+24B5: PARENTHESIZED LATIN SMALL LETTER Z
,"(z)", // Folded result
"⁰" // U+2070: SUPERSCRIPT ZERO
+ "₀" // U+2080: SUBSCRIPT ZERO
+ "⓪" // U+24EA: CIRCLED DIGIT ZERO
+ "⓿" // U+24FF: NEGATIVE CIRCLED DIGIT ZERO
+ "0" // U+FF10: FULLWIDTH DIGIT ZERO
,"0", // Folded result
"¹" // U+00B9: SUPERSCRIPT ONE
+ "₁" // U+2081: SUBSCRIPT ONE
+ "①" // U+2460: CIRCLED DIGIT ONE
+ "⓵" // U+24F5: DOUBLE CIRCLED DIGIT ONE
+ "❶" // U+2776: DINGBAT NEGATIVE CIRCLED DIGIT ONE
+ "➀" // U+2780: DINGBAT CIRCLED SANS-SERIF DIGIT ONE
+ "➊" // U+278A: DINGBAT NEGATIVE CIRCLED SANS-SERIF DIGIT ONE
+ "1" // U+FF11: FULLWIDTH DIGIT ONE
,"1", // Folded result
"⒈" // U+2488: DIGIT ONE FULL STOP
,"1.", // Folded result
"⑴" // U+2474: PARENTHESIZED DIGIT ONE
,"(1)", // Folded result
"²" // U+00B2: SUPERSCRIPT TWO
+ "₂" // U+2082: SUBSCRIPT TWO
+ "②" // U+2461: CIRCLED DIGIT TWO
+ "⓶" // U+24F6: DOUBLE CIRCLED DIGIT TWO
+ "❷" // U+2777: DINGBAT NEGATIVE CIRCLED DIGIT TWO
+ "➁" // U+2781: DINGBAT CIRCLED SANS-SERIF DIGIT TWO
+ "➋" // U+278B: DINGBAT NEGATIVE CIRCLED SANS-SERIF DIGIT TWO
+ "2" // U+FF12: FULLWIDTH DIGIT TWO
,"2", // Folded result
"⒉" // U+2489: DIGIT TWO FULL STOP
,"2.", // Folded result
"⑵" // U+2475: PARENTHESIZED DIGIT TWO
,"(2)", // Folded result
"³" // U+00B3: SUPERSCRIPT THREE
+ "₃" // U+2083: SUBSCRIPT THREE
+ "③" // U+2462: CIRCLED DIGIT THREE
+ "⓷" // U+24F7: DOUBLE CIRCLED DIGIT THREE
+ "❸" // U+2778: DINGBAT NEGATIVE CIRCLED DIGIT THREE
+ "➂" // U+2782: DINGBAT CIRCLED SANS-SERIF DIGIT THREE
+ "➌" // U+278C: DINGBAT NEGATIVE CIRCLED SANS-SERIF DIGIT THREE
+ "3" // U+FF13: FULLWIDTH DIGIT THREE
,"3", // Folded result
"⒊" // U+248A: DIGIT THREE FULL STOP
,"3.", // Folded result
"⑶" // U+2476: PARENTHESIZED DIGIT THREE
,"(3)", // Folded result
"⁴" // U+2074: SUPERSCRIPT FOUR
+ "₄" // U+2084: SUBSCRIPT FOUR
+ "④" // U+2463: CIRCLED DIGIT FOUR
+ "⓸" // U+24F8: DOUBLE CIRCLED DIGIT FOUR
+ "❹" // U+2779: DINGBAT NEGATIVE CIRCLED DIGIT FOUR
+ "➃" // U+2783: DINGBAT CIRCLED SANS-SERIF DIGIT FOUR
+ "➍" // U+278D: DINGBAT NEGATIVE CIRCLED SANS-SERIF DIGIT FOUR
+ "4" // U+FF14: FULLWIDTH DIGIT FOUR
,"4", // Folded result
"⒋" // U+248B: DIGIT FOUR FULL STOP
,"4.", // Folded result
"⑷" // U+2477: PARENTHESIZED DIGIT FOUR
,"(4)", // Folded result
"⁵" // U+2075: SUPERSCRIPT FIVE
+ "₅" // U+2085: SUBSCRIPT FIVE
+ "⑤" // U+2464: CIRCLED DIGIT FIVE
+ "⓹" // U+24F9: DOUBLE CIRCLED DIGIT FIVE
+ "❺" // U+277A: DINGBAT NEGATIVE CIRCLED DIGIT FIVE
+ "➄" // U+2784: DINGBAT CIRCLED SANS-SERIF DIGIT FIVE
+ "➎" // U+278E: DINGBAT NEGATIVE CIRCLED SANS-SERIF DIGIT FIVE
+ "5" // U+FF15: FULLWIDTH DIGIT FIVE
,"5", // Folded result
"⒌" // U+248C: DIGIT FIVE FULL STOP
,"5.", // Folded result
"⑸" // U+2478: PARENTHESIZED DIGIT FIVE
,"(5)", // Folded result
"⁶" // U+2076: SUPERSCRIPT SIX
+ "₆" // U+2086: SUBSCRIPT SIX
+ "⑥" // U+2465: CIRCLED DIGIT SIX
+ "⓺" // U+24FA: DOUBLE CIRCLED DIGIT SIX
+ "❻" // U+277B: DINGBAT NEGATIVE CIRCLED DIGIT SIX
+ "➅" // U+2785: DINGBAT CIRCLED SANS-SERIF DIGIT SIX
+ "➏" // U+278F: DINGBAT NEGATIVE CIRCLED SANS-SERIF DIGIT SIX
+ "6" // U+FF16: FULLWIDTH DIGIT SIX
,"6", // Folded result
"⒍" // U+248D: DIGIT SIX FULL STOP
,"6.", // Folded result
"⑹" // U+2479: PARENTHESIZED DIGIT SIX
,"(6)", // Folded result
"⁷" // U+2077: SUPERSCRIPT SEVEN
+ "₇" // U+2087: SUBSCRIPT SEVEN
+ "⑦" // U+2466: CIRCLED DIGIT SEVEN
+ "⓻" // U+24FB: DOUBLE CIRCLED DIGIT SEVEN
+ "❼" // U+277C: DINGBAT NEGATIVE CIRCLED DIGIT SEVEN
+ "➆" // U+2786: DINGBAT CIRCLED SANS-SERIF DIGIT SEVEN
+ "➐" // U+2790: DINGBAT NEGATIVE CIRCLED SANS-SERIF DIGIT SEVEN
+ "7" // U+FF17: FULLWIDTH DIGIT SEVEN
,"7", // Folded result
"⒎" // U+248E: DIGIT SEVEN FULL STOP
,"7.", // Folded result
"⑺" // U+247A: PARENTHESIZED DIGIT SEVEN
,"(7)", // Folded result
"⁸" // U+2078: SUPERSCRIPT EIGHT
+ "₈" // U+2088: SUBSCRIPT EIGHT
+ "⑧" // U+2467: CIRCLED DIGIT EIGHT
+ "⓼" // U+24FC: DOUBLE CIRCLED DIGIT EIGHT
+ "❽" // U+277D: DINGBAT NEGATIVE CIRCLED DIGIT EIGHT
+ "➇" // U+2787: DINGBAT CIRCLED SANS-SERIF DIGIT EIGHT
+ "➑" // U+2791: DINGBAT NEGATIVE CIRCLED SANS-SERIF DIGIT EIGHT
+ "8" // U+FF18: FULLWIDTH DIGIT EIGHT
,"8", // Folded result
"⒏" // U+248F: DIGIT EIGHT FULL STOP
,"8.", // Folded result
"⑻" // U+247B: PARENTHESIZED DIGIT EIGHT
,"(8)", // Folded result
"⁹" // U+2079: SUPERSCRIPT NINE
+ "₉" // U+2089: SUBSCRIPT NINE
+ "⑨" // U+2468: CIRCLED DIGIT NINE
+ "⓽" // U+24FD: DOUBLE CIRCLED DIGIT NINE
+ "❾" // U+277E: DINGBAT NEGATIVE CIRCLED DIGIT NINE
+ "➈" // U+2788: DINGBAT CIRCLED SANS-SERIF DIGIT NINE
+ "➒" // U+2792: DINGBAT NEGATIVE CIRCLED SANS-SERIF DIGIT NINE
+ "9" // U+FF19: FULLWIDTH DIGIT NINE
,"9", // Folded result
"⒐" // U+2490: DIGIT NINE FULL STOP
,"9.", // Folded result
"⑼" // U+247C: PARENTHESIZED DIGIT NINE
,"(9)", // Folded result
"⑩" // U+2469: CIRCLED NUMBER TEN
+ "⓾" // U+24FE: DOUBLE CIRCLED NUMBER TEN
+ "❿" // U+277F: DINGBAT NEGATIVE CIRCLED NUMBER TEN
+ "➉" // U+2789: DINGBAT CIRCLED SANS-SERIF NUMBER TEN
+ "➓" // U+2793: DINGBAT NEGATIVE CIRCLED SANS-SERIF NUMBER TEN
,"10", // Folded result
"⒑" // U+2491: NUMBER TEN FULL STOP
,"10.", // Folded result
"⑽" // U+247D: PARENTHESIZED NUMBER TEN
,"(10)", // Folded result
"⑪" // U+246A: CIRCLED NUMBER ELEVEN
+ "⓫" // U+24EB: NEGATIVE CIRCLED NUMBER ELEVEN
,"11", // Folded result
"⒒" // U+2492: NUMBER ELEVEN FULL STOP
,"11.", // Folded result
"⑾" // U+247E: PARENTHESIZED NUMBER ELEVEN
,"(11)", // Folded result
"⑫" // U+246B: CIRCLED NUMBER TWELVE
+ "⓬" // U+24EC: NEGATIVE CIRCLED NUMBER TWELVE
,"12", // Folded result
"⒓" // U+2493: NUMBER TWELVE FULL STOP
,"12.", // Folded result
"⑿" // U+247F: PARENTHESIZED NUMBER TWELVE
,"(12)", // Folded result
"⑬" // U+246C: CIRCLED NUMBER THIRTEEN
+ "⓭" // U+24ED: NEGATIVE CIRCLED NUMBER THIRTEEN
,"13", // Folded result
"⒔" // U+2494: NUMBER THIRTEEN FULL STOP
,"13.", // Folded result
"⒀" // U+2480: PARENTHESIZED NUMBER THIRTEEN
,"(13)", // Folded result
"⑭" // U+246D: CIRCLED NUMBER FOURTEEN
+ "⓮" // U+24EE: NEGATIVE CIRCLED NUMBER FOURTEEN
,"14", // Folded result
"⒕" // U+2495: NUMBER FOURTEEN FULL STOP
,"14.", // Folded result
"⒁" // U+2481: PARENTHESIZED NUMBER FOURTEEN
,"(14)", // Folded result
"⑮" // U+246E: CIRCLED NUMBER FIFTEEN
+ "⓯" // U+24EF: NEGATIVE CIRCLED NUMBER FIFTEEN
,"15", // Folded result
"⒖" // U+2496: NUMBER FIFTEEN FULL STOP
,"15.", // Folded result
"⒂" // U+2482: PARENTHESIZED NUMBER FIFTEEN
,"(15)", // Folded result
"⑯" // U+246F: CIRCLED NUMBER SIXTEEN
+ "⓰" // U+24F0: NEGATIVE CIRCLED NUMBER SIXTEEN
,"16", // Folded result
"⒗" // U+2497: NUMBER SIXTEEN FULL STOP
,"16.", // Folded result
"⒃" // U+2483: PARENTHESIZED NUMBER SIXTEEN
,"(16)", // Folded result
"⑰" // U+2470: CIRCLED NUMBER SEVENTEEN
+ "⓱" // U+24F1: NEGATIVE CIRCLED NUMBER SEVENTEEN
,"17", // Folded result
"⒘" // U+2498: NUMBER SEVENTEEN FULL STOP
,"17.", // Folded result
"⒄" // U+2484: PARENTHESIZED NUMBER SEVENTEEN
,"(17)", // Folded result
"⑱" // U+2471: CIRCLED NUMBER EIGHTEEN
+ "⓲" // U+24F2: NEGATIVE CIRCLED NUMBER EIGHTEEN
,"18", // Folded result
"⒙" // U+2499: NUMBER EIGHTEEN FULL STOP
,"18.", // Folded result
"⒅" // U+2485: PARENTHESIZED NUMBER EIGHTEEN
,"(18)", // Folded result
"⑲" // U+2472: CIRCLED NUMBER NINETEEN
+ "⓳" // U+24F3: NEGATIVE CIRCLED NUMBER NINETEEN
,"19", // Folded result
"⒚" // U+249A: NUMBER NINETEEN FULL STOP
,"19.", // Folded result
"⒆" // U+2486: PARENTHESIZED NUMBER NINETEEN
,"(19)", // Folded result
"⑳" // U+2473: CIRCLED NUMBER TWENTY
+ "⓴" // U+24F4: NEGATIVE CIRCLED NUMBER TWENTY
,"20", // Folded result
"⒛" // U+249B: NUMBER TWENTY FULL STOP
,"20.", // Folded result
"⒇" // U+2487: PARENTHESIZED NUMBER TWENTY
,"(20)", // Folded result
"«" // U+00AB: LEFT-POINTING DOUBLE ANGLE QUOTATION MARK
+ "»" // U+00BB: RIGHT-POINTING DOUBLE ANGLE QUOTATION MARK
+ "“" // U+201C: LEFT DOUBLE QUOTATION MARK
+ "”" // U+201D: RIGHT DOUBLE QUOTATION MARK
+ "„" // U+201E: DOUBLE LOW-9 QUOTATION MARK
+ "″" // U+2033: DOUBLE PRIME
+ "‶" // U+2036: REVERSED DOUBLE PRIME
+ "❝" // U+275D: HEAVY DOUBLE TURNED COMMA QUOTATION MARK ORNAMENT
+ "❞" // U+275E: HEAVY DOUBLE COMMA QUOTATION MARK ORNAMENT
+ "❮" // U+276E: HEAVY LEFT-POINTING ANGLE QUOTATION MARK ORNAMENT
+ "❯" // U+276F: HEAVY RIGHT-POINTING ANGLE QUOTATION MARK ORNAMENT
+ """ // U+FF02: FULLWIDTH QUOTATION MARK
,"\"", // Folded result
"‘" // U+2018: LEFT SINGLE QUOTATION MARK
+ "’" // U+2019: RIGHT SINGLE QUOTATION MARK
+ "‚" // U+201A: SINGLE LOW-9 QUOTATION MARK
+ "‛" // U+201B: SINGLE HIGH-REVERSED-9 QUOTATION MARK
+ "′" // U+2032: PRIME
+ "‵" // U+2035: REVERSED PRIME
+ "‹" // U+2039: SINGLE LEFT-POINTING ANGLE QUOTATION MARK
+ "›" // U+203A: SINGLE RIGHT-POINTING ANGLE QUOTATION MARK
+ "❛" // U+275B: HEAVY SINGLE TURNED COMMA QUOTATION MARK ORNAMENT
+ "❜" // U+275C: HEAVY SINGLE COMMA QUOTATION MARK ORNAMENT
+ "'" // U+FF07: FULLWIDTH APOSTROPHE
,"'", // Folded result
"‐" // U+2010: HYPHEN
+ "‑" // U+2011: NON-BREAKING HYPHEN
+ "‒" // U+2012: FIGURE DASH
+ "–" // U+2013: EN DASH
+ "—" // U+2014: EM DASH
+ "⁻" // U+207B: SUPERSCRIPT MINUS
+ "₋" // U+208B: SUBSCRIPT MINUS
+ "-" // U+FF0D: FULLWIDTH HYPHEN-MINUS
,"-", // Folded result
"⁅" // U+2045: LEFT SQUARE BRACKET WITH QUILL
+ "❲" // U+2772: LIGHT LEFT TORTOISE SHELL BRACKET ORNAMENT
+ "[" // U+FF3B: FULLWIDTH LEFT SQUARE BRACKET
,"[", // Folded result
"⁆" // U+2046: RIGHT SQUARE BRACKET WITH QUILL
+ "❳" // U+2773: LIGHT RIGHT TORTOISE SHELL BRACKET ORNAMENT
+ "]" // U+FF3D: FULLWIDTH RIGHT SQUARE BRACKET
,"]", // Folded result
"⁽" // U+207D: SUPERSCRIPT LEFT PARENTHESIS
+ "₍" // U+208D: SUBSCRIPT LEFT PARENTHESIS
+ "❨" // U+2768: MEDIUM LEFT PARENTHESIS ORNAMENT
+ "❪" // U+276A: MEDIUM FLATTENED LEFT PARENTHESIS ORNAMENT
+ "(" // U+FF08: FULLWIDTH LEFT PARENTHESIS
,"(", // Folded result
"⸨" // U+2E28: LEFT DOUBLE PARENTHESIS
,"((", // Folded result
"⁾" // U+207E: SUPERSCRIPT RIGHT PARENTHESIS
+ "₎" // U+208E: SUBSCRIPT RIGHT PARENTHESIS
+ "❩" // U+2769: MEDIUM RIGHT PARENTHESIS ORNAMENT
+ "❫" // U+276B: MEDIUM FLATTENED RIGHT PARENTHESIS ORNAMENT
+ ")" // U+FF09: FULLWIDTH RIGHT PARENTHESIS
,")", // Folded result
"⸩" // U+2E29: RIGHT DOUBLE PARENTHESIS
,"))", // Folded result
"❬" // U+276C: MEDIUM LEFT-POINTING ANGLE BRACKET ORNAMENT
+ "❰" // U+2770: HEAVY LEFT-POINTING ANGLE BRACKET ORNAMENT
+ "<" // U+FF1C: FULLWIDTH LESS-THAN SIGN
,"<", // Folded result
"❭" // U+276D: MEDIUM RIGHT-POINTING ANGLE BRACKET ORNAMENT
+ "❱" // U+2771: HEAVY RIGHT-POINTING ANGLE BRACKET ORNAMENT
+ ">" // U+FF1E: FULLWIDTH GREATER-THAN SIGN
,">", // Folded result
"❴" // U+2774: MEDIUM LEFT CURLY BRACKET ORNAMENT
+ "{" // U+FF5B: FULLWIDTH LEFT CURLY BRACKET
,"{", // Folded result
"❵" // U+2775: MEDIUM RIGHT CURLY BRACKET ORNAMENT
+ "}" // U+FF5D: FULLWIDTH RIGHT CURLY BRACKET
,"}", // Folded result
"⁺" // U+207A: SUPERSCRIPT PLUS SIGN
+ "₊" // U+208A: SUBSCRIPT PLUS SIGN
+ "+" // U+FF0B: FULLWIDTH PLUS SIGN
,"+", // Folded result
"⁼" // U+207C: SUPERSCRIPT EQUALS SIGN
+ "₌" // U+208C: SUBSCRIPT EQUALS SIGN
+ "=" // U+FF1D: FULLWIDTH EQUALS SIGN
,"=", // Folded result
"!" // U+FF01: FULLWIDTH EXCLAMATION MARK
,"!", // Folded result
"‼" // U+203C: DOUBLE EXCLAMATION MARK
,"!!", // Folded result
"⁉" // U+2049: EXCLAMATION QUESTION MARK
,"!?", // Folded result
"#" // U+FF03: FULLWIDTH NUMBER SIGN
,"#", // Folded result
"$" // U+FF04: FULLWIDTH DOLLAR SIGN
,"$", // Folded result
"⁒" // U+2052: COMMERCIAL MINUS SIGN
+ "%" // U+FF05: FULLWIDTH PERCENT SIGN
,"%", // Folded result
"&" // U+FF06: FULLWIDTH AMPERSAND
,"&", // Folded result
"⁎" // U+204E: LOW ASTERISK
+ "*" // U+FF0A: FULLWIDTH ASTERISK
,"*", // Folded result
"," // U+FF0C: FULLWIDTH COMMA
,",", // Folded result
"." // U+FF0E: FULLWIDTH FULL STOP
,".", // Folded result
"⁄" // U+2044: FRACTION SLASH
+ "/" // U+FF0F: FULLWIDTH SOLIDUS
,"/", // Folded result
":" // U+FF1A: FULLWIDTH COLON
,":", // Folded result
"⁏" // U+204F: REVERSED SEMICOLON
+ ";" // U+FF1B: FULLWIDTH SEMICOLON
,";", // Folded result
"?" // U+FF1F: FULLWIDTH QUESTION MARK
,"?", // Folded result
"⁇" // U+2047: DOUBLE QUESTION MARK
,"??", // Folded result
"⁈" // U+2048: QUESTION EXCLAMATION MARK
,"?!", // Folded result
"@" // U+FF20: FULLWIDTH COMMERCIAL AT
,"@", // Folded result
"\" // U+FF3C: FULLWIDTH REVERSE SOLIDUS
,"\\", // Folded result
"‸" // U+2038: CARET
+ "^" // U+FF3E: FULLWIDTH CIRCUMFLEX ACCENT
,"^", // Folded result
"_" // U+FF3F: FULLWIDTH LOW LINE
,"_", // Folded result
"⁓" // U+2053: SWUNG DASH
+ "~" // U+FF5E: FULLWIDTH TILDE
,"~", // Folded result
};
// Construct input text and expected output tokens
List<String> expectedUnfoldedTokens = new ArrayList<String>();
List<String> expectedFoldedTokens = new ArrayList<String>();
StringBuilder inputText = new StringBuilder();
for (int n = 0 ; n < foldings.length ; n += 2) {
if (n > 0) {
inputText.append(' '); // Space between tokens
}
inputText.append(foldings[n]);
// Construct the expected output tokens: both the unfolded and folded string,
// with the folded duplicated as many times as the number of characters in
// the input text.
StringBuilder expected = new StringBuilder();
int numChars = foldings[n].length();
for (int m = 0 ; m < numChars; ++m) {
expected.append(foldings[n + 1]);
}
expectedUnfoldedTokens.add(foldings[n]);
expectedFoldedTokens.add(expected.toString());
}
TokenStream stream = new MockTokenizer(new StringReader(inputText.toString()), MockTokenizer.WHITESPACE, false);
ASCIIFoldingFilter filter = new ASCIIFoldingFilter(stream, random().nextBoolean());
CharTermAttribute termAtt = filter.getAttribute(CharTermAttribute.class);
Iterator<String> unfoldedIter = expectedUnfoldedTokens.iterator();
Iterator<String> foldedIter = expectedFoldedTokens.iterator();
filter.reset();
while (foldedIter.hasNext()) {
assertNextTerms(unfoldedIter.next(), foldedIter.next(), filter, termAtt);
}
assertFalse(filter.incrementToken());
}
/** blast some random strings through the analyzer */
public void testRandomStrings() throws Exception {
Analyzer a = new Analyzer() {
@Override
protected TokenStreamComponents createComponents(String fieldName, Reader reader) {
Tokenizer tokenizer = new MockTokenizer(reader, MockTokenizer.WHITESPACE, false);
return new TokenStreamComponents(tokenizer,
new ASCIIFoldingFilter(tokenizer, random().nextBoolean()));
}
};
checkRandomData(random(), a, 1000*RANDOM_MULTIPLIER);
}
public void testEmptyTerm() throws IOException {
Analyzer a = new Analyzer() {
@Override
protected TokenStreamComponents createComponents(String fieldName, Reader reader) {
Tokenizer tokenizer = new KeywordTokenizer(reader);
return new TokenStreamComponents(tokenizer,
new ASCIIFoldingFilter(tokenizer, random().nextBoolean()));
}
};
checkOneTerm(a, "", "");
}
}
| apache-2.0 |
SciGaP/seagrid-rich-client | src/main/java/cct/dialogs/JEditorFrame_AboutBox.java | 7972 | /* ***** BEGIN LICENSE BLOCK *****
Version: Apache 2.0/GPL 3.0/LGPL 3.0
CCT - Computational Chemistry Tools
Jamberoo - Java Molecules Editor
Copyright 2008-2015 Dr. Vladislav Vasilyev
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
Contributor(s):
Dr. Vladislav Vasilyev <vvv900@gmail.com> (original author)
Alternatively, the contents of this file may be used under the terms of
either the GNU General Public License Version 2 or later (the "GPL"), or
the GNU Lesser General Public License Version 2.1 or later (the "LGPL"),
in which case the provisions of the GPL or the LGPL are applicable instead
of those above. If you wish to allow use of your version of this file only
under the terms of either the GPL or the LGPL, and not to allow others to
use your version of this file under the terms of the Apache 2.0, indicate your
decision by deleting the provisions above and replace them with the notice
and other provisions required by the GPL or the LGPL. If you do not delete
the provisions above, a recipient may use your version of this file under
the terms of any one of the Apache 2.0, the GPL or the LGPL.
***** END LICENSE BLOCK *****/
package cct.dialogs;
import cct.GlobalSettings;
import java.awt.BorderLayout;
import java.awt.Color;
import java.awt.FlowLayout;
import java.awt.Frame;
import java.awt.GridLayout;
import java.awt.event.ActionEvent;
import java.awt.event.ActionListener;
import java.awt.event.FocusEvent;
import java.awt.event.FocusListener;
import java.awt.event.MouseEvent;
import java.awt.event.MouseListener;
import javax.swing.BorderFactory;
import javax.swing.ImageIcon;
import javax.swing.JButton;
import javax.swing.JDialog;
import javax.swing.JLabel;
import javax.swing.JOptionPane;
import javax.swing.JPanel;
/**
* <p>Title: Picking</p>
*
* <p>Description: </p>
*
* <p>Copyright: Copyright (c) 2005</p>
*
* <p>Company: </p>
*
* @author not attributable
* @version 1.0
*/
public class JEditorFrame_AboutBox
extends JDialog implements ActionListener, FocusListener, MouseListener {
private JPanel panel1 = new JPanel();
private JPanel panel2 = new JPanel();
private JPanel insetsPanel1 = new JPanel();
private JPanel insetsPanel2 = new JPanel();
private JPanel insetsPanel3 = new JPanel();
private JButton button1 = new JButton();
private JLabel imageLabel = new JLabel();
private JLabel label1 = new JLabel();
private JLabel label2 = new JLabel();
private JLabel homePageLabel = new JLabel();
private JLabel emailLabel = new JLabel();
private ImageIcon image1 = new ImageIcon();
private BorderLayout borderLayout1 = new BorderLayout();
private BorderLayout borderLayout2 = new BorderLayout();
private FlowLayout flowLayout1 = new FlowLayout();
private GridLayout gridLayout1 = new GridLayout();
private String product = "Jamberoo - open-source project since 2005";
private String version = "Version 0.7 build 0626";
private String copyright = "Jamberoo Home Page";
private String comments = "Send e-mail for help";
private String Version, Build;
public JEditorFrame_AboutBox(Frame parent, String version, String build) {
super(parent);
Version = version;
Build = build;
try {
setDefaultCloseOperation(DISPOSE_ON_CLOSE);
jbInit();
} catch (Exception exception) {
exception.printStackTrace();
}
}
public JEditorFrame_AboutBox() {
this(null, "07", "0526");
}
/**
* Component initialization.
*
* @throws Exception
*/
private void jbInit() throws Exception {
version = "Version " + Version + " build " + Build;
image1 = new ImageIcon(JamberooFrame.class.getResource(
"cct/images/jmoleditor-128x128.png"));
//"images/about-32x32.gif"));
imageLabel.setIcon(image1);
setTitle("About");
panel1.setLayout(borderLayout1);
panel2.setLayout(borderLayout2);
insetsPanel1.setLayout(flowLayout1);
insetsPanel2.setLayout(flowLayout1);
insetsPanel2.setBorder(BorderFactory.createEmptyBorder(10, 10, 10, 10));
gridLayout1.setRows(4);
gridLayout1.setColumns(1);
label1.setToolTipText("");
label1.setText(product);
label2.setText(version);
homePageLabel.setText(copyright);
homePageLabel.setToolTipText("Go to the Jamberoo Home Page using default browser");
homePageLabel.addMouseListener(this);
emailLabel.setToolTipText("Send e-mail request using default e-mail client");
emailLabel.setText(comments);
emailLabel.addMouseListener(this);
insetsPanel3.setLayout(gridLayout1);
insetsPanel3.setBorder(BorderFactory.createEmptyBorder(10, 60, 10, 10));
button1.setText("OK");
button1.addActionListener(this);
insetsPanel2.add(imageLabel, null);
panel2.add(insetsPanel2, BorderLayout.WEST);
getContentPane().add(panel1, null);
insetsPanel3.add(label1, null);
insetsPanel3.add(label2, null);
insetsPanel3.add(homePageLabel, null);
insetsPanel3.add(emailLabel, null);
panel2.add(insetsPanel3, BorderLayout.CENTER);
insetsPanel1.add(button1, null);
panel1.add(insetsPanel1, BorderLayout.SOUTH);
panel1.add(panel2, BorderLayout.NORTH);
setResizable(true);
}
/**
* Close the dialog on a button event.
*
* @param actionEvent ActionEvent
*/
@Override
public void actionPerformed(ActionEvent actionEvent) {
if (actionEvent.getSource() == button1) {
dispose();
}
}
/**
* Invoked when a component gains the keyboard focus.
* @param e
*/
public void focusGained(FocusEvent e) {
System.out.println("Got focus");
if (e.getSource() == homePageLabel) {
homePageLabel.setForeground(Color.BLUE);
}
}
public void focusLost(FocusEvent e) {
if (e.getSource() == homePageLabel) {
homePageLabel.setForeground(Color.BLACK);
}
}
/**
* Invoked when the mouse button has been clicked (pressed and released) on a component.
* @param e
*/
public void mouseClicked(MouseEvent e) {
}
/**
* Invoked when the mouse enters a component.
* @param e
*/
public void mouseEntered(MouseEvent e) {
//System.out.println("Got focus");
if (e.getSource() == homePageLabel) {
homePageLabel.setForeground(Color.BLUE);
} else if (e.getSource() == emailLabel) {
emailLabel.setForeground(Color.BLUE);
}
}
/**
* Invoked when the mouse exits a component.
* @param e
*/
public void mouseExited(MouseEvent e) {
if (e.getSource() == homePageLabel) {
homePageLabel.setForeground(Color.BLACK);
} else if (e.getSource() == emailLabel) {
emailLabel.setForeground(Color.BLACK);
}
}
/**
* Invoked when a mouse button has been pressed on a component.
* @param e
*/
public void mousePressed(MouseEvent e) {
if (e.getSource() == homePageLabel) {
try {
GlobalSettings.showInDefaultBrowser(GlobalSettings.getProperty(GlobalSettings.URL_HOME_PAGE));
} catch (Exception ex) {
JOptionPane.showMessageDialog(this, ex.getMessage(), "Error", JOptionPane.ERROR_MESSAGE);
}
} else if (e.getSource() == emailLabel) {
try {
GlobalSettings.mailUsingDefaultClient(GlobalSettings.getProperty(GlobalSettings.HELP_EMAIL), "Jamberoo request", "");
} catch (Exception ex) {
JOptionPane.showMessageDialog(this, ex.getMessage(), "Error", JOptionPane.ERROR_MESSAGE);
System.err.println(ex.getMessage());
}
}
}
public void mouseReleased(MouseEvent e) {
}
}
| apache-2.0 |
joewalnes/idea-community | plugins/svn4idea/testSource/org/jetbrains/idea/svn/SvnCommittedViewTest.java | 12316 | package org.jetbrains.idea.svn;
import com.intellij.openapi.util.Comparing;
import com.intellij.openapi.vcs.CommittedChangesProvider;
import com.intellij.openapi.vcs.FileStatus;
import com.intellij.openapi.vcs.VcsConfiguration;
import com.intellij.openapi.vcs.changes.Change;
import com.intellij.openapi.vcs.changes.ContentRevision;
import com.intellij.openapi.vcs.versionBrowser.ChangeBrowserSettings;
import com.intellij.openapi.vfs.VirtualFile;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import org.jetbrains.idea.svn.history.SvnChangeList;
import org.jetbrains.idea.svn.history.SvnRepositoryLocation;
import org.junit.Assert;
import org.junit.Test;
import java.io.File;
import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
public class SvnCommittedViewTest extends SvnTestCase {
@Test
public void testAdd() throws Exception {
enableSilentOperation(VcsConfiguration.StandardConfirmation.ADD);
enableSilentOperation(VcsConfiguration.StandardConfirmation.REMOVE);
final VirtualFile d1 = createDirInCommand(myWorkingCopyDir, "d1");
final VirtualFile f11 = createFileInCommand(d1, "f11.txt", "123\n456");
final VirtualFile f12 = createFileInCommand(d1, "f12.txt", "----");
// r1, addition without history
checkin();
final SvnVcs vcs = SvnVcs.getInstance(myProject);
final CommittedChangesProvider<SvnChangeList,ChangeBrowserSettings> committedChangesProvider = vcs.getCommittedChangesProvider();
final List<SvnChangeList> changeListList =
committedChangesProvider.getCommittedChanges(committedChangesProvider.createDefaultSettings(),
new SvnRepositoryLocation(myRepoUrl), 0);
checkList(changeListList, 1, new Data[] {new Data(absPath(f11), FileStatus.ADDED, null),
new Data(absPath(f12), FileStatus.ADDED, null), new Data(absPath(d1), FileStatus.ADDED, null)});
}
@Test
public void testDelete() throws Exception {
enableSilentOperation(VcsConfiguration.StandardConfirmation.ADD);
enableSilentOperation(VcsConfiguration.StandardConfirmation.REMOVE);
final VirtualFile d1 = createDirInCommand(myWorkingCopyDir, "d1");
final VirtualFile f11 = createFileInCommand(d1, "f11.txt", "123\n456");
final VirtualFile f12 = createFileInCommand(d1, "f12.txt", "----");
// r1, addition without history
checkin();
deleteFileInCommand(f11);
checkin();
update();
deleteFileInCommand(d1);
checkin();
final SvnVcs vcs = SvnVcs.getInstance(myProject);
final CommittedChangesProvider<SvnChangeList,ChangeBrowserSettings> committedChangesProvider = vcs.getCommittedChangesProvider();
final List<SvnChangeList> changeListList =
committedChangesProvider.getCommittedChanges(committedChangesProvider.createDefaultSettings(),
new SvnRepositoryLocation(myRepoUrl), 0);
checkList(changeListList, 2, new Data[] {new Data(absPath(f11), FileStatus.DELETED, null)});
checkList(changeListList, 3, new Data[] {new Data(absPath(d1), FileStatus.DELETED, null)});
}
@Test
public void testReplaced() throws Exception {
enableSilentOperation(VcsConfiguration.StandardConfirmation.ADD);
enableSilentOperation(VcsConfiguration.StandardConfirmation.REMOVE);
VirtualFile d1 = createDirInCommand(myWorkingCopyDir, "d1");
VirtualFile f11 = createFileInCommand(d1, "f11.txt", "123\n456");
VirtualFile f12 = createFileInCommand(d1, "f12.txt", "----");
// r1, addition without history
checkin();
final String d1Path = new File(d1.getPath()).getAbsolutePath();
verify(runSvn("delete", d1Path));
verify(runSvn("add", d1Path));
checkin();
final SvnVcs vcs = SvnVcs.getInstance(myProject);
final CommittedChangesProvider<SvnChangeList,ChangeBrowserSettings> committedChangesProvider = vcs.getCommittedChangesProvider();
final List<SvnChangeList> changeListList =
committedChangesProvider.getCommittedChanges(committedChangesProvider.createDefaultSettings(),
new SvnRepositoryLocation(myRepoUrl), 0);
checkList(changeListList, 2, new Data[] {new Data(absPath(d1), FileStatus.MODIFIED, "- replaced")});
}
@Test
public void testMoveDir() throws Exception {
enableSilentOperation(VcsConfiguration.StandardConfirmation.ADD);
enableSilentOperation(VcsConfiguration.StandardConfirmation.REMOVE);
VirtualFile d1 = createDirInCommand(myWorkingCopyDir, "d1");
VirtualFile d2 = createDirInCommand(myWorkingCopyDir, "d2");
VirtualFile f11 = createFileInCommand(d1, "f11.txt", "123\n456");
VirtualFile f12 = createFileInCommand(d1, "f12.txt", "----");
// r1, addition without history
checkin();
final String oldPath = absPath(d1);
moveFileInCommand(d1, d2);
Thread.sleep(100);
checkin();
final SvnVcs vcs = SvnVcs.getInstance(myProject);
final CommittedChangesProvider<SvnChangeList,ChangeBrowserSettings> committedChangesProvider = vcs.getCommittedChangesProvider();
final List<SvnChangeList> changeListList =
committedChangesProvider.getCommittedChanges(committedChangesProvider.createDefaultSettings(),
new SvnRepositoryLocation(myRepoUrl), 0);
checkList(changeListList, 2, new Data[] {new Data(absPath(d1), FileStatus.MODIFIED, "- moved from .." + File.separatorChar + "d1"),
new Data(oldPath, FileStatus.DELETED, null)});
}
@Test
public void testMoveDirChangeFile() throws Exception {
enableSilentOperation(VcsConfiguration.StandardConfirmation.ADD);
enableSilentOperation(VcsConfiguration.StandardConfirmation.REMOVE);
VirtualFile d1 = createDirInCommand(myWorkingCopyDir, "d1");
VirtualFile d2 = createDirInCommand(myWorkingCopyDir, "d2");
VirtualFile f11 = createFileInCommand(d1, "f11.txt", "123\n456");
VirtualFile f12 = createFileInCommand(d1, "f12.txt", "----");
// r1, addition without history
checkin();
final String oldPath = absPath(d1);
final String oldF11Path = new File(f11.getPath()).getAbsolutePath();
moveFileInCommand(d1, d2);
editFileInCommand(myProject, f11, "new");
Thread.sleep(100);
checkin();
final SvnVcs vcs = SvnVcs.getInstance(myProject);
final CommittedChangesProvider<SvnChangeList,ChangeBrowserSettings> committedChangesProvider = vcs.getCommittedChangesProvider();
final List<SvnChangeList> changeListList =
committedChangesProvider.getCommittedChanges(committedChangesProvider.createDefaultSettings(),
new SvnRepositoryLocation(myRepoUrl), 0);
checkList(changeListList, 2, new Data[] {new Data(absPath(d1), FileStatus.MODIFIED, "- moved from .." + File.separatorChar + "d1"),
new Data(oldPath, FileStatus.DELETED, null),
new Data(absPath(f11), FileStatus.MODIFIED, "- moved from " + oldF11Path)});
}
@Test
public void testCopyDir() throws Exception {
final File trunk = new File(myTempDirFixture.getTempDirPath(), "trunk");
trunk.mkdir();
Thread.sleep(100);
final File folder = new File(trunk, "folder");
folder.mkdir();
Thread.sleep(100);
new File(folder, "f1.txt").createNewFile();
new File(folder, "f2.txt").createNewFile();
Thread.sleep(100);
verify(runSvn("import", "-m", "test", trunk.getAbsolutePath(), myRepoUrl + "/trunk"));
verify(runSvn("copy", "-m", "test", myRepoUrl + "/trunk", myRepoUrl + "/branch"));
final SvnVcs vcs = SvnVcs.getInstance(myProject);
final CommittedChangesProvider<SvnChangeList,ChangeBrowserSettings> committedChangesProvider = vcs.getCommittedChangesProvider();
final List<SvnChangeList> changeListList =
committedChangesProvider.getCommittedChanges(committedChangesProvider.createDefaultSettings(),
new SvnRepositoryLocation(myRepoUrl + "/branch"), 0);
checkList(changeListList, 2, new Data[] {new Data(new File(myWorkingCopyDir.getPath(), "branch").getAbsolutePath(), FileStatus.ADDED, "- copied from /trunk")});
}
@Test
public void testCopyAndModify() throws Exception {
final File trunk = new File(myTempDirFixture.getTempDirPath(), "trunk");
trunk.mkdir();
Thread.sleep(100);
final File folder = new File(trunk, "folder");
folder.mkdir();
Thread.sleep(100);
new File(folder, "f1.txt").createNewFile();
new File(folder, "f2.txt").createNewFile();
Thread.sleep(100);
verify(runSvn("import", "-m", "test", trunk.getAbsolutePath(), myRepoUrl + "/trunk"));
update();
verify(runSvn("copy", myWorkingCopyDir.getPath() + "/trunk", myWorkingCopyDir.getPath() + "/branch"));
verify(runSvn("propset", "testprop", "testval", myWorkingCopyDir.getPath() + "/branch/folder"));
checkin();
final SvnVcs vcs = SvnVcs.getInstance(myProject);
final CommittedChangesProvider<SvnChangeList,ChangeBrowserSettings> committedChangesProvider = vcs.getCommittedChangesProvider();
final List<SvnChangeList> changeListList =
committedChangesProvider.getCommittedChanges(committedChangesProvider.createDefaultSettings(),
new SvnRepositoryLocation(myRepoUrl + "/branch"), 0);
checkList(changeListList, 2, new Data[] {new Data(new File(myWorkingCopyDir.getPath(), "branch").getAbsolutePath(), FileStatus.ADDED, "- copied from /trunk"),
new Data(new File(myWorkingCopyDir.getPath(), "branch/folder").getAbsolutePath(), FileStatus.MODIFIED, "- copied from /trunk/folder")});
}
protected String absPath(final VirtualFile vf) {
return new File(vf.getPath()).getAbsolutePath();
}
protected static class Data {
public final String myLocalPath;
public final FileStatus myStatus;
@Nullable
public final String myOriginText;
protected Data(@NotNull final String localPath, @NotNull final FileStatus status, @Nullable final String originText) {
myLocalPath = localPath;
myStatus = status;
myOriginText = originText;
}
public boolean shouldBeComparedWithChange(final Change change) {
if (FileStatus.DELETED.equals(myStatus) && (change.getAfterRevision() == null)) {
// before path
return (change.getBeforeRevision() != null) && myLocalPath.equals(change.getBeforeRevision().getFile().getIOFile().getAbsolutePath());
} else {
return (change.getAfterRevision() != null) && myLocalPath.equals(change.getAfterRevision().getFile().getIOFile().getAbsolutePath());
}
}
}
protected void checkList(final List<SvnChangeList> lists, final long revision, final Data[] content) throws Exception {
SvnChangeList list = null;
for (SvnChangeList changeList : lists) {
if (changeList.getNumber() == revision) {
list = changeList;
}
}
Assert.assertNotNull("Change list #" + revision + " not found.", list);
final Collection<Change> changes = new ArrayList<Change>(list.getChanges());
Assert.assertNotNull("Null changes list", changes);
Assert.assertEquals(changes.size(), content.length);
for (Data data : content) {
boolean found = false;
for (Change change : changes) {
if (data.shouldBeComparedWithChange(change)) {
Assert.assertTrue(Comparing.equal(data.myOriginText, change.getOriginText(myProject)));
Assert.assertEquals(data.myStatus, change.getFileStatus());
found = true;
break;
}
}
Assert.assertTrue(printChanges(data, changes), found);
}
}
private static String printChanges(final Data data, final Collection<Change> changes) {
final StringBuilder sb = new StringBuilder("Data: ").append(data.myLocalPath).append(" exists: ").
append(new File(data.myLocalPath).exists()).append(" Changes: ");
for (Change change : changes) {
final ContentRevision cr = change.getAfterRevision() == null ? change.getBeforeRevision() : change.getAfterRevision();
final File ioFile = cr.getFile().getIOFile();
sb.append("'").append(ioFile.getAbsolutePath()).append("' exists: ").append(ioFile.exists()).append(" | ");
}
return sb.toString();
}
}
| apache-2.0 |
google-code-export/google-api-dfp-java | src/com/google/api/ads/dfp/v201208/LineItemServiceInterface.java | 6029 | /**
* LineItemServiceInterface.java
*
* This file was auto-generated from WSDL
* by the Apache Axis 1.4 Apr 22, 2006 (06:55:48 PDT) WSDL2Java emitter.
*/
package com.google.api.ads.dfp.v201208;
public interface LineItemServiceInterface extends java.rmi.Remote {
/**
* Creates a new {@link LineItem}.
*
* These fields are required:
* <ul>
* <li>{@link LineItem#costPerUnit}</li>
* <li>{@link LineItem#costType}<li>
* <li>{@link LineItem#endDateTime}</li>
* <li>{@link LineItem#lineItemType}</li>
* <li>{@link LineItem#name}</li>
* <li>{@link LineItem#orderId}</li>
* <li>{@link LineItem#startDateTime}</li>
* <li>{@link LineItem#unitsBought}</li>
* <li>{@link LineItem#creativePlaceholders}</li>
* <li>{@link LineItem#targeting}</li>
* </ul>
*
*
* @param lineItem the line item to create
*
* @return the line item with its ID filled in
*/
public com.google.api.ads.dfp.v201208.LineItem createLineItem(com.google.api.ads.dfp.v201208.LineItem lineItem) throws java.rmi.RemoteException, com.google.api.ads.dfp.v201208.ApiException;
/**
* Creates new {@link LineItem} objects.
*
*
* @param lineItems the line items to create
*
* @return the created line items with their IDs filled in
*/
public com.google.api.ads.dfp.v201208.LineItem[] createLineItems(com.google.api.ads.dfp.v201208.LineItem[] lineItems) throws java.rmi.RemoteException, com.google.api.ads.dfp.v201208.ApiException;
/**
* Returns the {@link LineItem} uniquely identified by the given
* ID.
*
*
* @param lineItemId the ID of the line item, which must already exist
*
* @return the {@code LineItem} uniquely identified by the given ID
*/
public com.google.api.ads.dfp.v201208.LineItem getLineItem(java.lang.Long lineItemId) throws java.rmi.RemoteException, com.google.api.ads.dfp.v201208.ApiException;
/**
* Gets a {@link LineItemPage} of {@link LineItem} objects that
* satisfy the
* given {@link Statement#query}. The following fields are supported
* for
* filtering:
*
* <table>
* <tr>
* <th scope="col">PQL Property</th> <th scope="col">Object Property</th>
* </tr>
* <tr>
* <td>{@code costType}</td>
* <td>{@link LineItem#costType}</td>
* </tr>
* <tr>
* <td>{@code id}</td>
* <td>{@link LineItem#id}</td>
* </tr>
* <tr>
* <td>{@code lineItemType}</td>
* <td>{@link LineItem#lineItemType}</td>
* </tr>
* <tr>
* <td>{@code name}</td>
* <td>{@link LineItem#name}</td>
* </tr>
* <tr>
* <td>{@code orderId}</td>
* <td>{@link LineItem#orderId}</td>
* </tr>
* <tr>
* <td>{@code status}</td>
* <td>{@link LineItem#status}</td>
* </tr>
* <tr>
* <td>{@code unitsBought}</td>
* <td>{@link LineItem#unitsBought}</td>
* </tr>
* <tr>
* <td>{@code deliveryRateType}</td>
* <td>{@link LineItem#deliveryRateType}</td>
* </tr>
* <tr>
* <td>{@code lastModifiedDateTime}</td>
* <td>{@link LineItem#lastModifiedDateTime}</td>
* </tr>
* <tr>
* <td>{@code creationDateTime}</td>
* <td>{@link LineItem#creationDateTime}</td>
* </tr>
* <tr>
* <td>{@code isMissingCreatives}</td>
* <td>{@link LineItem#isMissingCreatives}</td>
* </tr>
* </table>
*
*
* @param filterStatement a Publisher Query Language statement used to
* filter
* a set of line items.
*
* @return the line items that match the given filter
*/
public com.google.api.ads.dfp.v201208.LineItemPage getLineItemsByStatement(com.google.api.ads.dfp.v201208.Statement filterStatement) throws java.rmi.RemoteException, com.google.api.ads.dfp.v201208.ApiException;
/**
* Performs actions on {@link LineItem} objects that match the
* given
* {@link Statement#query}.
*
*
* @param lineItemAction the action to perform
*
* @param filterStatement a Publisher Query Language statement used to
* filter
* a set of line items
*
* @return the result of the action performed
*/
public com.google.api.ads.dfp.v201208.UpdateResult performLineItemAction(com.google.api.ads.dfp.v201208.LineItemAction lineItemAction, com.google.api.ads.dfp.v201208.Statement filterStatement) throws java.rmi.RemoteException, com.google.api.ads.dfp.v201208.ApiException;
/**
* Updates the specified {@link LineItem}.
*
*
* @param lineItem the line item to update
*
* @return the updated line item
*/
public com.google.api.ads.dfp.v201208.LineItem updateLineItem(com.google.api.ads.dfp.v201208.LineItem lineItem) throws java.rmi.RemoteException, com.google.api.ads.dfp.v201208.ApiException;
/**
* Updates the specified {@link LineItem} objects.
*
*
* @param lineItems the line items to update
*
* @return the updated line items
*/
public com.google.api.ads.dfp.v201208.LineItem[] updateLineItems(com.google.api.ads.dfp.v201208.LineItem[] lineItems) throws java.rmi.RemoteException, com.google.api.ads.dfp.v201208.ApiException;
}
| apache-2.0 |
jdsjlzx/LRecyclerView | app/src/main/java/com/lzx/demo/adapter/StickyTestAdapter.java | 2399 | package com.lzx.demo.adapter;
import android.content.Context;
import android.support.v7.widget.RecyclerView;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.widget.TextView;
import android.widget.Toast;
import com.lzx.demo.ItemDecoration.StickyHeaderAdapter;
import com.lzx.demo.R;
public class StickyTestAdapter extends RecyclerView.Adapter<StickyTestAdapter.ViewHolder> implements
StickyHeaderAdapter<StickyTestAdapter.HeaderHolder> {
private LayoutInflater mInflater;
private Context mContext;
public StickyTestAdapter(Context context) {
mContext = context;
mInflater = LayoutInflater.from(context);
}
@Override
public ViewHolder onCreateViewHolder(ViewGroup viewGroup, int i) {
final View view = mInflater.inflate(R.layout.item_test, viewGroup, false);
return new ViewHolder(view);
}
@Override
public void onBindViewHolder(ViewHolder viewHolder, final int position) {
viewHolder.item.setText("Item " + position);
}
@Override
public int getItemCount() {
return 50;
}
@Override
public long getHeaderId(int position) {
return (long) position / 7;
}
@Override
public HeaderHolder onCreateHeaderViewHolder(ViewGroup parent) {
final View view = mInflater.inflate(R.layout.header_test, parent, false);
return new HeaderHolder(view);
}
@Override
public void onBindHeaderViewHolder(final HeaderHolder viewholder, final int position) {
viewholder.header.setText("Header " + getHeaderId(position));
viewholder.itemView.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View view) {
Toast.makeText(mContext,"onBindHeaderViewHolder item clicked position = " + position,Toast.LENGTH_SHORT).show();
}
});
}
static class ViewHolder extends RecyclerView.ViewHolder {
public TextView item;
public ViewHolder(View itemView) {
super(itemView);
item = (TextView) itemView;
}
}
static class HeaderHolder extends RecyclerView.ViewHolder {
public TextView header;
public HeaderHolder(View itemView) {
super(itemView);
header = (TextView) itemView;
}
}
}
| apache-2.0 |
elblancoboss/CI346-Semester-2 | src/main/java/com/hussein/ci346/Database.java | 1539 | package com.hussein.ci346;
import java.sql.Connection;
import java.sql.DriverManager;
import java.sql.ResultSet;
import java.sql.Statement;
public class Database {
public Database() throws Exception {
try {
Class.forName("com.mysql.jdbc.Driver");
Connection databaseConnection = DriverManager.getConnection("jdbc:mysql://localhost:3300/staff","root","usbw");
Statement dbStatement = databaseConnection.createStatement();
ResultSet dbResults = dbStatement.executeQuery("SELECT * FROM staff");
while(dbResults.next()) {
System.out.println(dbResults.getInt(1) + " " + dbResults.getString(2) + " " + dbResults.getString(3) + " " + dbResults.getString(4));
}
databaseConnection.close();
}
catch(Exception e) {
throw e;
}
}
public ResultSet execute(String sql) throws Exception
{
try
{
Connection databaseConnection = DriverManager.getConnection("jdbc:mysql://localhost:3300/staff","root","usbw");
Statement dbStatement = databaseConnection.createStatement();
ResultSet dbResults = dbStatement.executeQuery(sql);
return dbResults;
}
catch(Exception e) {
throw e;
}
}
public boolean update(String sql) throws Exception
{
try
{
Connection databaseConnection = DriverManager.getConnection("jdbc:mysql://localhost:3300/staff","root","usbw");
Statement dbStatement = databaseConnection.createStatement();
boolean error = dbStatement.execute(sql);
return error;
}
catch(Exception e) {
throw e;
}
}
} | apache-2.0 |
douraid/cleandroid | src/main/java/org/cleandroid/core/view/AdapterViewListener.java | 774 | /*
* Cleandroid Framework
* @author: Douraid Arfaoui <douraid.arfaoui@gmail.com>
*
* Copyright (c) 2015, Douraid Arfaoui, or third-party contributors as
* indicated by the @author tags or express copyright attribution
* statements applied by the authors.
*
* This copyrighted material is made available to anyone wishing to use, modify,
* copy, or redistribute it subject to the terms and conditions of the Apache 2
* License, as published by the Apache Software Foundation.
*
*/
package org.cleandroid.core.view;
import android.view.View;
public interface AdapterViewListener<T> {
void onCreateView(View view, T item);
void onItemSelected(T item);
void onNothingSelected();
void onItemClicked(T item);
boolean onItemLongClicked(T item);
}
| apache-2.0 |
sealuzh/Permo | Permo/src/ch/uzh/ifi/seal/permo/common/core/model/types/ReferenceType.java | 1982 | /*******************************************************************************
* Copyright 2015 Software Evolution and Architecture Lab, University of Zurich
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
******************************************************************************/
package ch.uzh.ifi.seal.permo.common.core.model.types;
/**
* Representation of a reference type.
*/
public class ReferenceType extends NonArrayType {
private static final String POINT = ".";
private final String packageName;
private ReferenceType(final String packageName, final String name) {
super(name);
this.packageName = packageName;
}
/**
* Returns the name of the package the type belongs to.
*
* @return the name of the package the type belongs to
*/
public String getPackageName() {
return packageName;
}
/**
* Returns the qualified name of the type.
*
* @return the qualified name of the type
*/
@Override
public String getQualifiedName() {
return getPackageName() + POINT + getName();
}
/**
* A static factory method to create a new instance of this class.
*
* @param packageName
* the name of the package the type belongs to
* @param name
* the name of the type
* @return a new instance of {@link ReferenceType}
*/
public static ReferenceType of(final String packageName, final String name) {
return new ReferenceType(packageName, name);
}
}
| apache-2.0 |
aws/aws-sdk-java | aws-java-sdk-dynamodb/src/main/java/com/amazonaws/services/dynamodbv2/datamodeling/unmarshallers/CalendarUnmarshaller.java | 1383 | /*
* Copyright 2014-2022 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at:
*
* http://aws.amazon.com/apache2.0
*
* This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and
* limitations under the License.
*/
package com.amazonaws.services.dynamodbv2.datamodeling.unmarshallers;
import java.util.Calendar;
import java.util.GregorianCalendar;
import com.amazonaws.services.dynamodbv2.model.AttributeValue;
import com.amazonaws.util.DateUtils;
/**
* An unmarshaller that unmarshals ISO-8601-formatted dates as Java
* {@code Calendar} objects.
*/
public class CalendarUnmarshaller extends SUnmarshaller {
private static final CalendarUnmarshaller INSTANCE =
new CalendarUnmarshaller();
public static CalendarUnmarshaller instance() {
return INSTANCE;
}
private CalendarUnmarshaller() {
}
@Override
public Object unmarshall(AttributeValue value) {
Calendar cal = GregorianCalendar.getInstance();
cal.setTime(DateUtils.parseISO8601Date(value.getS()));
return cal;
}
}
| apache-2.0 |
zouzhberk/ambaridemo | demo-server/src/main/java/org/apache/ambari/server/view/configuration/InstanceConfig.java | 3093 | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.ambari.server.view.configuration;
import javax.xml.bind.annotation.XmlAccessType;
import javax.xml.bind.annotation.XmlAccessorType;
import javax.xml.bind.annotation.XmlElement;
import java.util.Collections;
import java.util.List;
/**
* View instance configuration.
*/
@XmlAccessorType(XmlAccessType.FIELD)
public class InstanceConfig {
/**
* The instance name.
*/
private String name;
/**
* The public view name.
*/
private String label;
/**
* The instance description.
*/
private String description;
/**
* Indicates whether or not the instance should be visible.
*/
private boolean visible = true;
/**
* The icon path in the view archive. Overrides the view
* level icon attribute.
*/
private String icon;
/**
* The big icon path in the view archive. Overrides the view
* level big icon attribute.
*/
private String icon64;
/**
* The instance properties.
*/
@XmlElement(name="property")
private List<PropertyConfig> properties;
/**
* Get the instance name.
*
* @return the name
*/
public String getName() {
return name;
}
/**
* Get the public view instance label.
*
* @return the view instance label
*/
public String getLabel() {
return label;
}
/**
* Get the view instance description.
*
* @return the view instance description
*/
public String getDescription() {
return description;
}
/**
* Indicates whether or not the instance should be visible.
*
* @return true if the instance should be visible; false otherwise
*/
public boolean isVisible() {
return visible;
}
/**
* Get the icon path in the view archive. Overrides the view
* level icon attribute.
*
* @return the icon path
*/
public String getIcon() {
return icon;
}
/**
* Get the big icon path in the view archive. Overrides the view
* level big icon attribute.
*
* @return the big icon path
*/
public String getIcon64() {
return icon64;
}
/**
* Get the instance properties.
*
* @return the instance properties
*/
public List<PropertyConfig> getProperties() {
return properties == null ? Collections.<PropertyConfig>emptyList() : properties;
}
}
| apache-2.0 |
JoelMarcey/buck | src/com/facebook/buck/jvm/java/stepsbuilder/javacd/serialization/CompilerOutputPathsValueSerializer.java | 2516 | /*
* Copyright (c) Facebook, Inc. and its affiliates.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.facebook.buck.jvm.java.stepsbuilder.javacd.serialization;
import com.facebook.buck.javacd.model.OutputPathsValue;
import com.facebook.buck.jvm.java.CompilerOutputPaths;
import com.facebook.buck.jvm.java.CompilerOutputPathsValue;
/** {@link CompilerOutputPathsValue} to protobuf serializer */
public class CompilerOutputPathsValueSerializer {
private CompilerOutputPathsValueSerializer() {}
/** Serializes {@link CompilerOutputPathsValue} into javacd model's {@link OutputPathsValue}. */
public static OutputPathsValue serialize(CompilerOutputPathsValue value) {
OutputPathsValue.Builder builder = OutputPathsValue.newBuilder();
builder.setLibraryPaths(toOutputPaths(value.getLibraryCompilerOutputPath()));
builder.setSourceAbiPaths(toOutputPaths(value.getSourceAbiCompilerOutputPath()));
builder.setSourceOnlyAbiPaths(toOutputPaths(value.getSourceOnlyAbiCompilerOutputPath()));
builder.setLibraryTargetFullyQualifiedName(value.getLibraryTargetFullyQualifiedName());
return builder.build();
}
private static OutputPathsValue.OutputPaths toOutputPaths(CompilerOutputPaths outputPaths) {
return CompilerOutputPathsSerializer.serialize(outputPaths);
}
/** Deserializes javacd model's {@link OutputPathsValue} into {@link CompilerOutputPathsValue}. */
public static CompilerOutputPathsValue deserialize(OutputPathsValue outputPathsValue) {
return CompilerOutputPathsValue.of(
outputPathsValue.getLibraryTargetFullyQualifiedName(),
toCompilerOutputPaths(outputPathsValue.getLibraryPaths()),
toCompilerOutputPaths(outputPathsValue.getSourceAbiPaths()),
toCompilerOutputPaths(outputPathsValue.getSourceOnlyAbiPaths()));
}
private static CompilerOutputPaths toCompilerOutputPaths(
OutputPathsValue.OutputPaths outputPaths) {
return CompilerOutputPathsSerializer.deserialize(outputPaths);
}
}
| apache-2.0 |
mwkang/zeppelin | zeppelin-plugins/launcher/cluster/src/main/java/org/apache/zeppelin/interpreter/launcher/ClusterInterpreterCheckThread.java | 3163 | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.zeppelin.interpreter.launcher;
import org.apache.zeppelin.cluster.ClusterCallback;
import org.apache.zeppelin.cluster.ClusterManagerServer;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.HashMap;
import static org.apache.zeppelin.cluster.meta.ClusterMeta.INTP_TSERVER_HOST;
import static org.apache.zeppelin.cluster.meta.ClusterMeta.INTP_TSERVER_PORT;
// Metadata registered in the cluster by the interpreter process,
// Keep the interpreter process started
public class ClusterInterpreterCheckThread extends Thread {
private static final Logger LOGGER
= LoggerFactory.getLogger(ClusterInterpreterCheckThread.class);
private InterpreterClient intpProcess;
private String intpGroupId;
private int connectTimeout;
ClusterInterpreterCheckThread(InterpreterClient intpProcess,
String intpGroupId,
int connectTimeout) {
this.intpProcess = intpProcess;
this.intpGroupId = intpGroupId;
this.connectTimeout = connectTimeout;
}
@Override
public void run() {
LOGGER.info("ClusterInterpreterCheckThread run() >>>");
ClusterManagerServer clusterServer = ClusterManagerServer.getInstance();
clusterServer.getIntpProcessStatus(intpGroupId, connectTimeout,
new ClusterCallback<HashMap<String, Object>>() {
@Override
public InterpreterClient online(HashMap<String, Object> result) {
String intpTSrvHost = (String) result.get(INTP_TSERVER_HOST);
int intpTSrvPort = (int) result.get(INTP_TSERVER_PORT);
LOGGER.info("Found cluster interpreter {}:{}", intpTSrvHost, intpTSrvPort);
if (intpProcess instanceof DockerInterpreterProcess) {
((DockerInterpreterProcess) intpProcess).processStarted(intpTSrvPort, intpTSrvHost);
} else if (intpProcess instanceof ClusterInterpreterProcess) {
((ClusterInterpreterProcess) intpProcess).processStarted(intpTSrvPort, intpTSrvHost);
} else {
LOGGER.error("Unknown type !");
}
return null;
}
@Override
public void offline() {
LOGGER.error("Can not found cluster interpreter!");
}
});
LOGGER.info("ClusterInterpreterCheckThread run() <<<");
}
}
| apache-2.0 |
advantageous/boon | json/src/main/java/io/advantageous/boon/json/serializers/DateSerializer.java | 1831 | /*
* Copyright 2013-2014 Richard M. Hightower
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* __________ _____ __ .__
* \______ \ ____ ____ ____ /\ / \ _____ | | _|__| ____ ____
* | | _// _ \ / _ \ / \ \/ / \ / \\__ \ | |/ / |/ \ / ___\
* | | ( <_> | <_> ) | \ /\ / Y \/ __ \| <| | | \/ /_/ >
* |______ /\____/ \____/|___| / \/ \____|__ (____ /__|_ \__|___| /\___ /
* \/ \/ \/ \/ \/ \//_____/
* ____. ___________ _____ ______________.___.
* | |____ ___ _______ \_ _____/ / _ \ / _____/\__ | |
* | \__ \\ \/ /\__ \ | __)_ / /_\ \ \_____ \ / | |
* /\__| |/ __ \\ / / __ \_ | \/ | \/ \ \____ |
* \________(____ /\_/ (____ / /_______ /\____|__ /_______ / / ______|
* \/ \/ \/ \/ \/ \/
*/
package io.advantageous.boon.json.serializers;
import io.advantageous.boon.primitive.CharBuf;
import java.util.Date;
/**
* Created by rick on 1/1/14.
*/
public interface DateSerializer {
void serializeDate (JsonSerializerInternal jsonSerializer, Date date, CharBuf builder );
}
| apache-2.0 |
PerfCake/PerfClipse | org.perfclipse.ui/src/org/perfclipse/ui/editors/palettefactories/DestinationFactory.java | 524 | package org.perfclipse.ui.editors.palettefactories;
import org.perfcake.model.ObjectFactory;
import org.perfcake.model.Scenario.Reporting.Reporter.Destination;
public class DestinationFactory extends ParametrizedSimpleFactory {
public DestinationFactory(Class<?> aClass, String parameter) {
super(aClass, parameter);
}
@Override
public Object getNewObject() {
ObjectFactory f = new ObjectFactory();
Destination d = f.createScenarioReportingReporterDestination();
d.setClazz(parameter);
return d;
}
}
| apache-2.0 |
misberner/automatalib | util/src/main/java/net/automatalib/util/automata/ads/LeeYannakakis.java | 20397 | /* Copyright (C) 2013-2020 TU Dortmund
* This file is part of AutomataLib, http://www.automatalib.net/.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package net.automatalib.util.automata.ads;
import java.util.Collections;
import java.util.EnumMap;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.Map;
import java.util.Set;
import java.util.function.Function;
import java.util.stream.Collectors;
import com.google.common.collect.BiMap;
import com.google.common.collect.HashBiMap;
import com.google.common.collect.Maps;
import com.google.common.collect.Sets;
import net.automatalib.automata.transducers.MealyMachine;
import net.automatalib.commons.util.Pair;
import net.automatalib.graphs.ads.ADSNode;
import net.automatalib.graphs.ads.impl.ADSLeafNode;
import net.automatalib.graphs.base.compact.CompactEdge;
import net.automatalib.graphs.base.compact.CompactSimpleGraph;
import net.automatalib.util.graphs.Path;
import net.automatalib.util.graphs.ShortestPaths;
import net.automatalib.util.graphs.traversal.GraphTraversal;
import net.automatalib.words.Alphabet;
import net.automatalib.words.Word;
/**
* Algorithm of Lee and Yannakakis for computing adaptive distinguishing sequences (of length at most n^2) in O(n^2)
* time (where n denotes the number of states of the automaton).
* <p>
* See: D. Lee and M. Yannakakis - "Testing Finite-State Machines: State Identification and Verification", IEEE
* Transactions on Computers 43.3 (1994)
*
* @author frohme
*/
@SuppressWarnings("nullness")
public final class LeeYannakakis {
private LeeYannakakis() {}
/**
* Computes an ADS using the algorithm of Lee and Yannakakis.
*
* @param automaton
* The automaton for which an ADS should be computed
* @param input
* the input alphabet of the automaton
* @param <S>
* (hypothesis) state type
* @param <I>
* input alphabet type
* @param <O>
* output alphabet type
*
* @return A {@link LYResult} containing an adaptive distinguishing sequence (if existent) and a possible set of
* indistinguishable states.
*/
public static <S, I, O> LYResult<S, I, O> compute(final MealyMachine<S, I, ?, O> automaton,
final Alphabet<I> input) {
final SplitTreeResult<S, I, O> str = computeSplitTree(automaton, input);
if (str.isPresent()) {
final Set<S> states = new HashSet<>(automaton.getStates());
return new LYResult<>(extractADS(automaton,
str.get(),
states,
states.stream()
.collect(Collectors.toMap(Function.identity(), Function.identity())),
null));
}
return new LYResult<>(str.getIndistinguishableStates());
}
private static <S, I, O> SplitTreeResult<S, I, O> computeSplitTree(final MealyMachine<S, I, ?, O> automaton,
final Alphabet<I> input) {
final SplitTree<S, I, O> st = new SplitTree<>(new HashSet<>(automaton.getStates()));
final Set<SplitTree<S, I, O>> leaves = Sets.newHashSetWithExpectedSize(automaton.size());
leaves.add(st);
while (leaves.stream().anyMatch(LeeYannakakis::needsRefinement)) {
final int maxCardinality = leaves.stream().mapToInt(x -> x.getPartition().size()).max().getAsInt();
final Set<SplitTree<S, I, O>> R =
leaves.stream().filter(x -> x.getPartition().size() == maxCardinality).collect(Collectors.toSet());
final Map<Validity, Set<Pair<Word<I>, SplitTree<S, I, O>>>> validitySetMap =
computeValidities(automaton, input, R, leaves);
if (!validitySetMap.get(Validity.INVALID).isEmpty()) {
final Set<Pair<Word<I>, SplitTree<S, I, O>>> set = validitySetMap.get(Validity.INVALID);
final Set<S> indistinguishableStates = new HashSet<>();
for (final Pair<Word<I>, SplitTree<S, I, O>> pair : set) {
indistinguishableStates.addAll(pair.getSecond().getPartition());
}
return new SplitTreeResult<>(indistinguishableStates);
}
// a-valid partitions
for (final Pair<Word<I>, SplitTree<S, I, O>> aPartition : validitySetMap.get(Validity.A_VALID)) {
assert aPartition.getFirst().size() == 1 : "a-valid inputs should always contain exactly 1 symbol";
final I aValidInput = aPartition.getFirst().firstSymbol();
final SplitTree<S, I, O> nodeToRefine = aPartition.getSecond();
final Map<O, Set<S>> successorMap = nodeToRefine.getPartition()
.stream()
.collect(Collectors.groupingBy(s -> automaton.getOutput(
s,
aValidInput), Collectors.toSet()));
nodeToRefine.setSequence(Word.fromSymbols(aValidInput));
leaves.remove(nodeToRefine);
for (Map.Entry<O, Set<S>> entry : successorMap.entrySet()) {
final SplitTree<S, I, O> child = new SplitTree<>(entry.getValue());
nodeToRefine.getSuccessors().put(entry.getKey(), child);
leaves.add(child);
}
for (final S s : nodeToRefine.getPartition()) {
nodeToRefine.getMapping().put(s, automaton.getSuccessor(s, aValidInput));
}
}
// b-valid partitions
for (final Pair<Word<I>, SplitTree<S, I, O>> bPartition : validitySetMap.get(Validity.B_VALID)) {
assert bPartition.getFirst().size() == 1 : "b-valid inputs should always contain exactly 1 symbol";
final I bValidInput = bPartition.getFirst().firstSymbol();
final SplitTree<S, I, O> nodeToRefine = bPartition.getSecond();
final Map<S, S> successorsToNodes = nodeToRefine.getPartition()
.stream()
.collect(Collectors.toMap(x -> automaton.getSuccessor(x,
bValidInput),
Function.identity()));
final SplitTree<S, I, O> v =
st.findLowestSubsetNode(successorsToNodes.keySet()).orElseThrow(IllegalStateException::new);
nodeToRefine.setSequence(v.getSequence().prepend(bValidInput));
leaves.remove(nodeToRefine);
for (final Map.Entry<O, SplitTree<S, I, O>> entry : v.getSuccessors().entrySet()) {
final Set<S> wSet = entry.getValue().getPartition();
final Set<S> intersection = new HashSet<>(successorsToNodes.keySet());
intersection.retainAll(wSet);
if (!intersection.isEmpty()) {
final Set<S> indistinguishableNodes =
intersection.stream().map(successorsToNodes::get).collect(Collectors.toSet());
final SplitTree<S, I, O> newChild = new SplitTree<>(indistinguishableNodes);
nodeToRefine.getSuccessors().put(entry.getKey(), newChild);
leaves.add(newChild);
}
}
for (final S s : nodeToRefine.getPartition()) {
nodeToRefine.getMapping().put(s, v.getMapping().get(automaton.getSuccessor(s, bValidInput)));
}
}
// c-valid partitions
for (final Pair<Word<I>, SplitTree<S, I, O>> cPartition : validitySetMap.get(Validity.C_VALID)) {
final Word<I> cValidInput = cPartition.getFirst();
final SplitTree<S, I, O> nodeToRefine = cPartition.getSecond();
final Map<S, S> successorsToNodes = nodeToRefine.getPartition()
.stream()
.collect(Collectors.toMap(x -> automaton.getSuccessor(x,
cValidInput),
Function.identity()));
final SplitTree<S, I, O> C =
st.findLowestSubsetNode(successorsToNodes.keySet()).orElseThrow(IllegalStateException::new);
nodeToRefine.setSequence(cValidInput.concat(C.getSequence()));
leaves.remove(nodeToRefine);
for (final Map.Entry<O, SplitTree<S, I, O>> entry : C.getSuccessors().entrySet()) {
final Set<S> wSet = entry.getValue().getPartition();
final Set<S> intersection = new HashSet<>(successorsToNodes.keySet());
intersection.retainAll(wSet);
if (!intersection.isEmpty()) {
final Set<S> indistinguishableNodes =
intersection.stream().map(successorsToNodes::get).collect(Collectors.toSet());
final SplitTree<S, I, O> newChild = new SplitTree<>(indistinguishableNodes);
nodeToRefine.getSuccessors().put(entry.getKey(), newChild);
leaves.add(newChild);
}
}
for (final S s : nodeToRefine.getPartition()) {
nodeToRefine.getMapping().put(s, C.getMapping().get(automaton.getSuccessor(s, cValidInput)));
}
}
}
return new SplitTreeResult<>(st);
}
private static <S, I, O> ADSNode<S, I, O> extractADS(final MealyMachine<S, I, ?, O> automaton,
final SplitTree<S, I, O> st,
final Set<S> currentSet,
final Map<S, S> currentToInitialMapping,
final ADSNode<S, I, O> predecessor) {
if (currentSet.size() == 1) {
final S currentNode = currentSet.iterator().next();
assert currentToInitialMapping.containsKey(currentNode);
return new ADSLeafNode<>(predecessor, currentToInitialMapping.get(currentNode));
}
final SplitTree<S, I, O> u = st.findLowestSubsetNode(currentSet).orElseThrow(IllegalStateException::new);
final Pair<ADSNode<S, I, O>, ADSNode<S, I, O>> ads =
ADSUtil.buildFromTrace(automaton, u.getSequence(), currentSet.iterator().next());
final ADSNode<S, I, O> head = ads.getFirst();
final ADSNode<S, I, O> tail = ads.getSecond();
head.setParent(predecessor);
for (final Map.Entry<O, SplitTree<S, I, O>> entry : u.getSuccessors().entrySet()) {
final O output = entry.getKey();
final SplitTree<S, I, O> tree = entry.getValue();
final Set<S> intersection = new HashSet<>(tree.getPartition());
intersection.retainAll(currentSet);
if (!intersection.isEmpty()) {
final Map<S, S> nextCurrentToInitialMapping = intersection.stream()
.collect(Collectors.toMap(key -> u.getMapping()
.get(key),
currentToInitialMapping::get));
final Set<S> nextCurrent =
intersection.stream().map(x -> u.getMapping().get(x)).collect(Collectors.toSet());
tail.getChildren()
.put(output, extractADS(automaton, st, nextCurrent, nextCurrentToInitialMapping, tail));
}
}
return head;
}
private static <S, I, O> boolean needsRefinement(final SplitTree<S, I, O> node) {
return node.getPartition().size() > 1;
}
private static <S, I, O> boolean isValidInput(final MealyMachine<S, I, ?, O> automaton,
final I input,
final Set<S> states) {
final Map<O, Set<S>> successors = new HashMap<>();
for (final S s : states) {
final O output = automaton.getOutput(s, input);
final S successor = automaton.getSuccessor(s, input);
if (!successors.containsKey(output)) {
successors.put(output, new HashSet<>());
}
if (!successors.get(output).add(successor)) {
return false;
}
}
return true;
}
private static <S, I, O> Map<Validity, Set<Pair<Word<I>, SplitTree<S, I, O>>>> computeValidities(final MealyMachine<S, I, ?, O> automaton,
final Alphabet<I> inputs,
final Set<SplitTree<S, I, O>> R,
final Set<SplitTree<S, I, O>> pi) {
final Map<Validity, Set<Pair<Word<I>, SplitTree<S, I, O>>>> result = new EnumMap<>(Validity.class);
final Map<S, Integer> stateToPartitionMap = new HashMap<>();
final BiMap<Integer, SplitTree<S, I, O>> partitionToNodeMap = HashBiMap.create();
int counter = 0;
for (SplitTree<S, I, O> partition : pi) {
for (final S s : partition.getPartition()) {
final Integer previousValue = stateToPartitionMap.put(s, counter);
assert previousValue == null : "Not a true partition";
}
partitionToNodeMap.put(counter, partition);
counter++;
}
for (final Validity v : Validity.values()) {
result.put(v, new HashSet<>());
}
final Set<SplitTree<S, I, O>> pendingCs = new HashSet<>();
final Map<Integer, Validity> partitionToClassificationMap = new HashMap<>();
final CompactSimpleGraph<I> implicationGraph = new CompactSimpleGraph<>(partitionToNodeMap.size());
for (int i = 0; i < partitionToNodeMap.size(); i++) {
implicationGraph.addIntNode();
}
partitionLoop:
for (final SplitTree<S, I, O> B : R) {
// general validity
final Map<I, Boolean> validInputMap = Maps.newHashMapWithExpectedSize(inputs.size());
for (final I i : inputs) {
validInputMap.put(i, isValidInput(automaton, i, B.getPartition()));
}
// a valid
for (final I i : inputs) {
if (!validInputMap.get(i)) {
continue;
}
final Set<O> outputs =
B.getPartition().stream().map(s -> automaton.getOutput(s, i)).collect(Collectors.toSet());
if (outputs.size() > 1) {
result.get(Validity.A_VALID).add(Pair.of(Word.fromSymbols(i), B));
partitionToClassificationMap.put(stateToPartitionMap.get(B.getPartition().iterator().next()),
Validity.A_VALID);
continue partitionLoop;
}
}
// b valid
for (final I i : inputs) {
if (!validInputMap.get(i)) {
continue;
}
final Set<Integer> successors = B.getPartition()
.stream()
.map(s -> stateToPartitionMap.get(automaton.getSuccessor(s, i)))
.collect(Collectors.toSet());
if (successors.size() > 1) {
result.get(Validity.B_VALID).add(Pair.of(Word.fromSymbols(i), B));
partitionToClassificationMap.put(stateToPartitionMap.get(B.getPartition().iterator().next()),
Validity.B_VALID);
continue partitionLoop;
}
}
// c valid
// we defer evaluation to later point in time, because we need to check if the target partitions are a- or b-valid
for (final I i : inputs) {
if (!validInputMap.get(i)) {
continue;
}
final S nodeInPartition = B.getPartition().iterator().next();
final S successor = automaton.getSuccessor(nodeInPartition, i);
final Integer partition = stateToPartitionMap.get(nodeInPartition);
final Integer successorPartition = stateToPartitionMap.get(successor);
if (!partition.equals(successorPartition)) {
implicationGraph.connect(partition, successorPartition, i);
pendingCs.add(B);
}
}
if (pendingCs.contains(B)) {
continue partitionLoop;
}
//if we haven't continued the loop up until here, there is no valid input
result.get(Validity.INVALID).add(Pair.of(null, B));
}
//check remaining potential Cs
pendingCLoop:
for (final SplitTree<S, I, O> pendingC : pendingCs) {
final Integer pendingPartition = partitionToNodeMap.inverse().get(pendingC);
final Iterator<Integer> iter =
GraphTraversal.bfIterator(implicationGraph, Collections.singleton(pendingPartition));
while (iter.hasNext()) {
final Integer successor = iter.next();
final Validity successorValidity = partitionToClassificationMap.get(successor);
if (successorValidity == Validity.A_VALID || successorValidity == Validity.B_VALID) {
final Path<Integer, CompactEdge<I>> path = ShortestPaths.shortestPath(implicationGraph,
pendingPartition,
implicationGraph.size(),
successor);
final Word<I> word =
path.edgeList().stream().map(CompactEdge::getProperty).collect(Word.collector());
result.get(Validity.C_VALID).add(Pair.of(word, pendingC));
continue pendingCLoop;
}
}
result.get(Validity.INVALID).add(Pair.of(null, pendingC));
}
return result;
}
private enum Validity {
A_VALID,
B_VALID,
C_VALID,
INVALID
}
}
| apache-2.0 |
apache/tapestry4 | framework/src/java/org/apache/tapestry/engine/ServiceEncodingImpl.java | 3169 | // Copyright 2004, 2005 The Apache Software Foundation
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package org.apache.tapestry.engine;
import java.util.Map;
import org.apache.hivemind.util.Defense;
import org.apache.tapestry.util.QueryParameterMap;
/**
* Implementation of {@link org.apache.tapestry.engine.ServiceEncoding}, which adds the ability to
* determine when the encoding has been modified.
*
* @author Howard M. Lewis Ship
* @since 4.0
*/
public class ServiceEncodingImpl implements ServiceEncoding
{
private String _servletPath;
private String _pathInfo;
/**
* Map of query parameter values; key is string name, value is either a string, an array of
* strings, or null. Could have done this with subclassing rather than delegation.
*/
private final QueryParameterMap _parameters;
private boolean _modified;
public boolean isModified()
{
return _modified;
}
public void resetModified()
{
_modified = false;
}
/**
* Creates a new instance with a new map of parameters.
*/
public ServiceEncodingImpl(String servletPath)
{
this(servletPath, null, new QueryParameterMap());
}
public ServiceEncodingImpl(String servletPath, Map parametersMap)
{
this(servletPath, null, new QueryParameterMap(parametersMap));
}
public ServiceEncodingImpl(String servletPath, String pathInfo, QueryParameterMap parameters)
{
Defense.notNull(servletPath, "servletPath");
Defense.notNull(parameters, "parameters");
_servletPath = servletPath;
_pathInfo = pathInfo;
_parameters = parameters;
}
public String getParameterValue(String name)
{
return _parameters.getParameterValue(name);
}
public String[] getParameterValues(String name)
{
return _parameters.getParameterValues(name);
}
public void setServletPath(String servletPath)
{
Defense.notNull(servletPath, "servletPath");
_servletPath = servletPath;
_modified = true;
}
public void setParameterValue(String name, String value)
{
_parameters.setParameterValue(name, value);
_modified = true;
}
public void setParameterValues(String name, String[] values)
{
_parameters.setParameterValues(name, values);
_modified = true;
}
public String getServletPath()
{
return _servletPath;
}
public String[] getParameterNames()
{
return _parameters.getParameterNames();
}
public String getPathInfo()
{
return _pathInfo;
}
} | apache-2.0 |
googleapis/google-api-java-client-services | clients/google-api-services-cloudfunctions/v2beta/1.31.0/com/google/api/services/cloudfunctions/v2beta/model/ListRuntimesResponse.java | 2172 | /*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
* in compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
/*
* This code was generated by https://github.com/googleapis/google-api-java-client-services/
* Modify at your own risk.
*/
package com.google.api.services.cloudfunctions.v2beta.model;
/**
* Response for the `ListRuntimes` method.
*
* <p> This is the Java data model class that specifies how to parse/serialize into the JSON that is
* transmitted over HTTP when working with the Cloud Functions API. For a detailed explanation see:
* <a href="https://developers.google.com/api-client-library/java/google-http-java-client/json">https://developers.google.com/api-client-library/java/google-http-java-client/json</a>
* </p>
*
* @author Google, Inc.
*/
@SuppressWarnings("javadoc")
public final class ListRuntimesResponse extends com.google.api.client.json.GenericJson {
/**
* The runtimes that match the request.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.util.List<Runtime> runtimes;
/**
* The runtimes that match the request.
* @return value or {@code null} for none
*/
public java.util.List<Runtime> getRuntimes() {
return runtimes;
}
/**
* The runtimes that match the request.
* @param runtimes runtimes or {@code null} for none
*/
public ListRuntimesResponse setRuntimes(java.util.List<Runtime> runtimes) {
this.runtimes = runtimes;
return this;
}
@Override
public ListRuntimesResponse set(String fieldName, Object value) {
return (ListRuntimesResponse) super.set(fieldName, value);
}
@Override
public ListRuntimesResponse clone() {
return (ListRuntimesResponse) super.clone();
}
}
| apache-2.0 |
kohii/smoothcsv | smoothcsv-app-modules/smoothcsv-core/src/main/java/command/app/CloseAllCommand.java | 1166 | /*
* Copyright 2016 kohii
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
* in compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package command.app;
import com.smoothcsv.framework.SCApplication;
import com.smoothcsv.framework.command.Command;
import com.smoothcsv.framework.component.SCTabbedPane;
/**
* @author kohii
*/
public class CloseAllCommand extends Command {
/*
* (non-Javadoc)
*
* @see com.smoothcsv.framework.commands.Command#run()
*/
@Override
public void run() {
SCTabbedPane tabbedPane = SCApplication.components().getTabbedPane();
CloseCommand closeCommand = new CloseCommand();
while (tabbedPane.getSelectedView() != null) {
closeCommand.run();
}
}
}
| apache-2.0 |
mufaddalq/cloudstack-datera-driver | awsapi/src/com/amazon/ec2/AssignPrivateIpAddressesResponseType.java | 26668 | // Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
/**
* AssignPrivateIpAddressesResponseType.java
*
* This file was auto-generated from WSDL
* by the Apache Axis2 version: 1.5.6 Built on : Aug 30, 2011 (10:01:01 CEST)
*/
package com.amazon.ec2;
/**
* AssignPrivateIpAddressesResponseType bean class
*/
public class AssignPrivateIpAddressesResponseType
implements org.apache.axis2.databinding.ADBBean{
/* This type was generated from the piece of schema that had
name = AssignPrivateIpAddressesResponseType
Namespace URI = http://ec2.amazonaws.com/doc/2012-08-15/
Namespace Prefix = ns1
*/
private static java.lang.String generatePrefix(java.lang.String namespace) {
if(namespace.equals("http://ec2.amazonaws.com/doc/2012-08-15/")){
return "ns1";
}
return org.apache.axis2.databinding.utils.BeanUtil.getUniquePrefix();
}
/**
* field for RequestId
*/
protected java.lang.String localRequestId ;
/**
* Auto generated getter method
* @return java.lang.String
*/
public java.lang.String getRequestId(){
return localRequestId;
}
/**
* Auto generated setter method
* @param param RequestId
*/
public void setRequestId(java.lang.String param){
this.localRequestId=param;
}
/**
* field for _return
*/
protected boolean local_return ;
/**
* Auto generated getter method
* @return boolean
*/
public boolean get_return(){
return local_return;
}
/**
* Auto generated setter method
* @param param _return
*/
public void set_return(boolean param){
this.local_return=param;
}
/**
* isReaderMTOMAware
* @return true if the reader supports MTOM
*/
public static boolean isReaderMTOMAware(javax.xml.stream.XMLStreamReader reader) {
boolean isReaderMTOMAware = false;
try{
isReaderMTOMAware = java.lang.Boolean.TRUE.equals(reader.getProperty(org.apache.axiom.om.OMConstants.IS_DATA_HANDLERS_AWARE));
}catch(java.lang.IllegalArgumentException e){
isReaderMTOMAware = false;
}
return isReaderMTOMAware;
}
/**
*
* @param parentQName
* @param factory
* @return org.apache.axiom.om.OMElement
*/
public org.apache.axiom.om.OMElement getOMElement (
final javax.xml.namespace.QName parentQName,
final org.apache.axiom.om.OMFactory factory) throws org.apache.axis2.databinding.ADBException{
org.apache.axiom.om.OMDataSource dataSource =
new org.apache.axis2.databinding.ADBDataSource(this,parentQName){
public void serialize(org.apache.axis2.databinding.utils.writer.MTOMAwareXMLStreamWriter xmlWriter) throws javax.xml.stream.XMLStreamException {
AssignPrivateIpAddressesResponseType.this.serialize(parentQName,factory,xmlWriter);
}
};
return new org.apache.axiom.om.impl.llom.OMSourcedElementImpl(
parentQName,factory,dataSource);
}
public void serialize(final javax.xml.namespace.QName parentQName,
final org.apache.axiom.om.OMFactory factory,
org.apache.axis2.databinding.utils.writer.MTOMAwareXMLStreamWriter xmlWriter)
throws javax.xml.stream.XMLStreamException, org.apache.axis2.databinding.ADBException{
serialize(parentQName,factory,xmlWriter,false);
}
public void serialize(final javax.xml.namespace.QName parentQName,
final org.apache.axiom.om.OMFactory factory,
org.apache.axis2.databinding.utils.writer.MTOMAwareXMLStreamWriter xmlWriter,
boolean serializeType)
throws javax.xml.stream.XMLStreamException, org.apache.axis2.databinding.ADBException{
java.lang.String prefix = null;
java.lang.String namespace = null;
prefix = parentQName.getPrefix();
namespace = parentQName.getNamespaceURI();
if ((namespace != null) && (namespace.trim().length() > 0)) {
java.lang.String writerPrefix = xmlWriter.getPrefix(namespace);
if (writerPrefix != null) {
xmlWriter.writeStartElement(namespace, parentQName.getLocalPart());
} else {
if (prefix == null) {
prefix = generatePrefix(namespace);
}
xmlWriter.writeStartElement(prefix, parentQName.getLocalPart(), namespace);
xmlWriter.writeNamespace(prefix, namespace);
xmlWriter.setPrefix(prefix, namespace);
}
} else {
xmlWriter.writeStartElement(parentQName.getLocalPart());
}
if (serializeType){
java.lang.String namespacePrefix = registerPrefix(xmlWriter,"http://ec2.amazonaws.com/doc/2012-08-15/");
if ((namespacePrefix != null) && (namespacePrefix.trim().length() > 0)){
writeAttribute("xsi","http://www.w3.org/2001/XMLSchema-instance","type",
namespacePrefix+":AssignPrivateIpAddressesResponseType",
xmlWriter);
} else {
writeAttribute("xsi","http://www.w3.org/2001/XMLSchema-instance","type",
"AssignPrivateIpAddressesResponseType",
xmlWriter);
}
}
namespace = "http://ec2.amazonaws.com/doc/2012-08-15/";
if (! namespace.equals("")) {
prefix = xmlWriter.getPrefix(namespace);
if (prefix == null) {
prefix = generatePrefix(namespace);
xmlWriter.writeStartElement(prefix,"requestId", namespace);
xmlWriter.writeNamespace(prefix, namespace);
xmlWriter.setPrefix(prefix, namespace);
} else {
xmlWriter.writeStartElement(namespace,"requestId");
}
} else {
xmlWriter.writeStartElement("requestId");
}
if (localRequestId==null){
// write the nil attribute
throw new org.apache.axis2.databinding.ADBException("requestId cannot be null!!");
}else{
xmlWriter.writeCharacters(localRequestId);
}
xmlWriter.writeEndElement();
namespace = "http://ec2.amazonaws.com/doc/2012-08-15/";
if (! namespace.equals("")) {
prefix = xmlWriter.getPrefix(namespace);
if (prefix == null) {
prefix = generatePrefix(namespace);
xmlWriter.writeStartElement(prefix,"return", namespace);
xmlWriter.writeNamespace(prefix, namespace);
xmlWriter.setPrefix(prefix, namespace);
} else {
xmlWriter.writeStartElement(namespace,"return");
}
} else {
xmlWriter.writeStartElement("return");
}
if (false) {
throw new org.apache.axis2.databinding.ADBException("return cannot be null!!");
} else {
xmlWriter.writeCharacters(org.apache.axis2.databinding.utils.ConverterUtil.convertToString(local_return));
}
xmlWriter.writeEndElement();
xmlWriter.writeEndElement();
}
/**
* Util method to write an attribute with the ns prefix
*/
private void writeAttribute(java.lang.String prefix,java.lang.String namespace,java.lang.String attName,
java.lang.String attValue,javax.xml.stream.XMLStreamWriter xmlWriter) throws javax.xml.stream.XMLStreamException{
if (xmlWriter.getPrefix(namespace) == null) {
xmlWriter.writeNamespace(prefix, namespace);
xmlWriter.setPrefix(prefix, namespace);
}
xmlWriter.writeAttribute(namespace,attName,attValue);
}
/**
* Util method to write an attribute without the ns prefix
*/
private void writeAttribute(java.lang.String namespace,java.lang.String attName,
java.lang.String attValue,javax.xml.stream.XMLStreamWriter xmlWriter) throws javax.xml.stream.XMLStreamException{
if (namespace.equals(""))
{
xmlWriter.writeAttribute(attName,attValue);
}
else
{
registerPrefix(xmlWriter, namespace);
xmlWriter.writeAttribute(namespace,attName,attValue);
}
}
/**
* Util method to write an attribute without the ns prefix
*/
private void writeQNameAttribute(java.lang.String namespace, java.lang.String attName,
javax.xml.namespace.QName qname, javax.xml.stream.XMLStreamWriter xmlWriter) throws javax.xml.stream.XMLStreamException {
java.lang.String attributeNamespace = qname.getNamespaceURI();
java.lang.String attributePrefix = xmlWriter.getPrefix(attributeNamespace);
if (attributePrefix == null) {
attributePrefix = registerPrefix(xmlWriter, attributeNamespace);
}
java.lang.String attributeValue;
if (attributePrefix.trim().length() > 0) {
attributeValue = attributePrefix + ":" + qname.getLocalPart();
} else {
attributeValue = qname.getLocalPart();
}
if (namespace.equals("")) {
xmlWriter.writeAttribute(attName, attributeValue);
} else {
registerPrefix(xmlWriter, namespace);
xmlWriter.writeAttribute(namespace, attName, attributeValue);
}
}
/**
* method to handle Qnames
*/
private void writeQName(javax.xml.namespace.QName qname,
javax.xml.stream.XMLStreamWriter xmlWriter) throws javax.xml.stream.XMLStreamException {
java.lang.String namespaceURI = qname.getNamespaceURI();
if (namespaceURI != null) {
java.lang.String prefix = xmlWriter.getPrefix(namespaceURI);
if (prefix == null) {
prefix = generatePrefix(namespaceURI);
xmlWriter.writeNamespace(prefix, namespaceURI);
xmlWriter.setPrefix(prefix,namespaceURI);
}
if (prefix.trim().length() > 0){
xmlWriter.writeCharacters(prefix + ":" + org.apache.axis2.databinding.utils.ConverterUtil.convertToString(qname));
} else {
// i.e this is the default namespace
xmlWriter.writeCharacters(org.apache.axis2.databinding.utils.ConverterUtil.convertToString(qname));
}
} else {
xmlWriter.writeCharacters(org.apache.axis2.databinding.utils.ConverterUtil.convertToString(qname));
}
}
private void writeQNames(javax.xml.namespace.QName[] qnames,
javax.xml.stream.XMLStreamWriter xmlWriter) throws javax.xml.stream.XMLStreamException {
if (qnames != null) {
// we have to store this data until last moment since it is not possible to write any
// namespace data after writing the charactor data
java.lang.StringBuffer stringToWrite = new java.lang.StringBuffer();
java.lang.String namespaceURI = null;
java.lang.String prefix = null;
for (int i = 0; i < qnames.length; i++) {
if (i > 0) {
stringToWrite.append(" ");
}
namespaceURI = qnames[i].getNamespaceURI();
if (namespaceURI != null) {
prefix = xmlWriter.getPrefix(namespaceURI);
if ((prefix == null) || (prefix.length() == 0)) {
prefix = generatePrefix(namespaceURI);
xmlWriter.writeNamespace(prefix, namespaceURI);
xmlWriter.setPrefix(prefix,namespaceURI);
}
if (prefix.trim().length() > 0){
stringToWrite.append(prefix).append(":").append(org.apache.axis2.databinding.utils.ConverterUtil.convertToString(qnames[i]));
} else {
stringToWrite.append(org.apache.axis2.databinding.utils.ConverterUtil.convertToString(qnames[i]));
}
} else {
stringToWrite.append(org.apache.axis2.databinding.utils.ConverterUtil.convertToString(qnames[i]));
}
}
xmlWriter.writeCharacters(stringToWrite.toString());
}
}
/**
* Register a namespace prefix
*/
private java.lang.String registerPrefix(javax.xml.stream.XMLStreamWriter xmlWriter, java.lang.String namespace) throws javax.xml.stream.XMLStreamException {
java.lang.String prefix = xmlWriter.getPrefix(namespace);
if (prefix == null) {
prefix = generatePrefix(namespace);
while (xmlWriter.getNamespaceContext().getNamespaceURI(prefix) != null) {
prefix = org.apache.axis2.databinding.utils.BeanUtil.getUniquePrefix();
}
xmlWriter.writeNamespace(prefix, namespace);
xmlWriter.setPrefix(prefix, namespace);
}
return prefix;
}
/**
* databinding method to get an XML representation of this object
*
*/
public javax.xml.stream.XMLStreamReader getPullParser(javax.xml.namespace.QName qName)
throws org.apache.axis2.databinding.ADBException{
java.util.ArrayList elementList = new java.util.ArrayList();
java.util.ArrayList attribList = new java.util.ArrayList();
elementList.add(new javax.xml.namespace.QName("http://ec2.amazonaws.com/doc/2012-08-15/",
"requestId"));
if (localRequestId != null){
elementList.add(org.apache.axis2.databinding.utils.ConverterUtil.convertToString(localRequestId));
} else {
throw new org.apache.axis2.databinding.ADBException("requestId cannot be null!!");
}
elementList.add(new javax.xml.namespace.QName("http://ec2.amazonaws.com/doc/2012-08-15/",
"return"));
elementList.add(
org.apache.axis2.databinding.utils.ConverterUtil.convertToString(local_return));
return new org.apache.axis2.databinding.utils.reader.ADBXMLStreamReaderImpl(qName, elementList.toArray(), attribList.toArray());
}
/**
* Factory class that keeps the parse method
*/
public static class Factory{
/**
* static method to create the object
* Precondition: If this object is an element, the current or next start element starts this object and any intervening reader events are ignorable
* If this object is not an element, it is a complex type and the reader is at the event just after the outer start element
* Postcondition: If this object is an element, the reader is positioned at its end element
* If this object is a complex type, the reader is positioned at the end element of its outer element
*/
public static AssignPrivateIpAddressesResponseType parse(javax.xml.stream.XMLStreamReader reader) throws java.lang.Exception{
AssignPrivateIpAddressesResponseType object =
new AssignPrivateIpAddressesResponseType();
int event;
java.lang.String nillableValue = null;
java.lang.String prefix ="";
java.lang.String namespaceuri ="";
try {
while (!reader.isStartElement() && !reader.isEndElement())
reader.next();
if (reader.getAttributeValue("http://www.w3.org/2001/XMLSchema-instance","type")!=null){
java.lang.String fullTypeName = reader.getAttributeValue("http://www.w3.org/2001/XMLSchema-instance",
"type");
if (fullTypeName!=null){
java.lang.String nsPrefix = null;
if (fullTypeName.indexOf(":") > -1){
nsPrefix = fullTypeName.substring(0,fullTypeName.indexOf(":"));
}
nsPrefix = nsPrefix==null?"":nsPrefix;
java.lang.String type = fullTypeName.substring(fullTypeName.indexOf(":")+1);
if (!"AssignPrivateIpAddressesResponseType".equals(type)){
//find namespace for the prefix
java.lang.String nsUri = reader.getNamespaceContext().getNamespaceURI(nsPrefix);
return (AssignPrivateIpAddressesResponseType)com.amazon.ec2.ExtensionMapper.getTypeObject(
nsUri,type,reader);
}
}
}
// Note all attributes that were handled. Used to differ normal attributes
// from anyAttributes.
java.util.Vector handledAttributes = new java.util.Vector();
reader.next();
while (!reader.isStartElement() && !reader.isEndElement()) reader.next();
if (reader.isStartElement() && new javax.xml.namespace.QName("http://ec2.amazonaws.com/doc/2012-08-15/","requestId").equals(reader.getName())){
java.lang.String content = reader.getElementText();
object.setRequestId(
org.apache.axis2.databinding.utils.ConverterUtil.convertToString(content));
reader.next();
} // End of if for expected property start element
else{
// A start element we are not expecting indicates an invalid parameter was passed
throw new org.apache.axis2.databinding.ADBException("Unexpected subelement " + reader.getLocalName());
}
while (!reader.isStartElement() && !reader.isEndElement()) reader.next();
if (reader.isStartElement() && new javax.xml.namespace.QName("http://ec2.amazonaws.com/doc/2012-08-15/","return").equals(reader.getName())){
java.lang.String content = reader.getElementText();
object.set_return(
org.apache.axis2.databinding.utils.ConverterUtil.convertToBoolean(content));
reader.next();
} // End of if for expected property start element
else{
// A start element we are not expecting indicates an invalid parameter was passed
throw new org.apache.axis2.databinding.ADBException("Unexpected subelement " + reader.getLocalName());
}
while (!reader.isStartElement() && !reader.isEndElement())
reader.next();
if (reader.isStartElement())
// A start element we are not expecting indicates a trailing invalid property
throw new org.apache.axis2.databinding.ADBException("Unexpected subelement " + reader.getLocalName());
} catch (javax.xml.stream.XMLStreamException e) {
throw new java.lang.Exception(e);
}
return object;
}
}//end of factory class
}
| apache-2.0 |
NuwanSameera/syncope | netbeans-plugin/src/main/java/org/apache/syncope/netbeans/plugin/service/MailTemplateManagerService.java | 2511 | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.syncope.netbeans.plugin.service;
import java.io.InputStream;
import java.util.List;
import javax.ws.rs.core.Response;
import org.apache.syncope.client.lib.SyncopeClient;
import org.apache.syncope.client.lib.SyncopeClientFactoryBean;
import org.apache.syncope.common.lib.to.MailTemplateTO;
import org.apache.syncope.common.lib.types.MailTemplateFormat;
import org.apache.syncope.common.rest.api.service.MailTemplateService;
public class MailTemplateManagerService {
private MailTemplateService service;
public MailTemplateManagerService(final String url, final String userName, final String password) {
SyncopeClient syncopeClient = new SyncopeClientFactoryBean().setAddress(url).create(userName, password);
service = syncopeClient.getService(MailTemplateService.class);
}
public List<MailTemplateTO> list() {
return service.list();
}
public boolean create(final MailTemplateTO mailTemplateTO) {
return Response.Status.CREATED.getStatusCode() == service.create(mailTemplateTO).getStatus();
}
public MailTemplateTO read(final String key) {
return service.read(key);
}
public boolean delete(final String key) {
service.delete(key);
return true;
}
public Object getFormat(final String key, final MailTemplateFormat format) {
return service.getFormat(key, format).getEntity();
}
public void setFormat(final String key, final MailTemplateFormat format, final InputStream templateIn) {
service.setFormat(key, format, templateIn);
}
public boolean removeFormat(final String key, final MailTemplateFormat format) {
return false;
}
}
| apache-2.0 |
man4j/protobeans | modules/mvc/src/main/java/org/protobeans/mvc/config/MvcConfig.java | 6715 | package org.protobeans.mvc.config;
import java.nio.charset.StandardCharsets;
import java.util.ArrayList;
import java.util.List;
import java.util.Locale;
import org.protobeans.core.annotation.InjectFrom;
import org.protobeans.mvc.annotation.EnableMvc;
import org.protobeans.mvc.util.FileUtils;
import org.protobeans.mvc.util.FilterBean;
import org.protobeans.mvc.util.GlobalModelAttribute;
import org.protobeans.mvc.util.PathUtils;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.ComponentScan;
import org.springframework.context.annotation.Configuration;
import org.springframework.context.annotation.Import;
import org.springframework.http.converter.HttpMessageConverter;
import org.springframework.http.converter.StringHttpMessageConverter;
import org.springframework.http.converter.json.MappingJackson2HttpMessageConverter;
import org.springframework.validation.Validator;
import org.springframework.validation.beanvalidation.LocalValidatorFactoryBean;
import org.springframework.web.WebApplicationInitializer;
import org.springframework.web.context.ConfigurableWebApplicationContext;
import org.springframework.web.multipart.MultipartResolver;
import org.springframework.web.multipart.commons.CommonsMultipartResolver;
import org.springframework.web.servlet.LocaleResolver;
import org.springframework.web.servlet.config.annotation.EnableWebMvc;
import org.springframework.web.servlet.config.annotation.InterceptorRegistry;
import org.springframework.web.servlet.config.annotation.PathMatchConfigurer;
import org.springframework.web.servlet.config.annotation.ResourceHandlerRegistry;
import org.springframework.web.servlet.config.annotation.WebMvcConfigurer;
import org.springframework.web.servlet.i18n.CookieLocaleResolver;
import org.springframework.web.servlet.i18n.LocaleChangeInterceptor;
import com.fasterxml.jackson.annotation.JsonAutoDetect.Visibility;
import com.fasterxml.jackson.annotation.JsonInclude.Include;
import com.fasterxml.jackson.annotation.PropertyAccessor;
import com.fasterxml.jackson.core.JsonParser.Feature;
import com.fasterxml.jackson.databind.DeserializationFeature;
import com.fasterxml.jackson.databind.ObjectMapper;
@EnableWebMvc
@Configuration
@InjectFrom(EnableMvc.class)
@Import(MvcValidatorConfig.class)
@ComponentScan(basePackages = "org.protobeans.mvc.controller")
public class MvcConfig implements WebMvcConfigurer {
private String resourcesPath;
private String resourcesUrl;
@Autowired(required = false)
private List<HttpMessageConverter<?>> converters = new ArrayList<>();
@Autowired(required = false)
private FilterBean filterBean;
@Autowired
private LocalValidatorFactoryBean localValidatorFactoryBean;
@Bean
public Class<? extends WebApplicationInitializer> mvcInitializer(ConfigurableWebApplicationContext ctx) {
MvcInitializer.rootApplicationContext = ctx;
if (filterBean != null) {
MvcInitializer.filters = filterBean.getFilters();
}
return MvcInitializer.class;
}
@Bean
public ObjectMapper mapper() {
return new ObjectMapper().setVisibility(PropertyAccessor.FIELD, Visibility.ANY)
.setVisibility(PropertyAccessor.GETTER, Visibility.NONE)
.setVisibility(PropertyAccessor.IS_GETTER, Visibility.NONE)
.setVisibility(PropertyAccessor.SETTER, Visibility.NONE)
.setVisibility(PropertyAccessor.CREATOR, Visibility.NONE)
.setSerializationInclusion(Include.NON_EMPTY)
.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false)
.configure(Feature.ALLOW_UNQUOTED_CONTROL_CHARS, true);
}
@Bean
HttpMessageConverter<?> jacksonMessageConverter() {
return new MappingJackson2HttpMessageConverter(mapper());
}
@Bean
HttpMessageConverter<?> stringMessageConverter() {
return new StringHttpMessageConverter(StandardCharsets.UTF_8);
}
@Override
public void configurePathMatch(PathMatchConfigurer configurer) {
configurer.setUseSuffixPatternMatch(false);
}
@Override
public Validator getValidator() {
return localValidatorFactoryBean;
}
@Bean
public GlobalModelAttribute globalModelAttribute() {
String value = null;
if (!resourcesPath.isEmpty() && !resourcesUrl.isEmpty()) {
String dashedResourcesPath = PathUtils.dashedPath(resourcesPath);
String dashedResourcesUrl = PathUtils.dashedPath(resourcesUrl);
long lastModified = FileUtils.getLastModified(dashedResourcesPath);
value = dashedResourcesUrl + lastModified;
}
return new GlobalModelAttribute("resourcesPrefix", value);
}
@Override
public void addResourceHandlers(ResourceHandlerRegistry registry) {
if (!resourcesPath.isEmpty() && !resourcesUrl.isEmpty()) {
String dashedResourcesPath = PathUtils.dashedPath(resourcesPath);
String dashedResourcesUrl = PathUtils.dashedPath(resourcesUrl);
long lastModified = FileUtils.getLastModified(dashedResourcesPath);
registry.addResourceHandler("swagger-ui.html").addResourceLocations("classpath:/META-INF/resources/");
registry.addResourceHandler("/webjars/**").addResourceLocations("classpath:/META-INF/resources/webjars/");
registry.addResourceHandler(dashedResourcesUrl + lastModified + "/**")
.addResourceLocations("classpath:" + dashedResourcesPath)
.setCachePeriod(31556926);
}
}
@Override
public void configureMessageConverters(List<HttpMessageConverter<?>> defaultConverters) {
defaultConverters.addAll(converters);
}
@Override
public void addInterceptors(InterceptorRegistry registry) {
registry.addInterceptor(new LocaleChangeInterceptor());
}
@Bean
public LocaleResolver localeResolver() {
CookieLocaleResolver resolver = new CookieLocaleResolver();
resolver.setDefaultLocale(new Locale("ru", "RU"));
return resolver;
}
@Bean
public MultipartResolver multipartResolver(){
CommonsMultipartResolver resolver = new CommonsMultipartResolver();
resolver.setDefaultEncoding("UTF-8");
return resolver;
}
}
| apache-2.0 |
syntelos/gwtcc | src/com/google/gwt/dev/util/arg/OptionEnableGeneratingOnShards.java | 1032 | /*
* Copyright 2009 Google Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package com.google.gwt.dev.util.arg;
/**
* Enables running generators on compile shards rather than during the
* Precompile stage.
*/
public interface OptionEnableGeneratingOnShards {
/**
* Returns <code>true</code> if generation is allowed to happen on shards.
*/
boolean isEnabledGeneratingOnShards();
/**
* Sets whether generation may happen on shards.
*/
void setEnabledGeneratingOnShards(boolean allowed);
}
| apache-2.0 |
quarkusio/quarkus | test-framework/common/src/main/java/io/quarkus/test/common/LauncherUtil.java | 14603 | package io.quarkus.test.common;
import java.io.BufferedReader;
import java.io.FileReader;
import java.io.IOException;
import java.io.InputStream;
import java.nio.charset.StandardCharsets;
import java.nio.file.Files;
import java.nio.file.Path;
import java.time.Duration;
import java.util.ArrayList;
import java.util.List;
import java.util.ServiceLoader;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicReference;
import java.util.function.Function;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import org.eclipse.microprofile.config.Config;
import org.eclipse.microprofile.config.spi.ConfigProviderResolver;
import io.quarkus.runtime.LaunchMode;
import io.quarkus.runtime.configuration.ConfigUtils;
import io.quarkus.runtime.configuration.QuarkusConfigFactory;
import io.quarkus.test.common.http.TestHTTPResourceManager;
import io.smallrye.config.SmallRyeConfig;
public final class LauncherUtil {
public static final int LOG_CHECK_INTERVAL = 500;
private LauncherUtil() {
}
public static Config installAndGetSomeConfig() {
final SmallRyeConfig config = ConfigUtils.configBuilder(false, LaunchMode.NORMAL).build();
QuarkusConfigFactory.setConfig(config);
final ConfigProviderResolver cpr = ConfigProviderResolver.instance();
try {
final Config installed = cpr.getConfig();
if (installed != config) {
cpr.releaseConfig(installed);
}
} catch (IllegalStateException ignored) {
}
return config;
}
/**
* Launches a process using the supplied arguments and makes sure the process's output is drained to standard out
*/
static Process launchProcess(List<String> args) throws IOException {
Process process = Runtime.getRuntime().exec(args.toArray(new String[0]));
new Thread(new ProcessReader(process.getInputStream())).start();
new Thread(new ProcessReader(process.getErrorStream())).start();
return process;
}
/**
* Waits (for a maximum of {@param waitTimeSeconds} seconds) until the launched process indicates the address it is
* listening on.
* If the wait time is exceeded an {@code IllegalStateException} is thrown.
*/
static ListeningAddress waitForCapturedListeningData(Process quarkusProcess, Path logFile, long waitTimeSeconds) {
ensureProcessIsAlive(quarkusProcess);
CountDownLatch signal = new CountDownLatch(1);
AtomicReference<ListeningAddress> resultReference = new AtomicReference<>();
CaptureListeningDataReader captureListeningDataReader = new CaptureListeningDataReader(logFile,
Duration.ofSeconds(waitTimeSeconds), signal, resultReference);
new Thread(captureListeningDataReader, "capture-listening-data").start();
try {
signal.await(waitTimeSeconds + 2, TimeUnit.SECONDS); // wait enough for the signal to be given by the capturing thread
ListeningAddress result = resultReference.get();
if (result != null) {
return result;
}
// a null result means that we could not determine the status of the process so we need to abort testing
destroyProcess(quarkusProcess);
throw new IllegalStateException(
"Unable to determine the status of the running process. See the above logs for details");
} catch (InterruptedException e) {
throw new RuntimeException("Interrupted while waiting to capture listening process port and protocol");
}
}
private static void ensureProcessIsAlive(Process quarkusProcess) {
try {
Thread.sleep(100);
} catch (InterruptedException ignored) {
throw new RuntimeException(
"Interrupted while waiting to determine the status of process '" + quarkusProcess.pid() + "'.");
}
if (!quarkusProcess.isAlive()) {
throw new RuntimeException("Unable to successfully launch process '" + quarkusProcess.pid() + "'. Exit code is: '"
+ quarkusProcess.exitValue() + "'.");
}
}
/**
* Try to destroy the process normally a few times
* and resort to forceful destruction if necessary
*/
private static void destroyProcess(Process quarkusProcess) {
quarkusProcess.destroy();
int i = 0;
while (i++ < 10) {
try {
Thread.sleep(LOG_CHECK_INTERVAL);
} catch (InterruptedException ignored) {
}
if (!quarkusProcess.isAlive()) {
break;
}
}
if (quarkusProcess.isAlive()) {
quarkusProcess.destroyForcibly();
}
}
static Function<IntegrationTestStartedNotifier.Context, IntegrationTestStartedNotifier.Result> createStartedFunction() {
List<IntegrationTestStartedNotifier> startedNotifiers = new ArrayList<>();
for (IntegrationTestStartedNotifier i : ServiceLoader.load(IntegrationTestStartedNotifier.class)) {
startedNotifiers.add(i);
}
if (startedNotifiers.isEmpty()) {
return null;
}
return (ctx) -> {
for (IntegrationTestStartedNotifier startedNotifier : startedNotifiers) {
IntegrationTestStartedNotifier.Result result = startedNotifier.check(ctx);
if (result.isStarted()) {
return result;
}
}
return IntegrationTestStartedNotifier.Result.NotStarted.INSTANCE;
};
}
/**
* Waits for {@param startedFunction} to indicate that the application has started.
*
* @return the {@link io.quarkus.test.common.IntegrationTestStartedNotifier.Result} indicating a successful start
* @throws RuntimeException if no successful start was indicated by {@param startedFunction}
*/
static IntegrationTestStartedNotifier.Result waitForStartedFunction(
Function<IntegrationTestStartedNotifier.Context, IntegrationTestStartedNotifier.Result> startedFunction,
Process quarkusProcess, long waitTimeSeconds, Path logFile) {
long bailout = System.currentTimeMillis() + waitTimeSeconds * 1000;
IntegrationTestStartedNotifier.Result result = null;
SimpleContext context = new SimpleContext(logFile);
while (System.currentTimeMillis() < bailout) {
if (!quarkusProcess.isAlive()) {
throw new RuntimeException("Failed to start target quarkus application, process has exited");
}
try {
Thread.sleep(100);
result = startedFunction.apply(context);
if (result.isStarted()) {
break;
}
} catch (Exception ignored) {
}
}
if (result == null) {
destroyProcess(quarkusProcess);
throw new RuntimeException("Unable to start target quarkus application " + waitTimeSeconds + "s");
}
return result;
}
/**
* Updates the configuration necessary to make all test systems knowledgeable about the port on which the launched
* process is listening
*/
static void updateConfigForPort(Integer effectivePort) {
if (effectivePort != null) {
System.setProperty("quarkus.http.port", effectivePort.toString()); //set the port as a system property in order to have it applied to Config
System.setProperty("quarkus.http.test-port", effectivePort.toString()); // needed for RestAssuredManager
installAndGetSomeConfig(); // reinitialize the configuration to make sure the actual port is used
System.clearProperty("test.url"); // make sure the old value does not interfere with setting the new one
System.setProperty("test.url", TestHTTPResourceManager.getUri());
}
}
/**
* Thread that reads a process output file looking for the line that indicates the address the application
* is listening on.
*/
private static class CaptureListeningDataReader implements Runnable {
private final Path processOutput;
private final Duration waitTime;
private final CountDownLatch signal;
private final AtomicReference<ListeningAddress> resultReference;
private final Pattern listeningRegex = Pattern.compile("Listening on:\\s+(https?)://\\S*:(\\d+)");
private final Pattern startedRegex = Pattern.compile(".*Quarkus .* started in \\d+.*s.*");
public CaptureListeningDataReader(Path processOutput, Duration waitTime, CountDownLatch signal,
AtomicReference<ListeningAddress> resultReference) {
this.processOutput = processOutput;
this.waitTime = waitTime;
this.signal = signal;
this.resultReference = resultReference;
}
@Override
public void run() {
if (!ensureProcessOutputFileExists()) {
unableToDetermineData("Log file '" + processOutput.toAbsolutePath() + "' was not created.");
return;
}
long bailoutTime = System.currentTimeMillis() + waitTime.toMillis();
try (BufferedReader reader = new BufferedReader(new FileReader(processOutput.toFile()))) {
long timeStarted = Long.MAX_VALUE;
boolean started = false;
// generally, we want to start as soon as info about Quarkus having started is printed
// but just in case the line with http host and port is printed later, let's wait a bit more
while (true) {
if (reader.ready()) { // avoid blocking as the input is a file which continually gets more data added
String line = reader.readLine();
if (startedRegex.matcher(line).matches()) {
timeStarted = System.currentTimeMillis();
started = true;
}
Matcher regexMatcher = listeningRegex.matcher(line);
if (regexMatcher.find()) {
dataDetermined(regexMatcher.group(1), Integer.valueOf(regexMatcher.group(2)));
return;
} else {
if (line.contains("Failed to start application (with profile")) {
unableToDetermineData("Application was not started: " + line);
return;
}
}
} else {
//wait until there is more of the file for us to read
long now = System.currentTimeMillis();
// if we have seen info that the app is started in the log a while ago
// or waiting the the next check interval will exceed the bailout time, it's time to finish waiting:
if (now + LOG_CHECK_INTERVAL > bailoutTime || now - 2 * LOG_CHECK_INTERVAL > timeStarted) {
if (started) {
dataDetermined(null, null); // no http, all is null
} else {
unableToDetermineData("Waited " + waitTime.getSeconds() + " seconds for " + processOutput
+ " to contain info about the listening port and protocol but no such info was found");
}
return;
}
try {
Thread.sleep(LOG_CHECK_INTERVAL);
} catch (InterruptedException e) {
unableToDetermineData(
"Thread interrupted while waiting for more data to become available in process output file: "
+ processOutput.toAbsolutePath());
return;
}
}
}
} catch (Exception e) {
unableToDetermineData("Exception occurred while reading process output from file " + processOutput);
e.printStackTrace();
}
}
private boolean ensureProcessOutputFileExists() {
long bailoutTime = System.currentTimeMillis() + waitTime.toMillis();
while (System.currentTimeMillis() < bailoutTime) {
if (Files.exists(processOutput)) {
return true;
} else {
try {
Thread.sleep(200);
} catch (InterruptedException e) {
unableToDetermineData("Thread interrupted while waiting for process output file to be created");
return false;
}
}
}
return false;
}
private void dataDetermined(String protocolValue, Integer portValue) {
this.resultReference.set(new ListeningAddress(portValue, protocolValue));
signal.countDown();
}
private void unableToDetermineData(String errorMessage) {
System.err.println(errorMessage);
this.resultReference.set(null);
signal.countDown();
}
}
/**
* Used to drain the input of a launched process
*/
private static class ProcessReader implements Runnable {
private final InputStream inputStream;
private ProcessReader(InputStream inputStream) {
this.inputStream = inputStream;
}
@Override
public void run() {
byte[] b = new byte[100];
int i;
try {
while ((i = inputStream.read(b)) > 0) {
System.out.print(new String(b, 0, i, StandardCharsets.UTF_8));
}
} catch (IOException e) {
//ignore
}
}
}
private static class SimpleContext implements IntegrationTestStartedNotifier.Context {
private final Path logFile;
public SimpleContext(Path logFile) {
this.logFile = logFile;
}
@Override
public Path logFile() {
return logFile;
}
}
}
| apache-2.0 |
acsukesh/java-chassis | handlers/handler-loadbalance/src/test/java/org/apache/servicecomb/loadbalance/MyServerListFilterExt.java | 1428 | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.servicecomb.loadbalance;
import java.util.ArrayList;
import java.util.List;
import org.apache.servicecomb.core.Invocation;
import com.netflix.loadbalancer.Server;
/**
* @author l00168639
*
*/
public class MyServerListFilterExt implements ServerListFilterExt {
private Invocation invocation;
@Override
public List<Server> getFilteredListOfServers(List<Server> serverList) {
if (invocation.getAppId().equals("test")) {
return new ArrayList<>();
}
return serverList;
}
@Override
public void setInvocation(Invocation invocation) {
this.invocation = invocation;
}
}
| apache-2.0 |
eENVplus/tf-exploitation-server | TF_Exploitation_Server_web/src/main/java/net/disy/eenvplus/tfes/web/validation/utilities/Ensure.java | 895 | //Copyright (c) 2014 by Disy Informationssysteme GmbH
package net.disy.eenvplus.tfes.web.validation.utilities;
import java.util.Collection;
import net.disy.eenvplus.tfes.web.api.exceptions.IErrorResponseCode;
import net.disy.eenvplus.tfes.web.endpoint.core.IllegalQueryArgumentException;
// NOT_PUBLISHED
public class Ensure {
private Ensure() {
}
public static <T> void collectionContains(
Collection<T> collection,
T element,
IErrorResponseCode errorResponseCode) {
if (!collection.contains(element)) {
throw new IllegalQueryArgumentException(errorResponseCode, element.toString());
}
}
public static <T> void collectionContainsAll(
Collection<T> superSet,
Collection<T> subset,
IErrorResponseCode errorResponseCode) {
for (T element : subset) {
collectionContains(superSet, element, errorResponseCode);
}
}
}
| apache-2.0 |
sajavadi/pinot | pinot-controller/src/main/java/com/linkedin/pinot/controller/helix/core/minion/generator/TaskGeneratorRegistry.java | 2462 | /**
* Copyright (C) 2014-2016 LinkedIn Corp. (pinot-core@linkedin.com)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.linkedin.pinot.controller.helix.core.minion.generator;
import com.clearspring.analytics.util.Preconditions;
import com.linkedin.pinot.controller.helix.core.minion.ClusterInfoProvider;
import com.linkedin.pinot.controller.helix.core.minion.PinotHelixTaskResourceManager;
import java.util.HashMap;
import java.util.Map;
import java.util.Set;
import javax.annotation.Nonnull;
import javax.annotation.Nullable;
/**
* Registry for all {@link PinotTaskGenerator}.
*/
public class TaskGeneratorRegistry {
private final Map<String, PinotTaskGenerator> _taskGeneratorRegistry = new HashMap<>();
public TaskGeneratorRegistry(@Nonnull ClusterInfoProvider clusterInfoProvider) {
registerTaskGenerator(new ConvertToRawIndexTaskGenerator(clusterInfoProvider));
}
/**
* Register a task generator.
*
* @param pinotTaskGenerator Task generator to be registered
*/
public void registerTaskGenerator(@Nonnull PinotTaskGenerator pinotTaskGenerator) {
// Task type cannot contain the task name separator
String taskType = pinotTaskGenerator.getTaskType();
Preconditions.checkArgument(!taskType.contains(PinotHelixTaskResourceManager.TASK_NAME_SEPARATOR),
"Task type: %s cannot contain underscore character", taskType);
_taskGeneratorRegistry.put(taskType, pinotTaskGenerator);
}
/**
* Get all registered task types.
*
* @return Set of all registered task types
*/
@Nonnull
public Set<String> getAllTaskTypes() {
return _taskGeneratorRegistry.keySet();
}
/**
* Get the task generator for the given task type.
*
* @param taskType Task type
* @return Task generator for the given task type
*/
@Nullable
public PinotTaskGenerator getTaskGenerator(@Nonnull String taskType) {
return _taskGeneratorRegistry.get(taskType);
}
}
| apache-2.0 |
FengShaduVIP/onLineSys | src/main/java/com/twp/utils/CallShell.java | 2044 | package com.twp.utils;
import java.io.File;
import java.io.IOException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Transactional;
@Service
@Transactional
public class CallShell {
private Logger logger = LoggerFactory.getLogger(getClass());
/**
* 编译代码
* @param workPath 临时文件根目录路径
* @param shellPath shell 命令
* @return
* @throws IOException
*/
public String LinuxJudge(String workPath,String shellPath) throws IOException{
logger.info("开始编译 代码"+workPath+" 文件里面的文件名字");
Runtime rt = Runtime.getRuntime();
File dir = new File(workPath);
String str[] = { "sh", "-c", "/bin/sh " + shellPath };
Process pcs = null ;
try{
pcs = rt.exec(str, null, dir);
pcs.waitFor();
}catch (Exception e){
try{
pcs.getErrorStream().close();
pcs.getInputStream().close();
pcs.getOutputStream().close();
}
catch(Exception ee){}
e.printStackTrace();
}finally{
pcs.destroy();
}
String result = OperateFile.readTiJiao(workPath+"/comp_log");
logger.info("编译代码 comp_log内容---》"+result);
return result;
}
/**
*
* @param filePath 临时文件根目录路径
* @param shellPath shell 命令
* @return
* @throws IOException
*/
public String Linux(String filePath,String shellPath) throws IOException{
Runtime rt = Runtime.getRuntime();
File dir = new File(filePath);
String str[] = { "sh", "-c", "/bin/sh " + shellPath };
Process pcs = null ;
try{
pcs = rt.exec(str, null, dir);
pcs.waitFor();
}catch (Exception e){
try{
pcs.getErrorStream().close();
pcs.getInputStream().close();
pcs.getOutputStream().close();
}
catch(Exception ee){}
}finally{
pcs.destroy();
}
OperateFile reade = new OperateFile();
String result = reade.readTiJiao(filePath+File.separator+"sim_log");
logger.info("执行代码 sim_log内容---》"+result);
return result;
}
} | apache-2.0 |
zillachan/LibZilla | app/src/main/java/com/zilla/libraryzilla/test/api/model/Org.java | 2187 | /*
* Copyright (c) 2015. Zilla Chen
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.zilla.libraryzilla.test.api.model;
import java.io.Serializable;
/**
* Created by zilla on 10/9/15.
*/
public class Org implements Serializable {
private int id;
private String name;
private String full_name;
private Owner owner;
private String html_url;
private Permission permissions;
public Org() {
}
public int getId() {
return id;
}
public void setId(int id) {
this.id = id;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public String getFull_name() {
return full_name;
}
public void setFull_name(String full_name) {
this.full_name = full_name;
}
public Owner getOwner() {
return owner;
}
public void setOwner(Owner owner) {
this.owner = owner;
}
public String getHtml_url() {
return html_url;
}
public void setHtml_url(String html_url) {
this.html_url = html_url;
}
public Permission getPermissions() {
return permissions;
}
public void setPermissions(Permission permissions) {
this.permissions = permissions;
}
@Override
public String toString() {
return "Org{" +
"id=" + id +
", name='" + name + '\'' +
", full_name='" + full_name + '\'' +
", owner=" + owner +
", html_url='" + html_url + '\'' +
", permissions=" + permissions +
'}';
}
}
| apache-2.0 |
Ariah-Group/Finance | af_webapp/src/main/java/org/kuali/kfs/coa/businessobject/options/ChartValuesFinder.java | 1874 | /*
* Copyright 2007 The Kuali Foundation
*
* Licensed under the Educational Community License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.opensource.org/licenses/ecl2.php
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.kuali.kfs.coa.businessobject.options;
import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
import org.kuali.kfs.coa.businessobject.Chart;
import org.kuali.kfs.coa.service.ChartService;
import org.kuali.kfs.sys.context.SpringContext;
import org.kuali.rice.core.api.util.ConcreteKeyValue;
import org.kuali.rice.core.api.util.KeyValue;
import org.kuali.rice.krad.keyvalues.KeyValuesBase;
/**
* This class returns list of chart key value pairs.
*/
public class ChartValuesFinder extends KeyValuesBase {
/**
* Creates a list of {@link Chart} using their code as the key and their code "-" description
*
* @see org.kuali.rice.kns.lookup.keyvalues.KeyValuesFinder#getKeyValues()
*/
@Override
public List<KeyValue> getKeyValues() {
Collection<Chart> chartCodes = SpringContext.getBean(ChartService.class).getAllActiveCharts();
List<KeyValue> chartKeyLabels = new ArrayList<KeyValue>(chartCodes.size()+1);
chartKeyLabels.add(new ConcreteKeyValue("", ""));
for (Chart chart : chartCodes) {
chartKeyLabels.add(new ConcreteKeyValue(chart.getChartOfAccountsCode(), chart.getCodeAndDescription()));
}
return chartKeyLabels;
}
}
| apache-2.0 |
giancosta86/Arcontes-fx | src/main/java/info/gianlucacosta/arcontes/fx/canvas/metainfo/DefaultSelectionRectangleInfo.java | 3337 | /*§
===========================================================================
Arcontes - FX
===========================================================================
Copyright (C) 2013-2015 Gianluca Costa
===========================================================================
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
===========================================================================
*/
package info.gianlucacosta.arcontes.fx.canvas.metainfo;
import info.gianlucacosta.helios.fx.serialization.SerializableColor;
import javafx.scene.paint.Color;
import java.util.Objects;
/**
* Implementation of SelectionRectangleInfo
*/
public class DefaultSelectionRectangleInfo implements SelectionRectangleInfo {
private SerializableColor backgroundColor;
private SerializableColor borderColor;
private double borderSize;
private double opacity;
public DefaultSelectionRectangleInfo() {
this.backgroundColor = new SerializableColor(Color.LIGHTPINK);
this.borderColor = new SerializableColor(Color.VIOLET);
this.borderSize = 1;
this.opacity = 0.45;
}
public DefaultSelectionRectangleInfo(SelectionRectangleInfo source) {
this.backgroundColor = new SerializableColor(source.getBackgroundColor());
this.borderColor = new SerializableColor(source.getBorderColor());
this.borderSize = source.getBorderSize();
this.opacity = source.getOpacity();
}
@Override
public Color getBackgroundColor() {
return backgroundColor.getFxColor();
}
public void setBackgroundColor(Color backgroundColor) {
this.backgroundColor = new SerializableColor(backgroundColor);
}
@Override
public Color getBorderColor() {
return borderColor.getFxColor();
}
public void setBorderColor(Color borderColor) {
this.borderColor = new SerializableColor(borderColor);
}
@Override
public double getBorderSize() {
return borderSize;
}
public void setBorderSize(double borderSize) {
this.borderSize = borderSize;
}
@Override
public double getOpacity() {
return opacity;
}
public void setOpacity(double opacity) {
this.opacity = opacity;
}
@Override
public boolean equals(Object obj) {
if (!(obj instanceof SelectionRectangleInfo)) {
return false;
}
SelectionRectangleInfo other = (SelectionRectangleInfo) obj;
return Objects.equals(backgroundColor, other.getBackgroundColor())
&& Objects.equals(borderColor, other.getBorderColor())
&& (borderSize == other.getBorderSize())
&& (opacity == other.getOpacity());
}
@Override
public int hashCode() {
return super.hashCode();
}
}
| apache-2.0 |
Radomiej/JavityEngine-TestApp | my-javity-game/core/src/main/java/pl/radomiej/citizen/world/GraphNodeConnection.java | 600 | package pl.radomiej.citizen.world;
import pl.radomiej.citizen.world.ai.FlatTiledConnection;
public class GraphNodeConnection {
public final GraphNode roomA, roomB;
private FlatTiledConnection connection;
public GraphNodeConnection(GraphNode roomA, GraphNode roomB) {
super();
this.roomA = roomA;
this.roomB = roomB;
roomA.getConnections().add(this);
roomB.getConnections().add(this);
}
public FlatTiledConnection getConnection() {
return connection;
}
public void setConnection(FlatTiledConnection connection) {
this.connection = connection;
}
}
| apache-2.0 |
caimingqin/fj | src/main/java/com/fj/common/persistence/Image.java | 1141 | package com.fj.common.persistence;
public abstract class Image<T> extends DataEntity<T>{
/**
*
*/
private static final long serialVersionUID = 8037252067242617171L;
/** 标题 */
private String title;
/** 原图片 */
private String source;
/** 大图片 */
private String large;
/** 中图片 */
private String medium;
/** 缩略图 */
private String thumbnail;
/** 排序 */
private Integer order;
public String getTitle() {
return title;
}
public void setTitle(String title) {
this.title = title;
}
public String getSource() {
return source;
}
public void setSource(String source) {
this.source = source;
}
public String getLarge() {
return large;
}
public void setLarge(String large) {
this.large = large;
}
public String getMedium() {
return medium;
}
public void setMedium(String medium) {
this.medium = medium;
}
public String getThumbnail() {
return thumbnail;
}
public void setThumbnail(String thumbnail) {
this.thumbnail = thumbnail;
}
public Integer getOrder() {
return order;
}
public void setOrder(Integer order) {
this.order = order;
}
}
| apache-2.0 |
KoehlerSB747/sd-tools | src/main/java/org/sd/atn/interp/InterpContainer.java | 3888 | /*
Copyright 2011 Semantic Discovery, Inc. (www.semanticdiscovery.com)
This file is part of the Semantic Discovery Toolkit.
The Semantic Discovery Toolkit is free software: you can redistribute it and/or modify
it under the terms of the GNU Lesser General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
The Semantic Discovery Toolkit is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU Lesser General Public License for more details.
You should have received a copy of the GNU Lesser General Public License
along with The Semantic Discovery Toolkit. If not, see <http://www.gnu.org/licenses/>.
*/
package org.sd.atn.interp;
import java.util.ArrayList;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import org.sd.atn.Parse;
import org.sd.util.tree.Tree;
import org.sd.xml.DataProperties;
import org.sd.xml.XmlLite;
/**
* Base class containing an interpretation component (field or sub-record).
* <p>
* @author Spence Koehler
*/
public class InterpContainer {
private InterpContainer parentContainer;
private Map<String, List<InterpContainer>> fields;
public final Tree<XmlLite.Data> interpNode;
public final String cmd;
public final String fieldName;
public final DataProperties overrides;
public final Parse parse;
public final Tree<String> parseNode;
public final Tree<XmlLite.Data> parentInterpNode;
protected InterpContainer(Tree<XmlLite.Data> interpNode, String cmd, String fieldName,
DataProperties overrides, Parse parse, Tree<String> parseNode,
Tree<XmlLite.Data> parentInterpNode) {
this.parentContainer = null;
this.fields = null;
this.interpNode = interpNode;
this.cmd = cmd;
this.fieldName = fieldName;
this.overrides = overrides;
this.parse = parse;
this.parseNode = parseNode;
this.parentInterpNode = parentInterpNode;
}
/**
* Safely downcast this instance to a RecordInterpContainer if it is one.
*/
public RecordInterpContainer asRecord() {
return null;
}
/**
* Safely downcast this instance to a FieldInterpContainer if it is one.
*/
public FieldInterpContainer asField() {
return null;
}
public boolean isRecord() {
return asRecord() != null;
}
public boolean isField() {
return asField() != null;
}
public boolean hasParentContainer() {
return parentContainer != null;
}
public InterpContainer getParentContainer() {
return parentContainer;
}
public void setParentContainer(InterpContainer parentContainer) {
this.parentContainer = parentContainer;
}
public boolean hasFields() {
return this.fields != null && this.fields.size() > 0;
}
public Map<String, List<InterpContainer>> getFields() {
return this.fields;
}
public List<InterpContainer> getField(String name) {
return this.fields == null ? null : this.fields.get(name);
}
public boolean hasField(String name) {
return this.fields == null ? false : this.fields.containsKey(name);
}
public void addField(String name, InterpContainer childContainer) {
addField(name, childContainer, true);
}
public void addField(String name, InterpContainer childContainer, boolean setChildParent) {
if (this.fields == null) this.fields = new LinkedHashMap<String, List<InterpContainer>>();
List<InterpContainer> values = fields.get(name);
if (values == null) {
values = new ArrayList<InterpContainer>();
fields.put(name, values);
}
values.add(childContainer);
if (setChildParent) {
childContainer.setParentContainer(this);
}
}
}
| apache-2.0 |
yavski/fab-speed-dial | library/src/main/java/io/github/yavski/fabspeeddial/SimpleMenuListenerAdapter.java | 631 | package io.github.yavski.fabspeeddial;
import android.support.design.internal.NavigationMenu;
import android.view.MenuItem;
/**
* This adapter class provides empty implementations of the methods from
* {@link FabSpeedDial.MenuListener}.
* Created by yavorivanov on 03/01/2016.
*/
public class SimpleMenuListenerAdapter implements FabSpeedDial.MenuListener {
@Override
public boolean onPrepareMenu(NavigationMenu navigationMenu) {
return true;
}
@Override
public boolean onMenuItemSelected(MenuItem menuItem) {
return false;
}
@Override
public void onMenuClosed() {
}
}
| apache-2.0 |
JNOSQL/diana | diana/diana-core/src/test/java/org/eclipse/jnosql/diana/writer/ValueWriterDecoratorTest.java | 1686 | /*
*
* Copyright (c) 2017 Otávio Santana and others
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Eclipse Public License v1.0
* and Apache License v2.0 which accompanies this distribution.
* The Eclipse Public License is available at http://www.eclipse.org/legal/epl-v10.html
* and the Apache License v2.0 is available at http://www.opensource.org/licenses/apache2.0.php.
*
* You may elect to redistribute this code under either of these licenses.
*
* Contributors:
*
* Otavio Santana
*
*/
package org.eclipse.jnosql.diana.writer;
import jakarta.nosql.ValueWriter;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import java.time.temporal.Temporal;
import java.util.Optional;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertFalse;
import static org.junit.jupiter.api.Assertions.assertTrue;
public class ValueWriterDecoratorTest {
private ValueWriter valueWriter;
@BeforeEach
public void setUp() {
valueWriter = ValueWriterDecorator.getInstance();
}
@Test
public void shouldVerifyCompatibility() {
assertTrue(valueWriter.isCompatible(Optional.class));
assertTrue(valueWriter.isCompatible(Temporal.class));
assertFalse(valueWriter.isCompatible(Boolean.class));
}
@SuppressWarnings("unchecked")
@Test
public void shouldConvert() {
String diana = "diana";
Optional<String> optional = Optional.of(diana);
Object result = valueWriter.write(optional);
assertEquals(diana, result);
}
} | apache-2.0 |
CenturyLinkCloud/mdw | mdw-common/src/com/centurylink/mdw/constant/OwnerType.java | 2117 | package com.centurylink.mdw.constant;
/**
* This file contains all the generic or common owner types for diff entities
* This can be extended for more customization
*/
public interface OwnerType {
String SYSTEM = "SYSTEM";
String TESTER = "TESTER";
String PROCESS = "PROCESS";
String PROCESS_INSTANCE = "PROCESS_INSTANCE";
String MAIN_PROCESS_INSTANCE = "MAIN_PROCESS_INSTANCE"; // for embedded proc inst
String ACTIVITY = "ACTIVITY";
String ACTIVITY_INSTANCE = "ACTIVITY_INSTANCE";
String ACTIVITY_IMPLEMENTOR = "ACTIVITY_IMPLEMENTOR";
String TASK = "TASK";
String TASK_INSTANCE = "TASK_INSTANCE";
String WORK_TRANSITION = "WORK_TRANSITION";
String WORK_TRANSITION_INSTANCE = "WORK_TRANSITION_INSTANCE";
String USER = "USER";
String USER_GROUP = "USER_GROUP";
String USER_GROUP_MAP = "USER_GROUP_MAP";
String INTERNAL_EVENT = "INTERNAL_EVENT";
String VARIABLE_INSTANCE = "VARIABLE_INSTANCE";
String ADAPTER = "ADAPTER"; // owner ID is activity instance ID
String ADAPTER_REQUEST = "ADAPTER_REQUEST"; // owner ID is activity instance ID
String ADAPTER_RESPONSE = "ADAPTER_RESPONSE"; // owner ID is activity instance ID
String ADAPTER_REQUEST_META = "ADAPTER_REQUEST_META"; // owner ID is document ID of ADAPTER_REQUEST
String ADAPTER_RESPONSE_META = "ADAPTER_RESPONSE_META"; // owner ID is document ID of ADAPTER_RESPONSE
String LISTENER_REQUEST = "LISTENER_REQUEST"; // owner ID is processInstanceId (if present)
String LISTENER_RESPONSE = "LISTENER_RESPONSE"; // owner ID is the document ID of LISTENER_REQUEST
String LISTENER_REQUEST_META = "LISTENER_REQUEST_META"; // owner ID is the document ID of the LISTENER_REQUEST
String LISTENER_RESPONSE_META = "LISTENER_RESPONSE_META"; // owner ID is the document ID of the LISTENER_RESPONSE
String DOCUMENT = "DOCUMENT";
String ERROR = "ERROR";
String SLA = "SLA";
String SOLUTION = "SOLUTION";
String PROCESS_RUN = "PROCESS_RUN";
String PROCESS_INSTANCE_DEF = "PROCESS_INSTANCE_DEF";
String COMMENT = "COMMENT";
}
| apache-2.0 |
eduarddrenth/Configuration | src/main/java/com/vectorprint/configuration/parameters/ClassParameter.java | 1155 |
package com.vectorprint.configuration.parameters;
/*-
* #%L
* Config
* %%
* Copyright (C) 2015 - 2018 VectorPrint
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
public class ClassParameter extends ParameterImpl<Class> {
public ClassParameter(String key, String help) {
super(key, help, Class.class);
}
@Override
protected String valueToString(Class value) {
return value!=null?value.getName():"";
}
@Override
public Parameter<Class> clone() throws CloneNotSupportedException {
return super.clone(); //To change body of generated methods, choose Tools | Templates.
}
}
| apache-2.0 |
Blaxcraft/MineRad | src/main/java/us/mcsw/minerad/items/ItemUraniumNugget.java | 357 | package us.mcsw.minerad.items;
import net.minecraft.item.EnumRarity;
import net.minecraft.item.ItemStack;
import us.mcsw.core.ItemMR;
public class ItemUraniumNugget extends ItemMR {
public ItemUraniumNugget() {
super("nuggetUranium");
}
@Override
public EnumRarity getRarity(ItemStack it) {
return EnumRarity.uncommon;
}
}
| apache-2.0 |
jmarranz/itsnat_droid | apptest/src/main/java/org/itsnat/itsnatdroidtest/testact/TestActivityPagerAdapter.java | 2458 | package org.itsnat.itsnatdroidtest.testact;
/**
* Created by jmarranz on 12/08/14.
*/
import android.app.Fragment;
import android.app.FragmentManager;
import android.content.res.Resources;
import android.support.v13.app.FragmentPagerAdapter;
import org.itsnat.itsnatdroidtest.R;
import java.util.ArrayList;
import java.util.Locale;
/**
* A {@link android.support.v13.app.FragmentPagerAdapter} that returns a fragment corresponding to
* one of the sections/tabs/pages.
*/
public class TestActivityPagerAdapter extends FragmentPagerAdapter
{
protected ArrayList<TestActivityTabFragment> fragmentArray;
protected Resources resources;
public TestActivityPagerAdapter(FragmentManager fm,Resources resources) {
super(fm);
this.resources = resources;
this.fragmentArray = new ArrayList<TestActivityTabFragment>(getCount());
for(int i = 0; i < getCount(); i++)
fragmentArray.add( TestActivityTabFragment.newInstance(i + 1) );
}
@Override
public Fragment getItem(int position)
{
return fragmentArray.get(position);
}
@Override
public int getCount() {
return 3;
}
@Override
public CharSequence getPageTitle(int position) {
Locale l = Locale.getDefault();
switch (position) {
case 0:
return resources.getString(R.string.title_tab1).toUpperCase(l);
case 1:
return resources.getString(R.string.title_tab2).toUpperCase(l);
case 2:
return resources.getString(R.string.title_tab3).toUpperCase(l);
}
return null;
}
public int getItemPosition(Object item) {
// Solución inspirada en:
// http://stackoverflow.com/questions/10849552/android-viewpager-cant-update-dynamically/10852046#10852046
// https://code.google.com/p/android/issues/detail?id=19001
// Si se quieren añadir y/o eliminar tabs es mejor derivar de FragmentStatePagerAdapter (creo)
TestActivityTabFragment fragment = (TestActivityTabFragment)item;
if (fragment.changed)
{
int res = POSITION_NONE; // Hace que se pida de nuevo en getItem(int) y se revisualice
fragment.changed = false;
return res;
}
else
{
return POSITION_UNCHANGED;
}
}
}
| apache-2.0 |
vespa-engine/vespa | docproc/src/main/java/com/yahoo/docproc/package-info.java | 251 | // Copyright Yahoo. Licensed under the terms of the Apache 2.0 license. See LICENSE in the project root.
@ExportPackage
@PublicApi
package com.yahoo.docproc;
import com.yahoo.api.annotations.PublicApi;
import com.yahoo.osgi.annotation.ExportPackage;
| apache-2.0 |
jembi/openxds | iheos/src/main/java/gov/nist/registry/ws/serviceclasses/RGForceError.java | 925 | package gov.nist.registry.ws.serviceclasses;
import org.apache.axiom.om.OMElement;
import org.apache.axis2.context.MessageContext;
public class RGForceError extends RG {
public OMElement AdhocQueryRequest(OMElement ahqr) {
String errorCode = (String) MessageContext.getCurrentMessageContext().getParameter("forcedError").getValue();
String errorMessage = (String) MessageContext.getCurrentMessageContext().getParameter("forcedErrorMessage").getValue();
return AdhocQueryRequestForceError(ahqr, "XCA", "XGQ", errorCode, errorMessage);
}
public OMElement RetrieveDocumentSetRequest(OMElement rdsr) {
String errorCode = (String) MessageContext.getCurrentMessageContext().getParameter("forcedError").getValue();
String errorMessage = (String) MessageContext.getCurrentMessageContext().getParameter("forcedErrorMessage").getValue();
return RetrieveDocumentForceError(rdsr, errorCode, errorMessage);
}
}
| apache-2.0 |
marc-urschick/a_whattobuy | app/src/main/java/com/wgmc/whattobuy/service/SettingsService.java | 2422 | package com.wgmc.whattobuy.service;
import android.app.Activity;
import android.content.Context;
import android.content.SharedPreferences;
import android.util.Log;
import com.wgmc.whattobuy.R;
import java.util.HashMap;
import java.util.Map;
/**
* Created by proxie on 5.4.17.
*/
// settings managment handling saving and loading of system settings and preferences
public class SettingsService extends DefaultService {
private static SettingsService instance;
public static SettingsService getInstance() {
if (instance == null)
instance = new SettingsService();
return instance;
}
private static final String prefName = "com.wgmc.whattobuy.settings";
// Settings Keys:
public static String SETTING_ENABLE_SHAKE_TO_CHECK_ITEMS;
public static String SETTING_SHOW_STARTUP_TOOLTIP_TUTORIAL;
private Map<String, Object> settings;
private SettingsService() {
settings = new HashMap<>();
}
public void loadSettings(Activity c) {
SharedPreferences sp = c.getPreferences(Context.MODE_PRIVATE);
Map<String, ?> all = sp.getAll();
for (Map.Entry<String, ?> e : all.entrySet()) {
settings.put(e.getKey(), e.getValue());
}
}
public Map<String, Object> getSettings() {
return settings;
}
public Object getSetting(String key) {
return settings.get(key);
}
public void putSetting(String key, Object val) {
settings.put(key, val);
}
public void saveSettings(Activity c) {
SharedPreferences pref = c.getPreferences(Context.MODE_PRIVATE);
final SharedPreferences.Editor editor = pref.edit();
for (Map.Entry<String, Object> e : settings.entrySet()) {
Log.d("Settings", e.getKey() + ": " + e.getValue());
editor.putString(e.getKey(), e.getValue().toString());
}
// editor.commit();
Thread t = new Thread(new Runnable() {
@Override
public void run() {
editor.commit();
}
}, "settings-saver");
t.setDaemon(true);
t.start();
}
public void initSettingKeys(Activity a) {
SETTING_ENABLE_SHAKE_TO_CHECK_ITEMS = a.getString(R.string.SETTING_ENABLE_SHAKE_TO_CHECK_ITEMS);
SETTING_SHOW_STARTUP_TOOLTIP_TUTORIAL = a.getString(R.string.SETTING_SHOW_STARTUP_TOOLTIP_TUTORIAL);
}
}
| apache-2.0 |
codefollower/Open-Source-Research | Douyu-0.7.1/douyu-netty/src/main/java/com/codefollower/douyu/netty/channel/AbstractChannel.java | 11131 | /*
* Copyright 2009 Red Hat, Inc.
*
* Red Hat licenses this file to you under the Apache License, version 2.0
* (the "License"); you may not use this file except in compliance with the
* License. You may obtain a copy of the License at:
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package com.codefollower.douyu.netty.channel;
import java.net.SocketAddress;
import java.util.concurrent.ConcurrentMap;
import com.codefollower.douyu.netty.util.internal.ConcurrentHashMap;
/**
* A skeletal {@link Channel} implementation.
*
* @author <a href="http://www.jboss.org/netty/">The Netty Project</a>
* @author <a href="http://gleamynode.net/">Trustin Lee</a>
*
* @version $Rev$, $Date$
*
*/
public abstract class AbstractChannel implements Channel {
static final ConcurrentMap<Integer, Channel> allChannels = new ConcurrentHashMap<Integer, Channel>();
private static final IdDeallocator ID_DEALLOCATOR = new IdDeallocator();
private static Integer allocateId(Channel channel) {
Integer id = Integer.valueOf(System.identityHashCode(channel));
for (;;) {
// Loop until a unique ID is acquired.
// It should be found in one loop practically.
if (allChannels.putIfAbsent(id, channel) == null) {
// Successfully acquired.
return id;
} else {
// Taken by other channel at almost the same moment.
id = Integer.valueOf(id.intValue() + 1);
}
}
}
private static final class IdDeallocator implements ChannelFutureListener {
IdDeallocator() {
super();
}
@Override
public void operationComplete(ChannelFuture future) throws Exception {
allChannels.remove(future.getChannel().getId());
}
}
private final Integer id;
private final Channel parent;
private final ChannelFactory factory;
private final ChannelPipeline pipeline;
private final ChannelFuture succeededFuture = new SucceededChannelFuture(this);
private final ChannelCloseFuture closeFuture = new ChannelCloseFuture();
private volatile int interestOps = OP_READ;
/** Cache for the string representation of this channel */
private boolean strValConnected;
private String strVal;
/**
* Creates a new instance.
*
* @param parent
* the parent of this channel. {@code null} if there's no parent.
* @param factory
* the factory which created this channel
* @param pipeline
* the pipeline which is going to be attached to this channel
* @param sink
* the sink which will receive downstream events from the pipeline
* and send upstream events to the pipeline
*/
protected AbstractChannel(
Channel parent, ChannelFactory factory,
ChannelPipeline pipeline, ChannelSink sink) {
this.parent = parent;
this.factory = factory;
this.pipeline = pipeline;
id = allocateId(this);
closeFuture.addListener(ID_DEALLOCATOR);
pipeline.attach(this, sink);
}
/**
* (Internal use only) Creates a new temporary instance with the specified
* ID.
*
* @param parent
* the parent of this channel. {@code null} if there's no parent.
* @param factory
* the factory which created this channel
* @param pipeline
* the pipeline which is going to be attached to this channel
* @param sink
* the sink which will receive downstream events from the pipeline
* and send upstream events to the pipeline
*/
protected AbstractChannel(
Integer id,
Channel parent, ChannelFactory factory,
ChannelPipeline pipeline, ChannelSink sink) {
this.id = id;
this.parent = parent;
this.factory = factory;
this.pipeline = pipeline;
pipeline.attach(this, sink);
}
@Override
public final Integer getId() {
return id;
}
@Override
public Channel getParent() {
return parent;
}
@Override
public ChannelFactory getFactory() {
return factory;
}
@Override
public ChannelPipeline getPipeline() {
return pipeline;
}
/**
* Returns the cached {@link SucceededChannelFuture} instance.
*/
protected ChannelFuture getSucceededFuture() {
return succeededFuture;
}
/**
* Returns the {@link FailedChannelFuture} whose cause is an
* {@link UnsupportedOperationException}.
*/
protected ChannelFuture getUnsupportedOperationFuture() {
return new FailedChannelFuture(this, new UnsupportedOperationException());
}
/**
* Returns the {@linkplain System#identityHashCode(Object) identity hash code}
* of this channel.
*/
@Override
public final int hashCode() {
return System.identityHashCode(this);
}
/**
* Returns {@code true} if and only if the specified object is identical
* with this channel (i.e: {@code this == o}).
*/
@Override
public final boolean equals(Object o) {
return this == o;
}
/**
* Compares the {@linkplain #getId() ID} of the two channels.
*/
@Override
public final int compareTo(Channel o) {
return getId().compareTo(o.getId());
}
@Override
public boolean isOpen() {
return !closeFuture.isDone();
}
/**
* Marks this channel as closed. This method is intended to be called by
* an internal component - please do not call it unless you know what you
* are doing.
*
* @return {@code true} if and only if this channel was not marked as
* closed yet
*/
protected boolean setClosed() {
return closeFuture.setClosed();
}
@Override
public ChannelFuture bind(SocketAddress localAddress) {
return Channels.bind(this, localAddress);
}
@Override
public ChannelFuture unbind() {
return Channels.unbind(this);
}
@Override
public ChannelFuture close() {
ChannelFuture returnedCloseFuture = Channels.close(this);
assert closeFuture == returnedCloseFuture;
return closeFuture;
}
@Override
public ChannelFuture getCloseFuture() {
return closeFuture;
}
@Override
public ChannelFuture connect(SocketAddress remoteAddress) {
return Channels.connect(this, remoteAddress);
}
@Override
public ChannelFuture disconnect() {
return Channels.disconnect(this);
}
@Override
public int getInterestOps() {
return interestOps;
}
@Override
public ChannelFuture setInterestOps(int interestOps) {
return Channels.setInterestOps(this, interestOps);
}
/**
* Sets the {@link #getInterestOps() interestOps} property of this channel
* immediately. This method is intended to be called by an internal
* component - please do not call it unless you know what you are doing.
*/
protected void setInterestOpsNow(int interestOps) {
this.interestOps = interestOps;
}
@Override
public boolean isReadable() {
return (getInterestOps() & OP_READ) != 0;
}
@Override
public boolean isWritable() {
return (getInterestOps() & OP_WRITE) == 0;
}
@Override
public ChannelFuture setReadable(boolean readable) {
if (readable) {
return setInterestOps(getInterestOps() | OP_READ);
} else {
return setInterestOps(getInterestOps() & ~OP_READ);
}
}
@Override
public ChannelFuture write(Object message) {
return Channels.write(this, message);
}
@Override
public ChannelFuture write(Object message, SocketAddress remoteAddress) {
return Channels.write(this, message, remoteAddress);
}
/**
* Returns the {@link String} representation of this channel. The returned
* string contains the {@linkplain #getId() ID}, {@linkplain #getLocalAddress() local address},
* and {@linkplain #getRemoteAddress() remote address} of this channel for
* easier identification.
*/
@Override
public String toString() {
boolean connected = isConnected();
if (strValConnected == connected && strVal != null) {
return strVal;
}
StringBuilder buf = new StringBuilder(128);
buf.append("[id: 0x");
buf.append(getIdString());
SocketAddress localAddress = getLocalAddress();
SocketAddress remoteAddress = getRemoteAddress();
if (remoteAddress != null) {
buf.append(", ");
if (getParent() == null) {
buf.append(localAddress);
buf.append(connected? " => " : " :> ");
buf.append(remoteAddress);
} else {
buf.append(remoteAddress);
buf.append(connected? " => " : " :> ");
buf.append(localAddress);
}
} else if (localAddress != null) {
buf.append(", ");
buf.append(localAddress);
}
buf.append(']');
String strVal = buf.toString();
this.strVal = strVal;
strValConnected = connected;
return strVal;
}
private String getIdString() {
String answer = Integer.toHexString(id.intValue());
switch (answer.length()) {
case 0:
answer = "00000000";
break;
case 1:
answer = "0000000" + answer;
break;
case 2:
answer = "000000" + answer;
break;
case 3:
answer = "00000" + answer;
break;
case 4:
answer = "0000" + answer;
break;
case 5:
answer = "000" + answer;
break;
case 6:
answer = "00" + answer;
break;
case 7:
answer = "0" + answer;
break;
}
return answer;
}
private final class ChannelCloseFuture extends DefaultChannelFuture {
public ChannelCloseFuture() {
super(AbstractChannel.this, false);
}
@Override
public boolean setSuccess() {
// User is not supposed to call this method - ignore silently.
return false;
}
@Override
public boolean setFailure(Throwable cause) {
// User is not supposed to call this method - ignore silently.
return false;
}
boolean setClosed() {
return super.setSuccess();
}
}
}
| apache-2.0 |
topsale/spring-boot-train | spring-boot-train-ch1/src/main/java/funtl/microservice/train/spring/boot/ch1/javaconfig/JavaConfig.java | 1508 | package funtl.microservice.train.spring.boot.ch1.javaconfig;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
/**
* 配置类
* (1) 使用 @Configuration 注解表明当前类是一个配置类,这意味着这个类里面可能有 0 个或者多个 @Bean 注解,此处没有使用包扫描,是因为所有的 Bean 都在此类中定义了
* (2) 使用 @Bean 注解声明当前方法 FunctionService 的返回值是一个 Bean ,Bean 的名称是方法名
* (3) 注入 FunctionService 的 Bean 时候直接调用 functionService()
* (4) 另外一种注入方式,直接将 FunctionService 作为参数给 useFunctionService(),这也是 Spring 容器提供的极好的功能。在 Spring 容器中,只要容器中存在某个 Bean,就可以在另外一个 Bean 的声明方法的参数中注入
*/
@Configuration // 1
public class JavaConfig {
@Bean // 2
public FunctionService functionService() {
return new FunctionService();
}
@Bean
public UseFunctionService useFunctionService() {
UseFunctionService useFunctionService = new UseFunctionService();
useFunctionService.setFunctionService(functionService()); // 3
return useFunctionService;
}
// @Bean
// public UseFunctionService useFunctionService(FunctionService functionService) { // 4
// UseFunctionService useFunctionService = new UseFunctionService();
// useFunctionService.setFunctionService(functionService);
// return useFunctionService;
// }
}
| apache-2.0 |
zuoweitan/Hitalk | app/src/main/java/com/vivifram/second/hitalk/ui/page/layout/ChatPrimaryMenuLayout.java | 7750 | package com.vivifram.second.hitalk.ui.page.layout;
import android.content.Context;
import android.text.TextUtils;
import android.util.AttributeSet;
import android.view.KeyEvent;
import android.view.LayoutInflater;
import android.view.MotionEvent;
import android.view.View;
import android.view.View.OnClickListener;
import android.widget.Button;
import android.widget.EditText;
import android.widget.ImageView;
import android.widget.RelativeLayout;
import com.vivifram.second.hitalk.R;
import com.vivifram.second.hitalk.ui.layout.EdittextWatcher;
/**
* 聊天输入栏主菜单栏
*
*/
public class ChatPrimaryMenuLayout extends ChatPrimaryMenuBaseLayout implements OnClickListener {
private EditText editText;
private View buttonSetModeKeyboard;
private RelativeLayout edittext_layout;
private View buttonSetModeVoice;
private View buttonSend;
private View buttonPressToSpeak;
private ImageView faceNormal;
private ImageView faceChecked;
private Button buttonMore;
private RelativeLayout faceLayout;
private Context context;
private VoiceRecorderView voiceRecorderView;
public ChatPrimaryMenuLayout(Context context, AttributeSet attrs, int defStyle) {
super(context, attrs, defStyle);
init(context, attrs);
}
public ChatPrimaryMenuLayout(Context context, AttributeSet attrs) {
this(context, attrs, 0);
}
public ChatPrimaryMenuLayout(Context context) {
super(context);
init(context, null);
}
private void init(final Context context, AttributeSet attrs) {
this.context = context;
LayoutInflater.from(context).inflate(R.layout.chat_primary_menu, this);
editText = (EditText) findViewById(R.id.et_sendmessage);
buttonSetModeKeyboard = findViewById(R.id.btn_set_mode_keyboard);
edittext_layout = (RelativeLayout) findViewById(R.id.edittext_layout);
buttonSetModeVoice = findViewById(R.id.btn_set_mode_voice);
buttonSend = findViewById(R.id.btn_send);
buttonPressToSpeak = findViewById(R.id.btn_press_to_speak);
faceNormal = (ImageView) findViewById(R.id.iv_face_normal);
faceChecked = (ImageView) findViewById(R.id.iv_face_checked);
faceLayout = (RelativeLayout) findViewById(R.id.rl_face);
buttonMore = (Button) findViewById(R.id.btn_more);
buttonSend.setOnClickListener(this);
buttonSetModeKeyboard.setOnClickListener(this);
buttonSetModeVoice.setOnClickListener(this);
buttonMore.setOnClickListener(this);
faceLayout.setOnClickListener(this);
editText.setOnClickListener(this);
editText.requestFocus();
// 监听文字框
editText.addTextChangedListener(new EdittextWatcher(){
@Override
public void onTextChanged(CharSequence s, int start, int before, int count) {
if (!TextUtils.isEmpty(s)) {
buttonMore.setVisibility(View.GONE);
buttonSend.setVisibility(View.VISIBLE);
} else {
buttonMore.setVisibility(View.VISIBLE);
buttonSend.setVisibility(View.GONE);
}
}
});
buttonPressToSpeak.setOnTouchListener(new View.OnTouchListener() {
@Override
public boolean onTouch(View v, MotionEvent event) {
if(listener != null){
return listener.onPressToSpeakBtnTouch(v, event);
}
return false;
}
});
}
/**
* 设置长按说话录制控件
* @param voiceRecorderView
*/
public void setPressToSpeakRecorderView(VoiceRecorderView voiceRecorderView){
this.voiceRecorderView = voiceRecorderView;
}
/**
* 表情输入
* @param emojiContent
*/
public void onEmojiconInputEvent(CharSequence emojiContent){
editText.append(emojiContent);
}
/**
* 表情删除
*/
public void onEmojiconDeleteEvent(){
if (!TextUtils.isEmpty(editText.getText())) {
KeyEvent event = new KeyEvent(0, 0, 0, KeyEvent.KEYCODE_DEL, 0, 0, 0, 0, KeyEvent.KEYCODE_ENDCALL);
editText.dispatchKeyEvent(event);
}
}
/**
* 点击事件
* @param view
*/
@Override
public void onClick(View view){
int id = view.getId();
if (id == R.id.btn_send) {
if(listener != null){
String s = editText.getText().toString();
editText.setText("");
listener.onSendBtnClicked(s);
}
} else if (id == R.id.btn_set_mode_voice) {
setModeVoice();
showNormalFaceImage();
if(listener != null)
listener.onToggleVoiceBtnClicked();
} else if (id == R.id.btn_set_mode_keyboard) {
setModeKeyboard();
showNormalFaceImage();
if(listener != null)
listener.onToggleVoiceBtnClicked();
} else if (id == R.id.btn_more) {
buttonSetModeVoice.setVisibility(View.VISIBLE);
buttonSetModeKeyboard.setVisibility(View.GONE);
edittext_layout.setVisibility(View.VISIBLE);
buttonPressToSpeak.setVisibility(View.GONE);
showNormalFaceImage();
if(listener != null)
listener.onToggleExtendClicked();
} else if (id == R.id.et_sendmessage) {
faceNormal.setVisibility(View.VISIBLE);
faceChecked.setVisibility(View.INVISIBLE);
if(listener != null)
listener.onEditTextClicked();
} else if (id == R.id.rl_face) {
toggleFaceImage();
if(listener != null){
listener.onToggleEmojiconClicked();
}
} else {
}
}
/**
* 显示语音图标按钮
*
*/
protected void setModeVoice() {
hideKeyboard();
edittext_layout.setVisibility(View.GONE);
buttonSetModeVoice.setVisibility(View.GONE);
buttonSetModeKeyboard.setVisibility(View.VISIBLE);
buttonSend.setVisibility(View.GONE);
buttonMore.setVisibility(View.VISIBLE);
buttonPressToSpeak.setVisibility(View.VISIBLE);
faceNormal.setVisibility(View.VISIBLE);
faceChecked.setVisibility(View.INVISIBLE);
}
/**
* 显示键盘图标
*/
protected void setModeKeyboard() {
edittext_layout.setVisibility(View.VISIBLE);
buttonSetModeKeyboard.setVisibility(View.GONE);
buttonSetModeVoice.setVisibility(View.VISIBLE);
// mEditTextContent.setVisibility(View.VISIBLE);
editText.requestFocus();
// buttonSend.setVisibility(View.VISIBLE);
buttonPressToSpeak.setVisibility(View.GONE);
if (TextUtils.isEmpty(editText.getText())) {
buttonMore.setVisibility(View.VISIBLE);
buttonSend.setVisibility(View.GONE);
} else {
buttonMore.setVisibility(View.GONE);
buttonSend.setVisibility(View.VISIBLE);
}
}
protected void toggleFaceImage(){
if(faceNormal.getVisibility() == View.VISIBLE){
showSelectedFaceImage();
}else{
showNormalFaceImage();
}
}
private void showNormalFaceImage(){
faceNormal.setVisibility(View.VISIBLE);
faceChecked.setVisibility(View.INVISIBLE);
}
private void showSelectedFaceImage(){
faceNormal.setVisibility(View.INVISIBLE);
faceChecked.setVisibility(View.VISIBLE);
}
@Override
public void onExtendMenuContainerHide() {
showNormalFaceImage();
}
}
| apache-2.0 |
eclipse/hudson.plugins.legacy-maven | maven-plugin/src/main/java/hudson/maven/reporters/MavenMailer.java | 2367 | /*******************************************************************************
*
* Copyright (c) 2004-2009 Oracle Corporation.
*
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Eclipse Public License v1.0
* which accompanies this distribution, and is available at
* http://www.eclipse.org/legal/epl-v10.html
*
* Contributors:
*
* Kohsuke Kawaguchi, Bruce Chapman
*
*
*******************************************************************************/
package hudson.maven.reporters;
import hudson.Launcher;
import hudson.Extension;
import hudson.model.BuildListener;
import hudson.tasks.MailSender;
import hudson.tasks.Mailer;
import hudson.maven.MavenBuild;
import hudson.maven.MavenReporter;
import hudson.maven.MavenReporterDescriptor;
import org.kohsuke.stapler.StaplerRequest;
import java.io.IOException;
import net.sf.json.JSONObject;
/**
* Sends out an e-mail notification for Maven build result.
* @author Kohsuke Kawaguchi
*/
public class MavenMailer extends MavenReporter {
/**
* @see Mailer
*/
public String recipients;
public boolean dontNotifyEveryUnstableBuild;
public boolean sendToIndividuals;
public boolean end(MavenBuild build, Launcher launcher, BuildListener listener) throws InterruptedException, IOException {
new MailSender(recipients,dontNotifyEveryUnstableBuild,sendToIndividuals).execute(build,listener);
return true;
}
@Extension
public static final class DescriptorImpl extends MavenReporterDescriptor {
public String getDisplayName() {
return Messages.MavenMailer_DisplayName();
}
public String getHelpFile() {
return "/help/project-config/mailer.html";
}
// reuse the config from the mailer.
@Override
public String getConfigPage() {
return getViewPage(Mailer.class,"config.jelly");
}
public MavenReporter newInstance(StaplerRequest req, JSONObject formData) throws FormException {
MavenMailer m = new MavenMailer();
req.bindParameters(m,"mailer_");
m.dontNotifyEveryUnstableBuild = req.getParameter("mailer_notifyEveryUnstableBuild")==null;
return m;
}
}
private static final long serialVersionUID = 1L;
}
| apache-2.0 |
shangtech/eshop | shop-base/src/main/java/net/shangtech/eshop/solr/SolrService.java | 4465 | package net.shangtech.eshop.solr;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import net.shangtech.eshop.product.entity.Brand;
import net.shangtech.eshop.product.entity.Category;
import net.shangtech.eshop.product.entity.Inventory;
import net.shangtech.eshop.product.entity.Sku;
import net.shangtech.eshop.product.service.BrandService;
import net.shangtech.eshop.product.service.CategoryService;
import net.shangtech.eshop.product.service.InventoryService;
import net.shangtech.eshop.product.service.SkuService;
import net.shangtech.framework.dao.support.Pagination;
import org.apache.solr.client.solrj.SolrQuery;
import org.apache.solr.client.solrj.SolrServerException;
import org.apache.solr.client.solrj.impl.HttpSolrServer;
import org.apache.solr.client.solrj.response.QueryResponse;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.InitializingBean;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.util.CollectionUtils;
import com.alibaba.fastjson.JSON;
public class SolrService implements InitializingBean {
private static final Logger logger = LoggerFactory.getLogger(SolrService.class);
@Autowired private SkuService skuService;
@Autowired private InventoryService inventoryService;
@Autowired private CategoryService categoryService;
@Autowired private BrandService brandService;
private HttpSolrServer server = null;
private String url;
@Override
public void afterPropertiesSet() throws Exception {
server = new HttpSolrServer(url);
// server.setAllowCompression(allowCompression);
// server.setConnectionTimeout(timeout);
// server.setDefaultMaxConnectionsPerHost(max);
}
public void saveSku(Sku sku) throws IOException, SolrServerException{
SolrSku solrSku = sku2Solr(sku);
server.addBean(solrSku);
if(logger.isDebugEnabled()){
logger.debug("ready to add {}", JSON.toJSON(solrSku));
}
server.commit();
}
public void saveSkuList(List<Sku> list) throws IOException, SolrServerException{
if(!CollectionUtils.isEmpty(list)){
List<SolrSku> solrSkus = new ArrayList<SolrSku>(list.size());
for(Sku sku : list){
solrSkus.add(sku2Solr(sku));
}
server.addBeans(solrSkus);
server.commit();
}
}
public void deleteByIds(List<String> ids) throws SolrServerException, IOException{
server.deleteById(ids);
server.commit();
}
public Pagination<SolrSku> findByCategory(String[] categoryCodes, Pagination<SolrSku> pagination) throws SolrServerException{
SolrQuery query = new SolrQuery("*:*");
for(String code : categoryCodes){
query.addFilterQuery("categoryCodes:" + code);
}
query.setStart(pagination.getStart());
query.setRows(pagination.getLimit());
QueryResponse qr = server.query(query);
pagination.setItems(qr.getBeans(SolrSku.class));
pagination.setTotalCount(new Long(qr.getResults().getNumFound()).intValue());
return pagination;
}
private SolrSku sku2Solr(Sku sku){
SolrSku solrSku = new SolrSku();
solrSku.setId(sku.getId());
solrSku.setCode(sku.getCode());
solrSku.setName(sku.getName());
solrSku.setMarketPrice(sku.getMarketPrice());
solrSku.setSellPrice(sku.getSellPrice());
solrSku.setImage(sku.getImage());
if(sku.getCategoryId() != null){
List<Category> categoryWithParents = categoryService.findWithParents(sku.getCategoryId());
if(!CollectionUtils.isEmpty(categoryWithParents)){
List<String> categories = new ArrayList<String>(categoryWithParents.size());
List<String> categoryCodes = new ArrayList<String>(categoryWithParents.size());
for(Category category : categoryWithParents){
categories.add(category.getName());
categoryCodes.add(category.getCode());
}
solrSku.setCategories(categories);
solrSku.setCategoryCodes(categoryCodes);
}
}
if(sku.getBrandId() != null){
Brand brand = brandService.find(sku.getBrandId());
if(brand != null){
solrSku.setBrandCode(brand.getCode());
solrSku.setBrandName(brand.getName());
}
}
List<String> sizes = new ArrayList<String>();
List<Inventory> inventories = inventoryService.findBySkuId(sku.getId());
if(!CollectionUtils.isEmpty(inventories)){
for(Inventory inventory : inventories){
sizes.add(inventory.getSize());
}
}
solrSku.setSizes(sizes);
return solrSku;
}
public String getUrl() {
return url;
}
public void setUrl(String url) {
this.url = url;
}
}
| apache-2.0 |
cbuil/sparql-dqp | src/test/ext/java/es/upm/fi/dia/oeg/test/sparqldqp/utils/ASTUtilTest.java | 2239 | package es.upm.fi.dia.oeg.test.sparqldqp.utils;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.antlr.runtime.tree.CommonTree;
import junit.framework.TestCase;
import es.upm.fi.dia.oeg.ogsadai.sparql.parser.ASTConstants;
import es.upm.fi.dia.oeg.ogsadai.sparql.parser.ASTUtil;
import es.upm.fi.dia.oeg.ogsadai.sparql.parser.SPARQLParserException;
import es.upm.fi.dia.oeg.ogsadai.sparql.parser.SimpleSparqlQueryParser;
import es.upm.fi.dia.oeg.ogsadai.sparql.query.Prefix;
public class ASTUtilTest extends TestCase
{
/** Copyright statement. */
private static final String COPYRIGHT_NOTICE = "Copyright (c) Universidad Politecnica de Madrid, 2009-2010.";
private SimpleSparqlQueryParser mParser;
private Map<String, String> mTestQueries;
/**
* Runs the test cases.
*
* @param args
* Not used
*/
public static void main(String[] args)
{
junit.textui.TestRunner.run(ASTUtilTest.class);
}
/**
* Constructor.
*
* @param name
* Test case name.
* @throws Exception
* If any problems arise in reading the test properties.
*/
public ASTUtilTest(String name) throws Exception
{
super(name);
}
public void setUp() throws Exception
{
super.setUp();
mParser = SimpleSparqlQueryParser.getInstance();
mTestQueries = new HashMap<>();
mTestQueries.put("testGetPredicateFromBGP",
" a:test1 :b ?c ");
mTestQueries.put("testGetPredicate",
" ?a :b ?c . a:test1 :b ?c");
}
public void testGetPredicateFromBGP()
{
CommonTree abstractSyntaxTree;
String query = mTestQueries.get("testGetPredicateFromBGP");
try
{
abstractSyntaxTree = mParser.parseTripleBlock(query);
CommonTree predicate = ASTUtil.getPredicateFromBGP(abstractSyntaxTree);
assertTrue(predicate.equals(abstractSyntaxTree));
}
catch (SPARQLParserException e)
{
// TODO Auto-generated catch block
e.printStackTrace();
}
}
} | apache-2.0 |
davidzchen/bazel | src/test/java/com/google/devtools/build/lib/starlark/StarlarkRuleContextTest.java | 117277 | // Copyright 2015 The Bazel Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.devtools.build.lib.starlark;
import static com.google.common.truth.Truth.assertThat;
import static com.google.common.truth.Truth.assertWithMessage;
import static com.google.common.truth.Truth8.assertThat;
import static com.google.devtools.build.lib.analysis.ToolchainCollection.DEFAULT_EXEC_GROUP_NAME;
import static com.google.devtools.build.lib.packages.Attribute.attr;
import static com.google.devtools.build.lib.packages.BuildType.LABEL_LIST;
import static org.junit.Assert.assertThrows;
import com.google.common.base.Joiner;
import com.google.common.base.Splitter;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.Iterables;
import com.google.devtools.build.lib.actions.ActionAnalysisMetadata;
import com.google.devtools.build.lib.actions.Artifact;
import com.google.devtools.build.lib.analysis.ActionsProvider;
import com.google.devtools.build.lib.analysis.ConfiguredRuleClassProvider;
import com.google.devtools.build.lib.analysis.ConfiguredTarget;
import com.google.devtools.build.lib.analysis.ExecGroupCollection;
import com.google.devtools.build.lib.analysis.ResolvedToolchainContext;
import com.google.devtools.build.lib.analysis.TransitiveInfoCollection;
import com.google.devtools.build.lib.analysis.actions.FileWriteAction;
import com.google.devtools.build.lib.analysis.actions.StarlarkAction;
import com.google.devtools.build.lib.analysis.configuredtargets.FileConfiguredTarget;
import com.google.devtools.build.lib.analysis.starlark.StarlarkRuleContext;
import com.google.devtools.build.lib.analysis.util.BuildViewTestCase;
import com.google.devtools.build.lib.analysis.util.MockRule;
import com.google.devtools.build.lib.cmdline.Label;
import com.google.devtools.build.lib.collect.nestedset.Depset;
import com.google.devtools.build.lib.packages.Provider;
import com.google.devtools.build.lib.packages.StarlarkInfo;
import com.google.devtools.build.lib.packages.StarlarkProvider;
import com.google.devtools.build.lib.packages.StarlarkProviderIdentifier;
import com.google.devtools.build.lib.packages.StructImpl;
import com.google.devtools.build.lib.rules.java.JavaInfo;
import com.google.devtools.build.lib.rules.java.JavaSourceJarsProvider;
import com.google.devtools.build.lib.rules.python.PyProviderUtils;
import com.google.devtools.build.lib.starlark.util.BazelEvaluationTestCase;
import com.google.devtools.build.lib.testutil.TestRuleClassProvider;
import com.google.devtools.build.lib.util.FileTypeSet;
import com.google.devtools.build.lib.vfs.FileSystemUtils;
import com.google.devtools.build.lib.vfs.PathFragment;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import net.starlark.java.eval.Dict;
import net.starlark.java.eval.EvalException;
import net.starlark.java.eval.Mutability;
import net.starlark.java.eval.Sequence;
import net.starlark.java.eval.Starlark;
import net.starlark.java.eval.StarlarkInt;
import net.starlark.java.eval.StarlarkList;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.junit.runners.JUnit4;
/** Tests for {@link StarlarkRuleContext}. */
@RunWith(JUnit4.class)
public final class StarlarkRuleContextTest extends BuildViewTestCase {
private StarlarkRuleContext createRuleContext(String label) throws Exception {
return new StarlarkRuleContext(
getRuleContextForStarlark(getConfiguredTarget(label)), null, getStarlarkSemantics());
}
private final BazelEvaluationTestCase ev = new BazelEvaluationTestCase();
/** A test rule that exercises the semantics of mandatory providers. */
private static final MockRule TESTING_RULE_FOR_MANDATORY_PROVIDERS =
() ->
MockRule.define(
"testing_rule_for_mandatory_providers",
(builder, env) ->
builder
.setUndocumented()
.add(attr("srcs", LABEL_LIST).allowedFileTypes(FileTypeSet.ANY_FILE))
.add(
attr("deps", LABEL_LIST)
.legacyAllowAnyFileType()
.mandatoryProvidersList(
ImmutableList.of(
ImmutableList.of(StarlarkProviderIdentifier.forLegacy("a")),
ImmutableList.of(
StarlarkProviderIdentifier.forLegacy("b"),
StarlarkProviderIdentifier.forLegacy("c"))))));
@Override
protected ConfiguredRuleClassProvider createRuleClassProvider() {
ConfiguredRuleClassProvider.Builder builder =
new ConfiguredRuleClassProvider.Builder()
.addRuleDefinition(TESTING_RULE_FOR_MANDATORY_PROVIDERS);
TestRuleClassProvider.addStandardRules(builder);
return builder.build();
}
@Before
public final void setupMyInfoAndGenerateBuildFile() throws Exception {
scratch.file("myinfo/myinfo.bzl", "MyInfo = provider()");
scratch.file("myinfo/BUILD");
scratch.file(
"foo/BUILD",
"package(features = ['-f1', 'f2', 'f3'])",
"genrule(name = 'foo',",
" cmd = 'dummy_cmd',",
" srcs = ['a.txt', 'b.img'],",
" tools = ['t.exe'],",
" outs = ['c.txt'])",
"genrule(name = 'foo2',",
" cmd = 'dummy_cmd',",
" outs = ['e.txt'])",
"genrule(name = 'bar',",
" cmd = 'dummy_cmd',",
" srcs = [':jl', ':gl'],",
" outs = ['d.txt'])",
"java_library(name = 'jl',",
" srcs = ['a.java'])",
"android_library(name = 'androidlib',",
" srcs = ['a.java'])",
"java_import(name = 'asr',",
" jars = [ 'asr.jar' ],",
" srcjar = 'asr-src.jar',",
")",
"genrule(name = 'gl',",
" cmd = 'touch $(OUTS)',",
" srcs = ['a.go'],",
" outs = [ 'gl.a', 'gl.gcgox', ],",
" output_to_bindir = 1,",
")",
"cc_library(name = 'cc_with_features',",
" srcs = ['dummy.cc'],",
" features = ['f1', '-f3'],",
")");
}
private void setRuleContext(StarlarkRuleContext ctx) throws Exception {
ev.update("ruleContext", ctx);
}
private void setUpAttributeErrorTest() throws Exception {
scratch.file(
"test/BUILD",
"load('//test:macros.bzl', 'macro_native_rule', 'macro_starlark_rule', 'starlark_rule')",
"macro_native_rule(name = 'm_native',",
" deps = [':jlib'])",
"macro_starlark_rule(name = 'm_starlark',",
" deps = [':jlib'])",
"java_library(name = 'jlib',",
" srcs = ['bla.java'])",
"cc_library(name = 'cclib',",
" deps = [':jlib'])",
"starlark_rule(name = 'skyrule',",
" deps = [':jlib'])");
scratch.file(
"test/macros.bzl",
"def _impl(ctx):",
" return",
"starlark_rule = rule(",
" implementation = _impl,",
" attrs = {",
" 'deps': attr.label_list(providers = ['some_provider'], allow_files=True)",
" }",
")",
"def macro_native_rule(name, deps): ",
" native.cc_library(name = name, deps = deps)",
"def macro_starlark_rule(name, deps):",
" starlark_rule(name = name, deps = deps)");
reporter.removeHandler(failFastHandler);
}
@Test
public void hasCorrectLocationForRuleAttributeError_NativeRuleWithMacro() throws Exception {
setUpAttributeErrorTest();
assertThrows(Exception.class, () -> createRuleContext("//test:m_native"));
assertContainsEvent("misplaced here");
// Skip the part of the error message that has details about the allowed deps since the mocks
// for the mac tests might have different values for them.
assertContainsEvent(
". Since this "
+ "rule was created by the macro 'macro_native_rule', the error might have been caused "
+ "by the macro implementation");
}
@Test
public void hasCorrectLocationForRuleAttributeError_StarlarkRuleWithMacro() throws Exception {
setUpAttributeErrorTest();
assertThrows(Exception.class, () -> createRuleContext("//test:m_starlark"));
assertContainsEvent(
"ERROR /workspace/test/BUILD:4:20: in deps attribute of starlark_rule rule "
+ "//test:m_starlark: '//test:jlib' does not have mandatory providers:"
+ " 'some_provider'. "
+ "Since this rule was created by the macro 'macro_starlark_rule', the error might "
+ "have been caused by the macro implementation");
}
@Test
public void hasCorrectLocationForRuleAttributeError_NativeRule() throws Exception {
setUpAttributeErrorTest();
assertThrows(Exception.class, () -> createRuleContext("//test:cclib"));
assertContainsEvent("misplaced here");
// Skip the part of the error message that has details about the allowed deps since the mocks
// for the mac tests might have different values for them.
assertDoesNotContainEvent("Since this rule was created by the macro");
}
@Test
public void hasCorrectLocationForRuleAttributeError_StarlarkRule() throws Exception {
setUpAttributeErrorTest();
assertThrows(Exception.class, () -> createRuleContext("//test:skyrule"));
assertContainsEvent(
"ERROR /workspace/test/BUILD:10:14: in deps attribute of "
+ "starlark_rule rule //test:skyrule: '//test:jlib' does not have mandatory providers: "
+ "'some_provider'");
}
@Test
public void testMandatoryProvidersListWithStarlark() throws Exception {
setBuildLanguageOptions("--incompatible_disallow_struct_provider_syntax=false");
scratch.file(
"test/BUILD",
"load('//test:rules.bzl', 'starlark_rule', 'my_rule', 'my_other_rule')",
"my_rule(name = 'mylib',",
" srcs = ['a.py'])",
"starlark_rule(name = 'skyrule1',",
" deps = [':mylib'])",
"my_other_rule(name = 'my_other_lib',",
" srcs = ['a.py'])",
"starlark_rule(name = 'skyrule2',",
" deps = [':my_other_lib'])");
scratch.file(
"test/rules.bzl",
"def _impl(ctx):",
" return",
"starlark_rule = rule(",
" implementation = _impl,",
" attrs = {",
" 'deps': attr.label_list(providers = [['a'], ['b', 'c']],",
" allow_files=True)",
" }",
")",
"def my_rule_impl(ctx):",
" return struct(a = [])",
"my_rule = rule(implementation = my_rule_impl, ",
" attrs = { 'srcs' : attr.label_list(allow_files=True)})",
"def my_other_rule_impl(ctx):",
" return struct(b = [])",
"my_other_rule = rule(implementation = my_other_rule_impl, ",
" attrs = { 'srcs' : attr.label_list(allow_files=True)})");
reporter.removeHandler(failFastHandler);
assertThat(getConfiguredTarget("//test:skyrule1")).isNotNull();
assertThrows(Exception.class, () -> createRuleContext("//test:skyrule2"));
assertContainsEvent(
"ERROR /workspace/test/BUILD:8:14: in deps attribute of "
+ "starlark_rule rule //test:skyrule2: '//test:my_other_lib' does not have "
+ "mandatory providers: 'a' or 'c'");
}
@Test
public void testMandatoryProvidersListWithNative() throws Exception {
setBuildLanguageOptions("--incompatible_disallow_struct_provider_syntax=false");
scratch.file(
"test/BUILD",
"load('//test:rules.bzl', 'my_rule', 'my_other_rule')",
"my_rule(name = 'mylib',",
" srcs = ['a.py'])",
"testing_rule_for_mandatory_providers(name = 'skyrule1',",
" deps = [':mylib'])",
"my_other_rule(name = 'my_other_lib',",
" srcs = ['a.py'])",
"testing_rule_for_mandatory_providers(name = 'skyrule2',",
" deps = [':my_other_lib'])");
scratch.file(
"test/rules.bzl",
"def my_rule_impl(ctx):",
" return struct(a = [])",
"my_rule = rule(implementation = my_rule_impl, ",
" attrs = { 'srcs' : attr.label_list(allow_files=True)})",
"def my_other_rule_impl(ctx):",
" return struct(b = [])",
"my_other_rule = rule(implementation = my_other_rule_impl, ",
" attrs = { 'srcs' : attr.label_list(allow_files=True)})");
reporter.removeHandler(failFastHandler);
assertThat(getConfiguredTarget("//test:skyrule1")).isNotNull();
assertThrows(Exception.class, () -> createRuleContext("//test:skyrule2"));
assertContainsEvent(
"ERROR /workspace/test/BUILD:8:37: in deps attribute of "
+ "testing_rule_for_mandatory_providers rule //test:skyrule2: '//test:my_other_lib' "
+ "does not have mandatory providers: 'a' or 'c'");
}
/* Sharing setup code between the testPackageBoundaryError*() methods is not possible since the
* errors already happen when loading the file. Consequently, all tests would fail at the same
* statement. */
@Test
public void testPackageBoundaryError_nativeRule() throws Exception {
scratch.file("test/BUILD", "cc_library(name = 'cclib',", " srcs = ['sub/my_sub_lib.h'])");
scratch.file("test/sub/BUILD", "cc_library(name = 'my_sub_lib', srcs = ['my_sub_lib.h'])");
reporter.removeHandler(failFastHandler);
getConfiguredTarget("//test:cclib");
assertContainsEvent(
"ERROR /workspace/test/BUILD:1:11: Label '//test:sub/my_sub_lib.h' is invalid because "
+ "'test/sub' is a subpackage; perhaps you meant to put the colon here: "
+ "'//test/sub:my_sub_lib.h'?");
}
@Test
public void testPackageBoundaryError_starlarkRule() throws Exception {
scratch.file(
"test/BUILD",
"load('//test:macros.bzl', 'starlark_rule')",
"starlark_rule(name = 'skyrule',",
" srcs = ['sub/my_sub_lib.h'])");
scratch.file("test/sub/BUILD", "cc_library(name = 'my_sub_lib', srcs = ['my_sub_lib.h'])");
scratch.file(
"test/macros.bzl",
"def _impl(ctx):",
" return",
"starlark_rule = rule(",
" implementation = _impl,",
" attrs = {",
" 'srcs': attr.label_list(allow_files=True)",
" }",
")");
reporter.removeHandler(failFastHandler);
getConfiguredTarget("//test:skyrule");
assertContainsEvent(
"ERROR /workspace/test/BUILD:2:14: Label '//test:sub/my_sub_lib.h' is invalid because "
+ "'test/sub' is a subpackage; perhaps you meant to put the colon here: "
+ "'//test/sub:my_sub_lib.h'?");
}
@Test
public void testPackageBoundaryError_starlarkMacro() throws Exception {
scratch.file(
"test/BUILD",
"load('//test:macros.bzl', 'macro_starlark_rule')",
"macro_starlark_rule(name = 'm_starlark',",
" srcs = ['sub/my_sub_lib.h'])");
scratch.file("test/sub/BUILD", "cc_library(name = 'my_sub_lib', srcs = ['my_sub_lib.h'])");
scratch.file(
"test/macros.bzl",
"def _impl(ctx):",
" return",
"starlark_rule = rule(",
" implementation = _impl,",
" attrs = {",
" 'srcs': attr.label_list(allow_files=True)",
" }",
")",
"def macro_starlark_rule(name, srcs=[]):",
" starlark_rule(name = name, srcs = srcs)");
reporter.removeHandler(failFastHandler);
getConfiguredTarget("//test:m_starlark");
assertContainsEvent(
"ERROR /workspace/test/BUILD:2:20: Label '//test:sub/my_sub_lib.h' is invalid because"
+ " 'test/sub' is a subpackage; perhaps you meant to put the colon here: "
+ "'//test/sub:my_sub_lib.h'?");
}
/* The error message for this case used to be wrong. */
@Test
public void testPackageBoundaryError_externalRepository_boundary() throws Exception {
scratch.file("r/WORKSPACE");
scratch.file("r/BUILD");
scratch.overwriteFile(
"WORKSPACE",
new ImmutableList.Builder<String>()
.addAll(analysisMock.getWorkspaceContents(mockToolsConfig))
.add("local_repository(name='r', path='r')")
.build());
scratch.file("BUILD", "cc_library(name = 'cclib',", " srcs = ['r/my_sub_lib.h'])");
invalidatePackages(
/*alsoConfigs=*/ false); // Repository shuffling messes with toolchain labels.
reporter.removeHandler(failFastHandler);
getConfiguredTarget("//:cclib");
assertContainsEvent(
"/workspace/BUILD:1:11: Label '//:r/my_sub_lib.h' is invalid because "
+ "'@r//' is a subpackage");
}
/* The error message for this case used to be wrong. */
@Test
public void testPackageBoundaryError_externalRepository_entirelyInside() throws Exception {
scratch.file("/r/WORKSPACE");
scratch.file("/r/BUILD", "cc_library(name = 'cclib',", " srcs = ['sub/my_sub_lib.h'])");
scratch.file("/r/sub/BUILD", "cc_library(name = 'my_sub_lib', srcs = ['my_sub_lib.h'])");
scratch.overwriteFile(
"WORKSPACE",
new ImmutableList.Builder<String>()
.addAll(analysisMock.getWorkspaceContents(mockToolsConfig))
.add("local_repository(name='r', path='/r')")
.build());
invalidatePackages(
/*alsoConfigs=*/ false); // Repository shuffling messes with toolchain labels.
reporter.removeHandler(failFastHandler);
getConfiguredTarget("@r//:cclib");
assertContainsEvent(
"/external/r/BUILD:1:11: Label '@r//:sub/my_sub_lib.h' is invalid because "
+ "'@r//sub' is a subpackage; perhaps you meant to put the colon here: "
+ "'@r//sub:my_sub_lib.h'?");
}
/*
* Making the location in BUILD file the default for "crosses boundary of subpackage" errors does
* not work in this case since the error actually happens in the bzl file. However, because of
* the current design, we can neither show the location in the bzl file nor display both
* locations (BUILD + bzl).
*
* Since this case is less common than having such an error in a BUILD file, we can live
* with it.
*/
@Test
public void testPackageBoundaryError_starlarkMacroWithErrorInBzlFile() throws Exception {
scratch.file(
"test/BUILD",
"load('//test:macros.bzl', 'macro_starlark_rule')",
"macro_starlark_rule(name = 'm_starlark')");
scratch.file("test/sub/BUILD", "cc_library(name = 'my_sub_lib', srcs = ['my_sub_lib.h'])");
scratch.file(
"test/macros.bzl",
"def _impl(ctx):",
" return",
"starlark_rule = rule(",
" implementation = _impl,",
" attrs = {",
" 'srcs': attr.label_list(allow_files=True)",
" }",
")",
"def macro_starlark_rule(name, srcs=[]):",
" starlark_rule(name = name, srcs = srcs + ['sub/my_sub_lib.h'])");
reporter.removeHandler(failFastHandler);
getConfiguredTarget("//test:m_starlark");
assertContainsEvent(
"ERROR /workspace/test/BUILD:2:20: Label '//test:sub/my_sub_lib.h' "
+ "is invalid because 'test/sub' is a subpackage");
}
@Test
public void testPackageBoundaryError_nativeMacro() throws Exception {
scratch.file(
"test/BUILD",
"load('//test:macros.bzl', 'macro_native_rule')",
"macro_native_rule(name = 'm_native',",
" srcs = ['sub/my_sub_lib.h'])");
scratch.file("test/sub/BUILD", "cc_library(name = 'my_sub_lib', srcs = ['my_sub_lib.h'])");
scratch.file(
"test/macros.bzl",
"def macro_native_rule(name, deps=[], srcs=[]): ",
" native.cc_library(name = name, deps = deps, srcs = srcs)");
reporter.removeHandler(failFastHandler);
getConfiguredTarget("//test:m_native");
assertContainsEvent(
"ERROR /workspace/test/BUILD:2:18: Label '//test:sub/my_sub_lib.h' "
+ "is invalid because 'test/sub' is a subpackage");
}
@Test
public void shouldGetPrerequisiteArtifacts() throws Exception {
StarlarkRuleContext ruleContext = createRuleContext("//foo:foo");
setRuleContext(ruleContext);
Object result = ev.eval("ruleContext.files.srcs");
assertArtifactList(result, ImmutableList.of("a.txt", "b.img"));
}
private static void assertArtifactList(Object result, List<String> artifacts) {
assertThat(result).isInstanceOf(Sequence.class);
Sequence<?> resultList = (Sequence) result;
assertThat(resultList).hasSize(artifacts.size());
int i = 0;
for (String artifact : artifacts) {
assertThat(((Artifact) resultList.get(i++)).getFilename()).isEqualTo(artifact);
}
}
@Test
public void shouldGetPrerequisites() throws Exception {
StarlarkRuleContext ruleContext = createRuleContext("//foo:bar");
setRuleContext(ruleContext);
Object result = ev.eval("ruleContext.attr.srcs");
// Check for a known provider
TransitiveInfoCollection tic1 = (TransitiveInfoCollection) ((Sequence) result).get(0);
assertThat(JavaInfo.getProvider(JavaSourceJarsProvider.class, tic1)).isNotNull();
// Check an unimplemented provider too
assertThat(PyProviderUtils.hasLegacyProvider(tic1)).isFalse();
}
@Test
public void shouldGetPrerequisite() throws Exception {
StarlarkRuleContext ruleContext = createRuleContext("//foo:asr");
setRuleContext(ruleContext);
Object result = ev.eval("ruleContext.attr.srcjar");
TransitiveInfoCollection tic = (TransitiveInfoCollection) result;
assertThat(tic).isInstanceOf(FileConfiguredTarget.class);
assertThat(tic.getLabel().getName()).isEqualTo("asr-src.jar");
}
@Test
public void testGetRuleAttributeListType() throws Exception {
StarlarkRuleContext ruleContext = createRuleContext("//foo:foo");
setRuleContext(ruleContext);
Object result = ev.eval("ruleContext.attr.outs");
assertThat(result).isInstanceOf(Sequence.class);
}
@Test
public void testGetRuleSelect() throws Exception {
scratch.file("test/starlark/BUILD");
scratch.file(
"test/starlark/rulestr.bzl", "def rule_dict(name):", " return native.existing_rule(name)");
scratch.file(
"test/getrule/BUILD",
"load('//test/starlark:rulestr.bzl', 'rule_dict')",
"cc_library(name ='x', ",
" srcs = select({'//conditions:default': []})",
")",
"rule_dict('x')");
// Parse the BUILD file, to make sure select() makes it out of native.rule().
createRuleContext("//test/getrule:x");
}
@Test
public void testExistingRuleReturnNone() throws Exception {
scratch.file(
"test/rulestr.bzl",
"def test_rule(name, x):",
" print(native.existing_rule(x))",
" if native.existing_rule(x) == None:",
" native.cc_library(name = name)");
scratch.file(
"test/BUILD",
"load('//test:rulestr.bzl', 'test_rule')",
"test_rule('a', 'does not exist')",
"test_rule('b', 'BUILD')");
assertThat(getConfiguredTarget("//test:a")).isNotNull();
assertThat(getConfiguredTarget("//test:b")).isNotNull();
}
@Test
public void existingRuleWithSelect() throws Exception {
scratch.file(
"test/existing_rule.bzl",
"def macro():",
" s = select({'//foo': ['//bar']})",
" native.cc_library(name = 'x', srcs = s)",
" print(native.existing_rule('x')['srcs'])");
scratch.file(
"test/BUILD",
"load('//test:existing_rule.bzl', 'macro')",
"macro()",
"cc_library(name = 'a', srcs = [])");
getConfiguredTarget("//test:a");
assertContainsEvent("select({Label(\"//foo:foo\"): [Label(\"//bar:bar\")]})");
}
@Test
public void testGetRule() throws Exception {
scratch.file("test/starlark/BUILD");
scratch.file(
"test/starlark/rulestr.bzl",
"def rule_dict(name):",
" return native.existing_rule(name)",
"def rules_dict():",
" return native.existing_rules()",
"def nop(ctx):",
" pass",
"nop_rule = rule(attrs = {'x': attr.label()}, implementation = nop)",
"consume_rule = rule(attrs = {'s': attr.string_list()}, implementation = nop)");
scratch.file(
"test/getrule/BUILD",
"load('//test/starlark:rulestr.bzl', 'rules_dict', 'rule_dict', 'nop_rule',"
+ "'consume_rule')",
"genrule(name = 'a', outs = ['a.txt'], ",
" licenses = ['notice'],",
" output_to_bindir = False,",
" tools = [ '//test:bla' ], cmd = 'touch $@')",
"nop_rule(name = 'c', x = ':a')",
"rlist= rules_dict()",
"consume_rule(name = 'all_str', s = [rlist['a']['kind'], rlist['a']['name'], ",
" rlist['c']['kind'], rlist['c']['name']])",
"adict = rule_dict('a')",
"cdict = rule_dict('c')",
"consume_rule(name = 'a_str', ",
" s = [adict['kind'], adict['name'], adict['outs'][0], adict['tools'][0]])",
"consume_rule(name = 'genrule_attr', ",
" s = adict.keys())",
"consume_rule(name = 'c_str', s = [cdict['kind'], cdict['name'], cdict['x']])");
StarlarkRuleContext allContext = createRuleContext("//test/getrule:all_str");
setRuleContext(allContext);
List<?> result = (List) ev.eval("ruleContext.attr.s");
assertThat(result).containsExactly("genrule", "a", "nop_rule", "c");
setRuleContext(createRuleContext("//test/getrule:a_str"));
result = (List) ev.eval("ruleContext.attr.s");
assertThat(result).containsExactly("genrule", "a", ":a.txt", "//test:bla");
setRuleContext(createRuleContext("//test/getrule:c_str"));
result = (List) ev.eval("ruleContext.attr.s");
assertThat(result).containsExactly("nop_rule", "c", ":a");
setRuleContext(createRuleContext("//test/getrule:genrule_attr"));
result = (List) ev.eval("ruleContext.attr.s");
assertThat(result)
.containsAtLeast(
"name",
"visibility",
"transitive_configs",
"tags",
"generator_name",
"generator_function",
"generator_location",
"features",
"compatible_with",
"restricted_to",
"srcs",
"tools",
"toolchains",
"outs",
"cmd",
"output_to_bindir",
"local",
"message",
"executable",
"stamp",
"heuristic_label_expansion",
"kind");
}
@Test
public void testExistingRuleDictIsMutable() throws Exception {
scratch.file(
"test/BUILD",
"load('inc.bzl', 'f')", //
"f()");
scratch.file(
"test/inc.bzl", //
"def f():",
" native.config_setting(name='x', define_values={'key': 'value'})",
" r = native.existing_rule('x')",
" r['define_values']['key'] = 123"); // mutate the dict
// Logically this belongs among the loading-phase tests of existing_rules. Where are they?
assertThat(getConfiguredTarget("//test:BUILD")).isNotNull(); // no error
}
@Test
public void testGetRuleAttributeListValue() throws Exception {
StarlarkRuleContext ruleContext = createRuleContext("//foo:foo");
setRuleContext(ruleContext);
Object result = ev.eval("ruleContext.attr.outs");
assertThat(((Sequence) result)).hasSize(1);
}
@Test
public void testGetRuleAttributeListValueNoGet() throws Exception {
StarlarkRuleContext ruleContext = createRuleContext("//foo:foo");
setRuleContext(ruleContext);
Object result = ev.eval("ruleContext.attr.outs");
assertThat(((Sequence) result)).hasSize(1);
}
@Test
public void testGetRuleAttributeStringTypeValue() throws Exception {
StarlarkRuleContext ruleContext = createRuleContext("//foo:foo");
setRuleContext(ruleContext);
Object result = ev.eval("ruleContext.attr.cmd");
assertThat((String) result).isEqualTo("dummy_cmd");
}
@Test
public void testGetRuleAttributeStringTypeValueNoGet() throws Exception {
StarlarkRuleContext ruleContext = createRuleContext("//foo:foo");
setRuleContext(ruleContext);
Object result = ev.eval("ruleContext.attr.cmd");
assertThat((String) result).isEqualTo("dummy_cmd");
}
@Test
public void testGetRuleAttributeBadAttributeName() throws Exception {
setRuleContext(createRuleContext("//foo:foo"));
ev.checkEvalErrorContains("No attribute 'bad'", "ruleContext.attr.bad");
}
@Test
public void testGetLabel() throws Exception {
setRuleContext(createRuleContext("//foo:foo"));
Object result = ev.eval("ruleContext.label");
assertThat(((Label) result).toString()).isEqualTo("//foo:foo");
}
@Test
public void testRuleError() throws Exception {
setRuleContext(createRuleContext("//foo:foo"));
ev.checkEvalErrorContains("message", "fail('message')");
}
@Test
public void testAttributeError() throws Exception {
setRuleContext(createRuleContext("//foo:foo"));
ev.checkEvalErrorContains("attribute srcs: message", "fail(attr='srcs', msg='message')");
}
@Test
public void testGetExecutablePrerequisite() throws Exception {
setRuleContext(createRuleContext("//foo:androidlib"));
Object result = ev.eval("ruleContext.executable._idlclass");
assertThat(((Artifact) result).getFilename()).matches("^IdlClass(\\.exe){0,1}$");
}
@Test
public void testCreateSpawnActionArgumentsWithExecutableFilesToRunProvider() throws Exception {
StarlarkRuleContext ruleContext = createRuleContext("//foo:androidlib");
setRuleContext(ruleContext);
ev.exec(
"ruleContext.actions.run(",
" inputs = ruleContext.files.srcs,",
" outputs = ruleContext.files.srcs,",
" arguments = ['--a','--b'],",
" executable = ruleContext.executable._idlclass)");
StarlarkAction action =
(StarlarkAction)
Iterables.getOnlyElement(
ruleContext.getRuleContext().getAnalysisEnvironment().getRegisteredActions());
assertThat(action.getCommandFilename()).matches("^.*/IdlClass(\\.exe){0,1}$");
}
@Test
public void testCreateStarlarkActionArgumentsWithUnusedInputsList() throws Exception {
StarlarkRuleContext ruleContext = createRuleContext("//foo:foo");
setRuleContext(ruleContext);
ev.exec(
"ruleContext.actions.run(",
" inputs = ruleContext.files.srcs,",
" outputs = ruleContext.files.srcs,",
" executable = 'executable',",
" unused_inputs_list = ruleContext.files.srcs[0])");
StarlarkAction action =
(StarlarkAction)
Iterables.getOnlyElement(
ruleContext.getRuleContext().getAnalysisEnvironment().getRegisteredActions());
assertThat(action.getUnusedInputsList()).isPresent();
assertThat(action.getUnusedInputsList().get().getFilename()).isEqualTo("a.txt");
assertThat(action.discoversInputs()).isTrue();
assertThat(action.isShareable()).isFalse();
}
@Test
public void testCreateStarlarkActionArgumentsWithoutUnusedInputsList() throws Exception {
StarlarkRuleContext ruleContext = createRuleContext("//foo:foo");
setRuleContext(ruleContext);
ev.exec(
"ruleContext.actions.run(",
" inputs = ruleContext.files.srcs,",
" outputs = ruleContext.files.srcs,",
" executable = 'executable',",
" unused_inputs_list = None)");
StarlarkAction action =
(StarlarkAction)
Iterables.getOnlyElement(
ruleContext.getRuleContext().getAnalysisEnvironment().getRegisteredActions());
assertThat(action.getUnusedInputsList()).isEmpty();
assertThat(action.discoversInputs()).isFalse();
}
@Test
public void testOutputs() throws Exception {
setRuleContext(createRuleContext("//foo:bar"));
Iterable<?> result = (Iterable) ev.eval("ruleContext.outputs.outs");
assertThat(((Artifact) Iterables.getOnlyElement(result)).getFilename()).isEqualTo("d.txt");
}
@Test
public void testStarlarkRuleContextGetDefaultShellEnv() throws Exception {
setRuleContext(createRuleContext("//foo:foo"));
Object result = ev.eval("ruleContext.configuration.default_shell_env");
assertThat(result).isInstanceOf(Dict.class);
}
@Test
public void testCheckPlaceholders() throws Exception {
setRuleContext(createRuleContext("//foo:foo"));
Object result = ev.eval("ruleContext.check_placeholders('%{name}', ['name'])");
assertThat(result).isEqualTo(true);
}
@Test
public void testCheckPlaceholdersBadPlaceholder() throws Exception {
setRuleContext(createRuleContext("//foo:foo"));
Object result = ev.eval("ruleContext.check_placeholders('%{name}', ['abc'])");
assertThat(result).isEqualTo(false);
}
@Test
public void testExpandMakeVariables() throws Exception {
setRuleContext(createRuleContext("//foo:foo"));
Object result = ev.eval("ruleContext.expand_make_variables('cmd', '$(ABC)', {'ABC': 'DEF'})");
assertThat(result).isEqualTo("DEF");
}
@Test
public void testExpandMakeVariablesShell() throws Exception {
setRuleContext(createRuleContext("//foo:foo"));
Object result = ev.eval("ruleContext.expand_make_variables('cmd', '$$ABC', {})");
assertThat(result).isEqualTo("$ABC");
}
private void setUpMakeVarToolchain() throws Exception {
scratch.file(
"vars/vars.bzl",
"def _make_var_supplier_impl(ctx):",
" val = ctx.attr.value",
" return [platform_common.TemplateVariableInfo({'MAKE_VAR_VALUE': val})]",
"make_var_supplier = rule(",
" implementation = _make_var_supplier_impl,",
" attrs = {",
" 'value': attr.string(mandatory = True),",
" })",
"def _make_var_user_impl(ctx):",
" return []",
"make_var_user = rule(",
" implementation = _make_var_user_impl,",
")");
scratch.file(
"vars/BUILD",
"load(':vars.bzl', 'make_var_supplier', 'make_var_user')",
"make_var_supplier(name = 'supplier', value = 'foo')",
"cc_toolchain_alias(name = 'current_cc_toolchain')",
"make_var_user(",
" name = 'vars',",
" toolchains = [':supplier', ':current_cc_toolchain'],",
")");
}
@Test
public void testExpandMakeVariables_cc() throws Exception {
setUpMakeVarToolchain();
setRuleContext(createRuleContext("//vars:vars"));
String result = (String) ev.eval("ruleContext.expand_make_variables('cmd', '$(CC)', {})");
assertThat(result).isNotEmpty();
}
@Test
public void testExpandMakeVariables_toolchain() throws Exception {
setUpMakeVarToolchain();
setRuleContext(createRuleContext("//vars:vars"));
Object result = ev.eval("ruleContext.expand_make_variables('cmd', '$(MAKE_VAR_VALUE)', {})");
assertThat(result).isEqualTo("foo");
}
@Test
public void testVar_toolchain() throws Exception {
setUpMakeVarToolchain();
setRuleContext(createRuleContext("//vars:vars"));
Object result = ev.eval("ruleContext.var['MAKE_VAR_VALUE']");
assertThat(result).isEqualTo("foo");
}
@Test
public void testConfiguration() throws Exception {
StarlarkRuleContext ruleContext = createRuleContext("//foo:foo");
setRuleContext(ruleContext);
Object result = ev.eval("ruleContext.configuration");
assertThat(ruleContext.getRuleContext().getConfiguration()).isSameInstanceAs(result);
}
@Test
public void testFeatures() throws Exception {
setRuleContext(createRuleContext("//foo:cc_with_features"));
Object result = ev.eval("ruleContext.features");
assertThat((Sequence) result).containsExactly("f1", "f2");
}
@Test
public void testDisabledFeatures() throws Exception {
setRuleContext(createRuleContext("//foo:cc_with_features"));
Object result = ev.eval("ruleContext.disabled_features");
assertThat((Sequence) result).containsExactly("f3");
}
@Test
public void testHostConfiguration() throws Exception {
StarlarkRuleContext ruleContext = createRuleContext("//foo:foo");
setRuleContext(ruleContext);
Object result = ev.eval("ruleContext.host_configuration");
assertThat(ruleContext.getRuleContext().getHostConfiguration()).isSameInstanceAs(result);
}
@Test
public void testWorkspaceName() throws Exception {
assertThat(ruleClassProvider.getRunfilesPrefix()).isNotNull();
assertThat(ruleClassProvider.getRunfilesPrefix()).isNotEmpty();
setRuleContext(createRuleContext("//foo:foo"));
Object result = ev.eval("ruleContext.workspace_name");
assertThat(ruleClassProvider.getRunfilesPrefix()).isEqualTo(result);
}
@Test
public void testDeriveArtifactLegacy() throws Exception {
setBuildLanguageOptions("--incompatible_new_actions_api=false");
setRuleContext(createRuleContext("//foo:foo"));
Object result = ev.eval("ruleContext.new_file(ruleContext.genfiles_dir," + " 'a/b.txt')");
PathFragment fragment = ((Artifact) result).getRootRelativePath();
assertThat(fragment.getPathString()).isEqualTo("foo/a/b.txt");
}
@Test
public void testDeriveArtifact() throws Exception {
setRuleContext(createRuleContext("//foo:foo"));
Object result = ev.eval("ruleContext.actions.declare_file('a/b.txt')");
PathFragment fragment = ((Artifact) result).getRootRelativePath();
assertThat(fragment.getPathString()).isEqualTo("foo/a/b.txt");
}
@Test
public void testDeriveTreeArtifact() throws Exception {
setRuleContext(createRuleContext("//foo:foo"));
Object result = ev.eval("ruleContext.actions.declare_directory('a/b')");
Artifact artifact = (Artifact) result;
PathFragment fragment = artifact.getRootRelativePath();
assertThat(fragment.getPathString()).isEqualTo("foo/a/b");
assertThat(artifact.isTreeArtifact()).isTrue();
}
@Test
public void testDeriveTreeArtifactType() throws Exception {
setRuleContext(createRuleContext("//foo:foo"));
String result = (String) ev.eval("type(ruleContext.actions.declare_directory('a/b'))");
assertThat(result).isEqualTo("File");
}
@Test
public void testDeriveTreeArtifactNextToSibling() throws Exception {
setRuleContext(createRuleContext("//foo:foo"));
Artifact artifact =
(Artifact)
ev.eval(
"ruleContext.actions.declare_directory('c',"
+ " sibling=ruleContext.actions.declare_directory('a/b'))");
PathFragment fragment = artifact.getRootRelativePath();
assertThat(fragment.getPathString()).isEqualTo("foo/a/c");
assertThat(artifact.isTreeArtifact()).isTrue();
}
@Test
public void testParamFileLegacy() throws Exception {
setBuildLanguageOptions("--incompatible_new_actions_api=false");
setRuleContext(createRuleContext("//foo:foo"));
Object result =
ev.eval(
"ruleContext.new_file(ruleContext.bin_dir," + "ruleContext.files.tools[0], '.params')");
PathFragment fragment = ((Artifact) result).getRootRelativePath();
assertThat(fragment.getPathString()).isEqualTo("foo/t.exe.params");
}
@Test
public void testParamFileSuffixLegacy() throws Exception {
setBuildLanguageOptions("--incompatible_new_actions_api=false");
setRuleContext(createRuleContext("//foo:foo"));
Object result =
ev.eval(
"ruleContext.new_file(ruleContext.files.tools[0], "
+ "ruleContext.files.tools[0].basename + '.params')");
PathFragment fragment = ((Artifact) result).getRootRelativePath();
assertThat(fragment.getPathString()).isEqualTo("foo/t.exe.params");
}
@Test
public void testParamFileSuffix() throws Exception {
setRuleContext(createRuleContext("//foo:foo"));
Object result =
ev.eval(
"ruleContext.actions.declare_file(ruleContext.files.tools[0].basename + '.params', "
+ "sibling = ruleContext.files.tools[0])");
PathFragment fragment = ((Artifact) result).getRootRelativePath();
assertThat(fragment.getPathString()).isEqualTo("foo/t.exe.params");
}
@Test
public void testLabelKeyedStringDictConvertsToTargetToStringMap() throws Exception {
scratch.file(
"my_rule.bzl",
"def _impl(ctx):",
" return",
"my_rule = rule(",
" implementation = _impl,",
" attrs = {",
" 'label_dict': attr.label_keyed_string_dict(),",
" }",
")");
scratch.file(
"BUILD",
"filegroup(name='dep')",
"load('//:my_rule.bzl', 'my_rule')",
"my_rule(name='r',",
" label_dict={':dep': 'value'})");
invalidatePackages();
setRuleContext(createRuleContext("//:r"));
Label keyLabel = (Label) ev.eval("ruleContext.attr.label_dict.keys()[0].label");
assertThat(keyLabel).isEqualTo(Label.parseAbsolute("//:dep", ImmutableMap.of()));
String valueString = (String) ev.eval("ruleContext.attr.label_dict.values()[0]");
assertThat(valueString).isEqualTo("value");
}
@Test
public void testLabelKeyedStringDictTranslatesAliases() throws Exception {
scratch.file(
"my_rule.bzl",
"def _impl(ctx):",
" return",
"my_rule = rule(",
" implementation = _impl,",
" attrs = {",
" 'label_dict': attr.label_keyed_string_dict(),",
" }",
")");
scratch.file(
"BUILD",
"filegroup(name='dep')",
"alias(name='alias', actual='dep')",
"load('//:my_rule.bzl', 'my_rule')",
"my_rule(name='r',",
" label_dict={':alias': 'value'})");
invalidatePackages();
setRuleContext(createRuleContext("//:r"));
Label keyLabel = (Label) ev.eval("ruleContext.attr.label_dict.keys()[0].label");
assertThat(keyLabel).isEqualTo(Label.parseAbsolute("//:dep", ImmutableMap.of()));
String valueString = (String) ev.eval("ruleContext.attr.label_dict.values()[0]");
assertThat(valueString).isEqualTo("value");
}
@Test
public void testLabelKeyedStringDictAcceptsDefaultValues() throws Exception {
scratch.file(
"my_rule.bzl",
"def _impl(ctx):",
" return",
"my_rule = rule(",
" implementation = _impl,",
" attrs = {",
" 'label_dict': attr.label_keyed_string_dict(default={Label('//:default'): 'defs'}),",
" }",
")");
scratch.file(
"BUILD",
"filegroup(name='default')",
"load('//:my_rule.bzl', 'my_rule')",
"my_rule(name='r')");
invalidatePackages();
setRuleContext(createRuleContext("//:r"));
Label keyLabel = (Label) ev.eval("ruleContext.attr.label_dict.keys()[0].label");
assertThat(keyLabel).isEqualTo(Label.parseAbsolute("//:default", ImmutableMap.of()));
String valueString = (String) ev.eval("ruleContext.attr.label_dict.values()[0]");
assertThat(valueString).isEqualTo("defs");
}
@Test
public void testLabelKeyedStringDictAllowsFilesWhenAllowFilesIsTrue() throws Exception {
scratch.file(
"my_rule.bzl",
"def _impl(ctx):",
" return",
"my_rule = rule(",
" implementation = _impl,",
" attrs = {",
" 'label_dict': attr.label_keyed_string_dict(allow_files=True),",
" }",
")");
scratch.file("myfile.cc");
scratch.file(
"BUILD",
"load('//:my_rule.bzl', 'my_rule')",
"my_rule(name='r',",
" label_dict={'myfile.cc': 'value'})");
invalidatePackages();
createRuleContext("//:r");
assertNoEvents();
}
@Test
public void testLabelKeyedStringDictAllowsFilesOfAppropriateTypes() throws Exception {
scratch.file(
"my_rule.bzl",
"def _impl(ctx):",
" return",
"my_rule = rule(",
" implementation = _impl,",
" attrs = {",
" 'label_dict': attr.label_keyed_string_dict(allow_files=['.cc']),",
" }",
")");
scratch.file("myfile.cc");
scratch.file(
"BUILD",
"load('//:my_rule.bzl', 'my_rule')",
"my_rule(name='r',",
" label_dict={'myfile.cc': 'value'})");
invalidatePackages();
createRuleContext("//:r");
assertNoEvents();
}
@Test
public void testLabelKeyedStringDictForbidsFilesOfIncorrectTypes() throws Exception {
reporter.removeHandler(failFastHandler);
scratch.file(
"my_rule.bzl",
"def _impl(ctx):",
" return",
"my_rule = rule(",
" implementation = _impl,",
" attrs = {",
" 'label_dict': attr.label_keyed_string_dict(allow_files=['.cc']),",
" }",
")");
scratch.file("myfile.cpp");
scratch.file(
"BUILD",
"load('//:my_rule.bzl', 'my_rule')",
"my_rule(name='r',",
" label_dict={'myfile.cpp': 'value'})");
invalidatePackages();
getConfiguredTarget("//:r");
assertContainsEvent("file '//:myfile.cpp' is misplaced here (expected .cc)");
}
@Test
public void testLabelKeyedStringDictForbidsFilesWhenAllowFilesIsFalse() throws Exception {
reporter.removeHandler(failFastHandler);
scratch.file(
"my_rule.bzl",
"def _impl(ctx):",
" return",
"my_rule = rule(",
" implementation = _impl,",
" attrs = {",
" 'label_dict': attr.label_keyed_string_dict(allow_files=False),",
" }",
")");
scratch.file("myfile.cpp");
scratch.file(
"BUILD",
"load('//:my_rule.bzl', 'my_rule')",
"my_rule(name='r',",
" label_dict={'myfile.cpp': 'value'})");
invalidatePackages();
getConfiguredTarget("//:r");
assertContainsEvent(
"in label_dict attribute of my_rule rule //:r: "
+ "source file '//:myfile.cpp' is misplaced here (expected no files)");
}
@Test
public void testLabelKeyedStringDictAllowsRulesWithRequiredProviders_legacy() throws Exception {
setBuildLanguageOptions("--incompatible_disallow_struct_provider_syntax=false");
scratch.file(
"my_rule.bzl",
"def _impl(ctx):",
" return",
"my_rule = rule(",
" implementation = _impl,",
" attrs = {",
" 'label_dict': attr.label_keyed_string_dict(providers=[['my_provider']]),",
" }",
")",
"def _dep_impl(ctx):",
" return struct(my_provider=5)",
"my_dep_rule = rule(",
" implementation = _dep_impl,",
" attrs = {}",
")");
scratch.file(
"BUILD",
"load('//:my_rule.bzl', 'my_rule', 'my_dep_rule')",
"my_dep_rule(name='dep')",
"my_rule(name='r',",
" label_dict={':dep': 'value'})");
invalidatePackages();
createRuleContext("//:r");
assertNoEvents();
}
@Test
public void testLabelKeyedStringDictAllowsRulesWithRequiredProviders() throws Exception {
scratch.file(
"my_rule.bzl",
"load('//myinfo:myinfo.bzl', 'MyInfo')",
"def _impl(ctx):",
" return",
"my_rule = rule(",
" implementation = _impl,",
" attrs = {",
" 'label_dict': attr.label_keyed_string_dict(providers=[MyInfo]),",
" }",
")",
"def _dep_impl(ctx):",
" return MyInfo(my_provider=5)",
"my_dep_rule = rule(",
" implementation = _dep_impl,",
" attrs = {}",
")");
scratch.file(
"BUILD",
"load('//:my_rule.bzl', 'my_rule', 'my_dep_rule')",
"my_dep_rule(name='dep')",
"my_rule(name='r',",
" label_dict={':dep': 'value'})");
invalidatePackages();
createRuleContext("//:r");
assertNoEvents();
}
@Test
public void testLabelKeyedStringDictForbidsRulesMissingRequiredProviders() throws Exception {
reporter.removeHandler(failFastHandler);
scratch.file(
"my_rule.bzl",
"def _impl(ctx):",
" return",
"my_rule = rule(",
" implementation = _impl,",
" attrs = {",
" 'label_dict': attr.label_keyed_string_dict(providers=[['my_provider']]),",
" }",
")",
"def _dep_impl(ctx):",
" return",
"my_dep_rule = rule(",
" implementation = _dep_impl,",
" attrs = {}",
")");
scratch.file(
"BUILD",
"load('//:my_rule.bzl', 'my_rule', 'my_dep_rule')",
"my_dep_rule(name='dep')",
"my_rule(name='r',",
" label_dict={':dep': 'value'})");
invalidatePackages();
getConfiguredTarget("//:r");
assertContainsEvent(
"in label_dict attribute of my_rule rule //:r: "
+ "'//:dep' does not have mandatory providers: 'my_provider'");
}
@Test
public void testLabelKeyedStringDictForbidsEmptyDictWhenAllowEmptyIsFalse() throws Exception {
reporter.removeHandler(failFastHandler);
scratch.file(
"my_rule.bzl",
"def _impl(ctx):",
" return",
"my_rule = rule(",
" implementation = _impl,",
" attrs = {",
" 'label_dict': attr.label_keyed_string_dict(allow_empty=False),",
" }",
")");
scratch.file(
"BUILD",
"load('//:my_rule.bzl', 'my_rule')",
"my_rule(name='r',",
" label_dict={})");
invalidatePackages();
getConfiguredTarget("//:r");
assertContainsEvent(
"in label_dict attribute of my_rule rule //:r: " + "attribute must be non empty");
}
@Test
public void testLabelKeyedStringDictAllowsEmptyDictWhenAllowEmptyIsTrue() throws Exception {
scratch.file(
"my_rule.bzl",
"def _impl(ctx):",
" return",
"my_rule = rule(",
" implementation = _impl,",
" attrs = {",
" 'label_dict': attr.label_keyed_string_dict(allow_empty=True),",
" }",
")");
scratch.file(
"BUILD",
"load('//:my_rule.bzl', 'my_rule')",
"my_rule(name='r',",
" label_dict={})");
invalidatePackages();
createRuleContext("//:r");
assertNoEvents();
}
@Test
public void testLabelKeyedStringDictForbidsMissingAttributeWhenMandatoryIsTrue()
throws Exception {
reporter.removeHandler(failFastHandler);
scratch.file(
"my_rule.bzl",
"def _impl(ctx):",
" return",
"my_rule = rule(",
" implementation = _impl,",
" attrs = {",
" 'label_dict': attr.label_keyed_string_dict(mandatory=True),",
" }",
")");
scratch.file("BUILD", "load('//:my_rule.bzl', 'my_rule')", "my_rule(name='r')");
invalidatePackages();
getConfiguredTarget("//:r");
assertContainsEvent("missing value for mandatory attribute 'label_dict' in 'my_rule' rule");
}
@Test
public void testLabelKeyedStringDictAllowsMissingAttributeWhenMandatoryIsFalse()
throws Exception {
scratch.file(
"my_rule.bzl",
"def _impl(ctx):",
" return",
"my_rule = rule(",
" implementation = _impl,",
" attrs = {",
" 'label_dict': attr.label_keyed_string_dict(mandatory=False),",
" }",
")");
scratch.file("BUILD", "load('//:my_rule.bzl', 'my_rule')", "my_rule(name='r')");
invalidatePackages();
createRuleContext("//:r");
assertNoEvents();
}
@Test
public void testLabelAttributeDefault() throws Exception {
scratch.file(
"my_rule.bzl",
"def _impl(ctx):",
" return",
"my_rule = rule(",
" implementation = _impl,",
" attrs = {",
" 'explicit_dep': attr.label(default = Label('//:dep')),",
" '_implicit_dep': attr.label(default = Label('//:dep')),",
" 'explicit_dep_list': attr.label_list(default = [Label('//:dep')]),",
" '_implicit_dep_list': attr.label_list(default = [Label('//:dep')]),",
" }",
")");
scratch.file(
"BUILD", "filegroup(name='dep')", "load('//:my_rule.bzl', 'my_rule')", "my_rule(name='r')");
invalidatePackages();
setRuleContext(createRuleContext("//:r"));
Label explicitDepLabel = (Label) ev.eval("ruleContext.attr.explicit_dep.label");
assertThat(explicitDepLabel).isEqualTo(Label.parseAbsolute("//:dep", ImmutableMap.of()));
Label implicitDepLabel = (Label) ev.eval("ruleContext.attr._implicit_dep.label");
assertThat(implicitDepLabel).isEqualTo(Label.parseAbsolute("//:dep", ImmutableMap.of()));
Label explicitDepListLabel = (Label) ev.eval("ruleContext.attr.explicit_dep_list[0].label");
assertThat(explicitDepListLabel).isEqualTo(Label.parseAbsolute("//:dep", ImmutableMap.of()));
Label implicitDepListLabel = (Label) ev.eval("ruleContext.attr._implicit_dep_list[0].label");
assertThat(implicitDepListLabel).isEqualTo(Label.parseAbsolute("//:dep", ImmutableMap.of()));
}
@Test
public void testRelativeLabelInExternalRepository() throws Exception {
scratch.file(
"external_rule.bzl",
"def _impl(ctx):",
" return",
"external_rule = rule(",
" implementation = _impl,",
" attrs = {",
" 'internal_dep': attr.label(default = Label('//:dep'))",
" }",
")");
scratch.file("BUILD", "filegroup(name='dep')");
scratch.file("/r/WORKSPACE");
scratch.file(
"/r/a/BUILD", "load('@//:external_rule.bzl', 'external_rule')", "external_rule(name='r')");
scratch.overwriteFile(
"WORKSPACE",
new ImmutableList.Builder<String>()
.addAll(analysisMock.getWorkspaceContents(mockToolsConfig))
.add("local_repository(name='r', path='/r')")
.build());
invalidatePackages(
/*alsoConfigs=*/ false); // Repository shuffling messes with toolchain labels.
setRuleContext(createRuleContext("@r//a:r"));
Label depLabel = (Label) ev.eval("ruleContext.attr.internal_dep.label");
assertThat(depLabel).isEqualTo(Label.parseAbsolute("//:dep", ImmutableMap.of()));
}
@Test
public void testCallerRelativeLabelInExternalRepository() throws Exception {
scratch.file("BUILD");
scratch.file(
"external_rule.bzl",
"def _impl(ctx):",
" return",
"external_rule = rule(",
" implementation = _impl,",
" attrs = {",
" 'internal_dep': attr.label(",
" default = Label('//:dep', relative_to_caller_repository = True)",
" )",
" }",
")");
scratch.file("/r/WORKSPACE");
scratch.file("/r/BUILD", "filegroup(name='dep')");
scratch.file(
"/r/a/BUILD", "load('@//:external_rule.bzl', 'external_rule')", "external_rule(name='r')");
scratch.overwriteFile(
"WORKSPACE",
new ImmutableList.Builder<String>()
.addAll(analysisMock.getWorkspaceContents(mockToolsConfig))
.add("local_repository(name='r', path='/r')")
.build());
invalidatePackages(
/*alsoConfigs=*/ false); // Repository shuffling messes with toolchain labels.
setRuleContext(createRuleContext("@r//a:r"));
Label depLabel = (Label) ev.eval("ruleContext.attr.internal_dep.label");
assertThat(depLabel).isEqualTo(Label.parseAbsolute("@r//:dep", ImmutableMap.of()));
}
@Test
public void testExternalWorkspaceLoad() throws Exception {
scratch.file(
"/r1/BUILD",
"filegroup(name = 'test',",
" srcs = ['test.txt'],",
" visibility = ['//visibility:public'],",
")");
scratch.file("/r1/WORKSPACE");
scratch.file("/r2/BUILD", "exports_files(['test.bzl'])");
scratch.file(
"/r2/test.bzl",
"def macro(name, path):",
" native.local_repository(name = name, path = path)");
scratch.file("/r2/WORKSPACE");
scratch.file(
"/r2/other_test.bzl", "def other_macro(name, path):", " print(name + ': ' + path)");
scratch.file("BUILD");
scratch.overwriteFile(
"WORKSPACE",
new ImmutableList.Builder<String>()
.addAll(analysisMock.getWorkspaceContents(mockToolsConfig))
.add("local_repository(name='r2', path='/r2')")
.add("load('@r2//:test.bzl', 'macro')")
.add("macro('r1', '/r1')")
.add("NEXT_NAME = 'r3'")
// We can still refer to r2 in other chunks:
.add("load('@r2//:other_test.bzl', 'other_macro')")
.add("macro(NEXT_NAME, '/r2')") // and we can still use macro outside of its chunk.
.build());
invalidatePackages(
/*alsoConfigs=*/ false); // Repository shuffling messes with toolchain labels.
assertThat(getConfiguredTarget("@r1//:test")).isNotNull();
}
@Test
@SuppressWarnings("unchecked")
public void testLoadBlockRepositoryRedefinition() throws Exception {
reporter.removeHandler(failFastHandler);
scratch.file("/bar/WORKSPACE");
scratch.file("/bar/bar.txt");
scratch.file("/bar/BUILD", "filegroup(name = 'baz', srcs = ['bar.txt'])");
scratch.file("/baz/WORKSPACE");
scratch.file("/baz/baz.txt");
scratch.file("/baz/BUILD", "filegroup(name = 'baz', srcs = ['baz.txt'])");
scratch.overwriteFile(
"WORKSPACE",
new ImmutableList.Builder<String>()
.addAll(analysisMock.getWorkspaceContents(mockToolsConfig))
.add("local_repository(name = 'foo', path = '/bar')")
.add("local_repository(name = 'foo', path = '/baz')")
.build());
invalidatePackages(
/*alsoConfigs=*/ false); // Repository shuffling messes with toolchain labels.
assertThat(
(List)
getConfiguredTargetAndData("@foo//:baz")
.getTarget()
.getAssociatedRule()
.getAttr("srcs"))
.contains(Label.parseAbsolute("@foo//:baz.txt", ImmutableMap.of()));
scratch.overwriteFile("BUILD");
scratch.overwriteFile("bar.bzl", "dummy = 1");
scratch.overwriteFile(
"WORKSPACE",
new ImmutableList.Builder<String>()
.addAll(analysisMock.getWorkspaceContents(mockToolsConfig))
.add("local_repository(name = 'foo', path = '/bar')")
.add("load('//:bar.bzl', 'dummy')")
.add("local_repository(name = 'foo', path = '/baz')")
.build());
invalidatePackages(/*alsoConfigs=*/ false); // Repository shuffling messes with toolchains.
assertThrows(Exception.class, () -> createRuleContext("@foo//:baz"));
assertContainsEvent(
"Cannot redefine repository after any load statement in the WORKSPACE file "
+ "(for repository 'foo')");
}
@Test
public void testAccessingRunfiles() throws Exception {
scratch.file("test/a.py");
scratch.file("test/b.py");
scratch.file("test/__init__.py");
scratch.file(
"test/rule.bzl",
"def _impl(ctx):",
" return",
"starlark_rule = rule(",
" implementation = _impl,",
" attrs = {",
" 'dep': attr.label(),",
" },",
")");
scratch.file(
"test/BUILD",
"load('//test:rule.bzl', 'starlark_rule')",
"py_binary(name = 'lib', srcs = ['lib.py', 'lib2.py'])",
"starlark_rule(name = 'foo', dep = ':lib')",
"py_binary(name = 'lib_with_init', srcs = ['lib_with_init.py', 'lib2.py', '__init__.py'])",
"starlark_rule(name = 'foo_with_init', dep = ':lib_with_init')");
setRuleContext(createRuleContext("//test:foo"));
Object filenames =
ev.eval("[f.short_path for f in ruleContext.attr.dep.default_runfiles.files.to_list()]");
assertThat(filenames).isInstanceOf(Sequence.class);
Sequence<?> filenamesList = (Sequence) filenames;
assertThat(filenamesList).containsAtLeast("test/lib.py", "test/lib2.py");
Object emptyFilenames =
ev.eval("ruleContext.attr.dep.default_runfiles.empty_filenames.to_list()");
assertThat(emptyFilenames).isInstanceOf(Sequence.class);
Sequence<?> emptyFilenamesList = (Sequence) emptyFilenames;
assertThat(emptyFilenamesList).containsExactly("test/__init__.py");
setRuleContext(createRuleContext("//test:foo_with_init"));
Object noEmptyFilenames =
ev.eval("ruleContext.attr.dep.default_runfiles.empty_filenames.to_list()");
assertThat(noEmptyFilenames).isInstanceOf(Sequence.class);
Sequence<?> noEmptyFilenamesList = (Sequence) noEmptyFilenames;
assertThat(noEmptyFilenamesList).isEmpty();
}
@Test
public void testAccessingRunfilesSymlinks_legacy() throws Exception {
setBuildLanguageOptions("--incompatible_disallow_struct_provider_syntax=false");
scratch.file("test/a.py");
scratch.file("test/b.py");
scratch.file(
"test/rule.bzl",
"def symlink_impl(ctx):",
" symlinks = {",
" 'symlink_' + f.short_path: f",
" for f in ctx.files.symlink",
" }",
" return struct(",
" runfiles = ctx.runfiles(",
" symlinks=symlinks,",
" )",
" )",
"symlink_rule = rule(",
" implementation = symlink_impl,",
" attrs = {",
" 'symlink': attr.label(allow_files=True),",
" },",
")");
scratch.file(
"test/BUILD",
"load('//test:rule.bzl', 'symlink_rule')",
"symlink_rule(name = 'lib_with_symlink', symlink = ':a.py')",
"sh_binary(",
" name = 'test_with_symlink',",
" srcs = ['test/b.py'],",
" data = [':lib_with_symlink'],",
")");
setRuleContext(createRuleContext("//test:test_with_symlink"));
Object symlinkPaths =
ev.eval("[s.path for s in ruleContext.attr.data[0].data_runfiles.symlinks.to_list()]");
assertThat(symlinkPaths).isInstanceOf(Sequence.class);
Sequence<?> symlinkPathsList = (Sequence) symlinkPaths;
assertThat(symlinkPathsList).containsExactly("symlink_test/a.py").inOrder();
Object symlinkFilenames =
ev.eval(
"[s.target_file.short_path for s in"
+ " ruleContext.attr.data[0].data_runfiles.symlinks.to_list()]");
assertThat(symlinkFilenames).isInstanceOf(Sequence.class);
Sequence<?> symlinkFilenamesList = (Sequence) symlinkFilenames;
assertThat(symlinkFilenamesList).containsExactly("test/a.py").inOrder();
}
@Test
public void testAccessingRunfilesSymlinks() throws Exception {
scratch.file("test/a.py");
scratch.file("test/b.py");
scratch.file(
"test/rule.bzl",
"def symlink_impl(ctx):",
" symlinks = {",
" 'symlink_' + f.short_path: f",
" for f in ctx.files.symlink",
" }",
" return DefaultInfo(",
" runfiles = ctx.runfiles(",
" symlinks=symlinks,",
" )",
" )",
"symlink_rule = rule(",
" implementation = symlink_impl,",
" attrs = {",
" 'symlink': attr.label(allow_files=True),",
" },",
")");
scratch.file(
"test/BUILD",
"load('//test:rule.bzl', 'symlink_rule')",
"symlink_rule(name = 'lib_with_symlink', symlink = ':a.py')",
"sh_binary(",
" name = 'test_with_symlink',",
" srcs = ['test/b.py'],",
" data = [':lib_with_symlink'],",
")");
setRuleContext(createRuleContext("//test:test_with_symlink"));
Object symlinkPaths =
ev.eval("[s.path for s in ruleContext.attr.data[0].data_runfiles.symlinks.to_list()]");
assertThat(symlinkPaths).isInstanceOf(Sequence.class);
Sequence<?> symlinkPathsList = (Sequence) symlinkPaths;
assertThat(symlinkPathsList).containsExactly("symlink_test/a.py").inOrder();
Object symlinkFilenames =
ev.eval(
"[s.target_file.short_path for s in"
+ " ruleContext.attr.data[0].data_runfiles.symlinks.to_list()]");
assertThat(symlinkFilenames).isInstanceOf(Sequence.class);
Sequence<?> symlinkFilenamesList = (Sequence) symlinkFilenames;
assertThat(symlinkFilenamesList).containsExactly("test/a.py").inOrder();
}
@Test
public void testAccessingRunfilesRootSymlinks_legacy() throws Exception {
setBuildLanguageOptions("--incompatible_disallow_struct_provider_syntax=false");
scratch.file("test/a.py");
scratch.file("test/b.py");
scratch.file(
"test/rule.bzl",
"def root_symlink_impl(ctx):",
" root_symlinks = {",
" 'root_symlink_' + f.short_path: f",
" for f in ctx.files.root_symlink",
" }",
" return struct(",
" runfiles = ctx.runfiles(",
" root_symlinks=root_symlinks,",
" )",
" )",
"root_symlink_rule = rule(",
" implementation = root_symlink_impl,",
" attrs = {",
" 'root_symlink': attr.label(allow_files=True)",
" },",
")");
scratch.file(
"test/BUILD",
"load('//test:rule.bzl', 'root_symlink_rule')",
"root_symlink_rule(name = 'lib_with_root_symlink', root_symlink = ':a.py')",
"sh_binary(",
" name = 'test_with_root_symlink',",
" srcs = ['test/b.py'],",
" data = [':lib_with_root_symlink'],",
")");
setRuleContext(createRuleContext("//test:test_with_root_symlink"));
Object rootSymlinkPaths =
ev.eval("[s.path for s in ruleContext.attr.data[0].data_runfiles.root_symlinks.to_list()]");
assertThat(rootSymlinkPaths).isInstanceOf(Sequence.class);
Sequence<?> rootSymlinkPathsList = (Sequence) rootSymlinkPaths;
assertThat(rootSymlinkPathsList).containsExactly("root_symlink_test/a.py").inOrder();
Object rootSymlinkFilenames =
ev.eval(
"[s.target_file.short_path for s in"
+ " ruleContext.attr.data[0].data_runfiles.root_symlinks.to_list()]");
assertThat(rootSymlinkFilenames).isInstanceOf(Sequence.class);
Sequence<?> rootSymlinkFilenamesList = (Sequence) rootSymlinkFilenames;
assertThat(rootSymlinkFilenamesList).containsExactly("test/a.py").inOrder();
}
@Test
public void testAccessingRunfilesRootSymlinks() throws Exception {
scratch.file("test/a.py");
scratch.file("test/b.py");
scratch.file(
"test/rule.bzl",
"def root_symlink_impl(ctx):",
" root_symlinks = {",
" 'root_symlink_' + f.short_path: f",
" for f in ctx.files.root_symlink",
" }",
" return DefaultInfo(",
" runfiles = ctx.runfiles(",
" root_symlinks=root_symlinks,",
" )",
" )",
"root_symlink_rule = rule(",
" implementation = root_symlink_impl,",
" attrs = {",
" 'root_symlink': attr.label(allow_files=True)",
" },",
")");
scratch.file(
"test/BUILD",
"load('//test:rule.bzl', 'root_symlink_rule')",
"root_symlink_rule(name = 'lib_with_root_symlink', root_symlink = ':a.py')",
"sh_binary(",
" name = 'test_with_root_symlink',",
" srcs = ['test/b.py'],",
" data = [':lib_with_root_symlink'],",
")");
setRuleContext(createRuleContext("//test:test_with_root_symlink"));
Object rootSymlinkPaths =
ev.eval("[s.path for s in ruleContext.attr.data[0].data_runfiles.root_symlinks.to_list()]");
assertThat(rootSymlinkPaths).isInstanceOf(Sequence.class);
Sequence<?> rootSymlinkPathsList = (Sequence) rootSymlinkPaths;
assertThat(rootSymlinkPathsList).containsExactly("root_symlink_test/a.py").inOrder();
Object rootSymlinkFilenames =
ev.eval(
"[s.target_file.short_path for s in"
+ " ruleContext.attr.data[0].data_runfiles.root_symlinks.to_list()]");
assertThat(rootSymlinkFilenames).isInstanceOf(Sequence.class);
Sequence<?> rootSymlinkFilenamesList = (Sequence) rootSymlinkFilenames;
assertThat(rootSymlinkFilenamesList).containsExactly("test/a.py").inOrder();
}
@Test
public void testExternalShortPath() throws Exception {
scratch.file("/bar/WORKSPACE");
scratch.file("/bar/bar.txt");
scratch.file("/bar/BUILD", "exports_files(['bar.txt'])");
FileSystemUtils.appendIsoLatin1(
scratch.resolve("WORKSPACE"), "local_repository(name = 'foo', path = '/bar')");
scratch.file(
"test/BUILD",
"genrule(",
" name = 'lib',",
" srcs = ['@foo//:bar.txt'],",
" cmd = 'echo $(SRCS) $@',",
" outs = ['lib.out'],",
" executable = 1,",
")");
invalidatePackages();
StarlarkRuleContext ruleContext = createRuleContext("//test:lib");
setRuleContext(ruleContext);
String filename = ev.eval("ruleContext.files.srcs[0].short_path").toString();
assertThat(filename).isEqualTo("../foo/bar.txt");
}
// Borrowed from Scratch.java.
private static String linesAsString(String... lines) {
StringBuilder builder = new StringBuilder();
for (String line : lines) {
builder.append(line);
builder.append('\n');
}
return builder.toString();
}
// The common structure of the following actions tests is a rule under test depended upon by
// a testing rule, where the rule under test has one output and one caller-supplied action.
private static String getSimpleUnderTestDefinition(
boolean withStarlarkTestable, String[] actionLines) {
return linesAsString(
// TODO(b/153667498): Just passing fail to map_each parameter of Args.add_all does not work.
"def fail_with_message(s):",
" fail(s)",
"",
"def _undertest_impl(ctx):",
" out = ctx.outputs.out",
" " + Joiner.on("\n ").join(actionLines),
"undertest_rule = rule(",
" implementation = _undertest_impl,",
" outputs = {'out': '%{name}.txt'},",
withStarlarkTestable ? " _skylark_testable = True," : "",
")");
}
private static String getSimpleUnderTestDefinition(String... actionLines) {
return getSimpleUnderTestDefinition(true, actionLines);
}
private static String getSimpleNontestableUnderTestDefinition(String... actionLines) {
return getSimpleUnderTestDefinition(false, actionLines);
}
private final String testingRuleDefinition =
linesAsString(
"def _testing_impl(ctx):",
" pass",
"testing_rule = rule(",
" implementation = _testing_impl,",
" attrs = {'dep': attr.label()},",
")");
private final String simpleBuildDefinition =
linesAsString(
"load(':rules.bzl', 'undertest_rule', 'testing_rule')",
"undertest_rule(",
" name = 'undertest',",
")",
"testing_rule(",
" name = 'testing',",
" dep = ':undertest',",
")");
@Test
public void testDependencyActionsProvider() throws Exception {
scratch.file(
"test/rules.bzl",
getSimpleUnderTestDefinition(
"ctx.actions.run_shell(outputs=[out], command='echo foo123 > ' + out.path)"),
testingRuleDefinition);
scratch.file("test/BUILD", simpleBuildDefinition);
StarlarkRuleContext ruleContext = createRuleContext("//test:testing");
setRuleContext(ruleContext);
Object provider = ev.eval("ruleContext.attr.dep[Actions]");
assertThat(provider).isInstanceOf(StructImpl.class);
assertThat(((StructImpl) provider).getProvider()).isEqualTo(ActionsProvider.INSTANCE);
ev.update("actions", provider);
Map<?, ?> mapping = (Dict<?, ?>) ev.eval("actions.by_file");
assertThat(mapping).hasSize(1);
ev.update("file", ev.eval("ruleContext.attr.dep.files.to_list()[0]"));
Object actionUnchecked = ev.eval("actions.by_file[file]");
assertThat(actionUnchecked).isInstanceOf(ActionAnalysisMetadata.class);
}
@Test
public void testNoAccessToDependencyActionsWithoutStarlarkTest() throws Exception {
reporter.removeHandler(failFastHandler);
scratch.file(
"test/rules.bzl",
getSimpleNontestableUnderTestDefinition(
"ctx.actions.run_shell(outputs=[out], command='echo foo123 > ' + out.path)"),
testingRuleDefinition);
scratch.file("test/BUILD", simpleBuildDefinition);
StarlarkRuleContext ruleContext = createRuleContext("//test:testing");
setRuleContext(ruleContext);
Exception e = assertThrows(Exception.class, () -> ev.eval("ruleContext.attr.dep[Actions]"));
assertThat(e)
.hasMessageThat()
.contains(
"<target //test:undertest> (rule 'undertest_rule') doesn't contain "
+ "declared provider 'Actions'");
}
@Test
public void testAbstractActionInterface() throws Exception {
setBuildLanguageOptions(
"--incompatible_disallow_struct_provider_syntax=false",
"--incompatible_no_rule_outputs_param=false");
scratch.file(
"test/rules.bzl",
"def _undertest_impl(ctx):",
" out1 = ctx.outputs.out1",
" out2 = ctx.outputs.out2",
" ctx.actions.write(output=out1, content='foo123')",
" ctx.actions.run_shell(outputs=[out2], inputs=[out1],",
" command='cp ' + out1.path + ' ' + out2.path)",
" return struct(out1=out1, out2=out2)",
"undertest_rule = rule(",
" implementation = _undertest_impl,",
" outputs = {'out1': '%{name}1.txt',",
" 'out2': '%{name}2.txt'},",
" _skylark_testable = True,",
")",
testingRuleDefinition);
scratch.file("test/BUILD", simpleBuildDefinition);
StarlarkRuleContext ruleContext = createRuleContext("//test:testing");
setRuleContext(ruleContext);
ev.update("file1", ev.eval("ruleContext.attr.dep.out1"));
ev.update("file2", ev.eval("ruleContext.attr.dep.out2"));
ev.update("action1", ev.eval("ruleContext.attr.dep[Actions].by_file[file1]"));
ev.update("action2", ev.eval("ruleContext.attr.dep[Actions].by_file[file2]"));
assertThat(ev.eval("action1.inputs")).isInstanceOf(Depset.class);
assertThat(ev.eval("action1.outputs")).isInstanceOf(Depset.class);
assertThat(ev.eval("action1.argv")).isEqualTo(Starlark.NONE);
assertThat(ev.eval("action2.content")).isEqualTo(Starlark.NONE);
assertThat(ev.eval("action1.substitutions")).isEqualTo(Starlark.NONE);
assertThat(ev.eval("action1.inputs.to_list()")).isEqualTo(ev.eval("[]"));
assertThat(ev.eval("action1.outputs.to_list()")).isEqualTo(ev.eval("[file1]"));
assertThat(ev.eval("action2.inputs.to_list()")).isEqualTo(ev.eval("[file1]"));
assertThat(ev.eval("action2.outputs.to_list()")).isEqualTo(ev.eval("[file2]"));
}
// For created_actions() tests, the "undertest" rule represents both the code under test and the
// Starlark user test code itself.
@Test
public void testCreatedActions() throws Exception {
setBuildLanguageOptions(
"--incompatible_disallow_struct_provider_syntax=false",
"--incompatible_no_rule_outputs_param=false");
// createRuleContext() gives us the context for a rule upon entry into its analysis function.
// But we need to inspect the result of calling created_actions() after the rule context has
// been modified by creating actions. So we'll call created_actions() from within the analysis
// function and pass it along as a provider.
scratch.file(
"test/rules.bzl",
"def _undertest_impl(ctx):",
" out1 = ctx.outputs.out1",
" out2 = ctx.outputs.out2",
" ctx.actions.run_shell(outputs=[out1], command='echo foo123 > ' + out1.path,",
" mnemonic='foo')",
" v = ctx.created_actions().by_file",
" ctx.actions.run_shell(outputs=[out2], command='echo bar123 > ' + out2.path)",
" return struct(v=v, out1=out1, out2=out2)",
"undertest_rule = rule(",
" implementation = _undertest_impl,",
" outputs = {'out1': '%{name}1.txt',",
" 'out2': '%{name}2.txt'},",
" _skylark_testable = True,",
")",
testingRuleDefinition);
scratch.file("test/BUILD", simpleBuildDefinition);
StarlarkRuleContext ruleContext = createRuleContext("//test:testing");
setRuleContext(ruleContext);
Object mapUnchecked = ev.eval("ruleContext.attr.dep.v");
assertThat(mapUnchecked).isInstanceOf(Dict.class);
Map<?, ?> map = (Dict) mapUnchecked;
// Should only have the first action because created_actions() was called
// before the second action was created.
Object file = ev.eval("ruleContext.attr.dep.out1");
assertThat(map).hasSize(1);
assertThat(map).containsKey(file);
Object actionUnchecked = map.get(file);
assertThat(actionUnchecked).isInstanceOf(ActionAnalysisMetadata.class);
assertThat(((ActionAnalysisMetadata) actionUnchecked).getMnemonic()).isEqualTo("foo");
}
@Test
public void testNoAccessToCreatedActionsWithoutStarlarkTest() throws Exception {
scratch.file(
"test/rules.bzl",
getSimpleNontestableUnderTestDefinition(
"ctx.actions.run_shell(outputs=[out], command='echo foo123 > ' + out.path)"));
scratch.file(
"test/BUILD",
"load(':rules.bzl', 'undertest_rule')",
"undertest_rule(",
" name = 'undertest',",
")");
StarlarkRuleContext ruleContext = createRuleContext("//test:undertest");
setRuleContext(ruleContext);
Object result = ev.eval("ruleContext.created_actions()");
assertThat(result).isEqualTo(Starlark.NONE);
}
@Test
public void testSpawnActionInterface() throws Exception {
scratch.file(
"test/rules.bzl",
getSimpleUnderTestDefinition(
"ctx.actions.run_shell(outputs=[out], command='echo foo123 > ' + out.path)"),
testingRuleDefinition);
scratch.file("test/BUILD", simpleBuildDefinition);
StarlarkRuleContext ruleContext = createRuleContext("//test:testing");
setRuleContext(ruleContext);
ev.update("file", ev.eval("ruleContext.attr.dep.files.to_list()[0]"));
ev.update("action", ev.eval("ruleContext.attr.dep[Actions].by_file[file]"));
assertThat(ev.eval("type(action)")).isEqualTo("Action");
Object argvUnchecked = ev.eval("action.argv");
assertThat(argvUnchecked).isInstanceOf(StarlarkList.class);
StarlarkList<?> argv = (StarlarkList) argvUnchecked;
assertThat(argv).hasSize(3);
assertThat(argv.isImmutable()).isTrue();
Object result = ev.eval("action.argv[2].startswith('echo foo123')");
assertThat((Boolean) result).isTrue();
}
@Test
public void testRunShellUsesHelperScriptForLongCommand() throws Exception {
setBuildLanguageOptions(
"--incompatible_disallow_struct_provider_syntax=false",
"--incompatible_no_rule_outputs_param=false");
// createRuleContext() gives us the context for a rule upon entry into its analysis function.
// But we need to inspect the result of calling created_actions() after the rule context has
// been modified by creating actions. So we'll call created_actions() from within the analysis
// function and pass it along as a provider.
scratch.file(
"test/rules.bzl",
"def _undertest_impl(ctx):",
" out1 = ctx.outputs.out1",
" out2 = ctx.outputs.out2",
" out3 = ctx.outputs.out3",
" ctx.actions.run_shell(outputs=[out1],",
" command='( %s ; ) > $1' % (",
" ' ; '.join(['echo xxx%d' % i for i in range(0, 7000)])),",
" mnemonic='mnemonic1',",
" arguments=[out1.path])",
" ctx.actions.run_shell(outputs=[out2],",
" command='echo foo > ' + out2.path,",
" mnemonic='mnemonic2')",
" ctx.actions.run_shell(outputs=[out3],",
" command='( %s ; ) > $1' % (",
" ' ; '.join(['echo yyy%d' % i for i in range(0, 7000)])),",
" mnemonic='mnemonic3',",
" arguments=[out3.path])",
" v = ctx.created_actions().by_file",
" return struct(v=v, out1=out1, out2=out2, out3=out3)",
"",
"undertest_rule = rule(",
" implementation=_undertest_impl,",
" outputs={'out1': '%{name}1.txt',",
" 'out2': '%{name}2.txt',",
" 'out3': '%{name}3.txt'},",
" _skylark_testable = True,",
")",
testingRuleDefinition);
scratch.file("test/BUILD", simpleBuildDefinition);
StarlarkRuleContext ruleContext = createRuleContext("//test:testing");
setRuleContext(ruleContext);
Object mapUnchecked = ev.eval("ruleContext.attr.dep.v");
assertThat(mapUnchecked).isInstanceOf(Dict.class);
Map<?, ?> map = (Dict) mapUnchecked;
Object out1 = ev.eval("ruleContext.attr.dep.out1");
Object out2 = ev.eval("ruleContext.attr.dep.out2");
Object out3 = ev.eval("ruleContext.attr.dep.out3");
// 5 actions in total: 3 SpawnActions and 2 FileWriteActions for the two long commands.
assertThat(map).hasSize(5);
assertThat(map).containsKey(out1);
assertThat(map).containsKey(out2);
assertThat(map).containsKey(out3);
Object action1Unchecked = map.get(out1);
Object action2Unchecked = map.get(out2);
Object action3Unchecked = map.get(out3);
assertThat(action1Unchecked).isInstanceOf(ActionAnalysisMetadata.class);
assertThat(action2Unchecked).isInstanceOf(ActionAnalysisMetadata.class);
assertThat(action3Unchecked).isInstanceOf(ActionAnalysisMetadata.class);
ActionAnalysisMetadata spawnAction1 = (ActionAnalysisMetadata) action1Unchecked;
ActionAnalysisMetadata spawnAction2 = (ActionAnalysisMetadata) action2Unchecked;
ActionAnalysisMetadata spawnAction3 = (ActionAnalysisMetadata) action3Unchecked;
assertThat(spawnAction1.getMnemonic()).isEqualTo("mnemonic1");
assertThat(spawnAction2.getMnemonic()).isEqualTo("mnemonic2");
assertThat(spawnAction3.getMnemonic()).isEqualTo("mnemonic3");
Artifact helper1 =
Iterables.getOnlyElement(
Iterables.filter(
spawnAction1.getInputs().toList(),
a -> a.getFilename().equals("undertest.run_shell_0.sh")));
assertThat(
Iterables.filter(
spawnAction2.getInputs().toList(), a -> a.getFilename().contains("run_shell_")))
.isEmpty();
Artifact helper3 =
Iterables.getOnlyElement(
Iterables.filter(
spawnAction3.getInputs().toList(),
a -> a.getFilename().equals("undertest.run_shell_2.sh")));
assertThat(map).containsKey(helper1);
assertThat(map).containsKey(helper3);
Object action4Unchecked = map.get(helper1);
Object action5Unchecked = map.get(helper3);
assertThat(action4Unchecked).isInstanceOf(FileWriteAction.class);
assertThat(action5Unchecked).isInstanceOf(FileWriteAction.class);
FileWriteAction fileWriteAction1 = (FileWriteAction) action4Unchecked;
FileWriteAction fileWriteAction2 = (FileWriteAction) action5Unchecked;
assertThat(fileWriteAction1.getFileContents()).contains("echo xxx6999 ;");
assertThat(fileWriteAction2.getFileContents()).contains("echo yyy6999 ;");
}
@Test
public void testInvalidMnemonic() throws Exception {
scratch.file(
"test/rule.bzl",
"def _impl(ctx):",
" out = ctx.actions.declare_file('f')",
" ctx.actions.run_shell(",
" outputs=[out], command='false', mnemonic='@@@')",
"r = rule(implementation = _impl)");
scratch.file("test/BUILD", "load('//test:rule.bzl', 'r')", "r(name = 'target')");
reporter.removeHandler(failFastHandler);
getConfiguredTarget("//test:target");
assertContainsEvent(
"mnemonic must only contain letters and/or digits, and have non-zero length, was: \"@@@\"");
}
@Test
public void testFileWriteActionInterface() throws Exception {
scratch.file(
"test/rules.bzl",
getSimpleUnderTestDefinition("ctx.actions.write(output=out, content='foo123')"),
testingRuleDefinition);
scratch.file("test/BUILD", simpleBuildDefinition);
StarlarkRuleContext ruleContext = createRuleContext("//test:testing");
setRuleContext(ruleContext);
ev.update("file", ev.eval("ruleContext.attr.dep.files.to_list()[0]"));
ev.update("action", ev.eval("ruleContext.attr.dep[Actions].by_file[file]"));
assertThat(ev.eval("type(action)")).isEqualTo("Action");
Object contentUnchecked = ev.eval("action.content");
assertThat(contentUnchecked).isInstanceOf(String.class);
assertThat(contentUnchecked).isEqualTo("foo123");
}
@Test
public void testFileWriteActionInterfaceWithArgs() throws Exception {
scratch.file(
"test/rules.bzl",
getSimpleUnderTestDefinition(
"args = ctx.actions.args()",
"args.add('foo123')",
"ctx.actions.write(output=out, content=args)"),
testingRuleDefinition);
scratch.file("test/BUILD", simpleBuildDefinition);
StarlarkRuleContext ruleContext = createRuleContext("//test:testing");
setRuleContext(ruleContext);
ev.update("file", ev.eval("ruleContext.attr.dep.files.to_list()[0]"));
ev.update("action", ev.eval("ruleContext.attr.dep[Actions].by_file[file]"));
assertThat(ev.eval("type(action)")).isEqualTo("Action");
Object contentUnchecked = ev.eval("action.content");
assertThat(contentUnchecked).isInstanceOf(String.class);
// Args content ends the file with a newline
assertThat(contentUnchecked).isEqualTo("foo123\n");
}
@Test
public void testFileWriteActionInterfaceWithArgsContainingTreeArtifact() throws Exception {
scratch.file(
"test/rules.bzl",
getSimpleUnderTestDefinition(
"directory = ctx.actions.declare_directory('dir')",
"ctx.actions.run_shell(",
" outputs = [directory],",
" command = 'mkdir {out}'",
")",
"args = ctx.actions.args()",
"args.add_all([directory])",
"ctx.actions.write(output=out, content=args)"),
testingRuleDefinition);
scratch.file("test/BUILD", simpleBuildDefinition);
StarlarkRuleContext ruleContext = createRuleContext("//test:testing");
setRuleContext(ruleContext);
ev.update("file", ev.eval("ruleContext.attr.dep.files.to_list()[0]"));
ev.update("action", ev.eval("ruleContext.attr.dep[Actions].by_file[file]"));
assertThat(ev.eval("type(action)")).isEqualTo("Action");
// If the Args contain a directory File that needs to be expanded, the contents are not known
// at analysis time.
Object contentUnchecked = ev.eval("action.content");
assertThat(contentUnchecked).isEqualTo(Starlark.NONE);
}
@Test
public void testFileWriteActionInterfaceWithArgsExpansionError() throws Exception {
scratch.file(
"test/rules.bzl",
getSimpleUnderTestDefinition(
"args = ctx.actions.args()",
"args.add_all(['args expansion error message'], map_each = fail_with_message)",
"ctx.actions.write(output=out, content=args)"),
testingRuleDefinition);
scratch.file("test/BUILD", simpleBuildDefinition);
StarlarkRuleContext ruleContext = createRuleContext("//test:testing");
setRuleContext(ruleContext);
ev.update("file", ev.eval("ruleContext.attr.dep.files.to_list()[0]"));
ev.update("action", ev.eval("ruleContext.attr.dep[Actions].by_file[file]"));
assertThat(ev.eval("type(action)")).isEqualTo("Action");
// If there's a failure when expanding Args, that error message is propagated.
EvalException e =
assertThrows(
"Should be an error expanding action.content",
EvalException.class,
() -> ev.eval("action.content"));
// e has a trivial stack (just <expr>, aka action.content), but its message
// contains a stack that has evidently been flattened into a string and passed
// through an event reporter as an ERROR at :7:15 (?).
// Ideally we would remove some of this cruft.
// ```
// Error expanding command line:
//
// /workspace/test/rules.bzl:7:15: Traceback (most recent call last):
// File "/workspace/test/rules.bzl", line 2, column 9, in fail_with_message
// Error in fail: args expansion error message
// ```
// stack=[fail_with_message@rules.bzl:2, fail@<builtin>]
assertThat(e).hasMessageThat().contains("Error expanding command line:");
assertThat(e)
.hasMessageThat()
.contains("File \"/workspace/test/rules.bzl\", line 2, column 9, in fail_with_message");
assertThat(e).hasMessageThat().contains("Error in fail: args expansion error message");
}
@Test
public void testTemplateExpansionActionInterface() throws Exception {
scratch.file(
"test/rules.bzl",
"def _undertest_impl(ctx):",
" out = ctx.outputs.out",
" ctx.actions.expand_template(output=out,",
" template=ctx.file.template, substitutions={'a': 'b'})",
"undertest_rule = rule(",
" implementation = _undertest_impl,",
" outputs = {'out': '%{name}.txt'},",
" attrs = {'template': attr.label(allow_single_file=True)},",
" _skylark_testable = True,",
")",
testingRuleDefinition);
scratch.file("test/template.txt", "aaaaa", "bcdef");
scratch.file(
"test/BUILD",
"load(':rules.bzl', 'undertest_rule', 'testing_rule')",
"undertest_rule(",
" name = 'undertest',",
" template = ':template.txt',",
")",
"testing_rule(",
" name = 'testing',",
" dep = ':undertest',",
")");
StarlarkRuleContext ruleContext = createRuleContext("//test:testing");
setRuleContext(ruleContext);
ev.update("file", ev.eval("ruleContext.attr.dep.files.to_list()[0]"));
ev.update("action", ev.eval("ruleContext.attr.dep[Actions].by_file[file]"));
assertThat(ev.eval("type(action)")).isEqualTo("Action");
Object contentUnchecked = ev.eval("action.content");
assertThat(contentUnchecked).isInstanceOf(String.class);
assertThat(contentUnchecked).isEqualTo("bbbbb\nbcdef\n");
Object substitutionsUnchecked = ev.eval("action.substitutions");
assertThat(substitutionsUnchecked).isInstanceOf(Dict.class);
assertThat(substitutionsUnchecked).isEqualTo(Dict.of((Mutability) null, "a", "b"));
}
private void setUpCoverageInstrumentedTest() throws Exception {
scratch.file(
"test/BUILD",
"cc_library(",
" name = 'foo',",
" srcs = ['foo.cc'],",
" deps = [':bar'],",
")",
"cc_library(",
" name = 'bar',",
" srcs = ['bar.cc'],",
")");
}
@Test
public void testCoverageInstrumentedCoverageDisabled() throws Exception {
setUpCoverageInstrumentedTest();
useConfiguration("--nocollect_code_coverage", "--instrumentation_filter=.");
StarlarkRuleContext ruleContext = createRuleContext("//test:foo");
setRuleContext(ruleContext);
Object result = ev.eval("ruleContext.coverage_instrumented()");
assertThat((Boolean) result).isFalse();
}
@Test
public void testCoverageInstrumentedFalseForSourceFileLabel() throws Exception {
setUpCoverageInstrumentedTest();
useConfiguration("--collect_code_coverage", "--instrumentation_filter=.");
setRuleContext(createRuleContext("//test:foo"));
Object result = ev.eval("ruleContext.coverage_instrumented(ruleContext.attr.srcs[0])");
assertThat((Boolean) result).isFalse();
}
@Test
public void testCoverageInstrumentedDoesNotMatchFilter() throws Exception {
setUpCoverageInstrumentedTest();
useConfiguration("--collect_code_coverage", "--instrumentation_filter=:foo");
setRuleContext(createRuleContext("//test:bar"));
Object result = ev.eval("ruleContext.coverage_instrumented()");
assertThat((Boolean) result).isFalse();
}
@Test
public void testCoverageInstrumentedMatchesFilter() throws Exception {
setUpCoverageInstrumentedTest();
useConfiguration("--collect_code_coverage", "--instrumentation_filter=:foo");
setRuleContext(createRuleContext("//test:foo"));
Object result = ev.eval("ruleContext.coverage_instrumented()");
assertThat((Boolean) result).isTrue();
}
@Test
public void testCoverageInstrumentedDoesNotMatchFilterNonDefaultLabel() throws Exception {
setUpCoverageInstrumentedTest();
useConfiguration("--collect_code_coverage", "--instrumentation_filter=:foo");
setRuleContext(createRuleContext("//test:foo"));
// //test:bar does not match :foo, though //test:foo would.
Object result = ev.eval("ruleContext.coverage_instrumented(ruleContext.attr.deps[0])");
assertThat((Boolean) result).isFalse();
}
@Test
public void testCoverageInstrumentedMatchesFilterNonDefaultLabel() throws Exception {
setUpCoverageInstrumentedTest();
useConfiguration("--collect_code_coverage", "--instrumentation_filter=:bar");
setRuleContext(createRuleContext("//test:foo"));
// //test:bar does match :bar, though //test:foo would not.
Object result = ev.eval("ruleContext.coverage_instrumented(ruleContext.attr.deps[0])");
assertThat((Boolean) result).isTrue();
}
// A list of attributes and methods ctx objects have
private final List<String> ctxAttributes =
ImmutableList.of(
"attr",
"split_attr",
"executable",
"file",
"files",
"workspace_name",
"label",
"fragments",
"host_fragments",
"configuration",
"host_configuration",
"coverage_instrumented(dep)",
"features",
"bin_dir",
"genfiles_dir",
"outputs",
"rule",
"aspect_ids",
"var",
"tokenize('foo')",
"expand('foo', [], Label('//test:main'))",
"new_file('foo.txt')",
"new_file(file, 'foo.txt')",
"actions.declare_file('foo.txt')",
"actions.declare_file('foo.txt', sibling = file)",
"actions.declare_directory('foo.txt')",
"actions.declare_directory('foo.txt', sibling = file)",
"actions.do_nothing(mnemonic = 'foo', inputs = [file])",
"actions.expand_template(template = file, output = file, substitutions = {})",
"actions.run(executable = file, outputs = [file])",
"actions.run_shell(command = 'foo', outputs = [file])",
"actions.write(file, 'foo')",
"check_placeholders('foo', [])",
"runfiles()",
"resolve_command(command = 'foo')",
"resolve_tools()");
@Test
public void testFrozenRuleContextHasInaccessibleAttributes() throws Exception {
setBuildLanguageOptions("--incompatible_new_actions_api=false");
scratch.file(
"test/BUILD",
"load('//test:rules.bzl', 'main_rule', 'dep_rule')",
"dep_rule(name = 'dep')",
"main_rule(name = 'main', deps = [':dep'])");
scratch.file("test/rules.bzl");
for (String attribute : ctxAttributes) {
scratch.overwriteFile(
"test/rules.bzl",
"load('//myinfo:myinfo.bzl', 'MyInfo')",
"def _main_impl(ctx):",
" dep = ctx.attr.deps[0]",
" file = ctx.outputs.file",
" foo = dep[MyInfo].dep_ctx." + attribute,
"main_rule = rule(",
" implementation = _main_impl,",
" attrs = {",
" 'deps': attr.label_list()",
" },",
" outputs = {'file': 'output.txt'},",
")",
"def _dep_impl(ctx):",
" return MyInfo(dep_ctx = ctx)",
"dep_rule = rule(implementation = _dep_impl)");
invalidatePackages();
AssertionError e =
assertThrows(
"Should have been unable to access dep_ctx." + attribute,
AssertionError.class,
() -> getConfiguredTarget("//test:main"));
assertThat(e)
.hasMessageThat()
.contains(
"cannot access field or method '"
+ Iterables.get(Splitter.on('(').split(attribute), 0)
+ "' of rule context for '//test:dep' outside of its own rule implementation "
+ "function");
}
}
@Test
public void testFrozenRuleContextForAspectsHasInaccessibleAttributes() throws Exception {
List<String> attributes = new ArrayList<>();
attributes.addAll(ctxAttributes);
attributes.addAll(
ImmutableList.of("rule.attr", "rule.executable", "rule.file", "rule.files", "rule.kind"));
scratch.file(
"test/BUILD",
"load('//test:rules.bzl', 'my_rule')",
"my_rule(name = 'dep')",
"my_rule(name = 'mid', deps = [':dep'])",
"my_rule(name = 'main', deps = [':mid'])");
scratch.file("test/rules.bzl");
for (String attribute : attributes) {
scratch.overwriteFile(
"test/rules.bzl",
"def _rule_impl(ctx):",
" pass",
"def _aspect_impl(target, ctx):",
" if ctx.rule.attr.deps:",
" dep = ctx.rule.attr.deps[0]",
" file = ctx.actions.declare_file('file.txt')",
" foo = dep." + (attribute.startsWith("rule.") ? "" : "ctx.") + attribute,
" return struct(ctx = ctx, rule=ctx.rule)",
"MyAspect = aspect(implementation=_aspect_impl)",
"my_rule = rule(",
" implementation = _rule_impl,",
" attrs = {",
" 'deps': attr.label_list(aspects = [MyAspect])",
" },",
")");
setBuildLanguageOptions("--incompatible_new_actions_api=false");
invalidatePackages();
AssertionError e =
assertThrows(
"Should have been unable to access dep." + attribute,
AssertionError.class,
() -> getConfiguredTarget("//test:main"));
// Typical value of e.getMessage():
//
// ERROR /workspace/test/BUILD:3:8: \
// in //test:rules.bzl%MyAspect aspect on my_rule rule //test:mid:
// Traceback (most recent call last):
// File "/workspace/test/BUILD", line 3, column 8, in //test:rules.bzl%MyAspect
// File "/workspace/test/rules.bzl", line 7, column 18, in _aspect_impl
// Error: cannot access field or method 'attr' of rule context for '//test:dep' \
// outside of its own rule implementation function
assertThat(e)
.hasMessageThat()
.contains(
"cannot access field or method '"
+ Iterables.get(Splitter.on('(').split(attribute), 0)
+ "' of rule context for '//test:dep' outside of its own rule implementation "
+ "function");
}
}
private static final List<String> deprecatedActionsApi =
ImmutableList.of("new_file('foo.txt')", "new_file(file, 'foo.txt')");
@Test
public void testIncompatibleNewActionsApi() throws Exception {
scratch.file("test/BUILD", "load('//test:rules.bzl', 'main_rule')", "main_rule(name = 'main')");
scratch.file("test/rules.bzl");
for (String actionApi : deprecatedActionsApi) {
scratch.overwriteFile(
"test/rules.bzl",
"def _main_impl(ctx):",
" file = ctx.outputs.file",
" foo = ctx." + actionApi,
"main_rule = rule(",
" implementation = _main_impl,",
" attrs = {",
" 'deps': attr.label_list()",
" },",
" outputs = {'file': 'output.txt'},",
")");
setBuildLanguageOptions("--incompatible_new_actions_api=true");
invalidatePackages();
AssertionError e =
assertThrows(
"Should have reported deprecation error for: " + actionApi,
AssertionError.class,
() -> getConfiguredTarget("//test:main"));
assertWithMessage(actionApi + " reported wrong error")
.that(e)
.hasMessageThat()
.contains("Use --incompatible_new_actions_api=false");
}
}
@Test
public void testMapAttributeOrdering() throws Exception {
scratch.file(
"a/a.bzl",
"key_provider = provider(fields=['keys'])",
"def _impl(ctx):",
" return [key_provider(keys=ctx.attr.value.keys())]",
"a = rule(implementation=_impl, attrs={'value': attr.string_dict()})");
scratch.file(
"a/BUILD",
"load(':a.bzl', 'a')",
"a(name='a', value={'c': 'c', 'b': 'b', 'a': 'a', 'f': 'f', 'e': 'e', 'd': 'd'})");
ConfiguredTarget a = getConfiguredTarget("//a");
StarlarkProvider.Key key =
new StarlarkProvider.Key(
Label.parseAbsolute("//a:a.bzl", ImmutableMap.of()), "key_provider");
StarlarkInfo keyInfo = (StarlarkInfo) a.get(key);
Sequence<?> keys = (Sequence) keyInfo.getValue("keys");
assertThat(keys).containsExactly("c", "b", "a", "f", "e", "d").inOrder();
}
private void writeIntFlagBuildSettingFiles() throws Exception {
scratch.file(
"test/build_setting.bzl",
"BuildSettingInfo = provider(fields = ['name', 'value'])",
"def _impl(ctx):",
" return [BuildSettingInfo(name = ctx.attr.name, value = ctx.build_setting_value)]",
"",
"int_flag = rule(",
" implementation = _impl,",
" build_setting = config.int(flag = True),",
")");
scratch.file(
"test/BUILD",
"load('//test:build_setting.bzl', 'int_flag')",
"int_flag(name = 'int_flag', build_setting_default = 42)");
}
@Test
public void testBuildSettingValue_explicitlySet() throws Exception {
writeIntFlagBuildSettingFiles();
useConfiguration(ImmutableMap.of("//test:int_flag", 24));
ConfiguredTarget buildSetting = getConfiguredTarget("//test:int_flag");
Provider.Key key =
new StarlarkProvider.Key(
Label.create(buildSetting.getLabel().getPackageIdentifier(), "build_setting.bzl"),
"BuildSettingInfo");
StructImpl buildSettingInfo = (StructImpl) buildSetting.get(key);
assertThat(buildSettingInfo.getValue("value")).isEqualTo(StarlarkInt.of(24));
}
@Test
public void testBuildSettingValue_defaultFallback() throws Exception {
writeIntFlagBuildSettingFiles();
ConfiguredTarget buildSetting = getConfiguredTarget("//test:int_flag");
Provider.Key key =
new StarlarkProvider.Key(
Label.create(buildSetting.getLabel().getPackageIdentifier(), "build_setting.bzl"),
"BuildSettingInfo");
StructImpl buildSettingInfo = (StructImpl) buildSetting.get(key);
assertThat(buildSettingInfo.getValue("value")).isEqualTo(StarlarkInt.of(42));
}
@Test
public void testBuildSettingValue_nonBuildSettingRule() throws Exception {
scratch.file(
"test/rule.bzl",
"def _impl(ctx):",
" foo = ctx.build_setting_value",
" return []",
"non_build_setting = rule(implementation = _impl)");
scratch.file(
"test/BUILD",
"load('//test:rule.bzl', 'non_build_setting')",
"non_build_setting(name = 'my_non_build_setting')");
reporter.removeHandler(failFastHandler);
getConfiguredTarget("//test:my_non_build_setting");
assertContainsEvent(
"attempting to access 'build_setting_value' of non-build setting "
+ "//test:my_non_build_setting");
}
private void createToolchains() throws Exception {
scratch.file(
"rule/test_toolchain.bzl",
"def _impl(ctx):",
" value = ctx.attr.value",
" toolchain = platform_common.ToolchainInfo(value = value)",
" return [toolchain]",
"test_toolchain = rule(",
" implementation = _impl,",
" attrs = {'value': attr.string()},",
")");
scratch.file(
"rule/test_rule.bzl",
"result = provider()",
"def _impl(ctx):",
" toolchain = ctx.toolchains['//rule:toolchain_type']",
" return [result(",
" value_from_toolchain = toolchain.value,",
" )]",
"test_rule = rule(",
" implementation = _impl,",
" toolchains = ['//rule:toolchain_type'],",
")");
scratch.file(
"rule/BUILD",
"exports_files(['test_toolchain/bzl', 'test_rule.bzl'])",
"toolchain_type(name = 'toolchain_type')");
scratch.file(
"toolchain/BUILD",
"load('//rule:test_toolchain.bzl', 'test_toolchain')",
"test_toolchain(",
" name = 'foo',",
" value = 'foo',",
")",
"toolchain(",
" name = 'foo_toolchain',",
" toolchain_type = '//rule:toolchain_type',",
" target_compatible_with = ['//platform:constraint_1'],",
" toolchain = ':foo',",
")",
"test_toolchain(",
" name = 'bar',",
" value = 'bar',",
")",
"toolchain(",
" name = 'bar_toolchain',",
" toolchain_type = '//rule:toolchain_type',",
" target_compatible_with = ['//platform:constraint_2'],",
" toolchain = ':bar',",
")");
}
private void createPlatforms() throws Exception {
scratch.file(
"platform/BUILD",
"constraint_setting(name = 'setting')",
"constraint_value(",
" name = 'constraint_1',",
" constraint_setting = ':setting',",
")",
"constraint_value(",
" name = 'constraint_2',",
" constraint_setting = ':setting',",
")",
"platform(",
" name = 'platform_1',",
" constraint_values = [':constraint_1'],",
")",
"platform(",
" name = 'platform_2',",
" constraint_values = [':constraint_2'],",
")");
}
private String getToolchainResult(String targetName) throws Exception {
ConfiguredTarget myRuleTarget = getConfiguredTarget(targetName);
StructImpl info =
(StructImpl)
myRuleTarget.get(
new StarlarkProvider.Key(
Label.parseAbsolute("//rule:test_rule.bzl", ImmutableMap.of()), "result"));
assertThat(info).isNotNull();
return (String) info.getValue("value_from_toolchain");
}
@Test
public void testToolchains() throws Exception {
createToolchains();
createPlatforms();
scratch.file(
"demo/BUILD",
"load('//rule:test_rule.bzl', 'test_rule')",
"test_rule(",
" name = 'demo',",
")");
useConfiguration(
"--extra_toolchains=//toolchain:foo_toolchain,//toolchain:bar_toolchain",
"--platforms=//platform:platform_1");
String value = getToolchainResult("//demo");
assertThat(value).isEqualTo("foo");
// Re-test with the other platform.
useConfiguration(
"--extra_toolchains=//toolchain:foo_toolchain,//toolchain:bar_toolchain",
"--platforms=//platform:platform_2");
value = getToolchainResult("//demo");
assertThat(value).isEqualTo("bar");
}
@Test
public void testTargetPlatformHasConstraint() throws Exception {
createPlatforms();
scratch.file(
"demo/test_rule.bzl",
"result = provider()",
"def _impl(ctx):",
" constraint = ctx.attr._constraint[platform_common.ConstraintValueInfo]",
" has_constraint = ctx.target_platform_has_constraint(constraint)",
" return [result(",
" has_constraint = has_constraint,",
" )]",
"test_rule = rule(",
" implementation = _impl,",
" attrs = {",
" '_constraint': attr.label(default = '//platform:constraint_1'),",
" },",
")");
scratch.file(
"demo/BUILD",
"load(':test_rule.bzl', 'test_rule')",
"test_rule(",
" name = 'demo',",
")");
useConfiguration("--platforms=//platform:platform_1");
ConfiguredTarget myRuleTarget = getConfiguredTarget("//demo");
StructImpl info =
(StructImpl)
myRuleTarget.get(
new StarlarkProvider.Key(
Label.parseAbsolute("//demo:test_rule.bzl", ImmutableMap.of()), "result"));
assertThat(info).isNotNull();
boolean hasConstraint = (boolean) info.getValue("has_constraint");
assertThat(hasConstraint).isTrue();
// Re-test with the other platform.
useConfiguration("--platforms=//platform:platform_2");
myRuleTarget = getConfiguredTarget("//demo");
info =
(StructImpl)
myRuleTarget.get(
new StarlarkProvider.Key(
Label.parseAbsolute("//demo:test_rule.bzl", ImmutableMap.of()), "result"));
assertThat(info).isNotNull();
hasConstraint = (boolean) info.getValue("has_constraint");
assertThat(hasConstraint).isFalse();
}
private void writeExecGroups() throws Exception {
createToolchains();
createPlatforms();
scratch.file(
"something/defs.bzl",
"result = provider()",
"def _impl(ctx):",
" exec_groups = ctx.exec_groups",
" toolchain = ctx.exec_groups['dragonfruit'].toolchains['//rule:toolchain_type']",
" return [result(",
" toolchain_value = toolchain.value,",
" exec_groups = exec_groups,",
" )]",
"use_exec_groups = rule(",
" implementation = _impl,",
" exec_groups = {",
" 'dragonfruit': exec_group(toolchains = ['//rule:toolchain_type']),",
" },",
")");
scratch.file(
"something/BUILD",
"load('//something:defs.bzl', 'use_exec_groups')",
"use_exec_groups(name = 'nectarine')");
setBuildLanguageOptions("--experimental_exec_groups=true");
useConfiguration(
"--extra_toolchains=//toolchain:foo_toolchain,//toolchain:bar_toolchain",
"--platforms=//platform:platform_1");
}
@Test
public void testExecGroup_toolchain() throws Exception {
writeExecGroups();
ConfiguredTarget target = getConfiguredTarget("//something:nectarine");
StructImpl info =
(StructImpl)
target.get(
new StarlarkProvider.Key(
Label.parseAbsoluteUnchecked("//something:defs.bzl"), "result"));
assertThat(info).isNotNull();
assertThat(info.getValue("toolchain_value")).isEqualTo("foo");
assertThat(info.getValue("exec_groups")).isInstanceOf(ExecGroupCollection.class);
ImmutableMap<String, ResolvedToolchainContext> toolchainContexts =
((ExecGroupCollection) info.getValue("exec_groups")).getToolchainCollectionForTesting();
assertThat(toolchainContexts.keySet()).containsExactly(DEFAULT_EXEC_GROUP_NAME, "dragonfruit");
assertThat(toolchainContexts.get(DEFAULT_EXEC_GROUP_NAME).requiredToolchainTypes()).isEmpty();
assertThat(toolchainContexts.get("dragonfruit").resolvedToolchainLabels())
.containsExactly(Label.parseAbsoluteUnchecked("//toolchain:foo"));
}
// Tests for an error that occurs when two exec groups have different requirements (toolchain
// types and exec constraints), but have the same toolchain type. This also requires the toolchain
// transition to be enabled.
@Test
public void testExecGroup_duplicateToolchainType() throws Exception {
createToolchains();
createPlatforms();
scratch.file(
"something/defs.bzl",
"result = provider()",
"def _impl(ctx):",
" exec_groups = ctx.exec_groups",
" toolchain = ctx.exec_groups['dragonfruit'].toolchains['//rule:toolchain_type']",
" return [result(",
" toolchain_value = toolchain.value,",
" exec_groups = exec_groups,",
" )]",
"use_exec_groups = rule(",
" implementation = _impl,",
" exec_groups = {",
" 'dragonfruit': exec_group(toolchains = ['//rule:toolchain_type']),",
" 'passionfruit': exec_group(",
" toolchains = ['//rule:toolchain_type'],",
" exec_compatible_with = ['//something:extra'],",
" ),",
" },",
" incompatible_use_toolchain_transition = True,",
")");
scratch.file(
"something/BUILD",
"constraint_setting(name = 'setting', default_constraint_value = ':extra')",
"constraint_value(name = 'extra', constraint_setting = ':setting')",
"load('//something:defs.bzl', 'use_exec_groups')",
"use_exec_groups(name = 'nectarine')");
setBuildLanguageOptions("--experimental_exec_groups=true");
useConfiguration(
"--extra_toolchains=//toolchain:foo_toolchain,//toolchain:bar_toolchain",
"--platforms=//platform:platform_1");
ConfiguredTarget target = getConfiguredTarget("//something:nectarine");
StructImpl info =
(StructImpl)
target.get(
new StarlarkProvider.Key(
Label.parseAbsoluteUnchecked("//something:defs.bzl"), "result"));
assertThat(info).isNotNull();
assertThat(info.getValue("toolchain_value")).isEqualTo("foo");
assertThat(info.getValue("exec_groups")).isInstanceOf(ExecGroupCollection.class);
ImmutableMap<String, ResolvedToolchainContext> toolchainContexts =
((ExecGroupCollection) info.getValue("exec_groups")).getToolchainCollectionForTesting();
assertThat(toolchainContexts.keySet())
.containsExactly(DEFAULT_EXEC_GROUP_NAME, "dragonfruit", "passionfruit");
assertThat(toolchainContexts.get(DEFAULT_EXEC_GROUP_NAME).requiredToolchainTypes()).isEmpty();
assertThat(toolchainContexts.get("dragonfruit").resolvedToolchainLabels())
.containsExactly(Label.parseAbsoluteUnchecked("//toolchain:foo"));
assertThat(toolchainContexts.get("passionfruit").resolvedToolchainLabels())
.containsExactly(Label.parseAbsoluteUnchecked("//toolchain:foo"));
}
@Test
public void testInvalidExecGroup() throws Exception {
writeExecGroups();
scratch.overwriteFile(
"something/defs.bzl",
"result = provider()",
"def _impl(ctx):",
" exec_groups = ctx.exec_groups",
" toolchain = ctx.exec_groups['unknown_fruit']",
" return []",
"use_exec_groups = rule(",
" implementation = _impl,",
" exec_groups = {",
" 'dragonfruit': exec_group(toolchains = ['//rule:toolchain_type']),",
" },",
")");
assertThrows(AssertionError.class, () -> getConfiguredTarget("//something:nectarine"));
assertContainsEvent(
"unrecognized exec group 'unknown_fruit' requested. Available exec groups: [dragonfruit]");
}
@Test
public void testCannotAccessDefaultGroupViaExecGroups() throws Exception {
writeExecGroups();
scratch.overwriteFile(
"something/defs.bzl",
"result = provider()",
"def _impl(ctx):",
" exec_groups = ctx.exec_groups",
" toolchain = ctx.exec_groups['" + DEFAULT_EXEC_GROUP_NAME + "']",
" return []",
"use_exec_groups = rule(",
" implementation = _impl,",
" exec_groups = {",
" 'dragonfruit': exec_group(toolchains = ['//rule:toolchain_type']),",
" },",
")");
assertThrows(AssertionError.class, () -> getConfiguredTarget("//something:nectarine"));
assertContainsEvent(
"unrecognized exec group '"
+ DEFAULT_EXEC_GROUP_NAME
+ "' requested. Available exec groups: [dragonfruit]");
}
@Test
public void testInvalidExecGroupName() throws Exception {
writeExecGroups();
String badName = "1bad-stuff-name";
scratch.overwriteFile(
"something/defs.bzl",
"result = provider()",
"def _impl(ctx):",
" exec_groups = ctx.exec_groups",
" toolchain = ctx.exec_groups['" + badName + "']",
" return []",
"use_exec_groups = rule(",
" implementation = _impl,",
" exec_groups = {",
" '" + badName + "': exec_group(toolchains = ['//rule:toolchain_type']),",
" },",
")");
assertThrows(AssertionError.class, () -> getConfiguredTarget("//something:nectarine"));
assertContainsEvent("Exec group name '" + badName + "' is not a valid name.");
}
}
| apache-2.0 |
JuananIBM/WebCompanies | src/main/java/com/juanan/pocs/companies/web/connection/MongoDBConnection.java | 976 | package com.juanan.pocs.companies.web.connection;
import com.juanan.pocs.companies.core.mongodb.MongoDB;
public class MongoDBConnection {
private static MongoDB mongoDB2013 = new MongoDB(MongoDBCredentials.host, MongoDBCredentials.port, "pilotoPreRating2013");
private static MongoDB mongoDB2014 = new MongoDB(MongoDBCredentials.host, MongoDBCredentials.port, "pilotoPreRating2014");
private static MongoDB mongoDB2015 = new MongoDB(MongoDBCredentials.host, MongoDBCredentials.port, "pilotoPreRating2015");
private static MongoDB mongoDB2016 = new MongoDB(MongoDBCredentials.host, MongoDBCredentials.port, "pilotoPreRating2016");
public static MongoDB getConnection(String anyo){
MongoDB mongoDB = null;
if (anyo.equals("2013")){
mongoDB = mongoDB2013;
} else if (anyo.equals("2014")){
mongoDB = mongoDB2014;
} else if (anyo.equals("2015")){
mongoDB = mongoDB2015;
} else {
mongoDB = mongoDB2016;
}
return mongoDB;
}
}
| apache-2.0 |
bherrmann7/jbum | fixed-src/com/thoughtworks/xstream/core/ReferenceByXPathUnmarshaller.java | 1626 | package com.thoughtworks.xstream.core;
import com.thoughtworks.xstream.alias.ClassMapper;
import com.thoughtworks.xstream.converters.ConverterLookup;
import com.thoughtworks.xstream.io.HierarchicalStreamReader;
import com.thoughtworks.xstream.io.path.Path;
import com.thoughtworks.xstream.io.path.PathTracker;
import com.thoughtworks.xstream.io.path.PathTrackingReader;
import com.thoughtworks.xstream.mapper.Mapper;
public class ReferenceByXPathUnmarshaller extends AbstractReferenceUnmarshaller {
private PathTracker pathTracker = new PathTracker();
public ReferenceByXPathUnmarshaller(Object root, HierarchicalStreamReader reader,
ConverterLookup converterLookup, Mapper mapper) {
super(root, reader, converterLookup, mapper);
this.reader = new PathTrackingReader(reader, pathTracker);
}
/**
* @deprecated As of 1.2, use {@link #ReferenceByXPathUnmarshaller(Object, HierarchicalStreamReader, ConverterLookup, Mapper)}
*/
public ReferenceByXPathUnmarshaller(Object root, HierarchicalStreamReader reader,
ConverterLookup converterLookup, ClassMapper classMapper) {
this(root, reader, converterLookup, (Mapper)classMapper);
}
protected Object getReferenceKey(String reference) {
final Path path = new Path(reference);
// We have absolute references, if path starts with '/'
return reference.charAt(0) != '/' ? pathTracker.getPath().apply(path) : path;
}
protected Object getCurrentReferenceKey() {
return pathTracker.getPath();
}
}
| apache-2.0 |
583462423/match | match-web/src/main/java/com/sduwh/match/controller/admin/AdminScoreController.java | 6513 | package com.sduwh.match.controller.admin;
import com.sduwh.match.controller.academy.AcademyScoreController;
import com.sduwh.match.controller.base.BaseController;
import com.sduwh.match.enums.ConcludingStatementStage;
import com.sduwh.match.enums.MatchStage;
import com.sduwh.match.enums.RaterLevel;
import com.sduwh.match.jedis.JedisAdapter;
import com.sduwh.match.jedis.RedisKeyGenerator;
import com.sduwh.match.model.HostHolder;
import com.sduwh.match.model.entity.*;
import com.sduwh.match.model.to.MatchItemTO;
import com.sduwh.match.model.to.ScoreInfo;
import com.sduwh.match.service.concludingstagtement.middlecheck.ConcludingStatementService;
import com.sduwh.match.service.grade.GradeService;
import com.sduwh.match.service.matchinfo.MatchInfoService;
import com.sduwh.match.service.matchitem.MatchItemService;
import com.sduwh.match.service.stage.StageService;
import com.sduwh.match.service.tmprater.TmpRaterService;
import com.sduwh.match.service.user.UserService;
import com.sduwh.match.util.StringUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Controller;
import org.springframework.ui.Model;
import org.springframework.web.bind.annotation.*;
import java.text.ParseException;
import java.util.HashMap;
import java.util.List;
import java.util.stream.Collectors;
/**
* Created by qxg on 17-9-20.
* 学校管理员的结题检查评分阶段
*/
@Controller
@RequestMapping("/admin")
public class AdminScoreController extends BaseController {
private static final Logger logger = LoggerFactory.getLogger(AcademyScoreController.class);
private static final String SCORE_ALL = "/admin/score_all";
private static final String SCORE_CREATE_RATER = "/admin/score_items";
@Autowired
StageService stageService;
@Autowired
MatchItemService matchItemService;
@Autowired
ConcludingStatementService concludingStatementService;
@Autowired
JedisAdapter jedisAdapter;
@Autowired
HostHolder hostHolder;
@Autowired
MatchInfoService matchInfoService;
@Autowired
TmpRaterService tmpRaterService;
@Autowired
GradeService gradeService;
@Autowired
UserService userService;
/** 获取当前处于评分阶段的所有的比赛*/
@GetMapping("/match/score/all")
public String getMatchScoreStage(Model model){
//获取当前处于评分阶段的matchInfo
List<Stage> stageList = stageService.selectByStageFlag(MatchStage.CONCLUSION_CHECK.getId());
List<MatchInfo> matchInfos = stageList.stream().flatMap(stage-> matchItemService.selectByNowStageId(stage.getId()).stream())
.filter(item->{
//过滤当前concludingStatement不是处于评分阶段的比赛
ConcludingStatement concludingStatement = concludingStatementService.selectByMatchItemId(item.getId());
return concludingStatement.getStage() == ConcludingStatementStage.SUPER_SCORE.getCode();
}).map(matchItem -> matchInfoService.selectByPrimaryKey(matchItem.getMatchInfoId()))
.distinct().collect(Collectors.toList());
model.addAttribute("infos",matchInfos);
return SCORE_ALL;
}
@GetMapping("/score/info/{id}")
public String getScoreByMatchInfoId(@PathVariable("id") int matchInfoId, Model model){
//通过matchInfoId,来获取这个比赛中的待评审的所有比赛
String key = RedisKeyGenerator.getSuperScoreKey(hostHolder.getUser().getId(),matchInfoId);
ScoreInfo scoreInfo = gradeService.getScoreInfo(RaterLevel.SCHOOL,key,matchInfoId,ConcludingStatementStage.SUPER_SCORE);
model.addAttribute("matchInfoId",scoreInfo.getMatchInfoId());
model.addAttribute("itemTOS", scoreInfo.getMatchItemTOS());
model.addAttribute("cnt",scoreInfo.getCnt());
model.addAttribute("nowCnt",scoreInfo.getNowCnt());
model.addAttribute("last",scoreInfo.getLast());
model.addAttribute("notDoneCnt",scoreInfo.getNotDoneCnt());
return SCORE_CREATE_RATER;
}
/** 评分阶段结束*/
@PostMapping("/rater/score/end")
@ResponseBody
public synchronized String endScore(@RequestParam("infoId")int infoId){
String key = RedisKeyGenerator.getSuperScoreKey(hostHolder.getUser().getId(),infoId);
//获取到该阶段的所有比赛,然后将对应的结题检查报告都设置到结束阶段
if(jedisAdapter.sget(key) == null || jedisAdapter.sget(key).size() == 0)return setJsonResult("error","该比赛没有可设置的比赛");
jedisAdapter.sget(key).stream().map(Integer::parseInt).map(concludingStatementService::selectByMatchItemId).forEach(cs->{
//将每个cs都设置为下一个阶段
cs.setStage(ConcludingStatementStage.ALL_DONW.getCode());
concludingStatementService.updateByMatchItemId(cs);
//将该比赛记录从当前比赛中移除
jedisAdapter.srem(key,String.valueOf(cs.getMatchItemId()));
//将matchItem设置为下一个阶段
matchItemService.updateAndSetNextStage(matchItemService.selectByPrimaryKey(cs.getMatchItemId()),null);
});
return setJsonResult("success","true");
}
@PostMapping("/rater/gen")
@ResponseBody
public String genRater(@RequestParam("matchItem") String matchItems,
@RequestParam("startTime") String startTime,
@RequestParam("endTime") String endTime,
@RequestParam("cnt") Integer cnt,
@RequestParam("matchInfoId")int matchInfoId) throws ParseException {
//给这些比赛生成临时评委
//判断是否有选项是空的
if(StringUtils.nullOrEmpty(matchItems,startTime,endTime,String.valueOf(cnt),String.valueOf(matchInfoId)))
return setJsonResult("error","输入内容不得为空!");
List<TmpRater> result =tmpRaterService.createRater(matchItems,startTime,endTime,cnt, RaterLevel.SCHOOL.getLevel(),matchInfoId);
//将来生成的评委的帐号密码生成字符串
StringBuilder ss = new StringBuilder();
result.forEach(t->{
ss.append("帐号:").append(t.getUsername()).append(" 密码:").append(t.getPassword()).append("\n");
});
return setJsonResult("success","true","raterInfo",ss.toString());
}
}
| apache-2.0 |
Yannic/closure-compiler | src/com/google/debugging/sourcemap/super/com/google/debugging/sourcemap/proto/Mapping.java | 2886 | /*
* Copyright 2015 The Closure Compiler Authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.debugging.sourcemap.proto;
/**
* GWT compatible replacement of {@code Mapping}, which is a generated Java protocol buffer
* unsuitable for use in the GWT Closure.
*
* This is not intended to match the generated class exactly, it just implements the required
* methods.
*/
public final class Mapping {
private Mapping() {}
public static final class OriginalMapping {
public static final class Builder {
private String originalFile = null;
private int columnPosition = 0;
private int lineNumber = 0;
private String identifier = null;
public Builder setOriginalFile(String value) {
this.originalFile = value;
return this;
}
public Builder setLineNumber(int value) {
this.lineNumber = value;
return this;
}
public Builder setColumnPosition(int value) {
this.columnPosition = value;
return this;
}
public Builder setIdentifier(String value) {
this.identifier = value;
return this;
}
public OriginalMapping build() {
return new OriginalMapping(originalFile, lineNumber, columnPosition, identifier);
}
}
public static Builder newBuilder() {
return new Builder();
}
private final String originalFile;
private final int lineNumber;
private final int columnPosition;
private final String identifier;
OriginalMapping(String originalFile, int lineNumber, int columnPosition, String identifier) {
this.originalFile = originalFile;
this.lineNumber = lineNumber;
this.columnPosition = columnPosition;
this.identifier = identifier;
}
public String getOriginalFile() {
return originalFile;
}
public int getLineNumber() {
return lineNumber;
}
public int getColumnPosition() {
return columnPosition;
}
public String getIdentifier() {
return identifier;
}
public boolean hasIdentifier() {
return identifier != null;
}
public Builder toBuilder() {
return new Builder()
.setOriginalFile(originalFile)
.setLineNumber(lineNumber)
.setColumnPosition(columnPosition)
.setIdentifier(identifier);
}
}
}
| apache-2.0 |
desruisseaux/sis | core/sis-referencing/src/test/java/org/apache/sis/parameter/ParameterValueGroupWrapper.java | 2416 | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.sis.parameter;
import java.util.List;
import org.opengis.parameter.ParameterValue;
import org.opengis.parameter.GeneralParameterValue;
import org.opengis.parameter.ParameterValueGroup;
import org.opengis.parameter.ParameterDescriptorGroup;
/**
* {@link ParameterValueGroup} wrapper for hiding the implementation class.
* Used when we want to prevent the optimizations detected by checks like
* {@code if (x instanceof DefaultParameterValueGroup)}.
*
* @author Martin Desruisseaux (Geomatys)
* @since 0.4
* @version 0.4
* @module
*/
@SuppressWarnings("CloneInNonCloneableClass")
final strictfp class ParameterValueGroupWrapper implements ParameterValueGroup {
/**
* The implementation to hide.
*/
private final ParameterValueGroup impl;
/**
* Creates a new wrapper for the given implementation.
*/
ParameterValueGroupWrapper(final ParameterValueGroup impl) {
this.impl = impl;
}
@SuppressWarnings("CloneDoesntCallSuperClone")
@Override public ParameterValueGroup clone() {return impl.clone();}
@Override public ParameterDescriptorGroup getDescriptor() {return impl.getDescriptor();}
@Override public List<GeneralParameterValue> values() {return impl.values();}
@Override public ParameterValue<?> parameter(String name) {return impl.parameter(name);}
@Override public List<ParameterValueGroup> groups(String name) {return impl.groups(name);}
@Override public ParameterValueGroup addGroup(String name) {return impl.addGroup(name);}
}
| apache-2.0 |