repo_name stringlengths 5 108 | path stringlengths 6 333 | size stringlengths 1 6 | content stringlengths 4 977k | license stringclasses 15 values |
|---|---|---|---|---|
nano-projects/nano-framework | nano-orm/nano-orm-kafka/src/test/java/org/nanoframework/orm/kafka/AbstractTests.java | 2826 | /*
* Copyright 2015-2017 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.nanoframework.orm.kafka;
import java.util.Enumeration;
import java.util.List;
import java.util.Set;
import javax.servlet.ServletConfig;
import javax.servlet.ServletContext;
import org.junit.BeforeClass;
import org.nanoframework.commons.util.CollectionUtils;
import org.nanoframework.core.globals.Globals;
import org.nanoframework.core.plugins.Module;
import org.nanoframework.core.plugins.defaults.module.SPIModule;
import org.nanoframework.core.spi.SPILoader;
import com.google.common.collect.Lists;
import com.google.inject.Guice;
import com.google.inject.Injector;
import com.google.inject.Key;
import com.google.inject.name.Names;
/**
*
* @author yanghe
* @since 1.4.9
*/
public abstract class AbstractTests {
private static final ServletConfig config = new ServletConfig() {
@Override
public String getServletName() {
return null;
}
@Override
public ServletContext getServletContext() {
return null;
}
@Override
public Enumeration<String> getInitParameterNames() {
return null;
}
@Override
public String getInitParameter(final String name) {
return null;
}
};
@BeforeClass
public static void setup() throws Throwable {
final Injector injector = Guice.createInjector().createChildInjector(new SPIModule());
Globals.set(Injector.class, injector);
final Set<String> moduleNames = SPILoader.spiNames(Module.class);
if (!CollectionUtils.isEmpty(moduleNames)) {
final List<Module> loadedModules = Lists.newArrayList();
for (final String moduleName : moduleNames) {
final Module module = injector.getInstance(Key.get(Module.class, Names.named(moduleName)));
module.config(config);
loadedModules.addAll(module.load());
}
Globals.set(Injector.class, injector.createChildInjector(loadedModules));
}
}
protected void injects() {
final Injector injector = Globals.get(Injector.class);
injector.createChildInjector(binder -> binder.requestInjection(this));
}
}
| apache-2.0 |
tateshitah/jspwiki | jspwiki-war/src/test/java/org/apache/wiki/providers/VerySimpleProvider.java | 4213 | /*
Licensed to the Apache Software Foundation (ASF) under one
or more contributor license agreements. See the NOTICE file
distributed with this work for additional information
regarding copyright ownership. The ASF licenses this file
to you under the Apache License, Version 2.0 (the
"License"); you may not use this file except in compliance
with the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing,
software distributed under the License is distributed on an
"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
KIND, either express or implied. See the License for the
specific language governing permissions and limitations
under the License.
*/
package org.apache.wiki.providers;
import java.util.Collection;
import java.util.Date;
import java.util.List;
import java.util.Properties;
import java.util.Vector;
import org.apache.wiki.WikiEngine;
import org.apache.wiki.WikiPage;
import org.apache.wiki.api.exceptions.ProviderException;
import org.apache.wiki.search.QueryItem;
/**
* This is a simple provider that is used by some of the tests. It has some
* specific behaviours, like it always contains a single page.
*/
public class VerySimpleProvider implements WikiPageProvider
{
/** The last request is stored here. */
public String m_latestReq = null;
/** The version number of the last request is stored here. */
public int m_latestVers = -123989;
/**
* This provider has only a single page, when you ask
* a list of all pages.
*/
public static final String PAGENAME = "foo";
/**
* The name of the page list.
*/
public static final String AUTHOR = "default-author";
private WikiEngine m_engine;
public void initialize( WikiEngine engine, Properties props )
{
m_engine = engine;
}
public String getProviderInfo()
{
return "Very Simple Provider.";
}
public void putPageText( WikiPage page, String text )
throws ProviderException
{
}
/**
* Always returns true.
*/
public boolean pageExists( String page )
{
return true;
}
/**
* Always returns true.
*/
public boolean pageExists( String page, int version )
{
return true;
}
/**
* Always returns null.
*/
public Collection findPages( QueryItem[] query )
{
return null;
}
/**
* Returns always a valid WikiPage.
*/
public WikiPage getPageInfo( String page, int version )
{
m_latestReq = page;
m_latestVers = version;
WikiPage p = new WikiPage( m_engine, page );
p.setVersion( 5 );
p.setAuthor( AUTHOR );
p.setLastModified( new Date(0L) );
return p;
}
/**
* Returns a single page.
*/
public Collection getAllPages()
{
Vector<WikiPage> v = new Vector<WikiPage>();
v.add( getPageInfo( PAGENAME, 5 ) );
return v;
}
/**
* Returns the same as getAllPages().
*/
public Collection getAllChangedSince( Date date )
{
return getAllPages();
}
/**
* Always returns 1.
*/
public int getPageCount()
{
return 1;
}
/**
* Always returns an empty list.
*/
public List getVersionHistory( String page )
{
return new Vector();
}
/**
* Stores the page and version into public fields of this class,
* then returns an empty string.
*/
public String getPageText( String page, int version )
{
m_latestReq = page;
m_latestVers = version;
return "";
}
public void deleteVersion( String page, int version )
{
}
public void deletePage( String page )
{
}
/* (non-Javadoc)
* @see org.apache.wiki.providers.WikiPageProvider#movePage(java.lang.String, java.lang.String)
*/
public void movePage( String from, String to ) throws ProviderException
{
// TODO Auto-generated method stub
}
}
| apache-2.0 |
mvs5465/jpo-ode | jpo-ode-plugins/src/main/java/us/dot/its/jpo/ode/plugin/j2735/J2735WiperStatus.java | 207 | package us.dot.its.jpo.ode.plugin.j2735;
import us.dot.its.jpo.ode.plugin.asn1.Asn1Object;
public enum J2735WiperStatus {
unavailable,
off,
intermittent,
low,
high,
washerInUse,
automaticPresent
}
| apache-2.0 |
yanguangkun/KunSoftware_Tour | src/main/java/com/kunsoftware/bean/CustomizeRequestBean.java | 1729 | package com.kunsoftware.bean;
import org.springframework.web.multipart.MultipartFile;
public class CustomizeRequestBean {
private String title1;
private String title2;
private String title3;
private String summary;
private Integer destination;
private Integer productResourceId;
private String productResourceName;
private MultipartFile imageFile;
private String frontDesk;
public String getTitle1() {
return title1;
}
public void setTitle1(String title1) {
this.title1 = title1;
}
public String getTitle2() {
return title2;
}
public void setTitle2(String title2) {
this.title2 = title2;
}
public String getTitle3() {
return title3;
}
public void setTitle3(String title3) {
this.title3 = title3;
}
public String getSummary() {
return summary;
}
public void setSummary(String summary) {
this.summary = summary;
}
public Integer getDestination() {
return destination;
}
public void setDestination(Integer destination) {
this.destination = destination;
}
public Integer getProductResourceId() {
return productResourceId;
}
public void setProductResourceId(Integer productResourceId) {
this.productResourceId = productResourceId;
}
public String getProductResourceName() {
return productResourceName;
}
public void setProductResourceName(String productResourceName) {
this.productResourceName = productResourceName;
}
public MultipartFile getImageFile() {
return imageFile;
}
public void setImageFile(MultipartFile imageFile) {
this.imageFile = imageFile;
}
public String getFrontDesk() {
return frontDesk;
}
public void setFrontDesk(String frontDesk) {
this.frontDesk = frontDesk;
}
}
| apache-2.0 |
highill-practice/highill-practice-spark | highill-practice-spark/src/main/java/com/highill/practice/spark/mllib/rdd/linear/SparkMLLinearSupportVectorMachines.java | 3403 | package com.highill.practice.spark.mllib.rdd.linear;
import java.io.File;
import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.api.java.JavaSparkContext;
import org.apache.spark.mllib.classification.SVMModel;
import org.apache.spark.mllib.classification.SVMWithSGD;
import org.apache.spark.mllib.evaluation.BinaryClassificationMetrics;
import org.apache.spark.mllib.optimization.L1Updater;
import org.apache.spark.mllib.regression.LabeledPoint;
import org.apache.spark.mllib.util.MLUtils;
import scala.Tuple2;
import com.highill.practice.spark.JavaRDDSparkContextMain;
import com.highill.practice.spark.tool.FileTool;
/**
*
* https://github.com/apache/spark/blob/master/examples/src/main/java/org/apache/spark/examples/mllib/JavaSVMWithSGDExample.java
*
*/
public class SparkMLLinearSupportVectorMachines {
public static void main(String[] args) {
String savePath = "target/tmp/SparkMLLinearSVMWithSGDModel";
FileTool.deletePath(savePath);
String dataPath = "data/mllib/sample_libsvm_data.txt";
JavaSparkContext javaSparkContext = JavaRDDSparkContextMain.javaSparkContext("SparkMLLinearSupportVectorMachines", "local[*]");
JavaRDD<LabeledPoint> labeledData = MLUtils.loadLibSVMFile(javaSparkContext.sc(), dataPath).toJavaRDD();
System.out.println("-----labeledData count: " + labeledData.count());
System.out.println("-----labeledData take 1: " + labeledData.take(1));
// Split initial RDD into tow collection, [60% training data, 40% testing data]
JavaRDD<LabeledPoint> trainingData = labeledData.sample(false, 0.6, 11L);
System.out.println("-----trainingData count: " + trainingData.count());
trainingData.cache();
System.out.println("-----trainingData cache count: " + trainingData.count());
JavaRDD<LabeledPoint> testData = labeledData.subtract(trainingData);
System.out.println("-----testData count: " + testData.count());
// Run training
int numIterations = 100;
final SVMModel svmTrainingModel = SVMWithSGD.train(trainingData.rdd(), numIterations);
System.out.println("-----svmTrainingModel: " + svmTrainingModel);
svmTrainingModel.clearThreshold();
// Compute raw scores on the test set
JavaRDD<Tuple2<Object, Object>> scoreAndLabels = testData.map(labeledPoint -> {
Double score = svmTrainingModel.predict(labeledPoint.features());
Tuple2<Object, Object> tuple = new Tuple2<Object, Object>(score, labeledPoint.label());
return tuple;
});
System.out.println("-----scoreAndLabels count: " + scoreAndLabels.count());
BinaryClassificationMetrics binaryClassificationMetrics =
new BinaryClassificationMetrics(JavaRDD.toRDD(scoreAndLabels));
System.out.println("-----binaryClassificationMetrics " + binaryClassificationMetrics);
double areaUnderROC = binaryClassificationMetrics.areaUnderROC();
System.out.println("-----areaUnderROC: " + areaUnderROC);
// TODO
// svmTrainingModel.save(javaSparkContext.sc(), savePath);
// SVMModel reloadSVMModel = SVMModel.load(javaSparkContext.sc(), savePath);
// System.out.println("-----reloadSVMModel: " + reloadSVMModel);
SVMWithSGD svmWithSGD = new SVMWithSGD();
svmWithSGD.optimizer().setNumIterations(200).setRegParam(0.1).setUpdater(new L1Updater());
final SVMModel svmModelL1 = svmWithSGD.run(trainingData.rdd());
System.out.println("-----svmModelL1: " + svmModelL1);
javaSparkContext.sc().stop();
}
}
| apache-2.0 |
fuyuanwu/springside4 | examples/quickstart/src/main/java/org/springside/examples/quickstart/web/task/TaskController.java | 4992 | /*******************************************************************************
* Copyright (c) 2005, 2014 springside.github.io
*
* Licensed under the Apache License, Version 2.0 (the "License");
*******************************************************************************/
package org.springside.examples.quickstart.web.task;
import java.util.Map;
import javax.servlet.ServletRequest;
import javax.validation.Valid;
import org.apache.shiro.SecurityUtils;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.data.domain.Page;
import org.springframework.stereotype.Controller;
import org.springframework.ui.Model;
import org.springframework.web.bind.annotation.ModelAttribute;
import org.springframework.web.bind.annotation.PathVariable;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestMethod;
import org.springframework.web.bind.annotation.RequestParam;
import org.springframework.web.servlet.mvc.support.RedirectAttributes;
import org.springside.examples.quickstart.entity.Task;
import org.springside.examples.quickstart.entity.User;
import org.springside.examples.quickstart.service.account.ShiroDbRealm.ShiroUser;
import org.springside.examples.quickstart.service.task.TaskService;
import org.springside.modules.web.Servlets;
import com.google.common.collect.Maps;
/**
* Task管理的Controller, 使用Restful风格的Urls:
*
* List page : GET /task/ Create page : GET /task/create Create action : POST
* /task/create Update page : GET /task/update/{id} Update action : POST
* /task/update Delete action : GET /task/delete/{id}
*
* @author fuyuanwu
*/
@Controller
@RequestMapping(value = "/task")
public class TaskController {
private static final String PAGE_SIZE = "3";
private static Map<String, String> sortTypes = Maps.newLinkedHashMap();
static {
sortTypes.put("auto", "自动");
sortTypes.put("title", "标题");
}
@Autowired
private TaskService taskService;
@RequestMapping(method = RequestMethod.GET)
public String list(@RequestParam(value = "page", defaultValue = "1") int pageNumber,
@RequestParam(value = "page.size", defaultValue = PAGE_SIZE) int pageSize,
@RequestParam(value = "sortType", defaultValue = "auto") String sortType, Model model,
ServletRequest request) {
Map<String, Object> searchParams = Servlets.getParametersStartingWith(request, "search_");
Long userId = getCurrentUserId();
Page<Task> tasks = taskService.getUserTask(userId, searchParams, pageNumber, pageSize, sortType);
model.addAttribute("tasks", tasks);
model.addAttribute("sortType", sortType);
model.addAttribute("sortTypes", sortTypes);
// 将搜索条件编码成字符串,用于排序,分页的URL
model.addAttribute("searchParams", Servlets.encodeParameterStringWithPrefix(searchParams, "search_"));
return "task/taskList";
}
@RequestMapping(value = "create", method = RequestMethod.GET)
public String createForm(Model model) {
model.addAttribute("task", new Task());
model.addAttribute("action", "create");
return "task/taskForm";
}
@RequestMapping(value = "create", method = RequestMethod.POST)
public String create(@Valid Task newTask, RedirectAttributes redirectAttributes) {
User user = new User(getCurrentUserId());
newTask.setUser(user);
taskService.saveTask(newTask);
redirectAttributes.addAttribute("message", "创建任务成功");
return "redirect:/task/";
}
@RequestMapping(value = "update/{id}", method = RequestMethod.GET)
public String updateForm(@PathVariable("id") Long id, Model model) {
model.addAttribute("task", taskService.getTask(id));
model.addAttribute("action", "update");
return "task/taskForm";
}
@RequestMapping(value = "update", method = RequestMethod.POST)
public String update(@Valid @ModelAttribute("task") Task task, RedirectAttributes redirectAttributes) {
taskService.saveTask(task);
redirectAttributes.addFlashAttribute("message", "更新任务成功");
return "redirect:/task/";
}
@RequestMapping(value = "delete/{id}")
public String delete(@PathVariable("id") Long id, RedirectAttributes redirectAttributes) {
taskService.deleteTask(id);
redirectAttributes.addFlashAttribute("message", "删除任务成功");
return "redirect:/task/";
}
/**
* 所有RequestMapping方法调用前的Model准备方法, 实现Struts2
* Preparable二次部分绑定的效果,先根据form的id从数据库查出Task对象,再把Form提交的内容绑定到该对象上。
* 因为仅update()方法的form中有id属性,因此仅在update时实际执行.
*/
@ModelAttribute
public void getTask(@RequestParam(value = "id", defaultValue = "-1") Long id, Model model) {
if (id != -1) {
model.addAttribute("task", taskService.getTask(id));
}
}
/**
* 取出Shiro中的当前用户Id.
*/
private Long getCurrentUserId() {
ShiroUser user = (ShiroUser) SecurityUtils.getSubject().getPrincipal();
return user.id;
}
}
| apache-2.0 |
brutusin/wava | wava-client/src/main/java/org/brutusin/wava/WavaClient.java | 3881 | /*
* Copyright 2016 Ignacio del Valle Alles idelvall@brutusin.org.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.brutusin.wava;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import org.brutusin.wava.env.EnvEntry;
import org.brutusin.wava.input.CancelInput;
import org.brutusin.wava.input.ExtendedSubmitInput;
import org.brutusin.wava.input.GroupInput;
import org.brutusin.wava.input.Input;
import org.brutusin.wava.input.SubmitInput;
import org.brutusin.wava.io.EventListener;
import org.brutusin.wava.io.LineListener;
import org.brutusin.wava.io.OpName;
import org.brutusin.wava.io.RequestExecutor;
import org.brutusin.wava.io.RetCode;
/**
*
* @author Ignacio del Valle Alles idelvall@brutusin.org
*/
public class WavaClient {
private final RequestExecutor executor = new RequestExecutor();
public void submit(SubmitInput input, final InputStream stdinStream, final OutputStream stdoutStream, final LineListener stderrListener, final EventListener eventListener) throws WavaNotRunningException {
try {
ExtendedSubmitInput esi = new ExtendedSubmitInput(input);
String parentId = System.getenv(EnvEntry.WAVA_JOB_ID.name());
if (parentId != null) {
esi.setParentId(Integer.valueOf(parentId));
}
Integer retCode = executor.executeRequest(OpName.submit, esi, stdinStream, stdoutStream, stderrListener, eventListener);
if (retCode == RetCode.CORE_NOT_RUNNING.getCode()) {
throw new WavaNotRunningException();
}
} catch (IOException ex) {
throw new RuntimeException(ex);
}
}
private static String executeCommand(RequestExecutor executor, OpName opName, Input input) throws WavaNotRunningException {
ByteArrayOutputStream stdoutOs = new ByteArrayOutputStream();
final StringBuilder sb = new StringBuilder();
LineListener stderrListener = new LineListener() {
@Override
public void onNewLine(String line) {
if (sb.length() > 0) {
sb.append("\n");
}
sb.append(line);
}
};
try {
Integer retCode = executor.executeRequest(opName, input, null, stdoutOs, stderrListener, null);
if (retCode == RetCode.CORE_NOT_RUNNING.getCode()) {
throw new WavaNotRunningException();
} else if (retCode != 0) {
throw new RuntimeException(sb.toString());
}
} catch (IOException ex) {
throw new RuntimeException(ex);
}
return stdoutOs.toString();
}
public String executeGroupCommand(GroupInput input) throws WavaNotRunningException {
return executeCommand(executor, OpName.group, input);
}
public String cancelJobCommand(CancelInput input) throws WavaNotRunningException {
return executeCommand(executor, OpName.cancel, input);
}
public static boolean isSchedulerRunning() {
try {
return Utils.isCoreRunning();
} catch (Exception ex) {
throw new RuntimeException(ex);
}
}
}
| apache-2.0 |
droolsjbpm/drools | drools-mvel/src/test/java/org/drools/mvel/integrationtests/DynamicRuleLoadTest.java | 8525 | /*
* Copyright 2015 Red Hat, Inc. and/or its affiliates.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package org.drools.mvel.integrationtests;
import java.lang.reflect.Field;
import org.drools.mvel.CommonTestMethodBase;
import org.drools.mvel.compiler.Message;
import org.junit.Test;
import org.kie.api.KieServices;
import org.kie.api.builder.KieModule;
import org.kie.api.builder.ReleaseId;
import org.kie.api.io.Resource;
import org.kie.api.io.ResourceType;
import org.kie.api.runtime.KieContainer;
import org.kie.api.runtime.KieSession;
import org.kie.internal.io.ResourceFactory;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertTrue;
public class DynamicRuleLoadTest extends CommonTestMethodBase {
private final String drl1 =
"package org.drools.mvel.compiler\n" +
"rule R1 when\n" +
" Message( $m : message )\n" +
"then\n" +
" System.out.println($m);\n" +
"end\n";
private final String drl2_1 =
"package org.drools.mvel.compiler\n" +
"global " + DynamicRuleLoadTest.class.getCanonicalName() + " test;\n" +
"rule R2_1 when\n" +
" $m : Message( message == \"Hi Universe\" )\n" +
"then\n" +
" test.updateToVersion();" +
"end\n";
private final String drl2_2 =
"package org.drools.mvel.compiler\n" +
"global " + DynamicRuleLoadTest.class.getCanonicalName() + " test;\n" +
"rule R2_2 when\n" +
" $m : Message( message == \"Hello World\" )\n" +
"then\n" +
" test.done();" +
"end\n";
private final String javaSrc =
"package org.drools.mvel.compiler.test;\n" +
"public class PersonObject {\n" +
" private String id;\n" +
" public String getId() {\n" +
" return id;\n" +
" }\n" +
" public void setId(String id) {\n" +
" this.id = id;\n" +
" }\n" +
" public void updateId() {\n" +
" this.id = \"Person from version 1\";\n" +
" }\n" +
"}";
private final String javaSrc_2 =
"package org.drools.mvel.compiler.test;\n" +
"public class PersonObject {\n" +
" private String id;\n" +
" public String getId() {\n" +
" return id;\n" +
" }\n" +
" public void setId(String id) {\n" +
" this.id = id;\n" +
" }\n" +
" public void updateId() {\n" +
" this.id = \"Person from version 2\";\n" +
" }\n" +
"}";
private final String person_drl =
"package org.drools.mvel.compiler.test\n" +
"import org.drools.mvel.compiler.test.PersonObject;\n" +
"\n" +
"rule \"Update person's id\"\n" +
"when\n" +
" $person : PersonObject()\n" +
"then\n" +
" $person.updateId();\n" +
" delete($person);\n" +
"end";
private KieContainer kieContainer;
private KieSession ksession;
private boolean done = false;
@Test
public void testKJarUpgrade() throws Exception {
// DROOLS-919
KieServices ks = KieServices.Factory.get();
// Create an in-memory jar for version 1.0.0
ReleaseId releaseId1 = ks.newReleaseId( "org.kie", "test-upgrade", "1.0.0" );
KieModule km = createAndDeployJar( ks, releaseId1, drl1, drl2_1 );
// Create a session and fire rules
kieContainer = ks.newKieContainer( km.getReleaseId() );
ksession = kieContainer.newKieSession();
ksession.setGlobal( "test", this );
ksession.insert( new Message( "Hi Universe" ) );
ksession.fireAllRules();
assertTrue( done );
}
@Test
public void testKJarUpgradeWithJavaClass() throws Exception {
KieServices ks = KieServices.Factory.get();
String kmodule = "<kmodule xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\" \n" +
" xmlns=\"http://www.drools.org/xsd/kmodule\">\n" +
" <kbase name=\"kbase1\">\n" +
" <ksession name=\"ksession1\" default=\"true\"/>\n" +
" </kbase>\n" +
"</kmodule>";
// Create an in-memory jar for version 1.0.0
ReleaseId releaseId1 = ks.newReleaseId( "org.kie", "test-upgrade-java", "1.0.0" );
Resource javaResource = ResourceFactory.newByteArrayResource(javaSrc.getBytes()).setResourceType( ResourceType.JAVA )
.setSourcePath( "org/drools/mvel/compiler/test/PersonObject.java" );
Resource drlResource = ResourceFactory.newByteArrayResource( person_drl.getBytes() ).setResourceType( ResourceType.DRL )
.setSourcePath( "kbase1/person.drl" );
KieModule km = createAndDeployJar( ks, kmodule, releaseId1, javaResource, drlResource );
// Create a session and fire rules
kieContainer = ks.newKieContainer( km.getReleaseId() );
ksession = kieContainer.newKieSession();
Class<?> clazz = kieContainer.getClassLoader().loadClass("org.drools.mvel.compiler.test.PersonObject");
Object person = clazz.newInstance();
ksession.insert( person );
ksession.fireAllRules();
assertNotNull(person);
Object personId = valueOf(person, "id");
assertNotNull(personId);
assertEquals("Person from version 1", personId);
ReleaseId releaseId2 = ks.newReleaseId( "org.kie", "test-upgrade-java", "1.1.0" );
Resource javaResource2 = ResourceFactory.newByteArrayResource(javaSrc_2.getBytes()).setResourceType( ResourceType.JAVA )
.setSourcePath( "org/drools/mvel/compiler/test/PersonObject.java" );
Resource drlResource2 = ResourceFactory.newByteArrayResource( person_drl.getBytes() ).setResourceType( ResourceType.DRL )
.setSourcePath( "kbase1/person.drl" );
createAndDeployJar( ks, kmodule, releaseId2, javaResource2, drlResource2 );
// update container
kieContainer.updateToVersion(releaseId2);
assertEquals(releaseId2, kieContainer.getReleaseId());
// now let's run the rules
ksession = kieContainer.newKieSession();
person = kieContainer.getClassLoader().loadClass("org.drools.mvel.compiler.test.PersonObject").newInstance();
ksession.insert( person );
ksession.fireAllRules();
assertNotNull(person);
personId = valueOf(person, "id");
assertNotNull(personId);
assertEquals("Person from version 2", personId);
}
public void updateToVersion() {
KieServices ks = KieServices.Factory.get();
// Create a new jar for version 1.1.0
ReleaseId releaseId2 = ks.newReleaseId( "org.kie", "test-upgrade", "1.1.0" );
KieModule km = createAndDeployJar( ks, releaseId2, drl1, drl2_2 );
// try to update the container to version 1.1.0
kieContainer.updateToVersion( releaseId2 );
// create and use a new session
ksession.insert( new Message( "Hello World" ) );
}
public void done() {
done = true;
}
protected Object valueOf(Object object, String fieldName) {
try {
Field field = object.getClass().getDeclaredField(fieldName);
field.setAccessible(true);
return field.get(object);
} catch (Exception e) {
return null;
}
}
}
| apache-2.0 |
NLeSC/Aether | src/nl/esciencecenter/aether/ConnectionFailedException.java | 4470 | package nl.esciencecenter.aether;
import java.io.IOException;
/**
* Container class for a single connection failure.
*/
public class ConnectionFailedException extends IOException {
private static final long serialVersionUID = 1L;
private final ReceivePortIdentifier receivePortIdentifier;
private final AetherIdentifier ibisIdentifier;
private final String receivePortName;
/**
* Constructs a <code>ConnectionFailedException</code> for a
* failed attempt to connect to a specific named receiveport
* at a specific ibis instance.
* @param detailMessage
* the detail message.
* @param ibisIdentifier
* the Ibis identifier of the ibis instance.
* @param receivePortName
* the receivePortName of the receive port.
*/
public ConnectionFailedException(String detailMessage,
AetherIdentifier ibisIdentifier, String receivePortName) {
super(detailMessage);
this.ibisIdentifier = ibisIdentifier;
this.receivePortName = receivePortName;
this.receivePortIdentifier = null;
}
/**
* Constructs a <code>ConnectionFailedException</code> for a
* failed attempt to connect to a specific named receiveport
* at a specific ibis instance.
* @param detailMessage
* the detail message.
* @param ibisIdentifier
* the Ibis identifier of the ibis instance.
* @param receivePortName
* the receivePortName of the receive port.
* @param cause
* the cause of the failure.
*/
public ConnectionFailedException(String detailMessage,
AetherIdentifier ibisIdentifier, String receivePortName,
Throwable cause) {
super(detailMessage);
initCause(cause);
this.ibisIdentifier = ibisIdentifier;
this.receivePortName = receivePortName;
this.receivePortIdentifier = null;
}
/**
* Constructs a <code>ConnectionFailedException</code> for a
* failed attempt to connect to a specific receiveport.
* at a specific ibis instance.
* @param detailMessage the detail message.
* @param receivePortIdentifier the receiveport identifier.
* @param cause the cause of the failure.
*/
public ConnectionFailedException(String detailMessage,
ReceivePortIdentifier receivePortIdentifier, Throwable cause) {
super(detailMessage);
initCause(cause);
this.receivePortIdentifier = receivePortIdentifier;
this.ibisIdentifier = receivePortIdentifier.ibisIdentifier();
this.receivePortName = receivePortIdentifier.name();
}
/**
* Constructs a <code>ConnectionFailedException</code> for a
* failed attempt to connect to a specific receiveport.
* @param detailMessage
* the detail message.
* @param receivePortIdentifier
* the receiveport identifier.
*/
public ConnectionFailedException(String detailMessage,
ReceivePortIdentifier receivePortIdentifier) {
super(detailMessage);
this.receivePortIdentifier = receivePortIdentifier;
this.ibisIdentifier = receivePortIdentifier.ibisIdentifier();
this.receivePortName = receivePortIdentifier.name();
}
/**
* Returns the ibis identifier of the ibis instance running the
* receive port that was the target of the failed connection attempt.
* @return
* the ibis identifier.
*/
public AetherIdentifier ibisIdentifier() {
if (ibisIdentifier == null) {
return receivePortIdentifier.ibisIdentifier();
}
return ibisIdentifier;
}
/**
* Returns the receiveport identifier of the failed connection attempt.
* If the connection attempt specified ibis identifiers and names,
* this call may return <code>null</code>.
* @return
* the receiveport identifier, or <code>null</code>.
*/
public ReceivePortIdentifier receivePortIdentifier() {
return receivePortIdentifier;
}
/**
* Returns the name of the receive port that was the target of the failed
* connection attempt.
* @return
* the receivePortName.
*/
public String receivePortName() {
if (receivePortName == null) {
return receivePortIdentifier.name();
}
return receivePortName;
}
}
| apache-2.0 |
crashlytics/aurora | src/test/java/org/apache/aurora/scheduler/http/AbstractJettyTest.java | 8140 | /**
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.aurora.scheduler.http;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicReference;
import javax.servlet.ServletContextListener;
import javax.ws.rs.core.MediaType;
import com.google.common.base.Throwables;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableSet;
import com.google.common.net.HostAndPort;
import com.google.common.util.concurrent.RateLimiter;
import com.google.inject.AbstractModule;
import com.google.inject.Guice;
import com.google.inject.Injector;
import com.google.inject.Key;
import com.google.inject.Module;
import com.google.inject.util.Modules;
import com.sun.jersey.api.client.Client;
import com.sun.jersey.api.client.WebResource;
import com.sun.jersey.api.client.config.ClientConfig;
import com.sun.jersey.api.client.config.DefaultClientConfig;
import com.sun.jersey.api.json.JSONConfiguration;
import org.apache.aurora.GuavaUtils.ServiceManagerIface;
import org.apache.aurora.common.quantity.Amount;
import org.apache.aurora.common.quantity.Time;
import org.apache.aurora.common.stats.StatsProvider;
import org.apache.aurora.common.testing.easymock.EasyMockTest;
import org.apache.aurora.common.util.BackoffStrategy;
import org.apache.aurora.gen.ServerInfo;
import org.apache.aurora.scheduler.AppStartup;
import org.apache.aurora.scheduler.SchedulerServicesModule;
import org.apache.aurora.scheduler.TierManager;
import org.apache.aurora.scheduler.app.LifecycleModule;
import org.apache.aurora.scheduler.app.ServiceGroupMonitor;
import org.apache.aurora.scheduler.async.AsyncModule;
import org.apache.aurora.scheduler.config.CliOptions;
import org.apache.aurora.scheduler.cron.CronJobManager;
import org.apache.aurora.scheduler.discovery.ServiceInstance;
import org.apache.aurora.scheduler.discovery.ServiceInstance.Endpoint;
import org.apache.aurora.scheduler.http.api.GsonMessageBodyHandler;
import org.apache.aurora.scheduler.offers.OfferManager;
import org.apache.aurora.scheduler.scheduling.RescheduleCalculator;
import org.apache.aurora.scheduler.scheduling.TaskGroups;
import org.apache.aurora.scheduler.scheduling.TaskGroups.TaskGroupsSettings;
import org.apache.aurora.scheduler.scheduling.TaskScheduler;
import org.apache.aurora.scheduler.state.LockManager;
import org.apache.aurora.scheduler.stats.StatsModule;
import org.apache.aurora.scheduler.storage.Storage;
import org.apache.aurora.scheduler.storage.entities.IServerInfo;
import org.apache.aurora.scheduler.storage.testing.StorageTestUtil;
import org.apache.aurora.scheduler.testing.FakeStatsProvider;
import org.junit.Before;
import static org.apache.aurora.scheduler.http.JettyServerModule.makeServletContextListener;
import static org.easymock.EasyMock.expect;
import static org.easymock.EasyMock.expectLastCall;
import static org.junit.Assert.assertNotNull;
/**
* TODO(wfarner): Break apart ServletModule so test setup isn't so involved.
* TODO(wfarner): Come up with an approach for these tests that doesn't require starting an actual
* HTTP server for each test case.
*
*/
public abstract class AbstractJettyTest extends EasyMockTest {
private Injector injector;
protected StorageTestUtil storage;
protected HostAndPort httpServer;
private AtomicReference<ImmutableSet<ServiceInstance>> schedulers;
/**
* Subclasses should override with a module that configures the servlets they are testing.
*
* @return A module used in the creation of the servlet container's child injector.
*/
protected Module getChildServletModule() {
return Modules.EMPTY_MODULE;
}
@Before
public void setUpBase() throws Exception {
storage = new StorageTestUtil(this);
ServiceGroupMonitor serviceGroupMonitor = createMock(ServiceGroupMonitor.class);
CliOptions options = new CliOptions();
injector = Guice.createInjector(
new StatsModule(options.stats),
new LifecycleModule(),
new SchedulerServicesModule(),
new AsyncModule(options.async),
new AbstractModule() {
<T> T bindMock(Class<T> clazz) {
T mock = createMock(clazz);
bind(clazz).toInstance(mock);
return mock;
}
@Override
protected void configure() {
bind(StatsProvider.class).toInstance(new FakeStatsProvider());
bind(Storage.class).toInstance(storage.storage);
bind(IServerInfo.class).toInstance(IServerInfo.build(new ServerInfo()
.setClusterName("unittest")
.setStatsUrlPrefix("none")));
bind(TaskGroupsSettings.class).toInstance(
new TaskGroupsSettings(
Amount.of(1L, Time.MILLISECONDS),
bindMock(BackoffStrategy.class),
RateLimiter.create(1000),
5));
bind(ServiceGroupMonitor.class).toInstance(serviceGroupMonitor);
bindMock(CronJobManager.class);
bindMock(LockManager.class);
bindMock(OfferManager.class);
bindMock(RescheduleCalculator.class);
bindMock(TaskScheduler.class);
bindMock(TierManager.class);
bindMock(Thread.UncaughtExceptionHandler.class);
bindMock(TaskGroups.TaskGroupBatchWorker.class);
bind(ServletContextListener.class).toProvider(() -> {
return makeServletContextListener(injector, getChildServletModule());
});
}
},
new JettyServerModule(options, false));
schedulers = new AtomicReference<>(ImmutableSet.of());
serviceGroupMonitor.start();
expectLastCall();
expect(serviceGroupMonitor.get()).andAnswer(schedulers::get).anyTimes();
}
protected void setLeadingScheduler(String host, int port) {
schedulers.set(ImmutableSet.of(
new ServiceInstance(new Endpoint(host, port), ImmutableMap.of())));
}
protected void unsetLeadingSchduler() {
schedulers.set(ImmutableSet.of());
}
protected void replayAndStart() {
control.replay();
try {
ServiceManagerIface service =
injector.getInstance(Key.get(ServiceManagerIface.class, AppStartup.class));
service.startAsync().awaitHealthy();
addTearDown(() -> {
service.stopAsync().awaitStopped(5L, TimeUnit.SECONDS);
});
} catch (Exception e) {
throw Throwables.propagate(e);
}
httpServer = injector.getInstance(HttpService.class).getAddress();
// By default we'll set this instance to be the leader.
setLeadingScheduler(httpServer.getHost(), httpServer.getPort());
}
protected String makeUrl(String path) {
return String.format("http://%s:%s%s", httpServer.getHost(), httpServer.getPort(), path);
}
protected WebResource.Builder getPlainRequestBuilder(String path) {
assertNotNull("HTTP server must be started first", httpServer);
Client client = Client.create(new DefaultClientConfig());
return client.resource(makeUrl(path)).getRequestBuilder();
}
protected WebResource.Builder getRequestBuilder(String path) {
assertNotNull("HTTP server must be started first", httpServer);
ClientConfig config = new DefaultClientConfig();
config.getFeatures().put(JSONConfiguration.FEATURE_POJO_MAPPING, Boolean.TRUE);
config.getClasses().add(GsonMessageBodyHandler.class);
Client client = Client.create(config);
// Disable redirects so we can unit test them.
client.setFollowRedirects(false);
return client.resource(makeUrl(path)).getRequestBuilder().accept(MediaType.APPLICATION_JSON);
}
}
| apache-2.0 |
ContaAzul/redisson | redisson/src/main/java/org/redisson/reactive/RedissonBatchReactive.java | 7240 | /**
* Copyright 2016 Nikita Koksharov
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.redisson.reactive;
import java.util.List;
import java.util.UUID;
import org.reactivestreams.Publisher;
import org.redisson.api.RAtomicLongReactive;
import org.redisson.api.RBatchReactive;
import org.redisson.api.RBitSetReactive;
import org.redisson.api.RBlockingQueueReactive;
import org.redisson.api.RBucketReactive;
import org.redisson.api.RDequeReactive;
import org.redisson.api.RHyperLogLogReactive;
import org.redisson.api.RKeysReactive;
import org.redisson.api.RLexSortedSetReactive;
import org.redisson.api.RListReactive;
import org.redisson.api.RMapCacheReactive;
import org.redisson.api.RMapReactive;
import org.redisson.api.RQueueReactive;
import org.redisson.api.RScoredSortedSetReactive;
import org.redisson.api.RScriptReactive;
import org.redisson.api.RSetCacheReactive;
import org.redisson.api.RSetReactive;
import org.redisson.api.RTopicReactive;
import org.redisson.api.RedissonReactiveClient;
import org.redisson.client.codec.Codec;
import org.redisson.command.CommandBatchService;
import org.redisson.connection.ConnectionManager;
import org.redisson.eviction.EvictionScheduler;
public class RedissonBatchReactive implements RBatchReactive {
private final EvictionScheduler evictionScheduler;
private final CommandBatchService executorService;
private final UUID id;
public RedissonBatchReactive(UUID id, EvictionScheduler evictionScheduler, ConnectionManager connectionManager) {
this.id = id;
this.evictionScheduler = evictionScheduler;
this.executorService = new CommandBatchService(connectionManager);
}
@Override
public <V> RBucketReactive<V> getBucket(String name) {
return new RedissonBucketReactive<V>(executorService, name);
}
@Override
public <V> RBucketReactive<V> getBucket(String name, Codec codec) {
return new RedissonBucketReactive<V>(codec, executorService, name);
}
@Override
public <V> RHyperLogLogReactive<V> getHyperLogLog(String name) {
return new RedissonHyperLogLogReactive<V>(executorService, name);
}
@Override
public <V> RHyperLogLogReactive<V> getHyperLogLog(String name, Codec codec) {
return new RedissonHyperLogLogReactive<V>(codec, executorService, name);
}
@Override
public <V> RListReactive<V> getList(String name) {
return new RedissonListReactive<V>(executorService, name);
}
@Override
public <V> RListReactive<V> getList(String name, Codec codec) {
return new RedissonListReactive<V>(codec, executorService, name);
}
@Override
public <K, V> RMapReactive<K, V> getMap(String name) {
return new RedissonMapReactive<K, V>(executorService, name);
}
@Override
public <K, V> RMapReactive<K, V> getMap(String name, Codec codec) {
return new RedissonMapReactive<K, V>(codec, executorService, name);
}
@Override
public <K, V> RMapCacheReactive<K, V> getMapCache(String name, Codec codec) {
return new RedissonMapCacheReactive<K, V>(id, evictionScheduler, codec, executorService, name);
}
@Override
public <K, V> RMapCacheReactive<K, V> getMapCache(String name) {
return new RedissonMapCacheReactive<K, V>(id, evictionScheduler, executorService, name);
}
@Override
public <V> RSetReactive<V> getSet(String name) {
return new RedissonSetReactive<V>(executorService, name);
}
@Override
public <V> RSetReactive<V> getSet(String name, Codec codec) {
return new RedissonSetReactive<V>(codec, executorService, name);
}
@Override
public <M> RTopicReactive<M> getTopic(String name) {
return new RedissonTopicReactive<M>(executorService, name);
}
@Override
public <M> RTopicReactive<M> getTopic(String name, Codec codec) {
return new RedissonTopicReactive<M>(codec, executorService, name);
}
@Override
public <V> RQueueReactive<V> getQueue(String name) {
return new RedissonQueueReactive<V>(executorService, name);
}
@Override
public <V> RQueueReactive<V> getQueue(String name, Codec codec) {
return new RedissonQueueReactive<V>(codec, executorService, name);
}
@Override
public <V> RBlockingQueueReactive<V> getBlockingQueue(String name) {
return new RedissonBlockingQueueReactive<V>(executorService, name);
}
@Override
public <V> RBlockingQueueReactive<V> getBlockingQueue(String name, Codec codec) {
return new RedissonBlockingQueueReactive<V>(codec, executorService, name);
}
@Override
public <V> RDequeReactive<V> getDequeReactive(String name) {
return new RedissonDequeReactive<V>(executorService, name);
}
@Override
public <V> RDequeReactive<V> getDequeReactive(String name, Codec codec) {
return new RedissonDequeReactive<V>(codec, executorService, name);
}
@Override
public RAtomicLongReactive getAtomicLongReactive(String name) {
return new RedissonAtomicLongReactive(executorService, name);
}
@Override
public <V> RSetCacheReactive<V> getSetCache(String name) {
return new RedissonSetCacheReactive<V>(evictionScheduler, executorService, name);
}
@Override
public <V> RSetCacheReactive<V> getSetCache(String name, Codec codec) {
return new RedissonSetCacheReactive<V>(codec, evictionScheduler, executorService, name);
}
@Override
public <V> RScoredSortedSetReactive<V> getScoredSortedSet(String name) {
return new RedissonScoredSortedSetReactive<V>(executorService, name);
}
@Override
public <V> RScoredSortedSetReactive<V> getScoredSortedSet(String name, Codec codec) {
return new RedissonScoredSortedSetReactive<V>(codec, executorService, name);
}
@Override
public RLexSortedSetReactive getLexSortedSet(String name) {
return new RedissonLexSortedSetReactive(executorService, name);
}
@Override
public RBitSetReactive getBitSet(String name) {
return new RedissonBitSetReactive(executorService, name);
}
@Override
public RScriptReactive getScript() {
return new RedissonScriptReactive(executorService);
}
@Override
public RKeysReactive getKeys() {
return new RedissonKeysReactive(executorService);
}
@Override
public Publisher<List<?>> execute() {
return new NettyFuturePublisher<List<?>>(executorService.executeAsync());
}
public void enableRedissonReferenceSupport(RedissonReactiveClient redissonReactive) {
this.executorService.enableRedissonReferenceSupport(redissonReactive);
}
}
| apache-2.0 |
Netflix/Priam | priam/src/main/java/com/netflix/priam/aws/SDBInstanceFactory.java | 4573 | /*
* Copyright 2013 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.priam.aws;
import com.amazonaws.AmazonServiceException;
import com.google.common.collect.ImmutableSet;
import com.google.inject.Inject;
import com.google.inject.Singleton;
import com.netflix.priam.identity.IPriamInstanceFactory;
import com.netflix.priam.identity.PriamInstance;
import com.netflix.priam.identity.config.InstanceInfo;
import java.util.*;
import java.util.stream.Collectors;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* SimpleDB based instance instanceIdentity. Requires 'InstanceIdentity' domain to be created ahead
*/
@Singleton
public class SDBInstanceFactory implements IPriamInstanceFactory {
private static final Logger logger = LoggerFactory.getLogger(SDBInstanceFactory.class);
private final SDBInstanceData dao;
private final InstanceInfo instanceInfo;
@Inject
public SDBInstanceFactory(SDBInstanceData dao, InstanceInfo instanceInfo) {
this.dao = dao;
this.instanceInfo = instanceInfo;
}
@Override
public ImmutableSet<PriamInstance> getAllIds(String appName) {
return ImmutableSet.copyOf(
dao.getAllIds(appName)
.stream()
.sorted((Comparator.comparingInt(PriamInstance::getId)))
.collect(Collectors.toList()));
}
@Override
public PriamInstance getInstance(String appName, String dc, int id) {
return dao.getInstance(appName, dc, id);
}
@Override
public PriamInstance create(
String app,
int id,
String instanceID,
String hostname,
String ip,
String rac,
Map<String, Object> volumes,
String token) {
try {
PriamInstance ins =
makePriamInstance(app, id, instanceID, hostname, ip, rac, volumes, token);
// remove old data node which are dead.
if (app.endsWith("-dead")) {
try {
PriamInstance oldData = dao.getInstance(app, instanceInfo.getRegion(), id);
// clean up a very old data...
if (null != oldData
&& oldData.getUpdatetime()
< (System.currentTimeMillis() - (3 * 60 * 1000)))
dao.deregisterInstance(oldData);
} catch (Exception ex) {
// Do nothing
logger.error(ex.getMessage(), ex);
}
}
dao.registerInstance(ins);
return ins;
} catch (Exception e) {
logger.error(e.getMessage());
throw new RuntimeException(e);
}
}
@Override
public void delete(PriamInstance inst) {
try {
dao.deregisterInstance(inst);
} catch (AmazonServiceException e) {
throw new RuntimeException("Unable to deregister priam instance", e);
}
}
@Override
public void update(PriamInstance orig, PriamInstance inst) {
try {
dao.updateInstance(orig, inst);
} catch (AmazonServiceException e) {
throw new RuntimeException("Unable to update/create priam instance", e);
}
}
private PriamInstance makePriamInstance(
String app,
int id,
String instanceID,
String hostname,
String ip,
String rac,
Map<String, Object> volumes,
String token) {
Map<String, Object> v = (volumes == null) ? new HashMap<>() : volumes;
PriamInstance ins = new PriamInstance();
ins.setApp(app);
ins.setRac(rac);
ins.setHost(hostname);
ins.setHostIP(ip);
ins.setId(id);
ins.setInstanceId(instanceID);
ins.setDC(instanceInfo.getRegion());
ins.setToken(token);
ins.setVolumes(v);
return ins;
}
}
| apache-2.0 |
BrentDouglas/chainlink | core/src/main/java/io/machinecode/chainlink/core/jsl/fluent/execution/FluentExecution.java | 1004 | /*
* Copyright 2015 Brent Douglas and other contributors
* as indicated by the @author tags. All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* http://www.apache.org/licenses/LICENSE-2.0
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.machinecode.chainlink.core.jsl.fluent.execution;
import io.machinecode.chainlink.spi.jsl.inherit.execution.InheritableExecution;
/**
* @author <a href="mailto:brent.n.douglas@gmail.com">Brent Douglas</a>
* @since 1.0
*/
public interface FluentExecution<T extends FluentExecution<T>> extends InheritableExecution<T> {
}
| apache-2.0 |
ryslanzaharov/rzaharov | chapter_005/src/test/java/ru/job4j/set/simplehashset/SimpleHashSetTest.java | 1375 | package ru.job4j.set.simplehashset;
import org.junit.Test;
import static org.junit.Assert.assertThat;
import static org.hamcrest.core.Is.is;
public class SimpleHashSetTest {
@Test
public void whenAddDifferentsElementsByHashCode() {
SimpleHashSet<Integer> integerSimpleHashSet = new SimpleHashSet<>(10);
integerSimpleHashSet.add(1);
integerSimpleHashSet.add(2);
integerSimpleHashSet.add(3);
integerSimpleHashSet.add(4);
integerSimpleHashSet.add(5);
integerSimpleHashSet.add(6);
integerSimpleHashSet.add(7);
integerSimpleHashSet.add(8);
integerSimpleHashSet.add(9);
integerSimpleHashSet.add(10);
boolean result = integerSimpleHashSet.add(2);
assertThat(result, is(false));
}
@Test
public void whenDeleteElementsByHashCode() {
SimpleHashSet<Integer> integerSimpleHashSet = new SimpleHashSet<>(7);
integerSimpleHashSet.add(1);
integerSimpleHashSet.add(2);
integerSimpleHashSet.add(3);
integerSimpleHashSet.add(4);
integerSimpleHashSet.add(1111);
integerSimpleHashSet.add(5);
integerSimpleHashSet.add(6);
integerSimpleHashSet.add(1111);
integerSimpleHashSet.remove(2);
Object expected = null;
assertThat(integerSimpleHashSet.keys[2], is(expected));
}
} | apache-2.0 |
sajavadi/pinot | thirdeye/thirdeye-hadoop/src/main/java/com/linkedin/thirdeye/hadoop/derivedcolumn/transformation/DerivedColumnTransformationPhaseJob.java | 15693 | /**
* Copyright (C) 2014-2015 LinkedIn Corp. (pinot-core@linkedin.com)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.linkedin.thirdeye.hadoop.derivedcolumn.transformation;
import static com.linkedin.thirdeye.hadoop.derivedcolumn.transformation.DerivedColumnTransformationPhaseConstants.DERIVED_COLUMN_TRANSFORMATION_PHASE_INPUT_PATH;
import static com.linkedin.thirdeye.hadoop.derivedcolumn.transformation.DerivedColumnTransformationPhaseConstants.DERIVED_COLUMN_TRANSFORMATION_PHASE_OUTPUT_PATH;
import static com.linkedin.thirdeye.hadoop.derivedcolumn.transformation.DerivedColumnTransformationPhaseConstants.DERIVED_COLUMN_TRANSFORMATION_PHASE_OUTPUT_SCHEMA;
import static com.linkedin.thirdeye.hadoop.derivedcolumn.transformation.DerivedColumnTransformationPhaseConstants.DERIVED_COLUMN_TRANSFORMATION_PHASE_THIRDEYE_CONFIG;
import static com.linkedin.thirdeye.hadoop.derivedcolumn.transformation.DerivedColumnTransformationPhaseConstants.DERIVED_COLUMN_TRANSFORMATION_PHASE_TOPK_PATH;
import java.io.DataInput;
import java.io.File;
import java.io.FileInputStream;
import java.io.IOException;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Properties;
import java.util.Set;
import com.linkedin.thirdeye.hadoop.config.MetricSpec;
import com.linkedin.thirdeye.hadoop.config.MetricType;
import com.linkedin.thirdeye.hadoop.config.ThirdEyeConfigProperties;
import com.linkedin.thirdeye.hadoop.config.ThirdEyeConstants;
import com.linkedin.thirdeye.hadoop.config.TopKDimensionToMetricsSpec;
import com.linkedin.thirdeye.hadoop.config.TopkWhitelistSpec;
import com.linkedin.thirdeye.hadoop.config.ThirdEyeConfig;
import com.linkedin.thirdeye.hadoop.topk.TopKDimensionValues;
import com.linkedin.thirdeye.hadoop.util.ThirdeyeAvroUtils;
import org.apache.avro.Schema;
import org.apache.avro.SchemaBuilder;
import org.apache.avro.SchemaBuilder.BaseFieldTypeBuilder;
import org.apache.avro.SchemaBuilder.FieldAssembler;
import org.apache.avro.SchemaBuilder.RecordBuilder;
import org.apache.avro.generic.GenericData.Record;
import org.apache.avro.generic.GenericRecord;
import org.apache.avro.mapred.AvroKey;
import org.apache.avro.mapreduce.AvroJob;
import org.apache.avro.mapreduce.AvroKeyInputFormat;
import org.apache.avro.mapreduce.AvroKeyOutputFormat;
import org.apache.avro.mapreduce.AvroMultipleOutputs;
import org.apache.commons.collections.CollectionUtils;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.conf.Configured;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.NullWritable;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.input.FileSplit;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
import org.apache.hadoop.mapreduce.lib.output.LazyOutputFormat;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.fasterxml.jackson.databind.ObjectMapper;
/**
* This phase will add a new column for every column that has topk config
* The new column added will be called "column_topk" (containing only topk values plus any whitelist)
* and "column" will contain all values with whitelist applied
*/
public class DerivedColumnTransformationPhaseJob extends Configured {
private static final Logger LOGGER = LoggerFactory.getLogger(DerivedColumnTransformationPhaseJob.class);
private static final ObjectMapper OBJECT_MAPPER = new ObjectMapper();
private String name;
private Properties props;
/**
* @param name
* @param props
*/
public DerivedColumnTransformationPhaseJob(String name, Properties props) {
super(new Configuration());
this.name = name;
this.props = props;
}
public static class DerivedColumnTransformationPhaseMapper
extends Mapper<AvroKey<GenericRecord>, NullWritable, AvroKey<GenericRecord>, NullWritable> {
private Schema outputSchema;
private ThirdEyeConfig thirdeyeConfig;
private DerivedColumnTransformationPhaseConfig config;
private List<String> dimensionsNames;
private List<String> metricNames;
private List<MetricType> metricTypes;
private TopKDimensionValues topKDimensionValues;
private Map<String, Set<String>> topKDimensionsMap;
private Map<String, Set<String>> whitelist;
private String timeColumnName;
private AvroMultipleOutputs avroMultipleOutputs;
String inputFileName;
@Override
public void setup(Context context) throws IOException, InterruptedException {
LOGGER.info("DerivedColumnTransformationPhaseJob.DerivedColumnTransformationPhaseMapper.setup()");
Configuration configuration = context.getConfiguration();
FileSystem fs = FileSystem.get(configuration);
FileSplit fileSplit = (FileSplit) context.getInputSplit();
inputFileName = fileSplit.getPath().getName();
inputFileName = inputFileName.substring(0, inputFileName.lastIndexOf(ThirdEyeConstants.AVRO_SUFFIX));
LOGGER.info("split name:" + inputFileName);
thirdeyeConfig = OBJECT_MAPPER.readValue(configuration.get(DERIVED_COLUMN_TRANSFORMATION_PHASE_THIRDEYE_CONFIG.toString()), ThirdEyeConfig.class);
config = DerivedColumnTransformationPhaseConfig.fromThirdEyeConfig(thirdeyeConfig);
dimensionsNames = config.getDimensionNames();
metricNames = config.getMetricNames();
metricTypes = config.getMetricTypes();
timeColumnName = config.getTimeColumnName();
whitelist = config.getWhitelist();
outputSchema = new Schema.Parser().parse(configuration.get(DERIVED_COLUMN_TRANSFORMATION_PHASE_OUTPUT_SCHEMA.toString()));
Path topKPath = new Path(configuration.get(DERIVED_COLUMN_TRANSFORMATION_PHASE_TOPK_PATH.toString())
+ File.separator + ThirdEyeConstants.TOPK_VALUES_FILE);
topKDimensionValues = new TopKDimensionValues();
if (fs.exists(topKPath)) {
FSDataInputStream topkValuesStream = fs.open(topKPath);
topKDimensionValues = OBJECT_MAPPER.readValue((DataInput) topkValuesStream, TopKDimensionValues.class);
topkValuesStream.close();
}
topKDimensionsMap = topKDimensionValues.getTopKDimensions();
avroMultipleOutputs = new AvroMultipleOutputs(context);
}
@Override
public void map(AvroKey<GenericRecord> key, NullWritable value, Context context)
throws IOException, InterruptedException {
// input record
GenericRecord inputRecord = key.datum();
// output record
GenericRecord outputRecord = new Record(outputSchema);
// dimensions
for (String dimension : dimensionsNames) {
String dimensionName = dimension;
String dimensionValue = ThirdeyeAvroUtils.getDimensionFromRecord(inputRecord, dimension);
// add original dimension value with whitelist applied
String whitelistDimensionValue = dimensionValue;
if (whitelist != null) {
Set<String> whitelistDimensions = whitelist.get(dimensionName);
if (CollectionUtils.isNotEmpty(whitelistDimensions)) {
// whitelist config exists for this dimension but value not present in whitelist
if (!whitelistDimensions.contains(dimensionValue)) {
whitelistDimensionValue = ThirdEyeConstants.OTHER;
}
}
}
outputRecord.put(dimensionName, whitelistDimensionValue);
// add column for topk, if topk config exists for that column, plus any whitelist values
if (topKDimensionsMap.containsKey(dimensionName)) {
Set<String> topKDimensionValues = topKDimensionsMap.get(dimensionName);
// if topk config exists for that dimension
if (CollectionUtils.isNotEmpty(topKDimensionValues)) {
String topkDimensionName = dimensionName + ThirdEyeConstants.TOPK_DIMENSION_SUFFIX;
String topkDimensionValue = dimensionValue;
// topk config exists for this dimension, but value not present in topk or whitelist
if (!topKDimensionValues.contains(dimensionValue) &&
(whitelist == null || whitelist.get(dimensionName) == null
|| !whitelist.get(dimensionName).contains(dimensionValue))) {
topkDimensionValue = ThirdEyeConstants.OTHER;
}
outputRecord.put(topkDimensionName, topkDimensionValue);
}
}
}
// metrics
for (int i = 0; i < metricNames.size(); i ++) {
String metricName = metricNames.get(i);
MetricType metricType = metricTypes.get(i);
outputRecord.put(metricName, ThirdeyeAvroUtils.getMetricFromRecord(inputRecord, metricName, metricType));
}
// time
outputRecord.put(timeColumnName, ThirdeyeAvroUtils.getMetricFromRecord(inputRecord, timeColumnName));
AvroKey<GenericRecord> outputKey = new AvroKey<GenericRecord>(outputRecord);
avroMultipleOutputs.write(outputKey, NullWritable.get(), inputFileName);
}
@Override
public void cleanup(Context context) throws IOException, InterruptedException {
avroMultipleOutputs.close();
}
}
public Job run() throws Exception {
Job job = Job.getInstance(getConf());
job.setJobName(name);
job.setJarByClass(DerivedColumnTransformationPhaseJob.class);
Configuration configuration = job.getConfiguration();
FileSystem fs = FileSystem.get(configuration);
// Input Path
String inputPathDir = getAndSetConfiguration(configuration, DERIVED_COLUMN_TRANSFORMATION_PHASE_INPUT_PATH);
LOGGER.info("Input path dir: " + inputPathDir);
for (String inputPath : inputPathDir.split(",")) {
LOGGER.info("Adding input:" + inputPath);
Path input = new Path(inputPath);
FileInputFormat.addInputPath(job, input);
}
// Topk path
String topkPath = getAndSetConfiguration(configuration, DERIVED_COLUMN_TRANSFORMATION_PHASE_TOPK_PATH);
LOGGER.info("Topk path : " + topkPath);
// Output path
Path outputPath = new Path(getAndSetConfiguration(configuration, DERIVED_COLUMN_TRANSFORMATION_PHASE_OUTPUT_PATH));
LOGGER.info("Output path dir: " + outputPath.toString());
if (fs.exists(outputPath)) {
fs.delete(outputPath, true);
}
FileOutputFormat.setOutputPath(job, outputPath);
// Schema
Schema avroSchema = ThirdeyeAvroUtils.getSchema(inputPathDir);
LOGGER.info("Schema : {}", avroSchema.toString(true));
// ThirdEyeConfig
String metricTypesProperty = ThirdeyeAvroUtils.getMetricTypesProperty(
props.getProperty(ThirdEyeConfigProperties.THIRDEYE_METRIC_NAMES.toString()),
props.getProperty(ThirdEyeConfigProperties.THIRDEYE_METRIC_TYPES.toString()), avroSchema);
props.setProperty(ThirdEyeConfigProperties.THIRDEYE_METRIC_TYPES.toString(), metricTypesProperty);
ThirdEyeConfig thirdeyeConfig = ThirdEyeConfig.fromProperties(props);
job.getConfiguration().set(DERIVED_COLUMN_TRANSFORMATION_PHASE_THIRDEYE_CONFIG.toString(),
OBJECT_MAPPER.writeValueAsString(thirdeyeConfig));
LOGGER.info("ThirdEyeConfig {}", thirdeyeConfig.encode());
// New schema
Schema outputSchema = newSchema(thirdeyeConfig);
job.getConfiguration().set(DERIVED_COLUMN_TRANSFORMATION_PHASE_OUTPUT_SCHEMA.toString(), outputSchema.toString());
// Map config
job.setMapperClass(DerivedColumnTransformationPhaseMapper.class);
job.setInputFormatClass(AvroKeyInputFormat.class);
job.setMapOutputKeyClass(AvroKey.class);
job.setMapOutputValueClass(NullWritable.class);
AvroJob.setOutputKeySchema(job, outputSchema);
LazyOutputFormat.setOutputFormatClass(job, AvroKeyOutputFormat.class);
AvroMultipleOutputs.addNamedOutput(job, "avro", AvroKeyOutputFormat.class, outputSchema);
job.setNumReduceTasks(0);
job.waitForCompletion(true);
return job;
}
public Schema newSchema(ThirdEyeConfig thirdeyeConfig) {
Schema outputSchema = null;
Set<String> topKTransformDimensionSet = new HashSet<>();
TopkWhitelistSpec topkWhitelist = thirdeyeConfig.getTopKWhitelist();
// gather topk columns
if (topkWhitelist != null) {
List<TopKDimensionToMetricsSpec> topKDimensionToMetricsSpecs = topkWhitelist.getTopKDimensionToMetricsSpec();
if (topKDimensionToMetricsSpecs != null) {
for (TopKDimensionToMetricsSpec topKDimensionToMetricsSpec : topKDimensionToMetricsSpecs) {
topKTransformDimensionSet.add(topKDimensionToMetricsSpec.getDimensionName());
}
}
}
RecordBuilder<Schema> recordBuilder = SchemaBuilder.record(thirdeyeConfig.getCollection());
FieldAssembler<Schema> fieldAssembler = recordBuilder.fields();
// add new column for topk columns
for (String dimension : thirdeyeConfig.getDimensionNames()) {
fieldAssembler = fieldAssembler.name(dimension).type().nullable().stringType().noDefault();
if (topKTransformDimensionSet.contains(dimension)) {
fieldAssembler = fieldAssembler.name(dimension + ThirdEyeConstants.TOPK_DIMENSION_SUFFIX).type().nullable().stringType().noDefault();
}
}
for (MetricSpec metricSpec : thirdeyeConfig.getMetrics()) {
String metric = metricSpec.getName();
MetricType metricType = metricSpec.getType();
BaseFieldTypeBuilder<Schema> baseFieldTypeBuilder = fieldAssembler.name(metric).type().nullable();
switch (metricType) {
case SHORT:
case INT:
fieldAssembler = baseFieldTypeBuilder.intType().noDefault();
break;
case FLOAT:
fieldAssembler = baseFieldTypeBuilder.floatType().noDefault();
break;
case DOUBLE:
fieldAssembler = baseFieldTypeBuilder.doubleType().noDefault();
break;
case LONG:
default:
fieldAssembler = baseFieldTypeBuilder.longType().noDefault();
}
}
String timeColumnName = thirdeyeConfig.getTime().getColumnName();
fieldAssembler = fieldAssembler.name(timeColumnName).type().longType().noDefault();
outputSchema = fieldAssembler.endRecord();
LOGGER.info("New schema {}", outputSchema.toString(true));
return outputSchema;
}
private String getAndSetConfiguration(Configuration configuration,
DerivedColumnTransformationPhaseConstants constant) {
String value = getAndCheck(constant.toString());
configuration.set(constant.toString(), value);
return value;
}
private String getAndCheck(String propName) {
String propValue = props.getProperty(propName);
if (propValue == null) {
throw new IllegalArgumentException(propName + " required property");
}
return propValue;
}
public static void main(String[] args) throws Exception {
if (args.length != 1) {
throw new IllegalArgumentException("usage: config.properties");
}
Properties props = new Properties();
props.load(new FileInputStream(args[0]));
DerivedColumnTransformationPhaseJob job = new DerivedColumnTransformationPhaseJob("derived_column_transformation_job", props);
job.run();
}
}
| apache-2.0 |
draekko/tray | library/src/androidTest/java/net/grandcentrix/tray/mock/TestTrayModulePreferences.java | 1600 | /*
* Copyright (C) 2015 grandcentrix GmbH
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package net.grandcentrix.tray.mock;
import net.grandcentrix.tray.TrayPreferences;
import net.grandcentrix.tray.core.PreferenceStorage;
import net.grandcentrix.tray.core.TrayItem;
import net.grandcentrix.tray.core.TrayStorage;
import android.content.Context;
import androidx.annotation.NonNull;
/**
* Created by pascalwelsch on 2/26/15.
*/
public class TestTrayModulePreferences extends TrayPreferences {
public TestTrayModulePreferences(final Context context, final String module) {
super(context, module, 1);
}
public TestTrayModulePreferences(@NonNull final Context context,
@NonNull final String module, final TrayStorage.Type type) {
super(context, module, 1, type);
}
public PreferenceStorage<TrayItem> getInternalStorage() {
return getStorage();
}
@Override
protected void onCreate(final int newVersion) {
}
@Override
protected void onUpgrade(final int oldVersion, final int newVersion) {
}
}
| apache-2.0 |
Epi-Info/Epi-Info-Android | src/main/java/gov/cdc/epiinfo/interpreter/EnterRule.java | 42951 | package gov.cdc.epiinfo.interpreter;
import gov.cdc.epiinfo.interpreter.CSymbol.DataType;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.List;
import com.creativewidgetworks.goldparser.engine.Reduction;
import com.creativewidgetworks.goldparser.engine.Token;
import com.creativewidgetworks.goldparser.engine.enums.SymbolType;
import com.creativewidgetworks.goldparser.parser.GOLDParser;
import com.creativewidgetworks.goldparser.parser.ProcessRule;
/*
@ProcessRule(rule={
"<Expression> ::= <Expression> > <Add Exp>",
"<Expression> ::= <Expression> < <Add Exp>",
"<Expression> ::= <Expression> <= <Add Exp>",
"<Expression> ::= <Expression> >= <Add Exp>",
"<Expression> ::= <Expression> '==' <Add Exp>",
"<Expression> ::= <Expression> <> <Add Exp>",
"<Expression> ::= <Add Exp>",
"<Add Exp> ::= <Add Exp> '+' <Mult Exp>",
"<Add Exp> ::= <Add Exp> - <Mult Exp>",
"<Add Exp> ::= <Add Exp> & <Mult Exp>",
"<Add Exp> ::= <Mult Exp>",
"<Mult Exp> ::= <Mult Exp> '*' <Negate Exp>",
"<Mult Exp> ::= <Mult Exp> '/' <Negate Exp>",
"<Mult Exp> ::= <Negate Exp>",
"<Negate Exp> ::= <Value>"
})*/
public abstract class EnterRule
{
public Rule_Context Context;
public EnterRule()
{
//this.Context = new Rule_Context();
}
public EnterRule(Rule_Context pContext)
{
this.Context = pContext;
}
public abstract Object Execute();
public String toString()
{
return "";
}
public boolean IsNull()
{
return false;
}
public static boolean isNullOrEmpty(String param)
{
return param == null || param.trim().length() == 0;
}
public static boolean isEmpty(Object obj)
{
if (obj == null) return true;
else return isNullOrEmpty(obj.toString());
}
protected Object ConvertStringToBoolean(String pValue)
{
Object result = null;
if(
pValue.equalsIgnoreCase("(+)") ||
pValue.equalsIgnoreCase("YES") ||
pValue.equalsIgnoreCase("Y") ||
pValue.equalsIgnoreCase("TRUE") ||
pValue.equalsIgnoreCase("T")
)
result = true;
if(
pValue.equalsIgnoreCase("(-)") ||
pValue.equalsIgnoreCase("NO") ||
pValue.equalsIgnoreCase("N") ||
pValue.equalsIgnoreCase("FALSE") ||
pValue.equalsIgnoreCase("F")
)
result = false;
return result;
}
public String ExtractIdentifier(Token pToken)
{
//return ((Reduction)pToken.getData()).getParentRule().definition();
Object o = pToken.getData();
String result = null;
if(o instanceof Reduction)
{
if (((Reduction)pToken.getData()).size() == 1)
result = ((Reduction)pToken.getData()).get(0).getData().toString();
else
result = ((Reduction)pToken.getData()).get(0).getData().toString() + " " + ExtractIdentifier(((Reduction)pToken.getData()).get(1));
}
else
{
result = pToken.getData().toString();
}
return result;
}
public static EnterRule BuildStatements(Rule_Context pContext, Reduction pToken)
{
EnterRule result = null;
//if (pToken.getParent().containsOneNonTerminal() == true)
//{
Rule_Enum Test = Rule_Enum.Convert(pToken.getParent().getHead().getName());
if(Test == null)
{
System.out.print("nuff said!");
}
else
{
switch (Test)
{
case CheckCodeBlock:
result = new Rule_CheckCodeBlock(pContext, pToken);
break;
case CheckCodeBlocks:
result = new Rule_CheckCodeBlocks(pContext, pToken);
break;
case Program:
result = new Rule_Program(pContext, pToken);
break;
case Always_Statement:
result = new Rule_Always(pContext, pToken);
break;
case Simple_Assign_Statement:
case Let_Statement:
case Assign_Statement:
result = new Rule_Assign(pContext, pToken);
break;
case If_Statement:
case If_Else_Statement:
result = new Rule_If_Then_Else_End(pContext, pToken);
break;
case Define_Variable_Statement:
result = new Rule_Define(pContext, pToken);
break;
case DefineVariables_Statement:
result = new Rule_DefineVariables_Statement(pContext, pToken);
break;
case Field_Checkcode_Statement:
result = new Rule_Field_Checkcode_Statement(pContext, pToken);
break;
case View_Checkcode_Statement:
result = new Rule_View_Checkcode_Statement(pContext, pToken);
break;
case Record_Checkcode_Statement:
result = new Rule_Record_Checkcode_Statement(pContext, pToken);
break;
case Page_Checkcode_Statement:
result = new Rule_Page_Checkcode_Statement(pContext, pToken);
break;
case Begin_Before_statement:
result = new Rule_Begin_Before_Statement(pContext, pToken);
break;
case Begin_After_statement:
result = new Rule_Begin_After_Statement(pContext, pToken);
break;
case Begin_Click_statement:
result = new Rule_Begin_Click_Statement(pContext, pToken);
break;
case Subroutine_Statement:
result = new Rule_Subroutine_Statement(pContext, pToken);
break;
case Call_Statement:
result = new Rule_Call(pContext, pToken);
break;
case Expr_List:
result = new Rule_ExprList(pContext, pToken);
break;
case Expression:
result = new Rule_Expression(pContext, pToken);
break;
case And_Exp:
result = new Rule_AndExp(pContext, pToken);
break;
case Not_Exp:
result = new Rule_NotExp(pContext, pToken);
break;
case Compare_Exp:
result = new Rule_CompareExp(pContext, pToken);
break;
case Concat_Exp:
result = new Rule_ConcatExp(pContext, pToken);
break;
case Add_Exp:
result = new Rule_AddExp(pContext, pToken);
break;
case Mult_Exp:
result = new Rule_MultExp(pContext, pToken);
break;
case Pow_Exp:
result = new Rule_PowExp(pContext, pToken);
break;
case Negate_Exp:
result = new Rule_NegateExp(pContext, pToken);
break;
case Statements:
result = new Rule_Statements(pContext, pToken);
break;
case Statement:
result = new Rule_Statement(pContext, pToken);
break;
case Hide_Some_Statement:
case Hide_Except_Statement:
result = new Rule_Hide(pContext, pToken);
break;
case Go_To_Variable_Statement:
case Go_To_Page_Statement:
result = new Rule_GoTo(pContext, pToken);
break;
case Unhide_Some_Statement:
case Unhide_Except_Statement:
result = new Rule_UnHide(pContext, pToken);
break;
/* case Assign_DLL_Statement:
result = new Rule_Assign_DLL_Statement(pContext, pToken);
break;
case Else_If_Statement:
result = new Rule_Else_If_Statement(pContext, pToken);
break;
case Define_Dll_Statement:
result = new Rule_DLL_Statement(pContext, pToken);
break;
case FuncName2:
case FunctionCall:
result = new Rule_FunctionCall(pContext, pToken);
break;
case Go_To_Variable_Statement:
case Go_To_Page_Statement:
result = new Rule_GoTo(pContext, pToken);
break;
*/
case Simple_Dialog_Statement:
result = new Rule_Dialog(pContext, pToken);
break;
/*
case Numeric_Dialog_Implicit_Statement:
case Numeric_Dialog_Explicit_Statement:
case TextBox_Dialog_Statement:
case Db_Values_Dialog_Statement:
case YN_Dialog_Statement:
case Db_Views_Dialog_Statement:
case Databases_Dialog_Statement:
case Db_Variables_Dialog_Statement:
case Multiple_Choice_Dialog_Statement:
case Dialog_Read_Statement:
case Dialog_Write_Statement:
case Dialog_Read_Filter_Statement:
case Dialog_Write_Filter_Statement:
case Dialog_Date_Statement:
case Dialog_Date_Mask_Statement:
result = new Rule_Dialog(pContext, pToken);
break;
case Comment_Line:
result = new Rule_CommentLine(pContext, pToken);
break;
*/ case Simple_Execute_Statement:
case Execute_File_Statement:
case Execute_Url_Statement:
case Execute_Wait_For_Exit_File_Statement:
case Execute_Wait_For_Exit_String_Statement:
case Execute_Wait_For_Exit_Url_Statement:
case Execute_No_Wait_For_Exit_File_Statement:
case Execute_No_Wait_For_Exit_String_Statement:
case Execute_No_Wait_For_Exit_Url_Statement:
result = new Rule_Execute(pContext, pToken);
break;/*
case Beep_Statement:
result = new Rule_Beep(pContext, pToken);
break;
case Auto_Search_Statement:
result = new Rule_AutoSearch(pContext, pToken);
break;
case Quit_Statement:
result = new Rule_Quit(pContext);
break;
*/ case Clear_Statement:
result = new Rule_Clear(pContext, pToken);
break; /*
case New_Record_Statement:
result = new Rule_NewRecord(pContext, pToken);
break;
case Simple_Undefine_Statement:
result = new Rule_Undefine(pContext, pToken);
break;
*/ case Geocode_Statement:
result = new Rule_Geocode(pContext, pToken);
break;
/*
case Begin_Before_statement:
result = new Rule_Begin_Before_Statement(pContext, pToken);
break;
case Begin_After_statement:
result = new Rule_Begin_After_Statement(pContext, pToken);
break;
case Begin_Click_statement:
result = new Rule_Begin_Click_Statement(pContext, pToken);
break;
case CheckCodeBlock:
result = new Rule_CheckCodeBlock(pContext, pToken);
break;
case CheckCodeBlocks:
result = new Rule_CheckCodeBlocks(pContext, pToken);
break;
case Simple_Run_Statement:
break;*/
case Define_Statement_Group:
result = new Rule_DefineVariables_Statement(pContext, pToken);
break;
case Define_Statement_Type:
result = new Rule_Define(pContext, pToken);
break;
case Highlight_Statement:
result = new Rule_Highlight(pContext, pToken);
break;
case UnHighlight_Statement:
result = new Rule_UnHighlight(pContext, pToken);
break;
case Enable_Statement:
result = new Rule_Enable(pContext, pToken);
break;
case Disable_Statement:
result = new Rule_Disable(pContext, pToken);
break;
case FunctionCall:
result = new Rule_FunctionCall(pContext, pToken);
break;
case Value:
case Decimal_Number:
case Qualified_ID:
case Literal_String:
case Literal:
case Number:
case Literal_Date:
case Boolean:
case Identifier:
case Real_Number:
case Hex_Number:
case RealLiteral:
case DecLiteral:
case HexLiteral:
case Date:
case Time:
case String:
result = new Rule_Value(pContext, pToken);
break;
default:
result = new Rule_Value(pContext, pToken);
//result = null;
break;
//result = new Rule_Value(pContext, pToken);
//throw new Exception("Missing rule in EnterRule.BuildStatments " + NT.Symbol.ToString());
}
}
/*}
else // terminal token
{
//TerminalToken TT = (TerminalToken)pToken;
switch (Rule_Enum.Convert(pToken.getParent().getHead().getName()))
{
case Value:
default:
result = new Rule_Value(pContext, pToken);
break;
}
}*/
return result;
}
public static String GetIdentifier(String pValue)
{
String result = pValue.replaceAll("\\]$", "").replaceAll("^\\[", "");
return result;
}
static public List<EnterRule> GetFunctionParameters(Rule_Context pContext, Reduction pToken)
{
List<EnterRule> result = new ArrayList<EnterRule>();
if (pToken.getParent().getHead().getType() == SymbolType.NON_TERMINAL)
{
//NonterminalToken NT = (NonterminalToken)pToken;
//switch (NT.Symbol.ToString())
Rule_Enum Test = Rule_Enum.Convert(pToken.getParent().getHead().getName());
if(Test == null)
{
System.out.print("check code issue ");
}
else
{
switch (Test)
{
case NonEmptyFunctionParameterList:
//this.paramList.Push(new Rule_NonEmptyFunctionParameterList(T, this.paramList));
result.addAll(EnterRule.GetFunctionParameters(pContext, pToken));
break;
case SingleFunctionParameterList:
result.addAll(EnterRule.GetFunctionParameters(pContext, pToken));
break;
case EmptyFunctionParameterList:
//this.paramList = new Rule_EmptyFunctionParameterList(T);
// do nothing the parameterlist is empty
break;
case MultipleFunctionParameterList:
//this.MultipleParameterList = new Rule_MultipleFunctionParameterList(pToken);
//<NonEmptyFunctionParameterList> ',' <Expression>
//result.Add(AnalysisRule.BuildStatments(pContext, NT.Tokens[0]));
result.addAll(EnterRule.GetFunctionParameters(pContext, pToken.get(0).asReduction()));
result.add(EnterRule.BuildStatements(pContext, pToken.get(2).asReduction()));
break;
case FuncName2:
case Expression:
case FunctionCall:
default:
result.add(EnterRule.BuildStatements(pContext, pToken));
break;
}
}
}
else
{
//TerminalToken TT = (TerminalToken)pToken;
if (pToken.get(0).getData().toString()!= ",")
{
result.add(new Rule_Value(pContext, pToken));
}
}
/*
<FunctionCall> ::= Identifier '(' <FunctionParameterList> ')'
| FORMAT '(' <FunctionParameterList> ')'
| <FuncName2>
! | <FuncName1> '(' <FunctionCall> ')'
<FunctionParameterList> ::= <EmptyFunctionParameterList> | <NonEmptyFunctionParameterList>
<NonEmptyFunctionParameterList> ::= <MultipleFunctionParameterList> | <SingleFunctionParameterList>
<MultipleFunctionParameterList> ::= <NonEmptyFunctionParameterList> ',' <Expression>
<SingleFunctionParameterList> ::= <expression>
<EmptyFunctionParameterList> ::=
*/
return result;
}
public enum Rule_Enum
{
Program(0),
Always_Statement(1),
Simple_Assign_Statement(2),
Let_Statement(3),
Assign_Statement(4),
Assign_DLL_Statement(5),
If_Statement(6),
If_Else_Statement(7),
Else_If_Statement(8),
Define_Variable_Statement(9),
Define_Dll_Statement(10),
FuncName2(11),
FunctionCall(12),
Hide_Some_Statement(13),
Hide_Except_Statement(14),
Unhide_Some_Statement(15),
Unhide_Except_Statement(16),
Go_To_Variable_Statement(17),
Go_To_Page_Statement(18),
Simple_Dialog_Statement(19),
Numeric_Dialog_Implicit_Statement(20),
Numeric_Dialog_Explicit_Statement(21),
TextBox_Dialog_Statement(22),
Db_Values_Dialog_Statement(23),
YN_Dialog_Statement(24),
Db_Views_Dialog_Statement(25),
Databases_Dialog_Statement(26),
Db_Variables_Dialog_Statement(27),
Multiple_Choice_Dialog_Statement(28),
Dialog_Read_Statement(29),
Dialog_Write_Statement(30),
Dialog_Read_Filter_Statement(31),
Dialog_Write_Filter_Statement(32),
Dialog_Date_Statement(33),
Dialog_Date_Mask_Statement(34),
Comment_Line(35),
Simple_Execute_Statement(36),
Execute_File_Statement(37),
Execute_Url_Statement(38),
Execute_Wait_For_Exit_File_Statement(39),
Execute_Wait_For_Exit_String_Statement(40),
Execute_Wait_For_Exit_Url_Statement(41),
Execute_No_Wait_For_Exit_File_Statement(42),
Execute_No_Wait_For_Exit_String_Statement(43),
Execute_No_Wait_For_Exit_Url_Statement(44),
Beep_Statement(45),
Auto_Search_Statement(46),
Quit_Statement(47),
Clear_Statement(48),
New_Record_Statement(49),
Simple_Undefine_Statement(50),
Geocode_Statement(51),
DefineVariables_Statement(52),
Field_Checkcode_Statement(53),
View_Checkcode_Statement(54),
Record_Checkcode_Statement(55),
Page_Checkcode_Statement(56),
Subroutine_Statement(57),
Call_Statement(58),
Expr_List(59),
Expression(60),
And_Exp(61),
Not_Exp(62),
Compare_Exp(63),
Concat_Exp(64),
Add_Exp(65),
Mult_Exp(66),
Pow_Exp(67),
Negate_Exp(68),
Begin_Before_statement(69),
Begin_After_statement(70),
Begin_Click_statement(71),
CheckCodeBlock(72),
CheckCodeBlocks(73),
Simple_Run_Statement(74),
Statements(75),
Statement(76),
Define_Statement_Group(77),
Define_Statement_Type(78),
Highlight_Statement(79),
UnHighlight_Statement(80),
Enable_Statement(81),
Disable_Statement(82),
Value(83),
Decimal_Number(84),
Qualified_ID(85),
Identifier(86),
Literal_Date(87),
Literal(88),
Literal_String(89),
Number(90),
Real_Number(91),
Hex_Number(92),
Boolean(93),
RealLiteral(94),
DecLiteral(95),
HexLiteral(96),
Date(97),
Time(98),
String(99),
Literal_Char(100),
CharLiteral(101),
NonEmptyFunctionParameterList(102),
SingleFunctionParameterList(103),
EmptyFunctionParameterList(104),
MultipleFunctionParameterList(105);
private int value;
public int getValue()
{
return value;
}
Rule_Enum() {}
Rule_Enum(int pValue)
{
this.value = pValue;
}
static HashMap<String,Rule_Enum> StringEnum;
static
{
StringEnum = new HashMap<String,Rule_Enum>();
StringEnum.put("Program".toLowerCase(), Program);
StringEnum.put("Always_Statement".toLowerCase(),Always_Statement);
StringEnum.put("Simple_Assign_Statement".toLowerCase(),Simple_Assign_Statement);
StringEnum.put("Let_Statement".toLowerCase(),Let_Statement);
StringEnum.put("Assign_Statement".toLowerCase(),
Assign_Statement);
StringEnum.put("Assign_DLL_Statement".toLowerCase(),
Assign_DLL_Statement);
StringEnum.put("If_Statement".toLowerCase(),
If_Statement);
StringEnum.put("If_Else_Statement".toLowerCase(),
If_Else_Statement);
StringEnum.put("Else_If_Statement".toLowerCase(),
Else_If_Statement);
StringEnum.put("Define_Variable_Statement".toLowerCase(),
Define_Variable_Statement);
StringEnum.put("Define_Dll_Statement".toLowerCase(),
Define_Dll_Statement);
StringEnum.put("FuncName2".toLowerCase(),
FuncName2);
StringEnum.put("FunctionCall".toLowerCase(),
FunctionCall);
StringEnum.put("Hide_Some_Statement".toLowerCase(),
Hide_Some_Statement);
StringEnum.put("Hide_Except_Statement".toLowerCase(),
Hide_Except_Statement);
StringEnum.put("Unhide_Some_Statement".toLowerCase(),
Unhide_Some_Statement);
StringEnum.put("Unhide_Except_Statement".toLowerCase(),
Unhide_Except_Statement);
StringEnum.put("Go_To_Variable_Statement".toLowerCase(),
Go_To_Variable_Statement);
StringEnum.put("Go_To_Page_Statement".toLowerCase(),
Go_To_Page_Statement);
StringEnum.put("Simple_Dialog_Statement".toLowerCase(),
Simple_Dialog_Statement);
StringEnum.put("Numeric_Dialog_Implicit_Statement".toLowerCase(),
Numeric_Dialog_Implicit_Statement);
StringEnum.put("Numeric_Dialog_Explicit_Statement".toLowerCase(),
Numeric_Dialog_Explicit_Statement);
StringEnum.put("TextBox_Dialog_Statement".toLowerCase(),
TextBox_Dialog_Statement);
StringEnum.put("Db_Values_Dialog_Statement".toLowerCase(),
Db_Values_Dialog_Statement);
StringEnum.put("YN_Dialog_Statement".toLowerCase(),
YN_Dialog_Statement);
StringEnum.put("Db_Views_Dialog_Statement".toLowerCase(),
Db_Views_Dialog_Statement);
StringEnum.put("Databases_Dialog_Statement".toLowerCase(),
Databases_Dialog_Statement);
StringEnum.put("Db_Variables_Dialog_Statement".toLowerCase(),
Db_Variables_Dialog_Statement);
StringEnum.put("Multiple_Choice_Dialog_Statement".toLowerCase(),
Multiple_Choice_Dialog_Statement);
StringEnum.put("Dialog_Read_Statement".toLowerCase(),
Dialog_Read_Statement);
StringEnum.put("Dialog_Write_Statement".toLowerCase(),
Dialog_Write_Statement);
StringEnum.put("Dialog_Read_Filter_Statement".toLowerCase(),
Dialog_Read_Filter_Statement);
StringEnum.put("Dialog_Write_Filter_Statement".toLowerCase(),
Dialog_Write_Filter_Statement);
StringEnum.put("Dialog_Date_Statement".toLowerCase(),
Dialog_Date_Statement);
StringEnum.put("Dialog_Date_Mask_Statement".toLowerCase(),
Dialog_Date_Mask_Statement);
StringEnum.put("Comment_Line".toLowerCase(),
Comment_Line);
StringEnum.put("Simple_Execute_Statement".toLowerCase(),
Simple_Execute_Statement);
StringEnum.put("Execute_File_Statement".toLowerCase(),
Execute_File_Statement);
StringEnum.put("Execute_Url_Statement".toLowerCase(),
Execute_Url_Statement);
StringEnum.put("Execute_Wait_For_Exit_File_Statement".toLowerCase(),
Execute_Wait_For_Exit_File_Statement);
StringEnum.put("Execute_Wait_For_Exit_String_Statement".toLowerCase(),
Execute_Wait_For_Exit_String_Statement);
StringEnum.put("Execute_Wait_For_Exit_Url_Statement".toLowerCase(),
Execute_Wait_For_Exit_Url_Statement);
StringEnum.put("Execute_No_Wait_For_Exit_File_Statement".toLowerCase(),
Execute_No_Wait_For_Exit_File_Statement);
StringEnum.put("Execute_No_Wait_For_Exit_String_Statement".toLowerCase(),
Execute_No_Wait_For_Exit_String_Statement);
StringEnum.put("Execute_No_Wait_For_Exit_Url_Statement".toLowerCase(),
Execute_No_Wait_For_Exit_Url_Statement);
StringEnum.put("Beep_Statement".toLowerCase(),
Beep_Statement);
StringEnum.put("Auto_Search_Statement".toLowerCase(),
Auto_Search_Statement);
StringEnum.put("Quit_Statement".toLowerCase(),
Quit_Statement);
StringEnum.put("Clear_Statement".toLowerCase(),
Clear_Statement);
StringEnum.put("New_Record_Statement".toLowerCase(),
New_Record_Statement);
StringEnum.put("Simple_Undefine_Statement".toLowerCase(),
Simple_Undefine_Statement);
StringEnum.put("Geocode_Statement".toLowerCase(),
Geocode_Statement);
StringEnum.put("DefineVariables_Statement".toLowerCase(),
DefineVariables_Statement);
StringEnum.put("Field_Checkcode_Statement".toLowerCase(),
Field_Checkcode_Statement);
StringEnum.put("View_Checkcode_Statement".toLowerCase(),
View_Checkcode_Statement);
StringEnum.put("Record_Checkcode_Statement".toLowerCase(),
Record_Checkcode_Statement);
StringEnum.put("Page_Checkcode_Statement".toLowerCase(),
Page_Checkcode_Statement);
StringEnum.put("Subroutine_Statement".toLowerCase(),
Subroutine_Statement);
StringEnum.put("Call_Statement".toLowerCase(),
Call_Statement);
StringEnum.put("Expr_List".toLowerCase(),
Expr_List);
StringEnum.put("Expression".toLowerCase(),
Expression);
StringEnum.put("And Exp".toLowerCase(),
And_Exp);
StringEnum.put("Not Exp".toLowerCase(),
Not_Exp);
StringEnum.put("Compare Exp".toLowerCase(),
Compare_Exp);
StringEnum.put("Concat Exp".toLowerCase(),
Concat_Exp);
StringEnum.put("Add Exp".toLowerCase(),
Add_Exp);
StringEnum.put("Mult Exp".toLowerCase(),
Mult_Exp);
StringEnum.put("Pow Exp".toLowerCase(),
Pow_Exp);
StringEnum.put("Negate Exp".toLowerCase(),
Negate_Exp);
StringEnum.put("Begin_Before_statement".toLowerCase(),
Begin_Before_statement);
StringEnum.put("Begin_After_statement".toLowerCase(),
Begin_After_statement);
StringEnum.put("Begin_Click_statement".toLowerCase(),
Begin_Click_statement);
StringEnum.put("CheckCodeBlock".toLowerCase(),
CheckCodeBlock);
StringEnum.put("CheckCodeBlocks".toLowerCase(),
CheckCodeBlocks);
StringEnum.put("Simple_Run_Statement".toLowerCase(),
Simple_Run_Statement);
StringEnum.put("Statements".toLowerCase(),
Statements);
StringEnum.put("Statement".toLowerCase(),
Statement);
StringEnum.put("Define_Statement_Group".toLowerCase(),
Define_Statement_Group);
StringEnum.put("Define_Statement_Type".toLowerCase(),
Define_Statement_Type);
StringEnum.put("Highlight_Statement".toLowerCase(),
Highlight_Statement);
StringEnum.put("UnHighlight_Statement".toLowerCase(),
UnHighlight_Statement);
StringEnum.put("Enable_Statement".toLowerCase(),
Enable_Statement);
StringEnum.put("Disable_Statement".toLowerCase(),
Disable_Statement);
StringEnum.put("Value".toLowerCase(),
Value);
StringEnum.put("Decimal_Number".toLowerCase(),
Decimal_Number);
StringEnum.put("Qualified_ID".toLowerCase(), Qualified_ID);
StringEnum.put("Qualified ID".toLowerCase(), Qualified_ID);
StringEnum.put("Identifier".toLowerCase(), Identifier);
StringEnum.put("FunctionCall".toLowerCase(), FunctionCall);
StringEnum.put("Literal_Date".toLowerCase(), Literal_Date);
StringEnum.put("Literal".toLowerCase(), Literal);
StringEnum.put("Literal_String".toLowerCase(), Literal_String);
StringEnum.put("Number".toLowerCase(), Number);
StringEnum.put("Real_Number".toLowerCase(), Real_Number);
StringEnum.put("Decimal_Number".toLowerCase(), Decimal_Number);
StringEnum.put("Hex_Number".toLowerCase(), Hex_Number);
StringEnum.put("Boolean".toLowerCase(), Boolean);
StringEnum.put("RealLiteral".toLowerCase(), RealLiteral);
StringEnum.put("DecLiteral".toLowerCase(), DecLiteral);
StringEnum.put("HexLiteral".toLowerCase(), HexLiteral);
StringEnum.put("Date".toLowerCase(), Date);
StringEnum.put("Time".toLowerCase(), Time);
StringEnum.put("String".toLowerCase(), String);
StringEnum.put("Literal_Char".toLowerCase(), Literal_Char);
StringEnum.put("CharLiteral".toLowerCase(),CharLiteral);
StringEnum.put("NonEmptyFunctionParameterList".toLowerCase(),NonEmptyFunctionParameterList);
StringEnum.put("SingleFunctionParameterList".toLowerCase(),SingleFunctionParameterList);
StringEnum.put("EmptyFunctionParameterList".toLowerCase(),EmptyFunctionParameterList);
StringEnum.put("MultipleFunctionParameterList".toLowerCase(),MultipleFunctionParameterList);
}
static Rule_Enum Convert(String pValue)
{
Rule_Enum result = null;
String value = pValue.replace("<", "");
value = value.replace(">", "");
value = value.trim();
if(StringEnum.containsKey(value.toLowerCase()))
{
return StringEnum.get(value.toLowerCase());
}
else
{
return result;
}
/*
if(value.equalsIgnoreCase("Program"))
return Program;
if(value.equalsIgnoreCase("Always_Statement"))
return Always_Statement;
if(value.equalsIgnoreCase("Simple_Assign_Statement"))
return Simple_Assign_Statement;
if(value.equalsIgnoreCase("Let_Statement"))
return Let_Statement;
if(value.equalsIgnoreCase("Assign_Statement"))
return Assign_Statement;
if(value.equalsIgnoreCase("Assign_DLL_Statement"))
return Assign_DLL_Statement;
if(value.equalsIgnoreCase("If_Statement"))
return If_Statement;
if(value.equalsIgnoreCase("If_Else_Statement"))
return If_Else_Statement;
if(value.equalsIgnoreCase("Else_If_Statement"))
return Else_If_Statement;
if(value.equalsIgnoreCase("Define_Variable_Statement"))
return Define_Variable_Statement;
if(value.equalsIgnoreCase("Define_Dll_Statement"))
return Define_Dll_Statement;
if(value.equalsIgnoreCase("FuncName2"))
return FuncName2;
if(value.equalsIgnoreCase("FunctionCall"))
return FunctionCall;
if(value.equalsIgnoreCase("Hide_Some_Statement"))
return Hide_Some_Statement;
if(value.equalsIgnoreCase("Hide_Except_Statement"))
return Hide_Except_Statement;
if(value.equalsIgnoreCase("Unhide_Some_Statement"))
return Unhide_Some_Statement;
if(value.equalsIgnoreCase("Unhide_Except_Statement"))
return Unhide_Except_Statement;
if(value.equalsIgnoreCase("Go_To_Variable_Statement"))
return Go_To_Variable_Statement;
if(value.equalsIgnoreCase("Go_To_Page_Statement"))
return Go_To_Page_Statement;
if(value.equalsIgnoreCase("Simple_Dialog_Statement"))
return Simple_Dialog_Statement;
if(value.equalsIgnoreCase("Numeric_Dialog_Implicit_Statement"))
return Numeric_Dialog_Implicit_Statement;
if(value.equalsIgnoreCase("Numeric_Dialog_Explicit_Statement"))
return Numeric_Dialog_Explicit_Statement;
if(value.equalsIgnoreCase("TextBox_Dialog_Statement"))
return TextBox_Dialog_Statement;
if(value.equalsIgnoreCase("Db_Values_Dialog_Statement"))
return Db_Values_Dialog_Statement;
if(value.equalsIgnoreCase("YN_Dialog_Statement"))
return YN_Dialog_Statement;
if(value.equalsIgnoreCase("Db_Views_Dialog_Statement"))
return Db_Views_Dialog_Statement;
if(value.equalsIgnoreCase("Databases_Dialog_Statement"))
return Databases_Dialog_Statement;
if(value.equalsIgnoreCase("Db_Variables_Dialog_Statement"))
return Db_Variables_Dialog_Statement;
if(value.equalsIgnoreCase("Multiple_Choice_Dialog_Statement"))
return Multiple_Choice_Dialog_Statement;
if(value.equalsIgnoreCase("Dialog_Read_Statement"))
return Dialog_Read_Statement;
if(value.equalsIgnoreCase("Dialog_Write_Statement"))
return Dialog_Write_Statement;
if(value.equalsIgnoreCase("Dialog_Read_Filter_Statement"))
return Dialog_Read_Filter_Statement;
if(value.equalsIgnoreCase("Dialog_Write_Filter_Statement"))
return Dialog_Write_Filter_Statement;
if(value.equalsIgnoreCase("Dialog_Date_Statement"))
return Dialog_Date_Statement;
if(value.equalsIgnoreCase("Dialog_Date_Mask_Statement"))
return Dialog_Date_Mask_Statement;
if(value.equalsIgnoreCase("Comment_Line"))
return Comment_Line;
if(value.equalsIgnoreCase("Simple_Execute_Statement"))
return Simple_Execute_Statement;
if(value.equalsIgnoreCase("Execute_File_Statement"))
return Execute_File_Statement;
if(value.equalsIgnoreCase("Execute_Url_Statement"))
return Execute_Url_Statement;
if(value.equalsIgnoreCase("Execute_Wait_For_Exit_File_Statement"))
return Execute_Wait_For_Exit_File_Statement;
if(value.equalsIgnoreCase("Execute_Wait_For_Exit_String_Statement"))
return Execute_Wait_For_Exit_String_Statement;
if(value.equalsIgnoreCase("Execute_Wait_For_Exit_Url_Statement"))
return Execute_Wait_For_Exit_Url_Statement;
if(value.equalsIgnoreCase("Execute_No_Wait_For_Exit_File_Statement"))
return Execute_No_Wait_For_Exit_File_Statement;
if(value.equalsIgnoreCase("Execute_No_Wait_For_Exit_String_Statement"))
return Execute_No_Wait_For_Exit_String_Statement;
if(value.equalsIgnoreCase("Execute_No_Wait_For_Exit_Url_Statement"))
return Execute_No_Wait_For_Exit_Url_Statement;
if(value.equalsIgnoreCase("Beep_Statement"))
return Beep_Statement;
if(value.equalsIgnoreCase("Auto_Search_Statement"))
return Auto_Search_Statement;
if(value.equalsIgnoreCase("Quit_Statement"))
return Quit_Statement;
if(value.equalsIgnoreCase("Clear_Statement"))
return Clear_Statement;
if(value.equalsIgnoreCase("New_Record_Statement"))
return New_Record_Statement;
if(value.equalsIgnoreCase("Simple_Undefine_Statement"))
return Simple_Undefine_Statement;
if(value.equalsIgnoreCase("Geocode_Statement"))
return Geocode_Statement;
if(value.equalsIgnoreCase("DefineVariables_Statement"))
return DefineVariables_Statement;
if(value.equalsIgnoreCase("Field_Checkcode_Statement"))
return Field_Checkcode_Statement;
if(value.equalsIgnoreCase("View_Checkcode_Statement"))
return View_Checkcode_Statement;
if(value.equalsIgnoreCase("Record_Checkcode_Statement"))
return Record_Checkcode_Statement;
if(value.equalsIgnoreCase("Page_Checkcode_Statement"))
return Page_Checkcode_Statement;
if(value.equalsIgnoreCase("Subroutine_Statement"))
return Subroutine_Statement;
if(value.equalsIgnoreCase("Call_Statement"))
return Call_Statement;
if(value.equalsIgnoreCase("Expr_List"))
return Expr_List;
if(value.equalsIgnoreCase("Expression"))
return Expression;
if(value.equalsIgnoreCase("And Exp"))
return And_Exp;
if(value.equalsIgnoreCase("Not Exp"))
return Not_Exp;
if(value.equalsIgnoreCase("Compare Exp"))
return Compare_Exp;
if(value.equalsIgnoreCase("Concat Exp"))
return Concat_Exp;
if(value.equalsIgnoreCase("Add Exp"))
return Add_Exp;
if(value.equalsIgnoreCase("Mult Exp"))
return Mult_Exp;
if(value.equalsIgnoreCase("Pow Exp"))
return Pow_Exp;
if(value.equalsIgnoreCase("Negate Exp"))
return Negate_Exp;
if(value.equalsIgnoreCase("Begin_Before_statement"))
return Begin_Before_statement;
if(value.equalsIgnoreCase("Begin_After_statement"))
return Begin_After_statement;
if(value.equalsIgnoreCase("Begin_Click_statement"))
return Begin_Click_statement;
if(value.equalsIgnoreCase("CheckCodeBlock"))
return CheckCodeBlock;
if(value.equalsIgnoreCase("CheckCodeBlocks"))
return CheckCodeBlocks;
if(value.equalsIgnoreCase("Simple_Run_Statement"))
return Simple_Run_Statement;
if(value.equalsIgnoreCase("Statements"))
return Statements;
if(value.equalsIgnoreCase("Statement"))
return Statement;
if(value.equalsIgnoreCase("Define_Statement_Group"))
return Define_Statement_Group;
if(value.equalsIgnoreCase("Define_Statement_Type"))
return Define_Statement_Type;
if(value.equalsIgnoreCase("Highlight_Statement"))
return Highlight_Statement;
if(value.equalsIgnoreCase("UnHighlight_Statement"))
return UnHighlight_Statement;
if(value.equalsIgnoreCase("Enable_Statement"))
return Enable_Statement;
if(value.equalsIgnoreCase("Disable_Statement"))
return Disable_Statement;
if(value.equalsIgnoreCase("Value"))
return Value;
if(value.equalsIgnoreCase("Decimal_Number"))
return Decimal_Number;
if(value.equalsIgnoreCase("Qualified_ID"))
return Qualified_ID;
if(value.equalsIgnoreCase("Qualified ID"))
return Qualified_ID;
if(value.equalsIgnoreCase("Identifier"))
return Identifier;
if(value.equalsIgnoreCase("FunctionCall"))
return FunctionCall;
if(value.equalsIgnoreCase("Literal_Date"))
return Literal_Date;
if(value.equalsIgnoreCase("Literal"))
return Literal;
if(value.equalsIgnoreCase("Literal_String"))
return Literal_String;
if(value.equalsIgnoreCase("Number"))
return Number;
if(value.equalsIgnoreCase("Real_Number"))
return Real_Number;
if(value.equalsIgnoreCase("Decimal_Number"))
return Decimal_Number;
if(value.equalsIgnoreCase("Hex_Number"))
return Hex_Number;
if(value.equalsIgnoreCase("Boolean"))
return Boolean;
if(value.equalsIgnoreCase("RealLiteral"))
return RealLiteral;
if(value.equalsIgnoreCase("DecLiteral"))
return DecLiteral;
if(value.equalsIgnoreCase("HexLiteral"))
return HexLiteral;
if(value.equalsIgnoreCase("Date"))
return Date;
if(value.equalsIgnoreCase("Time"))
return Time;
if(value.equalsIgnoreCase("String"))
return String;
if(value.equalsIgnoreCase("Literal_Char"))
return Literal_Char;
if(value.equalsIgnoreCase("CharLiteral"))
return CharLiteral;
return result;*/
}
}
}
| apache-2.0 |
iamlrf/leetcode | algorithm/547_friend_circles/solution.java | 682 | class Solution {
public int findCircleNum(int[][] M) {
int[] stats = new int[M.length];
for(int i : stats) {
stats[i] = 0;
}
int count = 0;
for(int i = 0; i < M.length; i++) {
if(stats[i] == 0) {
count++;
stats[i] = 1;
findFriend(M, i, stats);
}
}
return count;
}
private void findFriend(int[][] M, int i, int[] stats) {
for(int j = 0; j < M.length; j++) {
if(stats[j] == 0 && 1 == M[i][j]) {
stats[j] = 1;
findFriend(M, j, stats);
}
}
}
}
| apache-2.0 |
GaloisInc/JavaFE | Javafe/java/javafe/ast/IdPragma.java | 70 | package javafe.ast;
public interface IdPragma {
Identifier id();
}
| apache-2.0 |
Evil-Co-Legacy/XMLConfiguration | src/main/java/com/evilco/configuration/xml/exception/ConfigurationProcessorException.java | 815 | package com.evilco.configuration.xml.exception;
/**
* @auhtor Johannes Donath <johannesd@evil-co.com>
* @copyright Copyright (C) 2014 Evil-Co <http://www.evil-co.org>
*/
public class ConfigurationProcessorException extends ConfigurationException {
public ConfigurationProcessorException () {
super ();
}
public ConfigurationProcessorException (String message) {
super (message);
}
public ConfigurationProcessorException (String message, Throwable cause) {
super (message, cause);
}
public ConfigurationProcessorException (Throwable cause) {
super (cause);
}
protected ConfigurationProcessorException (String message, Throwable cause, boolean enableSuppression, boolean writableStackTrace) {
super (message, cause, enableSuppression, writableStackTrace);
}
}
| apache-2.0 |
Rikkola/guvnor | guvnor-webapp/src/main/java/org/drools/guvnor/client/categorynav/CategorySelectHandler.java | 931 | /*
* Copyright 2005 JBoss Inc
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.drools.guvnor.client.categorynav;
/**
* This represents an event of a category being selected.
* This means the category widget can be used in several different places.
*/
public interface CategorySelectHandler {
/**
* When a category is selected.
*/
public void selected(String selectedPath);
}
| apache-2.0 |
Claudenw/jdbc4sparql | src/main/java/org/xenei/jdbc4sparql/iface/ModelFactory.java | 193 | package org.xenei.jdbc4sparql.iface;
import java.util.Properties;
import com.hp.hpl.jena.rdf.model.Model;
public interface ModelFactory {
Model createModel(final Properties properties);
}
| apache-2.0 |
simplify20/RetrofitDemos | app/src/main/java/com/creact/steve/retrofitsample/network/interceptors/HttpConfigInterceptor.java | 699 | package com.creact.steve.retrofitsample.network.interceptors;
import com.creact.steve.retrofitsample.network.HttpConfig;
import okhttp3.Request;
import okhttp3.Response;
/**
* Created by Steve on 2016/5/12.
*/
public class HttpConfigInterceptor extends BaseInterceptor {
private HttpConfig mConfig;
public void setConfig(HttpConfig config) {
this.mConfig = config;
}
@Override
public Request manipulateRequest(Request request) {
//read config
return super.manipulateRequest(request);
}
@Override
public Response manipulateResponse(Response response) {
//read config
return super.manipulateResponse(response);
}
}
| apache-2.0 |
Orange-OpenSource/matos-profiles | matos-android/src/main/java/org/apache/commons/codec/language/RefinedSoundex.java | 1558 | package org.apache.commons.codec.language;
/*
* #%L
* Matos
* $Id:$
* $HeadURL:$
* %%
* Copyright (C) 2010 - 2014 Orange SA
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
@com.francetelecom.rd.stubs.annotation.ClassDone(0)
public class RefinedSoundex
implements org.apache.commons.codec.StringEncoder
{
// Fields
public static final RefinedSoundex US_ENGLISH = null;
public static final char [] US_ENGLISH_MAPPING = null;
// Constructors
public RefinedSoundex(){
}
public RefinedSoundex(char [] arg1){
}
// Methods
public java.lang.Object encode(java.lang.Object arg1) throws org.apache.commons.codec.EncoderException{
return (java.lang.Object) null;
}
public java.lang.String encode(java.lang.String arg1){
return (java.lang.String) null;
}
public int difference(java.lang.String arg1, java.lang.String arg2) throws org.apache.commons.codec.EncoderException{
return 0;
}
public java.lang.String soundex(java.lang.String arg1){
return (java.lang.String) null;
}
}
| apache-2.0 |
smanvi-pivotal/geode | geode-core/src/main/java/org/apache/geode/management/internal/cli/commands/LocateEntryCommand.java | 4080 | /*
* Licensed to the Apache Software Foundation (ASF) under one or more contributor license
* agreements. See the NOTICE file distributed with this work for additional information regarding
* copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance with the License. You may obtain a
* copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package org.apache.geode.management.internal.cli.commands;
import static org.apache.geode.management.internal.cli.commands.DataCommandsUtils.callFunctionForRegion;
import static org.apache.geode.management.internal.cli.commands.DataCommandsUtils.makePresentationResult;
import java.util.Set;
import org.apache.commons.collections.CollectionUtils;
import org.springframework.shell.core.annotation.CliCommand;
import org.springframework.shell.core.annotation.CliOption;
import org.apache.geode.distributed.DistributedMember;
import org.apache.geode.management.cli.CliMetaData;
import org.apache.geode.management.cli.ConverterHint;
import org.apache.geode.management.cli.Result;
import org.apache.geode.management.internal.cli.domain.DataCommandRequest;
import org.apache.geode.management.internal.cli.domain.DataCommandResult;
import org.apache.geode.management.internal.cli.functions.DataCommandFunction;
import org.apache.geode.management.internal.cli.i18n.CliStrings;
import org.apache.geode.security.ResourcePermission.Operation;
import org.apache.geode.security.ResourcePermission.Resource;
public class LocateEntryCommand implements GfshCommand {
@CliMetaData(relatedTopic = {CliStrings.TOPIC_GEODE_DATA, CliStrings.TOPIC_GEODE_REGION})
@CliCommand(value = {CliStrings.LOCATE_ENTRY}, help = CliStrings.LOCATE_ENTRY__HELP)
public Result locateEntry(
@CliOption(key = {CliStrings.LOCATE_ENTRY__KEY}, mandatory = true,
help = CliStrings.LOCATE_ENTRY__KEY__HELP) String key,
@CliOption(key = {CliStrings.LOCATE_ENTRY__REGIONNAME}, mandatory = true,
help = CliStrings.LOCATE_ENTRY__REGIONNAME__HELP,
optionContext = ConverterHint.REGION_PATH) String regionPath,
@CliOption(key = {CliStrings.LOCATE_ENTRY__KEYCLASS},
help = CliStrings.LOCATE_ENTRY__KEYCLASS__HELP) String keyClass,
@CliOption(key = {CliStrings.LOCATE_ENTRY__VALUEKLASS},
help = CliStrings.LOCATE_ENTRY__VALUEKLASS__HELP) String valueClass,
@CliOption(key = {CliStrings.LOCATE_ENTRY__RECURSIVE},
help = CliStrings.LOCATE_ENTRY__RECURSIVE__HELP, specifiedDefaultValue = "true",
unspecifiedDefaultValue = "false") boolean recursive) {
getSecurityService().authorize(Resource.DATA, Operation.READ, regionPath, key);
DataCommandResult dataResult;
DataCommandFunction locateEntry = new DataCommandFunction();
Set<DistributedMember> memberList = findMembersForRegion(getCache(), regionPath);
if (CollectionUtils.isNotEmpty(memberList)) {
DataCommandRequest request = new DataCommandRequest();
request.setCommand(CliStrings.LOCATE_ENTRY);
request.setKey(key);
request.setKeyClass(keyClass);
request.setRegionName(regionPath);
request.setValueClass(valueClass);
request.setRecursive(recursive);
dataResult = callFunctionForRegion(request, locateEntry, memberList);
} else {
dataResult = DataCommandResult.createLocateEntryInfoResult(key, null, null, CliStrings.format(
CliStrings.LOCATE_ENTRY__MSG__REGION_NOT_FOUND_ON_ALL_MEMBERS, regionPath), false);
}
dataResult.setKeyClass(keyClass);
if (valueClass != null) {
dataResult.setValueClass(valueClass);
}
return makePresentationResult(dataResult);
}
}
| apache-2.0 |
Qi4j/qi4j-extensions | entitystore-neo4j/src/test/java/org/qi4j/entitystore/neo4j/test/SimpleNeoStoreTest.java | 1254 | package org.qi4j.entitystore.neo4j.test;
import org.qi4j.api.common.Visibility;
import org.qi4j.bootstrap.AssemblyException;
import org.qi4j.bootstrap.ModuleAssembly;
import org.qi4j.entitystore.memory.MemoryEntityStoreService;
import org.qi4j.entitystore.neo4j.NeoConfiguration;
import org.qi4j.entitystore.neo4j.NeoEntityStoreService;
import org.qi4j.library.fileconfig.FileConfiguration;
import org.qi4j.spi.uuid.UuidIdentityGeneratorService;
import org.qi4j.test.entity.AbstractEntityStoreTest;
public class SimpleNeoStoreTest
extends AbstractEntityStoreTest
{
public void assemble(ModuleAssembly module)
throws AssemblyException
{
module.layer().application().setName("SimpleNeoTest");
super.assemble(module);
module.services(FileConfiguration.class);
module.services(NeoEntityStoreService.class);
ModuleAssembly configModule = module.layer().module("config");
configModule.entities(NeoConfiguration.class).visibleIn(Visibility.layer);
configModule.services(MemoryEntityStoreService.class);
configModule.services(UuidIdentityGeneratorService.class);
}
@Override
public void givenConcurrentUnitOfWorksWhenUoWCompletesThenCheckConcurrentModification()
{
}
}
| apache-2.0 |
windwardadmin/android-awt | src/main/java/net/windward/android/awt/AWTError.java | 1058 | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/**
* @author Michael Danilov
*/
package net.windward.android.awt;
public class AWTError extends Error {
private static final long serialVersionUID = -1819846354050686206L;
public AWTError(String msg) {
super(msg);
}
}
| apache-2.0 |
springfox/springfox | springfox-core/src/main/java/springfox/documentation/builders/ResponseBuilder.java | 3091 | package springfox.documentation.builders;
import org.springframework.http.MediaType;
import springfox.documentation.schema.Example;
import springfox.documentation.service.Header;
import springfox.documentation.service.Response;
import springfox.documentation.service.VendorExtension;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.function.Consumer;
import java.util.function.Function;
import java.util.stream.Collectors;
import static springfox.documentation.builders.BuilderDefaults.*;
public class ResponseBuilder {
private String code;
private String description;
private Boolean isDefault = false;
private final List<Header> headers = new ArrayList<>();
private final Map<MediaType, RepresentationBuilder> representations = new HashMap<>();
private final List<VendorExtension> vendorExtensions = new ArrayList<>();
private final List<Example> examples = new ArrayList<>();
public ResponseBuilder code(String code) {
this.code = defaultIfAbsent(code, this.code);
return this;
}
public ResponseBuilder description(String description) {
this.description = defaultIfAbsent(description, this.description);
return this;
}
public ResponseBuilder isDefault(boolean isDefault) {
this.isDefault = isDefault;
return this;
}
public ResponseBuilder headers(Collection<Header> headers) {
this.headers.addAll(headers);
return this;
}
private RepresentationBuilder representationBuilderFor(org.springframework.http.MediaType mediaType) {
return this.representations.computeIfAbsent(mediaType,
m -> new RepresentationBuilder()
.mediaType(m));
}
public Function<Consumer<RepresentationBuilder>, ResponseBuilder> representation(
org.springframework.http.MediaType mediaType) {
return content -> {
content.accept(representationBuilderFor(mediaType));
return this;
};
}
public ResponseBuilder vendorExtensions(Collection<VendorExtension> vendorExtensions) {
this.vendorExtensions.addAll(vendorExtensions);
return this;
}
public ResponseBuilder examples(Collection<Example> examples) {
this.examples.addAll(examples);
return this;
}
public ResponseBuilder copyOf(Response source) {
if (source == null) {
return this;
}
source.getRepresentations().forEach(each ->
this.representation(each.getMediaType()).apply(r -> r.copyOf(each)));
this.code(source.getCode())
.description(source.getDescription())
.examples(source.getExamples())
.headers(source.getHeaders())
.isDefault(source.isDefault())
.vendorExtensions(source.getVendorExtensions());
return this;
}
public Response build() {
return new Response(
code,
description,
isDefault,
headers,
representations.values()
.stream()
.map(RepresentationBuilder::build)
.collect(Collectors.toSet()),
examples,
vendorExtensions);
}
} | apache-2.0 |
dbflute-test/dbflute-test-active-hangar | src/main/java/org/docksidestage/hangar/dbflute/dtomapper/bs/BsSummaryWithdrawalDtoMapper.java | 14447 | package org.docksidestage.hangar.dbflute.dtomapper.bs;
import java.io.Serializable;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.util.HashMap;
import java.util.Set;
import org.dbflute.Entity;
import org.dbflute.dbmeta.DBMeta;
import org.dbflute.dbmeta.InstanceKeyEntity;
import org.dbflute.dbmeta.dtomap.DtoMapper;
import org.dbflute.dbmeta.dtomap.InstanceKeyDto;
import org.dbflute.helper.beans.DfBeanDesc;
import org.dbflute.helper.beans.DfPropertyDesc;
import org.dbflute.helper.beans.factory.DfBeanDescFactory;
import org.dbflute.jdbc.Classification;
import org.docksidestage.hangar.dbflute.exentity.*;
import org.docksidestage.hangar.simpleflute.dto.*;
import org.docksidestage.hangar.dbflute.dtomapper.*;
/**
* The DTO mapper of SUMMARY_WITHDRAWAL as VIEW. <br>
* <pre>
* [primary-key]
*
*
* [column]
* MEMBER_ID, MEMBER_NAME, WITHDRAWAL_REASON_CODE, WITHDRAWAL_REASON_TEXT, WITHDRAWAL_REASON_INPUT_TEXT, WITHDRAWAL_DATETIME, MEMBER_STATUS_CODE, MEMBER_STATUS_NAME, MAX_PURCHASE_PRICE
*
* [sequence]
*
*
* [identity]
*
*
* [version-no]
*
*
* [foreign-table]
*
*
* [referrer-table]
*
*
* [foreign-property]
*
*
* [referrer-property]
*
* </pre>
* @author DBFlute(AutoGenerator)
*/
public abstract class BsSummaryWithdrawalDtoMapper implements DtoMapper<SummaryWithdrawal, SummaryWithdrawalDto>, Serializable {
// ===================================================================================
// Definition
// ==========
/** The serial version UID for object serialization. (Default) */
private static final long serialVersionUID = 1L;
// ===================================================================================
// Attribute
// =========
protected final Map<Entity, Object> _relationDtoMap;
protected final Map<Object, Entity> _relationEntityMap;
protected boolean _exceptCommonColumn;
protected boolean _reverseReference; // default: one-way reference
protected boolean _instanceCache = true; // default: cached
// ===================================================================================
// Constructor
// ===========
public BsSummaryWithdrawalDtoMapper() {
_relationDtoMap = new HashMap<Entity, Object>();
_relationEntityMap = new HashMap<Object, Entity>();
}
public BsSummaryWithdrawalDtoMapper(Map<Entity, Object> relationDtoMap, Map<Object, Entity> relationEntityMap) {
_relationDtoMap = relationDtoMap;
_relationEntityMap = relationEntityMap;
}
// ===================================================================================
// Mapping
// =======
// -----------------------------------------------------
// to DTO
// ------
/**
* {@inheritDoc}
*/
public SummaryWithdrawalDto mappingToDto(SummaryWithdrawal entity) {
if (entity == null) {
return null;
}
SummaryWithdrawalDto dto = new SummaryWithdrawalDto();
dto.setMemberId(entity.getMemberId());
dto.setMemberName(entity.getMemberName());
dto.setWithdrawalReasonCode(entity.getWithdrawalReasonCode());
dto.setWithdrawalReasonText(entity.getWithdrawalReasonText());
dto.setWithdrawalReasonInputText(entity.getWithdrawalReasonInputText());
dto.setWithdrawalDatetime(entity.getWithdrawalDatetime());
dto.setMemberStatusCode(entity.getMemberStatusCode());
dto.setMemberStatusName(entity.getMemberStatusName());
dto.setMaxPurchasePrice(entity.getMaxPurchasePrice());
reflectDerivedProperty(entity, dto, true);
return dto;
}
/**
* {@inheritDoc}
*/
public List<SummaryWithdrawalDto> mappingToDtoList(List<SummaryWithdrawal> entityList) {
if (entityList == null) {
throw new IllegalArgumentException("The argument 'entityList' should not be null.");
}
List<SummaryWithdrawalDto> dtoList = new ArrayList<SummaryWithdrawalDto>();
for (SummaryWithdrawal entity : entityList) {
SummaryWithdrawalDto dto = mappingToDto(entity);
if (dto != null) {
dtoList.add(dto);
} else {
if (isAcceptNullElementOnList()) {
dtoList.add(null);
}
}
}
return dtoList;
}
// -----------------------------------------------------
// to Entity
// ---------
/**
* {@inheritDoc}
*/
public SummaryWithdrawal mappingToEntity(SummaryWithdrawalDto dto) {
if (dto == null) {
return null;
}
SummaryWithdrawal entity = new SummaryWithdrawal();
if (needsMapping(dto, dto.getMemberId(), "memberId")) {
entity.setMemberId(dto.getMemberId());
}
if (needsMapping(dto, dto.getMemberName(), "memberName")) {
entity.setMemberName(dto.getMemberName());
}
if (needsMapping(dto, dto.getWithdrawalReasonCode(), "withdrawalReasonCode")) {
entity.setWithdrawalReasonCode(dto.getWithdrawalReasonCode());
}
if (needsMapping(dto, dto.getWithdrawalReasonText(), "withdrawalReasonText")) {
entity.setWithdrawalReasonText(dto.getWithdrawalReasonText());
}
if (needsMapping(dto, dto.getWithdrawalReasonInputText(), "withdrawalReasonInputText")) {
entity.setWithdrawalReasonInputText(dto.getWithdrawalReasonInputText());
}
if (needsMapping(dto, dto.getWithdrawalDatetime(), "withdrawalDatetime")) {
entity.setWithdrawalDatetime(dto.getWithdrawalDatetime());
}
if (needsMapping(dto, dto.getMemberStatusCode(), "memberStatusCode")) {
entity.setMemberStatusCode(dto.getMemberStatusCode());
}
if (needsMapping(dto, dto.getMemberStatusName(), "memberStatusName")) {
entity.setMemberStatusName(dto.getMemberStatusName());
}
if (needsMapping(dto, dto.getMaxPurchasePrice(), "maxPurchasePrice")) {
entity.setMaxPurchasePrice(dto.getMaxPurchasePrice());
}
reflectDerivedProperty(entity, dto, false);
return entity;
}
/**
* Does the property need to be mapped to an entity? <br>
* If modified info of DTO has at least one property, only modified properties are mapped.
* And if no property is modified, all properties are mapped (but the other option exists).
* @param dto The instance of DTO. (NotNull)
* @param value The value of DTO's property. (NotNull)
* @param propName The property name of DTO. (NotNull)
* @return The determination, true or false.
*/
protected boolean needsMapping(SummaryWithdrawalDto dto, Object value, String propName) {
Set<String> modifiedProperties = dto.mymodifiedProperties();
if (modifiedProperties.isEmpty()) {
return isMappingToEntityContainsNull() || value != null;
}
return modifiedProperties.contains(propName);
}
/**
* Does the mapping to an entity contain null values? (when no property is modified) <br>
* Default is true that means a setter is called if the value is null.
* But this method is valid only when no property is modified.
* @return The determination, true or false.
*/
protected boolean isMappingToEntityContainsNull() { // for extension
return true; // as default
}
/**
* {@inheritDoc}
*/
public List<SummaryWithdrawal> mappingToEntityList(List<SummaryWithdrawalDto> dtoList) {
if (dtoList == null) {
throw new IllegalArgumentException("The argument 'dtoList' should not be null.");
}
List<SummaryWithdrawal> entityList = new ArrayList<SummaryWithdrawal>();
for (SummaryWithdrawalDto dto : dtoList) {
SummaryWithdrawal entity = mappingToEntity(dto);
if (entity != null) {
entityList.add(entity);
} else {
if (isAcceptNullElementOnList()) {
entityList.add(null);
}
}
}
return entityList;
}
protected boolean isAcceptNullElementOnList() {
return true; // as default
}
// -----------------------------------------------------
// Instance Key
// ------------
protected Object createInstanceKeyDto(final Object dto, final int instanceHash) {
return new InstanceKeyDto(dto, instanceHash);
}
protected InstanceKeyEntity createInstanceKeyEntity(Entity entity) {
return new InstanceKeyEntity(entity);
}
public void disableInstanceCache() { // internal option
_instanceCache = false;
}
// -----------------------------------------------------
// Derived Property
// ----------------
protected void reflectDerivedProperty(Entity entity, Object dto, boolean toDto) {
DfBeanDesc entityDesc = DfBeanDescFactory.getBeanDesc(entity.getClass());
DfBeanDesc dtoDesc = DfBeanDescFactory.getBeanDesc(dto.getClass());
DBMeta dbmeta = entity.asDBMeta();
for (String propertyName : entityDesc.getProppertyNameList()) {
if (isOutOfDerivedPropertyName(entity, dto, toDto, dbmeta, entityDesc, dtoDesc, propertyName)) {
continue;
}
DfPropertyDesc entityProp = entityDesc.getPropertyDesc(propertyName);
Class<?> propertyType = entityProp.getPropertyType();
if (isOutOfDerivedPropertyType(entity, dto, toDto, propertyName, propertyType)) {
continue;
}
if (entityProp.isReadable() && entityProp.isWritable()) {
DfPropertyDesc dtoProp = dtoDesc.getPropertyDesc(propertyName);
if (dtoProp.isReadable() && dtoProp.isWritable()) {
if (toDto) {
dtoProp.setValue(dto, entityProp.getValue(entity));
} else {
entityProp.setValue(entity, dtoProp.getValue(dto));
}
}
}
}
}
protected boolean isOutOfDerivedPropertyName(Entity entity, Object dto, boolean toDto
, DBMeta dbmeta, DfBeanDesc entityDesc, DfBeanDesc dtoDesc
, String propertyName) {
return dbmeta.hasColumn(propertyName)
|| dbmeta.hasForeign(propertyName) || dbmeta.hasReferrer(propertyName)
|| !dtoDesc.hasPropertyDesc(propertyName);
}
protected boolean isOutOfDerivedPropertyType(Entity entity, Object dto, boolean toDto
, String propertyName, Class<?> propertyType) {
return List.class.isAssignableFrom(propertyType)
|| Entity.class.isAssignableFrom(propertyType)
|| Classification.class.isAssignableFrom(propertyType);
}
// ===================================================================================
// Suppress Relation
// =================
// (basically) to suppress infinity loop
protected void doSuppressAll() { // internal
}
protected void doSuppressClear() { // internal
}
// ===================================================================================
// Mapping Option
// ==============
/**
* {@inheritDoc}
*/
public void setBaseOnlyMapping(boolean baseOnlyMapping) {
if (baseOnlyMapping) {
doSuppressAll();
} else {
doSuppressClear();
}
}
protected boolean isExceptCommonColumn() {
return _exceptCommonColumn;
}
/**
* {@inheritDoc}
*/
public void setExceptCommonColumn(boolean exceptCommonColumn) {
_exceptCommonColumn = exceptCommonColumn;
}
protected boolean isReverseReference() {
return _reverseReference;
}
/**
* {@inheritDoc}
*/
public void setReverseReference(boolean reverseReference) {
_reverseReference = reverseReference;
}
// -----------------------------------------------------
// Easy-to-Use
// -----------
/**
* Enable base-only mapping that means the mapping ignores all references.
* @return this. (NotNull)
*/
public SummaryWithdrawalDtoMapper baseOnlyMapping() {
setBaseOnlyMapping(true);
return (SummaryWithdrawalDtoMapper)this;
}
/**
* Enable except common column that means the mapping excepts common column.
* @return this. (NotNull)
*/
public SummaryWithdrawalDtoMapper exceptCommonColumn() {
setExceptCommonColumn(true);
return (SummaryWithdrawalDtoMapper)this;
}
/**
* Enable reverse reference that means the mapping contains reverse references.
* @return this. (NotNull)
*/
public SummaryWithdrawalDtoMapper reverseReference() {
setReverseReference(true);
return (SummaryWithdrawalDtoMapper)this;
}
}
| apache-2.0 |
jentfoo/aws-sdk-java | aws-java-sdk-iam/src/main/java/com/amazonaws/services/identitymanagement/model/transform/ListPoliciesResultStaxUnmarshaller.java | 3148 | /*
* Copyright 2014-2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with
* the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package com.amazonaws.services.identitymanagement.model.transform;
import java.util.ArrayList;
import javax.xml.stream.events.XMLEvent;
import javax.annotation.Generated;
import com.amazonaws.services.identitymanagement.model.*;
import com.amazonaws.transform.Unmarshaller;
import com.amazonaws.transform.StaxUnmarshallerContext;
import com.amazonaws.transform.SimpleTypeStaxUnmarshallers.*;
/**
* ListPoliciesResult StAX Unmarshaller
*/
@Generated("com.amazonaws:aws-java-sdk-code-generator")
public class ListPoliciesResultStaxUnmarshaller implements Unmarshaller<ListPoliciesResult, StaxUnmarshallerContext> {
public ListPoliciesResult unmarshall(StaxUnmarshallerContext context) throws Exception {
ListPoliciesResult listPoliciesResult = new ListPoliciesResult();
int originalDepth = context.getCurrentDepth();
int targetDepth = originalDepth + 1;
if (context.isStartOfDocument())
targetDepth += 2;
while (true) {
XMLEvent xmlEvent = context.nextEvent();
if (xmlEvent.isEndDocument())
return listPoliciesResult;
if (xmlEvent.isAttribute() || xmlEvent.isStartElement()) {
if (context.testExpression("Policies", targetDepth)) {
listPoliciesResult.withPolicies(new ArrayList<Policy>());
continue;
}
if (context.testExpression("Policies/member", targetDepth)) {
listPoliciesResult.withPolicies(PolicyStaxUnmarshaller.getInstance().unmarshall(context));
continue;
}
if (context.testExpression("IsTruncated", targetDepth)) {
listPoliciesResult.setIsTruncated(BooleanStaxUnmarshaller.getInstance().unmarshall(context));
continue;
}
if (context.testExpression("Marker", targetDepth)) {
listPoliciesResult.setMarker(StringStaxUnmarshaller.getInstance().unmarshall(context));
continue;
}
} else if (xmlEvent.isEndElement()) {
if (context.getCurrentDepth() < originalDepth) {
return listPoliciesResult;
}
}
}
}
private static ListPoliciesResultStaxUnmarshaller instance;
public static ListPoliciesResultStaxUnmarshaller getInstance() {
if (instance == null)
instance = new ListPoliciesResultStaxUnmarshaller();
return instance;
}
}
| apache-2.0 |
alibaba/nacos | common/src/main/java/com/alibaba/nacos/common/http/client/response/JdkHttpClientResponse.java | 2988 | /*
* Copyright 1999-2018 Alibaba Group Holding Ltd.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.alibaba.nacos.common.http.client.response;
import com.alibaba.nacos.common.constant.HttpHeaderConsts;
import com.alibaba.nacos.common.http.param.Header;
import com.alibaba.nacos.common.utils.IoUtils;
import java.io.ByteArrayInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.net.HttpURLConnection;
import java.util.List;
import java.util.Map;
/**
* JDk http client response implement.
*
* @author mai.jh
*/
public class JdkHttpClientResponse implements HttpClientResponse {
private final HttpURLConnection conn;
private InputStream responseStream;
private Header responseHeader;
private static final String CONTENT_ENCODING = "gzip";
public JdkHttpClientResponse(HttpURLConnection conn) {
this.conn = conn;
}
@Override
public Header getHeaders() {
if (this.responseHeader == null) {
this.responseHeader = Header.newInstance();
}
for (Map.Entry<String, List<String>> entry : conn.getHeaderFields().entrySet()) {
this.responseHeader.addOriginalResponseHeader(entry.getKey(), entry.getValue());
}
return this.responseHeader;
}
@Override
public InputStream getBody() throws IOException {
Header headers = getHeaders();
InputStream errorStream = this.conn.getErrorStream();
this.responseStream = (errorStream != null ? errorStream : this.conn.getInputStream());
String contentEncoding = headers.getValue(HttpHeaderConsts.CONTENT_ENCODING);
// Used to process http content_encoding, when content_encoding is GZIP, use GZIPInputStream
if (CONTENT_ENCODING.equals(contentEncoding)) {
byte[] bytes = IoUtils.tryDecompress(this.responseStream);
if (bytes == null) {
throw new IOException("decompress http response error");
}
return new ByteArrayInputStream(bytes);
}
return this.responseStream;
}
@Override
public int getStatusCode() throws IOException {
return this.conn.getResponseCode();
}
@Override
public String getStatusText() throws IOException {
return this.conn.getResponseMessage();
}
@Override
public void close() {
IoUtils.closeQuietly(this.responseStream);
}
}
| apache-2.0 |
RobAustin/byte-buddy | byte-buddy-dep/src/main/java/net/bytebuddy/dynamic/loading/ClassInjector.java | 21190 | package net.bytebuddy.dynamic.loading;
import net.bytebuddy.instrumentation.type.TypeDescription;
import net.bytebuddy.utility.RandomString;
import java.io.BufferedOutputStream;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.lang.instrument.Instrumentation;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
import java.security.*;
import java.util.Arrays;
import java.util.HashMap;
import java.util.Map;
import java.util.jar.JarEntry;
import java.util.jar.JarFile;
import java.util.jar.JarOutputStream;
/**
* A class injector is capable of injecting classes into a {@link java.lang.ClassLoader} without
* requiring the class loader to being able to explicitly look up these classes.
*/
public interface ClassInjector {
/**
* A convenience reference to the default protection domain which is {@code null}.
*/
ProtectionDomain DEFAULT_PROTECTION_DOMAIN = null;
/**
* Injects the given types into the represented class loader.
*
* @param types The types to load via injection.
* @return The loaded types that were passed as arguments.
*/
Map<TypeDescription, Class<?>> inject(Map<? extends TypeDescription, byte[]> types);
/**
* A class injector that uses reflective method calls.
*/
class UsingReflection implements ClassInjector {
/**
* A storage for the reflection method representations that are obtained on loading this classes.
*/
private static final ReflectionStore REFLECTION_STORE;
/*
* Obtains the reflective instances used by this injector or a no-op instance that throws the exception
* that occurred when attempting to obtain the reflective member instances.
*/
static {
ReflectionStore reflectionStore;
try {
Method findLoadedClassMethod = ClassLoader.class.getDeclaredMethod("findLoadedClass", String.class);
findLoadedClassMethod.setAccessible(true);
Method loadByteArrayMethod = ClassLoader.class.getDeclaredMethod("defineClass",
String.class,
byte[].class,
int.class,
int.class,
ProtectionDomain.class);
loadByteArrayMethod.setAccessible(true);
reflectionStore = new ReflectionStore.Resolved(findLoadedClassMethod, loadByteArrayMethod);
} catch (Exception e) {
reflectionStore = new ReflectionStore.Faulty(e);
}
REFLECTION_STORE = reflectionStore;
}
/**
* The class loader into which the classes are to be injected.
*/
private final ClassLoader classLoader;
/**
* The protection domain that is used when loading classes.
*/
private final ProtectionDomain protectionDomain;
/**
* The access control context of this class loader's instantiation.
*/
private final AccessControlContext accessControlContext;
/**
* Creates a new injector for the given {@link java.lang.ClassLoader} and a default
* {@link java.security.ProtectionDomain}.
*
* @param classLoader The {@link java.lang.ClassLoader} into which new class definitions are to be injected.
*/
public UsingReflection(ClassLoader classLoader) {
this(classLoader, DEFAULT_PROTECTION_DOMAIN);
}
/**
* Creates a new injector for the given {@link java.lang.ClassLoader} and {@link java.security.ProtectionDomain}.
*
* @param classLoader The {@link java.lang.ClassLoader} into which new class definitions are to be injected.
* @param protectionDomain The protection domain to apply during class definition.
*/
public UsingReflection(ClassLoader classLoader, ProtectionDomain protectionDomain) {
if (classLoader == null) {
throw new IllegalArgumentException("Cannot inject classes into the bootstrap class loader");
}
this.classLoader = classLoader;
this.protectionDomain = protectionDomain;
accessControlContext = AccessController.getContext();
}
@Override
public Map<TypeDescription, Class<?>> inject(Map<? extends TypeDescription, byte[]> types) {
try {
Map<TypeDescription, Class<?>> loaded = new HashMap<TypeDescription, Class<?>>(types.size());
synchronized (classLoader) {
for (Map.Entry<? extends TypeDescription, byte[]> entry : types.entrySet()) {
Class<?> type = (Class<?>) REFLECTION_STORE.getFindLoadedClassMethod().invoke(classLoader, entry.getKey().getName());
if (type == null) {
try {
type = AccessController.doPrivileged(new ClassLoadingAction(entry.getKey().getName(), entry.getValue()), accessControlContext);
} catch (PrivilegedActionException e) {
if (e.getCause() instanceof IllegalAccessException) {
throw (IllegalAccessException) e.getCause();
} else if (e.getCause() instanceof InvocationTargetException) {
throw (InvocationTargetException) e.getCause();
} else {
throw (RuntimeException) e.getCause();
}
}
}
loaded.put(entry.getKey(), type);
}
}
return loaded;
} catch (IllegalAccessException e) {
throw new IllegalStateException("Could not access injection method", e);
} catch (InvocationTargetException e) {
throw new IllegalStateException("Exception on invoking loader method", e.getCause());
}
}
@Override
public boolean equals(Object other) {
if (this == other) return true;
if (other == null || getClass() != other.getClass()) return false;
UsingReflection that = (UsingReflection) other;
return accessControlContext.equals(that.accessControlContext)
&& classLoader.equals(that.classLoader)
&& !(protectionDomain != null ? !protectionDomain.equals(that.protectionDomain) : that.protectionDomain != null);
}
@Override
public int hashCode() {
int result = classLoader.hashCode();
result = 31 * result + (protectionDomain != null ? protectionDomain.hashCode() : 0);
result = 31 * result + accessControlContext.hashCode();
return result;
}
@Override
public String toString() {
return "ClassInjector.UsingReflection{" +
"classLoader=" + classLoader +
", protectionDomain=" + protectionDomain +
", accessControlContext=" + accessControlContext +
'}';
}
/**
* A storage for method representations in order to access a class loader reflectively.
*/
protected interface ReflectionStore {
/**
* Returns the method for finding a class on a class loader.
*
* @return The method for finding a class on a class loader.
*/
Method getFindLoadedClassMethod();
/**
* Returns the method for loading a class into a class loader.
*
* @return The method for loading a class into a class loader.
*/
Method getLoadByteArrayMethod();
/**
* Represents a successfully loaded method lookup.
*/
class Resolved implements ReflectionStore {
/**
* The method for finding a class on a class loader.
*/
private final Method findLoadedClassMethod;
/**
* The method for loading a class into a class loader.
*/
private final Method loadByteArrayMethod;
/**
* Creates a new resolved reflection store.
*
* @param findLoadedClassMethod The method for finding a class on a class loader.
* @param loadByteArrayMethod The method for loading a class into a class loader.
*/
protected Resolved(Method findLoadedClassMethod, Method loadByteArrayMethod) {
this.findLoadedClassMethod = findLoadedClassMethod;
this.loadByteArrayMethod = loadByteArrayMethod;
}
@Override
public Method getFindLoadedClassMethod() {
return findLoadedClassMethod;
}
@Override
public Method getLoadByteArrayMethod() {
return loadByteArrayMethod;
}
@Override
public boolean equals(Object other) {
if (this == other) return true;
if (other == null || getClass() != other.getClass()) return false;
Resolved resolved = (Resolved) other;
return findLoadedClassMethod.equals(resolved.findLoadedClassMethod)
&& loadByteArrayMethod.equals(resolved.loadByteArrayMethod);
}
@Override
public int hashCode() {
int result = findLoadedClassMethod.hashCode();
result = 31 * result + loadByteArrayMethod.hashCode();
return result;
}
@Override
public String toString() {
return "ClassInjector.UsingReflection.ReflectionStore.Resolved{" +
"findLoadedClassMethod=" + findLoadedClassMethod +
", loadByteArrayMethod=" + loadByteArrayMethod +
'}';
}
}
/**
* Represents an unsuccessfully loaded method lookup.
*/
class Faulty implements ReflectionStore {
/**
* The message to display in an exception.
*/
private static final String MESSAGE = "Cannot access reflection API for class loading";
/**
* The exception that occurred when looking up the reflection methods.
*/
private final Exception exception;
/**
* Creates a new faulty reflection store.
*
* @param exception The exception that was thrown when attempting to lookup the method.
*/
protected Faulty(Exception exception) {
this.exception = exception;
}
@Override
public Method getFindLoadedClassMethod() {
throw new RuntimeException(MESSAGE, exception);
}
@Override
public Method getLoadByteArrayMethod() {
throw new RuntimeException(MESSAGE, exception);
}
@Override
public boolean equals(Object other) {
return this == other || !(other == null || getClass() != other.getClass())
&& exception.equals(((Faulty) other).exception);
}
@Override
public int hashCode() {
return exception.hashCode();
}
@Override
public String toString() {
return "ClassInjector.UsingReflection.ReflectionStore.Faulty{exception=" + exception + '}';
}
}
}
/**
* A privileged action for loading a class reflectively.
*/
protected class ClassLoadingAction implements PrivilegedExceptionAction<Class<?>> {
/**
* A convenience variable representing the first index of an array, to make the code more readable.
*/
private static final int FROM_BEGINNING = 0;
/**
* The name of the class that is being loaded.
*/
private final String name;
/**
* The binary representation of the class that is being loaded.
*/
private final byte[] binaryRepresentation;
/**
* Creates a new class loading action.
*
* @param name The name of the class that is being loaded.
* @param binaryRepresentation The binary representation of the class that is being loaded.
*/
protected ClassLoadingAction(String name, byte[] binaryRepresentation) {
this.name = name;
this.binaryRepresentation = binaryRepresentation;
}
@Override
public Class<?> run() throws IllegalAccessException, InvocationTargetException {
return (Class<?>) REFLECTION_STORE.getLoadByteArrayMethod().invoke(classLoader,
name,
binaryRepresentation,
FROM_BEGINNING,
binaryRepresentation.length,
protectionDomain);
}
@Override
public boolean equals(Object other) {
if (this == other) return true;
if (other == null || getClass() != other.getClass()) return false;
ClassLoadingAction that = (ClassLoadingAction) other;
return Arrays.equals(binaryRepresentation, that.binaryRepresentation)
&& UsingReflection.this.equals(that.getOuter())
&& name.equals(that.name);
}
/**
* Returns the outer instance.
*
* @return The outer instance.
*/
private UsingReflection getOuter() {
return UsingReflection.this;
}
@Override
public int hashCode() {
int result = name.hashCode();
result = 31 * result + UsingReflection.this.hashCode();
result = 31 * result + Arrays.hashCode(binaryRepresentation);
return result;
}
@Override
public String toString() {
return "ClassInjector.UsingReflection.ClassLoadingAction{" +
"injector=" + UsingReflection.this +
", name='" + name + '\'' +
", binaryRepresentation=<" + binaryRepresentation.length + " bytes>" +
'}';
}
}
}
/**
* A class injector using a {@link java.lang.instrument.Instrumentation} to append to either the boot classpath
* or the system class path.
*/
class UsingInstrumentation implements ClassInjector {
/**
* A prefix to use of generated files.
*/
private static final String PREFIX = "jar";
/**
* The class file extension.
*/
private static final String CLASS_FILE_EXTENSION = ".class";
/**
* The instrumentation to use for appending to the class path or the boot path.
*/
private final Instrumentation instrumentation;
/**
* A representation of the target path to which classes are to be appended.
*/
private final Target target;
/**
* The folder to be used for storing jar files.
*/
private final File folder;
/**
* A random string generator for creating file names.
*/
private final RandomString randomString;
/**
* Creates an instrumentation-based class injector.
*
* @param folder The folder to be used for storing jar files.
* @param target A representation of the target path to which classes are to be appended.
* @param instrumentation The instrumentation to use for appending to the class path or the boot path.
*/
public UsingInstrumentation(File folder, Target target, Instrumentation instrumentation) {
this.folder = folder;
this.target = target;
this.instrumentation = instrumentation;
randomString = new RandomString();
}
@Override
public Map<TypeDescription, Class<?>> inject(Map<? extends TypeDescription, byte[]> types) {
File jarFile = new File(folder, String.format("%s%s.jar", PREFIX, randomString.nextString()));
try {
if (!jarFile.createNewFile()) {
throw new IllegalStateException("Cannot create file " + jarFile);
}
JarOutputStream jarOutputStream = new JarOutputStream(new BufferedOutputStream(new FileOutputStream(jarFile)));
try {
for (Map.Entry<? extends TypeDescription, byte[]> entry : types.entrySet()) {
jarOutputStream.putNextEntry(new JarEntry(entry.getKey().getInternalName() + CLASS_FILE_EXTENSION));
jarOutputStream.write(entry.getValue());
}
} finally {
jarOutputStream.close();
}
target.inject(instrumentation, new JarFile(jarFile));
Map<TypeDescription, Class<?>> loaded = new HashMap<TypeDescription, Class<?>>(types.size());
ClassLoader classLoader = ClassLoader.getSystemClassLoader();
for (TypeDescription typeDescription : types.keySet()) {
loaded.put(typeDescription, classLoader.loadClass(typeDescription.getName()));
}
return loaded;
} catch (IOException e) {
throw new IllegalStateException("Cannot write jar file to disk", e);
} catch (ClassNotFoundException e) {
throw new IllegalStateException("Cannot load injected class", e);
}
}
@Override
public boolean equals(Object other) {
if (this == other) return true;
if (other == null || getClass() != other.getClass()) return false;
UsingInstrumentation that = (UsingInstrumentation) other;
return folder.equals(that.folder)
&& instrumentation.equals(that.instrumentation)
&& target == that.target;
}
@Override
public int hashCode() {
int result = instrumentation.hashCode();
result = 31 * result + target.hashCode();
result = 31 * result + folder.hashCode();
return result;
}
@Override
public String toString() {
return "ClassInjector.UsingInstrumentation{" +
"instrumentation=" + instrumentation +
", target=" + target +
", folder=" + folder +
", randomString=" + randomString +
'}';
}
/**
* A representation of the target to which Java classes should be appended to.
*/
public enum Target {
/**
* Representation of the bootstrap class loader.
*/
BOOTSTRAP {
@Override
protected void inject(Instrumentation instrumentation, JarFile jarFile) {
instrumentation.appendToBootstrapClassLoaderSearch(jarFile);
}
},
/**
* Representation of the system class loader.
*/
SYSTEM {
@Override
protected void inject(Instrumentation instrumentation, JarFile jarFile) {
instrumentation.appendToSystemClassLoaderSearch(jarFile);
}
};
/**
* Adds the given classes to the represented class loader.
*
* @param instrumentation The instrumentation instance to use.
* @param jarFile The jar file to append.
*/
protected abstract void inject(Instrumentation instrumentation, JarFile jarFile);
@Override
public String toString() {
return "ClassInjector.UsingInstrumentation.Target." + name();
}
}
}
}
| apache-2.0 |
viralpatel/spring-boot-tutorials | spring-boot-freemarker-example/src/main/java/net/viralpatel/springbootfreemarkerexample/Customer.java | 1157 | package net.viralpatel.springbootfreemarkerexample;
import java.time.LocalDate;
import org.springframework.format.annotation.DateTimeFormat;
public class Customer {
private int customerId;
private String customerName;
@DateTimeFormat(iso = DateTimeFormat.ISO.DATE)
private LocalDate dateOfBirth;
private String email;
public Customer() {
super();
}
public Customer(int customerId, String customerName, String email, LocalDate dateOfBirth) {
super();
this.customerId = customerId;
this.customerName = customerName;
this.dateOfBirth = dateOfBirth;
this.email = email;
}
public int getCustomerId() {
return customerId;
}
public void setCustomerId(int customerId) {
this.customerId = customerId;
}
public String getCustomerName() {
return customerName;
}
public void setCustomerName(String customerName) {
this.customerName = customerName;
}
public LocalDate getDateOfBirth() {
return dateOfBirth;
}
public void setDateOfBirth(LocalDate dateOfBirth) {
this.dateOfBirth = dateOfBirth;
}
public String getEmail() {
return email;
}
public void setEmail(String email) {
this.email = email;
}
}
| apache-2.0 |
ben-manes/cassandra | src/java/org/apache/cassandra/service/StorageService.java | 203140 | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.cassandra.service;
import java.io.ByteArrayInputStream;
import java.io.DataInputStream;
import java.io.File;
import java.io.IOError;
import java.io.IOException;
import java.lang.management.ManagementFactory;
import java.net.InetAddress;
import java.net.UnknownHostException;
import java.nio.ByteBuffer;
import java.util.*;
import java.util.Map.Entry;
import java.util.concurrent.CopyOnWriteArrayList;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Future;
import java.util.concurrent.FutureTask;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.TimeoutException;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.regex.MatchResult;
import java.util.regex.Pattern;
import javax.annotation.Nullable;
import javax.management.JMX;
import javax.management.MBeanServer;
import javax.management.NotificationBroadcasterSupport;
import javax.management.ObjectName;
import javax.management.openmbean.TabularData;
import javax.management.openmbean.TabularDataSupport;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.base.Predicate;
import com.google.common.collect.ArrayListMultimap;
import com.google.common.collect.Collections2;
import com.google.common.collect.HashMultimap;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.Iterables;
import com.google.common.collect.Lists;
import com.google.common.collect.Maps;
import com.google.common.collect.Multimap;
import com.google.common.collect.Sets;
import com.google.common.util.concurrent.FutureCallback;
import com.google.common.util.concurrent.Futures;
import com.google.common.util.concurrent.ListenableFuture;
import com.google.common.util.concurrent.Uninterruptibles;
import org.apache.commons.lang3.StringUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import ch.qos.logback.classic.LoggerContext;
import ch.qos.logback.classic.jmx.JMXConfiguratorMBean;
import ch.qos.logback.classic.spi.ILoggingEvent;
import ch.qos.logback.core.Appender;
import org.apache.cassandra.auth.AuthKeyspace;
import org.apache.cassandra.auth.AuthMigrationListener;
import org.apache.cassandra.batchlog.BatchRemoveVerbHandler;
import org.apache.cassandra.batchlog.BatchStoreVerbHandler;
import org.apache.cassandra.batchlog.BatchlogManager;
import org.apache.cassandra.concurrent.ScheduledExecutors;
import org.apache.cassandra.concurrent.Stage;
import org.apache.cassandra.concurrent.StageManager;
import org.apache.cassandra.config.CFMetaData;
import org.apache.cassandra.config.DatabaseDescriptor;
import org.apache.cassandra.config.Schema;
import org.apache.cassandra.db.ColumnFamilyStore;
import org.apache.cassandra.db.CounterMutationVerbHandler;
import org.apache.cassandra.db.DecoratedKey;
import org.apache.cassandra.db.DefinitionsUpdateVerbHandler;
import org.apache.cassandra.db.Directories;
import org.apache.cassandra.db.Keyspace;
import org.apache.cassandra.db.MigrationRequestVerbHandler;
import org.apache.cassandra.db.MutationVerbHandler;
import org.apache.cassandra.db.PartitionPosition;
import org.apache.cassandra.db.RangeSliceVerbHandler;
import org.apache.cassandra.db.ReadCommandVerbHandler;
import org.apache.cassandra.db.ReadRepairVerbHandler;
import org.apache.cassandra.db.SchemaCheckVerbHandler;
import org.apache.cassandra.db.SizeEstimatesRecorder;
import org.apache.cassandra.db.SnapshotDetailsTabularData;
import org.apache.cassandra.db.SystemKeyspace;
import org.apache.cassandra.db.TruncateVerbHandler;
import org.apache.cassandra.db.commitlog.CommitLog;
import org.apache.cassandra.db.compaction.CompactionManager;
import org.apache.cassandra.db.lifecycle.LifecycleTransaction;
import org.apache.cassandra.dht.BootStrapper;
import org.apache.cassandra.dht.IPartitioner;
import org.apache.cassandra.dht.Range;
import org.apache.cassandra.dht.RangeStreamer;
import org.apache.cassandra.dht.RingPosition;
import org.apache.cassandra.dht.Splitter;
import org.apache.cassandra.dht.StreamStateStore;
import org.apache.cassandra.dht.Token;
import org.apache.cassandra.dht.Token.TokenFactory;
import org.apache.cassandra.exceptions.AlreadyExistsException;
import org.apache.cassandra.exceptions.ConfigurationException;
import org.apache.cassandra.exceptions.InvalidRequestException;
import org.apache.cassandra.exceptions.UnavailableException;
import org.apache.cassandra.gms.ApplicationState;
import org.apache.cassandra.gms.EndpointState;
import org.apache.cassandra.gms.FailureDetector;
import org.apache.cassandra.gms.GossipDigestAck2VerbHandler;
import org.apache.cassandra.gms.GossipDigestAckVerbHandler;
import org.apache.cassandra.gms.GossipDigestSynVerbHandler;
import org.apache.cassandra.gms.GossipShutdownVerbHandler;
import org.apache.cassandra.gms.Gossiper;
import org.apache.cassandra.gms.IEndpointStateChangeSubscriber;
import org.apache.cassandra.gms.IFailureDetector;
import org.apache.cassandra.gms.TokenSerializer;
import org.apache.cassandra.gms.VersionedValue;
import org.apache.cassandra.hints.HintVerbHandler;
import org.apache.cassandra.hints.HintsService;
import org.apache.cassandra.io.sstable.SSTableLoader;
import org.apache.cassandra.io.util.FileUtils;
import org.apache.cassandra.locator.AbstractReplicationStrategy;
import org.apache.cassandra.locator.DynamicEndpointSnitch;
import org.apache.cassandra.locator.IEndpointSnitch;
import org.apache.cassandra.locator.LocalStrategy;
import org.apache.cassandra.locator.TokenMetadata;
import org.apache.cassandra.metrics.StorageMetrics;
import org.apache.cassandra.net.AsyncOneResponse;
import org.apache.cassandra.net.MessageOut;
import org.apache.cassandra.net.MessagingService;
import org.apache.cassandra.net.ResponseVerbHandler;
import org.apache.cassandra.repair.RepairMessageVerbHandler;
import org.apache.cassandra.repair.RepairParallelism;
import org.apache.cassandra.repair.RepairRunnable;
import org.apache.cassandra.repair.SystemDistributedKeyspace;
import org.apache.cassandra.repair.messages.RepairOption;
import org.apache.cassandra.schema.KeyspaceMetadata;
import org.apache.cassandra.service.paxos.CommitVerbHandler;
import org.apache.cassandra.service.paxos.PrepareVerbHandler;
import org.apache.cassandra.service.paxos.ProposeVerbHandler;
import org.apache.cassandra.streaming.ReplicationFinishedVerbHandler;
import org.apache.cassandra.streaming.StreamManager;
import org.apache.cassandra.streaming.StreamPlan;
import org.apache.cassandra.streaming.StreamResultFuture;
import org.apache.cassandra.streaming.StreamState;
import org.apache.cassandra.thrift.EndpointDetails;
import org.apache.cassandra.thrift.TokenRange;
import org.apache.cassandra.thrift.cassandraConstants;
import org.apache.cassandra.tracing.TraceKeyspace;
import org.apache.cassandra.utils.BackgroundActivityMonitor;
import org.apache.cassandra.utils.FBUtilities;
import org.apache.cassandra.utils.JVMStabilityInspector;
import org.apache.cassandra.utils.OutputHandler;
import org.apache.cassandra.utils.Pair;
import org.apache.cassandra.utils.WindowsTimer;
import org.apache.cassandra.utils.WrappedRunnable;
import org.apache.cassandra.utils.progress.ProgressEvent;
import org.apache.cassandra.utils.progress.ProgressEventType;
import org.apache.cassandra.utils.progress.jmx.JMXProgressSupport;
import org.apache.cassandra.utils.progress.jmx.LegacyJMXProgressSupport;
import static java.util.Arrays.asList;
import static java.util.stream.Collectors.toList;
import static org.apache.cassandra.index.SecondaryIndexManager.getIndexName;
import static org.apache.cassandra.index.SecondaryIndexManager.isIndexColumnFamily;
/**
* This abstraction contains the token/identifier of this node
* on the identifier space. This token gets gossiped around.
* This class will also maintain histograms of the load information
* of other nodes in the cluster.
*/
public class StorageService extends NotificationBroadcasterSupport implements IEndpointStateChangeSubscriber, StorageServiceMBean
{
private static final Logger logger = LoggerFactory.getLogger(StorageService.class);
public static final int RING_DELAY = getRingDelay(); // delay after which we assume ring has stablized
private final JMXProgressSupport progressSupport = new JMXProgressSupport(this);
/**
* @deprecated backward support to previous notification interface
* Will be removed on 4.0
*/
@Deprecated
private final LegacyJMXProgressSupport legacyProgressSupport;
private static int getRingDelay()
{
String newdelay = System.getProperty("cassandra.ring_delay_ms");
if (newdelay != null)
{
logger.info("Overriding RING_DELAY to {}ms", newdelay);
return Integer.parseInt(newdelay);
}
else
return 30 * 1000;
}
/* This abstraction maintains the token/endpoint metadata information */
private TokenMetadata tokenMetadata = new TokenMetadata();
public volatile VersionedValue.VersionedValueFactory valueFactory = new VersionedValue.VersionedValueFactory(tokenMetadata.partitioner);
private Thread drainOnShutdown = null;
private boolean inShutdownHook = false;
public static final StorageService instance = new StorageService();
public boolean isInShutdownHook()
{
return inShutdownHook;
}
public Collection<Range<Token>> getLocalRanges(String keyspaceName)
{
return getRangesForEndpoint(keyspaceName, FBUtilities.getBroadcastAddress());
}
public Collection<Range<Token>> getPrimaryRanges(String keyspace)
{
return getPrimaryRangesForEndpoint(keyspace, FBUtilities.getBroadcastAddress());
}
public Collection<Range<Token>> getPrimaryRangesWithinDC(String keyspace)
{
return getPrimaryRangeForEndpointWithinDC(keyspace, FBUtilities.getBroadcastAddress());
}
private final Set<InetAddress> replicatingNodes = Collections.synchronizedSet(new HashSet<InetAddress>());
private CassandraDaemon daemon;
private InetAddress removingNode;
/* Are we starting this node in bootstrap mode? */
private volatile boolean isBootstrapMode;
/* we bootstrap but do NOT join the ring unless told to do so */
private boolean isSurveyMode = Boolean.parseBoolean(System.getProperty("cassandra.write_survey", "false"));
/* true if node is rebuilding and receiving data */
private final AtomicBoolean isRebuilding = new AtomicBoolean();
private boolean initialized;
private volatile boolean joined = false;
/* the probability for tracing any particular request, 0 disables tracing and 1 enables for all */
private double traceProbability = 0.0;
private static enum Mode { STARTING, NORMAL, JOINING, LEAVING, DECOMMISSIONED, MOVING, DRAINING, DRAINED }
private Mode operationMode = Mode.STARTING;
/* Used for tracking drain progress */
private volatile int totalCFs, remainingCFs;
private static final AtomicInteger nextRepairCommand = new AtomicInteger();
private final List<IEndpointLifecycleSubscriber> lifecycleSubscribers = new CopyOnWriteArrayList<>();
private static final BackgroundActivityMonitor bgMonitor = new BackgroundActivityMonitor();
private final ObjectName jmxObjectName;
private Collection<Token> bootstrapTokens = null;
// true when keeping strict consistency while bootstrapping
private boolean useStrictConsistency = Boolean.parseBoolean(System.getProperty("cassandra.consistent.rangemovement", "true"));
private static final boolean allowSimultaneousMoves = Boolean.valueOf(System.getProperty("cassandra.consistent.simultaneousmoves.allow","false"));
private boolean replacing;
private final StreamStateStore streamStateStore = new StreamStateStore();
/** This method updates the local token on disk */
public void setTokens(Collection<Token> tokens)
{
if (logger.isDebugEnabled())
logger.debug("Setting tokens to {}", tokens);
SystemKeyspace.updateTokens(tokens);
tokenMetadata.updateNormalTokens(tokens, FBUtilities.getBroadcastAddress());
Collection<Token> localTokens = getLocalTokens();
setGossipTokens(localTokens);
setMode(Mode.NORMAL, false);
}
public void setGossipTokens(Collection<Token> tokens)
{
List<Pair<ApplicationState, VersionedValue>> states = new ArrayList<Pair<ApplicationState, VersionedValue>>();
states.add(Pair.create(ApplicationState.TOKENS, valueFactory.tokens(tokens)));
states.add(Pair.create(ApplicationState.STATUS, valueFactory.normal(tokens)));
Gossiper.instance.addLocalApplicationStates(states);
}
public StorageService()
{
MBeanServer mbs = ManagementFactory.getPlatformMBeanServer();
try
{
jmxObjectName = new ObjectName("org.apache.cassandra.db:type=StorageService");
mbs.registerMBean(this, jmxObjectName);
mbs.registerMBean(StreamManager.instance, new ObjectName(StreamManager.OBJECT_NAME));
}
catch (Exception e)
{
throw new RuntimeException(e);
}
legacyProgressSupport = new LegacyJMXProgressSupport(this, jmxObjectName);
/* register the verb handlers */
MessagingService.instance().registerVerbHandlers(MessagingService.Verb.MUTATION, new MutationVerbHandler());
MessagingService.instance().registerVerbHandlers(MessagingService.Verb.READ_REPAIR, new ReadRepairVerbHandler());
MessagingService.instance().registerVerbHandlers(MessagingService.Verb.READ, new ReadCommandVerbHandler());
MessagingService.instance().registerVerbHandlers(MessagingService.Verb.RANGE_SLICE, new RangeSliceVerbHandler());
MessagingService.instance().registerVerbHandlers(MessagingService.Verb.PAGED_RANGE, new RangeSliceVerbHandler());
MessagingService.instance().registerVerbHandlers(MessagingService.Verb.COUNTER_MUTATION, new CounterMutationVerbHandler());
MessagingService.instance().registerVerbHandlers(MessagingService.Verb.TRUNCATE, new TruncateVerbHandler());
MessagingService.instance().registerVerbHandlers(MessagingService.Verb.PAXOS_PREPARE, new PrepareVerbHandler());
MessagingService.instance().registerVerbHandlers(MessagingService.Verb.PAXOS_PROPOSE, new ProposeVerbHandler());
MessagingService.instance().registerVerbHandlers(MessagingService.Verb.PAXOS_COMMIT, new CommitVerbHandler());
MessagingService.instance().registerVerbHandlers(MessagingService.Verb.HINT, new HintVerbHandler());
// see BootStrapper for a summary of how the bootstrap verbs interact
MessagingService.instance().registerVerbHandlers(MessagingService.Verb.REPLICATION_FINISHED, new ReplicationFinishedVerbHandler());
MessagingService.instance().registerVerbHandlers(MessagingService.Verb.REQUEST_RESPONSE, new ResponseVerbHandler());
MessagingService.instance().registerVerbHandlers(MessagingService.Verb.INTERNAL_RESPONSE, new ResponseVerbHandler());
MessagingService.instance().registerVerbHandlers(MessagingService.Verb.REPAIR_MESSAGE, new RepairMessageVerbHandler());
MessagingService.instance().registerVerbHandlers(MessagingService.Verb.GOSSIP_SHUTDOWN, new GossipShutdownVerbHandler());
MessagingService.instance().registerVerbHandlers(MessagingService.Verb.GOSSIP_DIGEST_SYN, new GossipDigestSynVerbHandler());
MessagingService.instance().registerVerbHandlers(MessagingService.Verb.GOSSIP_DIGEST_ACK, new GossipDigestAckVerbHandler());
MessagingService.instance().registerVerbHandlers(MessagingService.Verb.GOSSIP_DIGEST_ACK2, new GossipDigestAck2VerbHandler());
MessagingService.instance().registerVerbHandlers(MessagingService.Verb.DEFINITIONS_UPDATE, new DefinitionsUpdateVerbHandler());
MessagingService.instance().registerVerbHandlers(MessagingService.Verb.SCHEMA_CHECK, new SchemaCheckVerbHandler());
MessagingService.instance().registerVerbHandlers(MessagingService.Verb.MIGRATION_REQUEST, new MigrationRequestVerbHandler());
MessagingService.instance().registerVerbHandlers(MessagingService.Verb.SNAPSHOT, new SnapshotVerbHandler());
MessagingService.instance().registerVerbHandlers(MessagingService.Verb.ECHO, new EchoVerbHandler());
MessagingService.instance().registerVerbHandlers(MessagingService.Verb.BATCH_STORE, new BatchStoreVerbHandler());
MessagingService.instance().registerVerbHandlers(MessagingService.Verb.BATCH_REMOVE, new BatchRemoveVerbHandler());
}
public void registerDaemon(CassandraDaemon daemon)
{
this.daemon = daemon;
}
public void register(IEndpointLifecycleSubscriber subscriber)
{
lifecycleSubscribers.add(subscriber);
}
public void unregister(IEndpointLifecycleSubscriber subscriber)
{
lifecycleSubscribers.remove(subscriber);
}
// should only be called via JMX
public void stopGossiping()
{
if (initialized)
{
logger.warn("Stopping gossip by operator request");
Gossiper.instance.stop();
initialized = false;
}
}
// should only be called via JMX
public void startGossiping()
{
if (!initialized)
{
logger.warn("Starting gossip by operator request");
setGossipTokens(getLocalTokens());
Gossiper.instance.forceNewerGeneration();
Gossiper.instance.start((int) (System.currentTimeMillis() / 1000));
initialized = true;
}
}
// should only be called via JMX
public boolean isGossipRunning()
{
return Gossiper.instance.isEnabled();
}
// should only be called via JMX
public void startRPCServer()
{
if (daemon == null)
{
throw new IllegalStateException("No configured daemon");
}
daemon.thriftServer.start();
}
public void stopRPCServer()
{
if (daemon == null)
{
throw new IllegalStateException("No configured daemon");
}
if (daemon.thriftServer != null)
daemon.thriftServer.stop();
}
public boolean isRPCServerRunning()
{
if ((daemon == null) || (daemon.thriftServer == null))
{
return false;
}
return daemon.thriftServer.isRunning();
}
public void startNativeTransport()
{
if (daemon == null)
{
throw new IllegalStateException("No configured daemon");
}
try
{
daemon.startNativeTransport();
}
catch (Exception e)
{
throw new RuntimeException("Error starting native transport: " + e.getMessage());
}
}
public void stopNativeTransport()
{
if (daemon == null)
{
throw new IllegalStateException("No configured daemon");
}
daemon.stopNativeTransport();
}
public boolean isNativeTransportRunning()
{
if (daemon == null)
{
return false;
}
return daemon.isNativeTransportRunning();
}
public void stopTransports()
{
if (isInitialized())
{
logger.error("Stopping gossiper");
stopGossiping();
}
if (isRPCServerRunning())
{
logger.error("Stopping RPC server");
stopRPCServer();
}
if (isNativeTransportRunning())
{
logger.error("Stopping native transport");
stopNativeTransport();
}
}
private void shutdownClientServers()
{
stopRPCServer();
stopNativeTransport();
}
public void stopClient()
{
Gossiper.instance.unregister(this);
Gossiper.instance.stop();
MessagingService.instance().shutdown();
// give it a second so that task accepted before the MessagingService shutdown gets submitted to the stage (to avoid RejectedExecutionException)
Uninterruptibles.sleepUninterruptibly(1, TimeUnit.SECONDS);
StageManager.shutdownNow();
}
public boolean isInitialized()
{
return initialized;
}
public boolean isSetupCompleted()
{
return daemon == null
? false
: daemon.setupCompleted();
}
public void stopDaemon()
{
if (daemon == null)
throw new IllegalStateException("No configured daemon");
daemon.deactivate();
}
public synchronized Collection<Token> prepareReplacementInfo() throws ConfigurationException
{
logger.info("Gathering node replacement information for {}", DatabaseDescriptor.getReplaceAddress());
if (!MessagingService.instance().isListening())
MessagingService.instance().listen();
// make magic happen
Gossiper.instance.doShadowRound();
UUID hostId = null;
// now that we've gossiped at least once, we should be able to find the node we're replacing
if (Gossiper.instance.getEndpointStateForEndpoint(DatabaseDescriptor.getReplaceAddress())== null)
throw new RuntimeException("Cannot replace_address " + DatabaseDescriptor.getReplaceAddress() + " because it doesn't exist in gossip");
hostId = Gossiper.instance.getHostId(DatabaseDescriptor.getReplaceAddress());
try
{
VersionedValue tokensVersionedValue = Gossiper.instance.getEndpointStateForEndpoint(DatabaseDescriptor.getReplaceAddress()).getApplicationState(ApplicationState.TOKENS);
if (tokensVersionedValue == null)
throw new RuntimeException("Could not find tokens for " + DatabaseDescriptor.getReplaceAddress() + " to replace");
Collection<Token> tokens = TokenSerializer.deserialize(tokenMetadata.partitioner, new DataInputStream(new ByteArrayInputStream(tokensVersionedValue.toBytes())));
SystemKeyspace.setLocalHostId(hostId); // use the replacee's host Id as our own so we receive hints, etc
Gossiper.instance.resetEndpointStateMap(); // clean up since we have what we need
return tokens;
}
catch (IOException e)
{
throw new RuntimeException(e);
}
}
public synchronized void checkForEndpointCollision() throws ConfigurationException
{
logger.debug("Starting shadow gossip round to check for endpoint collision");
if (!MessagingService.instance().isListening())
MessagingService.instance().listen();
Gossiper.instance.doShadowRound();
if (!Gossiper.instance.isSafeForBootstrap(FBUtilities.getBroadcastAddress()))
{
throw new RuntimeException(String.format("A node with address %s already exists, cancelling join. " +
"Use cassandra.replace_address if you want to replace this node.",
FBUtilities.getBroadcastAddress()));
}
if (useStrictConsistency && !allowSimultaneousMoves())
{
for (Map.Entry<InetAddress, EndpointState> entry : Gossiper.instance.getEndpointStates())
{
// ignore local node or empty status
if (entry.getKey().equals(FBUtilities.getBroadcastAddress()) || entry.getValue().getApplicationState(ApplicationState.STATUS) == null)
continue;
String[] pieces = entry.getValue().getApplicationState(ApplicationState.STATUS).value.split(VersionedValue.DELIMITER_STR, -1);
assert (pieces.length > 0);
String state = pieces[0];
if (state.equals(VersionedValue.STATUS_BOOTSTRAPPING) || state.equals(VersionedValue.STATUS_LEAVING) || state.equals(VersionedValue.STATUS_MOVING))
throw new UnsupportedOperationException("Other bootstrapping/leaving/moving nodes detected, cannot bootstrap while cassandra.consistent.rangemovement is true");
}
}
Gossiper.instance.resetEndpointStateMap();
}
private boolean allowSimultaneousMoves()
{
return allowSimultaneousMoves && DatabaseDescriptor.getNumTokens() == 1;
}
// for testing only
public void unsafeInitialize() throws ConfigurationException
{
initialized = true;
Gossiper.instance.register(this);
Gossiper.instance.start((int) (System.currentTimeMillis() / 1000)); // needed for node-ring gathering.
Gossiper.instance.addLocalApplicationState(ApplicationState.NET_VERSION, valueFactory.networkVersion());
if (!MessagingService.instance().isListening())
MessagingService.instance().listen();
}
public void populateTokenMetadata()
{
if (Boolean.parseBoolean(System.getProperty("cassandra.load_ring_state", "true")))
{
logger.info("Populating token metadata from system tables");
Multimap<InetAddress, Token> loadedTokens = SystemKeyspace.loadTokens();
if (!shouldBootstrap()) // if we have not completed bootstrapping, we should not add ourselves as a normal token
loadedTokens.putAll(FBUtilities.getBroadcastAddress(), SystemKeyspace.getSavedTokens());
for (InetAddress ep : loadedTokens.keySet())
tokenMetadata.updateNormalTokens(loadedTokens.get(ep), ep);
logger.info("Token metadata: {}", tokenMetadata);
}
}
public synchronized void initServer() throws ConfigurationException
{
initServer(RING_DELAY);
}
public synchronized void initServer(int delay) throws ConfigurationException
{
logger.info("Cassandra version: {}", FBUtilities.getReleaseVersionString());
logger.info("Thrift API version: {}", cassandraConstants.VERSION);
logger.info("CQL supported versions: {} (default: {})",
StringUtils.join(ClientState.getCQLSupportedVersion(), ","), ClientState.DEFAULT_CQL_VERSION);
initialized = true;
try
{
// Ensure StorageProxy is initialized on start-up; see CASSANDRA-3797.
Class.forName("org.apache.cassandra.service.StorageProxy");
// also IndexSummaryManager, which is otherwise unreferenced
Class.forName("org.apache.cassandra.io.sstable.IndexSummaryManager");
}
catch (ClassNotFoundException e)
{
throw new AssertionError(e);
}
if (Boolean.parseBoolean(System.getProperty("cassandra.load_ring_state", "true")))
{
logger.info("Loading persisted ring state");
Multimap<InetAddress, Token> loadedTokens = SystemKeyspace.loadTokens();
Map<InetAddress, UUID> loadedHostIds = SystemKeyspace.loadHostIds();
for (InetAddress ep : loadedTokens.keySet())
{
if (ep.equals(FBUtilities.getBroadcastAddress()))
{
// entry has been mistakenly added, delete it
SystemKeyspace.removeEndpoint(ep);
}
else
{
if (loadedHostIds.containsKey(ep))
tokenMetadata.updateHostId(loadedHostIds.get(ep), ep);
Gossiper.instance.addSavedEndpoint(ep);
}
}
}
// daemon threads, like our executors', continue to run while shutdown hooks are invoked
drainOnShutdown = new Thread(new WrappedRunnable()
{
@Override
public void runMayThrow() throws InterruptedException
{
inShutdownHook = true;
ExecutorService viewMutationStage = StageManager.getStage(Stage.VIEW_MUTATION);
ExecutorService counterMutationStage = StageManager.getStage(Stage.COUNTER_MUTATION);
ExecutorService mutationStage = StageManager.getStage(Stage.MUTATION);
if (mutationStage.isShutdown()
&& counterMutationStage.isShutdown()
&& viewMutationStage.isShutdown())
return; // drained already
if (daemon != null)
shutdownClientServers();
ScheduledExecutors.optionalTasks.shutdown();
Gossiper.instance.stop();
// In-progress writes originating here could generate hints to be written, so shut down MessagingService
// before mutation stage, so we can get all the hints saved before shutting down
MessagingService.instance().shutdown();
viewMutationStage.shutdown();
BatchlogManager.instance.shutdown();
HintsService.instance.pauseDispatch();
counterMutationStage.shutdown();
mutationStage.shutdown();
viewMutationStage.awaitTermination(3600, TimeUnit.SECONDS);
counterMutationStage.awaitTermination(3600, TimeUnit.SECONDS);
mutationStage.awaitTermination(3600, TimeUnit.SECONDS);
StorageProxy.instance.verifyNoHintsInProgress();
List<Future<?>> flushes = new ArrayList<>();
for (Keyspace keyspace : Keyspace.all())
{
KeyspaceMetadata ksm = Schema.instance.getKSMetaData(keyspace.getName());
if (!ksm.params.durableWrites)
for (ColumnFamilyStore cfs : keyspace.getColumnFamilyStores())
flushes.add(cfs.forceFlush());
}
try
{
FBUtilities.waitOnFutures(flushes);
}
catch (Throwable t)
{
JVMStabilityInspector.inspectThrowable(t);
// don't let this stop us from shutting down the commitlog and other thread pools
logger.warn("Caught exception while waiting for memtable flushes during shutdown hook", t);
}
CommitLog.instance.shutdownBlocking();
if (FBUtilities.isWindows())
WindowsTimer.endTimerPeriod(DatabaseDescriptor.getWindowsTimerInterval());
HintsService.instance.shutdownBlocking();
// wait for miscellaneous tasks like sstable and commitlog segment deletion
ScheduledExecutors.nonPeriodicTasks.shutdown();
if (!ScheduledExecutors.nonPeriodicTasks.awaitTermination(1, TimeUnit.MINUTES))
logger.warn("Miscellaneous task executor still busy after one minute; proceeding with shutdown");
}
}, "StorageServiceShutdownHook");
Runtime.getRuntime().addShutdownHook(drainOnShutdown);
replacing = DatabaseDescriptor.isReplacing();
if (!Boolean.parseBoolean(System.getProperty("cassandra.start_gossip", "true")))
{
logger.info("Not starting gossip as requested.");
return;
}
prepareToJoin();
// Has to be called after the host id has potentially changed in prepareToJoin().
try
{
CacheService.instance.counterCache.loadSavedAsync().get();
}
catch (Throwable t)
{
JVMStabilityInspector.inspectThrowable(t);
logger.warn("Error loading counter cache", t);
}
if (Boolean.parseBoolean(System.getProperty("cassandra.join_ring", "true")))
{
joinTokenRing(delay);
}
else
{
Collection<Token> tokens = SystemKeyspace.getSavedTokens();
if (!tokens.isEmpty())
{
tokenMetadata.updateNormalTokens(tokens, FBUtilities.getBroadcastAddress());
// order is important here, the gossiper can fire in between adding these two states. It's ok to send TOKENS without STATUS, but *not* vice versa.
List<Pair<ApplicationState, VersionedValue>> states = new ArrayList<Pair<ApplicationState, VersionedValue>>();
states.add(Pair.create(ApplicationState.TOKENS, valueFactory.tokens(tokens)));
states.add(Pair.create(ApplicationState.STATUS, valueFactory.hibernate(true)));
Gossiper.instance.addLocalApplicationStates(states);
}
logger.info("Not joining ring as requested. Use JMX (StorageService->joinRing()) to initiate ring joining");
}
}
/**
* In the event of forceful termination we need to remove the shutdown hook to prevent hanging (OOM for instance)
*/
public void removeShutdownHook()
{
if (drainOnShutdown != null)
Runtime.getRuntime().removeShutdownHook(drainOnShutdown);
if (FBUtilities.isWindows())
WindowsTimer.endTimerPeriod(DatabaseDescriptor.getWindowsTimerInterval());
}
private boolean shouldBootstrap()
{
return DatabaseDescriptor.isAutoBootstrap() && !SystemKeyspace.bootstrapComplete() && !DatabaseDescriptor.getSeeds().contains(FBUtilities.getBroadcastAddress());
}
private void prepareToJoin() throws ConfigurationException
{
if (!joined)
{
Map<ApplicationState, VersionedValue> appStates = new EnumMap<>(ApplicationState.class);
if (SystemKeyspace.wasDecommissioned())
{
if (Boolean.getBoolean("cassandra.override_decommission"))
{
logger.warn("This node was decommissioned, but overriding by operator request.");
SystemKeyspace.setBootstrapState(SystemKeyspace.BootstrapState.COMPLETED);
}
else
throw new ConfigurationException("This node was decommissioned and will not rejoin the ring unless cassandra.override_decommission=true has been set, or all existing data is removed and the node is bootstrapped again");
}
if (replacing && !(Boolean.parseBoolean(System.getProperty("cassandra.join_ring", "true"))))
throw new ConfigurationException("Cannot set both join_ring=false and attempt to replace a node");
if (DatabaseDescriptor.getReplaceTokens().size() > 0 || DatabaseDescriptor.getReplaceNode() != null)
throw new RuntimeException("Replace method removed; use cassandra.replace_address instead");
if (replacing)
{
if (SystemKeyspace.bootstrapComplete())
throw new RuntimeException("Cannot replace address with a node that is already bootstrapped");
if (!DatabaseDescriptor.isAutoBootstrap())
throw new RuntimeException("Trying to replace_address with auto_bootstrap disabled will not work, check your configuration");
bootstrapTokens = prepareReplacementInfo();
appStates.put(ApplicationState.TOKENS, valueFactory.tokens(bootstrapTokens));
appStates.put(ApplicationState.STATUS, valueFactory.hibernate(true));
}
else if (shouldBootstrap())
{
checkForEndpointCollision();
}
// have to start the gossip service before we can see any info on other nodes. this is necessary
// for bootstrap to get the load info it needs.
// (we won't be part of the storage ring though until we add a counterId to our state, below.)
// Seed the host ID-to-endpoint map with our own ID.
UUID localHostId = SystemKeyspace.getLocalHostId();
getTokenMetadata().updateHostId(localHostId, FBUtilities.getBroadcastAddress());
appStates.put(ApplicationState.NET_VERSION, valueFactory.networkVersion());
appStates.put(ApplicationState.HOST_ID, valueFactory.hostId(localHostId));
appStates.put(ApplicationState.RPC_ADDRESS, valueFactory.rpcaddress(DatabaseDescriptor.getBroadcastRpcAddress()));
appStates.put(ApplicationState.RELEASE_VERSION, valueFactory.releaseVersion());
logger.info("Starting up server gossip");
Gossiper.instance.register(this);
Gossiper.instance.start(SystemKeyspace.incrementAndGetGeneration(), appStates); // needed for node-ring gathering.
// gossip snitch infos (local DC and rack)
gossipSnitchInfo();
// gossip Schema.emptyVersion forcing immediate check for schema updates (see MigrationManager#maybeScheduleSchemaPull)
Schema.instance.updateVersionAndAnnounce(); // Ensure we know our own actual Schema UUID in preparation for updates
if (!MessagingService.instance().isListening())
MessagingService.instance().listen();
LoadBroadcaster.instance.startBroadcasting();
HintsService.instance.startDispatch();
BatchlogManager.instance.start();
}
}
private void joinTokenRing(int delay) throws ConfigurationException
{
joined = true;
// We bootstrap if we haven't successfully bootstrapped before, as long as we are not a seed.
// If we are a seed, or if the user manually sets auto_bootstrap to false,
// we'll skip streaming data from other nodes and jump directly into the ring.
//
// The seed check allows us to skip the RING_DELAY sleep for the single-node cluster case,
// which is useful for both new users and testing.
//
// We attempted to replace this with a schema-presence check, but you need a meaningful sleep
// to get schema info from gossip which defeats the purpose. See CASSANDRA-4427 for the gory details.
Set<InetAddress> current = new HashSet<>();
if (logger.isDebugEnabled())
{
logger.debug("Bootstrap variables: {} {} {} {}",
DatabaseDescriptor.isAutoBootstrap(),
SystemKeyspace.bootstrapInProgress(),
SystemKeyspace.bootstrapComplete(),
DatabaseDescriptor.getSeeds().contains(FBUtilities.getBroadcastAddress()));
}
if (DatabaseDescriptor.isAutoBootstrap() && !SystemKeyspace.bootstrapComplete() && DatabaseDescriptor.getSeeds().contains(FBUtilities.getBroadcastAddress()))
{
logger.info("This node will not auto bootstrap because it is configured to be a seed node.");
}
boolean dataAvailable = true; // make this to false when bootstrap streaming failed
if (shouldBootstrap())
{
if (SystemKeyspace.bootstrapInProgress())
logger.warn("Detected previous bootstrap failure; retrying");
else
SystemKeyspace.setBootstrapState(SystemKeyspace.BootstrapState.IN_PROGRESS);
setMode(Mode.JOINING, "waiting for ring information", true);
// first sleep the delay to make sure we see all our peers
for (int i = 0; i < delay; i += 1000)
{
// if we see schema, we can proceed to the next check directly
if (!Schema.instance.getVersion().equals(Schema.emptyVersion))
{
logger.debug("got schema: {}", Schema.instance.getVersion());
break;
}
Uninterruptibles.sleepUninterruptibly(1, TimeUnit.SECONDS);
}
// if our schema hasn't matched yet, wait until it has
// we do this by waiting for all in-flight migration requests and responses to complete
// (post CASSANDRA-1391 we don't expect this to be necessary very often, but it doesn't hurt to be careful)
if (!MigrationManager.isReadyForBootstrap())
{
setMode(Mode.JOINING, "waiting for schema information to complete", true);
MigrationManager.waitUntilReadyForBootstrap();
}
setMode(Mode.JOINING, "schema complete, ready to bootstrap", true);
setMode(Mode.JOINING, "waiting for pending range calculation", true);
PendingRangeCalculatorService.instance.blockUntilFinished();
setMode(Mode.JOINING, "calculation complete, ready to bootstrap", true);
logger.debug("... got ring + schema info");
if (useStrictConsistency &&
(
tokenMetadata.getBootstrapTokens().valueSet().size() > 0 ||
tokenMetadata.getLeavingEndpoints().size() > 0 ||
tokenMetadata.getMovingEndpoints().size() > 0
))
{
throw new UnsupportedOperationException("Other bootstrapping/leaving/moving nodes detected, cannot bootstrap while cassandra.consistent.rangemovement is true");
}
// get bootstrap tokens
if (!replacing)
{
if (tokenMetadata.isMember(FBUtilities.getBroadcastAddress()))
{
String s = "This node is already a member of the token ring; bootstrap aborted. (If replacing a dead node, remove the old one from the ring first.)";
throw new UnsupportedOperationException(s);
}
setMode(Mode.JOINING, "getting bootstrap token", true);
bootstrapTokens = BootStrapper.getBootstrapTokens(tokenMetadata, FBUtilities.getBroadcastAddress());
}
else
{
if (!DatabaseDescriptor.getReplaceAddress().equals(FBUtilities.getBroadcastAddress()))
{
try
{
// Sleep additionally to make sure that the server actually is not alive
// and giving it more time to gossip if alive.
Thread.sleep(LoadBroadcaster.BROADCAST_INTERVAL);
}
catch (InterruptedException e)
{
throw new AssertionError(e);
}
// check for operator errors...
for (Token token : bootstrapTokens)
{
InetAddress existing = tokenMetadata.getEndpoint(token);
if (existing != null)
{
long nanoDelay = delay * 1000000L;
if (Gossiper.instance.getEndpointStateForEndpoint(existing).getUpdateTimestamp() > (System.nanoTime() - nanoDelay))
throw new UnsupportedOperationException("Cannot replace a live node... ");
current.add(existing);
}
else
{
throw new UnsupportedOperationException("Cannot replace token " + token + " which does not exist!");
}
}
}
else
{
try
{
Thread.sleep(RING_DELAY);
}
catch (InterruptedException e)
{
throw new AssertionError(e);
}
}
setMode(Mode.JOINING, "Replacing a node with token(s): " + bootstrapTokens, true);
}
dataAvailable = bootstrap(bootstrapTokens);
}
else
{
bootstrapTokens = SystemKeyspace.getSavedTokens();
if (bootstrapTokens.isEmpty())
{
Collection<String> initialTokens = DatabaseDescriptor.getInitialTokens();
if (initialTokens.size() < 1)
{
bootstrapTokens = BootStrapper.getRandomTokens(tokenMetadata, DatabaseDescriptor.getNumTokens());
if (DatabaseDescriptor.getNumTokens() == 1)
logger.warn("Generated random token {}. Random tokens will result in an unbalanced ring; see http://wiki.apache.org/cassandra/Operations", bootstrapTokens);
else
logger.info("Generated random tokens. tokens are {}", bootstrapTokens);
}
else
{
bootstrapTokens = new ArrayList<>(initialTokens.size());
for (String token : initialTokens)
bootstrapTokens.add(getTokenFactory().fromString(token));
logger.info("Saved tokens not found. Using configuration value: {}", bootstrapTokens);
}
}
else
{
if (bootstrapTokens.size() != DatabaseDescriptor.getNumTokens())
throw new ConfigurationException("Cannot change the number of tokens from " + bootstrapTokens.size() + " to " + DatabaseDescriptor.getNumTokens());
else
logger.info("Using saved tokens {}", bootstrapTokens);
}
}
// if we don't have system_traces keyspace at this point, then create it manually
maybeAddOrUpdateKeyspace(TraceKeyspace.metadata());
maybeAddOrUpdateKeyspace(SystemDistributedKeyspace.metadata());
if (!isSurveyMode)
{
if (dataAvailable)
{
// start participating in the ring.
SystemKeyspace.setBootstrapState(SystemKeyspace.BootstrapState.COMPLETED);
setTokens(bootstrapTokens);
// remove the existing info about the replaced node.
if (!current.isEmpty())
{
for (InetAddress existing : current)
Gossiper.instance.replacedEndpoint(existing);
}
assert tokenMetadata.sortedTokens().size() > 0;
doAuthSetup();
}
else
{
logger.warn("Some data streaming failed. Use nodetool to check bootstrap state and resume. For more, see `nodetool help bootstrap`. {}", SystemKeyspace.getBootstrapState());
}
}
else
{
logger.info("Startup complete, but write survey mode is active, not becoming an active ring member. Use JMX (StorageService->joinRing()) to finalize ring joining.");
}
}
public void gossipSnitchInfo()
{
IEndpointSnitch snitch = DatabaseDescriptor.getEndpointSnitch();
String dc = snitch.getDatacenter(FBUtilities.getBroadcastAddress());
String rack = snitch.getRack(FBUtilities.getBroadcastAddress());
Gossiper.instance.addLocalApplicationState(ApplicationState.DC, StorageService.instance.valueFactory.datacenter(dc));
Gossiper.instance.addLocalApplicationState(ApplicationState.RACK, StorageService.instance.valueFactory.rack(rack));
}
public synchronized void joinRing() throws IOException
{
if (!joined)
{
logger.info("Joining ring by operator request");
try
{
joinTokenRing(0);
}
catch (ConfigurationException e)
{
throw new IOException(e.getMessage());
}
}
else if (isSurveyMode)
{
setTokens(SystemKeyspace.getSavedTokens());
SystemKeyspace.setBootstrapState(SystemKeyspace.BootstrapState.COMPLETED);
isSurveyMode = false;
logger.info("Leaving write survey mode and joining ring at operator request");
assert tokenMetadata.sortedTokens().size() > 0;
doAuthSetup();
}
}
private void doAuthSetup()
{
maybeAddOrUpdateKeyspace(AuthKeyspace.metadata());
DatabaseDescriptor.getRoleManager().setup();
DatabaseDescriptor.getAuthenticator().setup();
DatabaseDescriptor.getAuthorizer().setup();
MigrationManager.instance.register(new AuthMigrationListener());
}
private void maybeAddKeyspace(KeyspaceMetadata ksm)
{
try
{
MigrationManager.announceNewKeyspace(ksm, 0, false);
}
catch (AlreadyExistsException e)
{
logger.debug("Attempted to create new keyspace {}, but it already exists", ksm.name);
}
}
/**
* Ensure the schema of a pseudo-system keyspace (a distributed system keyspace: traces, auth and the so-called distributedKeyspace),
* is up to date with what we expected (creating it if it doesn't exist and updating tables that may have been upgraded).
*/
private void maybeAddOrUpdateKeyspace(KeyspaceMetadata expected)
{
// Note that want to deal with the keyspace and its table a bit differently: for the keyspace definition
// itself, we want to create it if it doesn't exist yet, but if it does exist, we don't want to modify it,
// because user can modify the definition to change the replication factor (#6016) and we don't want to
// override it. For the tables however, we have to deal with the fact that new version can add new columns
// (#8162 being an example), so even if the table definition exists, we still need to force the "current"
// version of the schema, the one the node will be expecting.
KeyspaceMetadata defined = Schema.instance.getKSMetaData(expected.name);
// If the keyspace doesn't exist, create it
if (defined == null)
{
maybeAddKeyspace(expected);
defined = Schema.instance.getKSMetaData(expected.name);
}
// While the keyspace exists, it might miss table or have outdated one
// There is also the potential for a race, as schema migrations add the bare
// keyspace into Schema.instance before adding its tables, so double check that
// all the expected tables are present
for (CFMetaData expectedTable : expected.tables)
{
CFMetaData definedTable = defined.tables.get(expectedTable.cfName).orElse(null);
if (definedTable == null || !definedTable.equals(expectedTable))
MigrationManager.forceAnnounceNewColumnFamily(expectedTable);
}
}
public boolean isJoined()
{
return tokenMetadata.isMember(FBUtilities.getBroadcastAddress());
}
public void rebuild(String sourceDc)
{
rebuild(sourceDc, null, null);
}
public void rebuild(String sourceDc, String keyspace, String tokens)
{
// check on going rebuild
if (!isRebuilding.compareAndSet(false, true))
{
throw new IllegalStateException("Node is still rebuilding. Check nodetool netstats.");
}
// check the arguments
if (keyspace == null && tokens != null)
{
throw new IllegalArgumentException("Cannot specify tokens without keyspace.");
}
logger.info("rebuild from dc: {}, {}, {}", sourceDc == null ? "(any dc)" : sourceDc,
keyspace == null ? "(All keyspaces)" : keyspace,
tokens == null ? "(All tokens)" : tokens);
try
{
RangeStreamer streamer = new RangeStreamer(tokenMetadata,
null,
FBUtilities.getBroadcastAddress(),
"Rebuild",
!replacing && useStrictConsistency,
DatabaseDescriptor.getEndpointSnitch(),
streamStateStore,
false);
streamer.addSourceFilter(new RangeStreamer.FailureDetectorSourceFilter(FailureDetector.instance));
if (sourceDc != null)
streamer.addSourceFilter(new RangeStreamer.SingleDatacenterFilter(DatabaseDescriptor.getEndpointSnitch(), sourceDc));
if (keyspace == null)
{
for (String keyspaceName : Schema.instance.getNonLocalStrategyKeyspaces())
streamer.addRanges(keyspaceName, getLocalRanges(keyspaceName));
}
else if (tokens == null)
{
streamer.addRanges(keyspace, getLocalRanges(keyspace));
}
else
{
Token.TokenFactory factory = getTokenFactory();
List<Range<Token>> ranges = new ArrayList<>();
Pattern rangePattern = Pattern.compile("\\(\\s*(-?\\w+)\\s*,\\s*(-?\\w+)\\s*\\]");
try (Scanner tokenScanner = new Scanner(tokens))
{
while (tokenScanner.findInLine(rangePattern) != null)
{
MatchResult range = tokenScanner.match();
Token startToken = factory.fromString(range.group(1));
Token endToken = factory.fromString(range.group(2));
logger.info(String.format("adding range: (%s,%s]", startToken, endToken));
ranges.add(new Range<>(startToken, endToken));
}
if (tokenScanner.hasNext())
throw new IllegalArgumentException("Unexpected string: " + tokenScanner.next());
}
streamer.addRanges(keyspace, ranges);
}
StreamResultFuture resultFuture = streamer.fetchAsync();
// wait for result
resultFuture.get();
}
catch (InterruptedException e)
{
throw new RuntimeException("Interrupted while waiting on rebuild streaming");
}
catch (ExecutionException e)
{
// This is used exclusively through JMX, so log the full trace but only throw a simple RTE
logger.error("Error while rebuilding node", e.getCause());
throw new RuntimeException("Error while rebuilding node: " + e.getCause().getMessage());
}
finally
{
// rebuild is done (successfully or not)
isRebuilding.set(false);
}
}
public void setRpcTimeout(long value)
{
DatabaseDescriptor.setRpcTimeout(value);
logger.info("set rpc timeout to {} ms", value);
}
public long getRpcTimeout()
{
return DatabaseDescriptor.getRpcTimeout();
}
public void setReadRpcTimeout(long value)
{
DatabaseDescriptor.setReadRpcTimeout(value);
logger.info("set read rpc timeout to {} ms", value);
}
public long getReadRpcTimeout()
{
return DatabaseDescriptor.getReadRpcTimeout();
}
public void setRangeRpcTimeout(long value)
{
DatabaseDescriptor.setRangeRpcTimeout(value);
logger.info("set range rpc timeout to {} ms", value);
}
public long getRangeRpcTimeout()
{
return DatabaseDescriptor.getRangeRpcTimeout();
}
public void setWriteRpcTimeout(long value)
{
DatabaseDescriptor.setWriteRpcTimeout(value);
logger.info("set write rpc timeout to {} ms", value);
}
public long getWriteRpcTimeout()
{
return DatabaseDescriptor.getWriteRpcTimeout();
}
public void setCounterWriteRpcTimeout(long value)
{
DatabaseDescriptor.setCounterWriteRpcTimeout(value);
logger.info("set counter write rpc timeout to {} ms", value);
}
public long getCounterWriteRpcTimeout()
{
return DatabaseDescriptor.getCounterWriteRpcTimeout();
}
public void setCasContentionTimeout(long value)
{
DatabaseDescriptor.setCasContentionTimeout(value);
logger.info("set cas contention rpc timeout to {} ms", value);
}
public long getCasContentionTimeout()
{
return DatabaseDescriptor.getCasContentionTimeout();
}
public void setTruncateRpcTimeout(long value)
{
DatabaseDescriptor.setTruncateRpcTimeout(value);
logger.info("set truncate rpc timeout to {} ms", value);
}
public long getTruncateRpcTimeout()
{
return DatabaseDescriptor.getTruncateRpcTimeout();
}
public void setStreamingSocketTimeout(int value)
{
DatabaseDescriptor.setStreamingSocketTimeout(value);
logger.info("set streaming socket timeout to {} ms", value);
}
public int getStreamingSocketTimeout()
{
return DatabaseDescriptor.getStreamingSocketTimeout();
}
public void setStreamThroughputMbPerSec(int value)
{
DatabaseDescriptor.setStreamThroughputOutboundMegabitsPerSec(value);
logger.info("setstreamthroughput: throttle set to {}", value);
}
public int getStreamThroughputMbPerSec()
{
return DatabaseDescriptor.getStreamThroughputOutboundMegabitsPerSec();
}
public void setInterDCStreamThroughputMbPerSec(int value)
{
DatabaseDescriptor.setInterDCStreamThroughputOutboundMegabitsPerSec(value);
logger.info("setinterdcstreamthroughput: throttle set to {}", value);
}
public int getInterDCStreamThroughputMbPerSec()
{
return DatabaseDescriptor.getInterDCStreamThroughputOutboundMegabitsPerSec();
}
public int getCompactionThroughputMbPerSec()
{
return DatabaseDescriptor.getCompactionThroughputMbPerSec();
}
public void setCompactionThroughputMbPerSec(int value)
{
DatabaseDescriptor.setCompactionThroughputMbPerSec(value);
CompactionManager.instance.setRate(value);
}
public boolean isIncrementalBackupsEnabled()
{
return DatabaseDescriptor.isIncrementalBackupsEnabled();
}
public void setIncrementalBackupsEnabled(boolean value)
{
DatabaseDescriptor.setIncrementalBackupsEnabled(value);
}
private void setMode(Mode m, boolean log)
{
setMode(m, null, log);
}
private void setMode(Mode m, String msg, boolean log)
{
operationMode = m;
String logMsg = msg == null ? m.toString() : String.format("%s: %s", m, msg);
if (log)
logger.info(logMsg);
else
logger.debug(logMsg);
}
/**
* Bootstrap node by fetching data from other nodes.
* If node is bootstrapping as a new node, then this also announces bootstrapping to the cluster.
*
* This blocks until streaming is done.
*
* @param tokens bootstrapping tokens
* @return true if bootstrap succeeds.
*/
private boolean bootstrap(final Collection<Token> tokens)
{
isBootstrapMode = true;
SystemKeyspace.updateTokens(tokens); // DON'T use setToken, that makes us part of the ring locally which is incorrect until we are done bootstrapping
if (!replacing)
{
// if not an existing token then bootstrap
List<Pair<ApplicationState, VersionedValue>> states = new ArrayList<>();
states.add(Pair.create(ApplicationState.TOKENS, valueFactory.tokens(tokens)));
states.add(Pair.create(ApplicationState.STATUS, valueFactory.bootstrapping(tokens)));
Gossiper.instance.addLocalApplicationStates(states);
setMode(Mode.JOINING, "sleeping " + RING_DELAY + " ms for pending range setup", true);
Uninterruptibles.sleepUninterruptibly(RING_DELAY, TimeUnit.MILLISECONDS);
}
else
{
// Dont set any state for the node which is bootstrapping the existing token...
tokenMetadata.updateNormalTokens(tokens, FBUtilities.getBroadcastAddress());
SystemKeyspace.removeEndpoint(DatabaseDescriptor.getReplaceAddress());
}
if (!Gossiper.instance.seenAnySeed())
throw new IllegalStateException("Unable to contact any seeds!");
if (Boolean.getBoolean("cassandra.reset_bootstrap_progress"))
{
logger.info("Resetting bootstrap progress to start fresh");
SystemKeyspace.resetAvailableRanges();
}
setMode(Mode.JOINING, "Starting to bootstrap...", true);
BootStrapper bootstrapper = new BootStrapper(FBUtilities.getBroadcastAddress(), tokens, tokenMetadata);
bootstrapper.addProgressListener(progressSupport);
ListenableFuture<StreamState> bootstrapStream = bootstrapper.bootstrap(streamStateStore, !replacing && useStrictConsistency); // handles token update
Futures.addCallback(bootstrapStream, new FutureCallback<StreamState>()
{
@Override
public void onSuccess(StreamState streamState)
{
isBootstrapMode = false;
logger.info("Bootstrap completed! for the tokens {}", tokens);
}
@Override
public void onFailure(Throwable e)
{
logger.warn("Error during bootstrap: " + e.getCause().getMessage(), e.getCause());
}
});
try
{
bootstrapStream.get();
return true;
}
catch (Throwable e)
{
logger.error("Error while waiting on bootstrap to complete. Bootstrap will have to be restarted.", e);
return false;
}
}
public boolean resumeBootstrap()
{
if (isBootstrapMode && SystemKeyspace.bootstrapInProgress())
{
logger.info("Resuming bootstrap...");
// get bootstrap tokens saved in system keyspace
final Collection<Token> tokens = SystemKeyspace.getSavedTokens();
// already bootstrapped ranges are filtered during bootstrap
BootStrapper bootstrapper = new BootStrapper(FBUtilities.getBroadcastAddress(), tokens, tokenMetadata);
bootstrapper.addProgressListener(progressSupport);
ListenableFuture<StreamState> bootstrapStream = bootstrapper.bootstrap(streamStateStore, !replacing && useStrictConsistency); // handles token update
Futures.addCallback(bootstrapStream, new FutureCallback<StreamState>()
{
@Override
public void onSuccess(StreamState streamState)
{
isBootstrapMode = false;
// start participating in the ring.
// pretend we are in survey mode so we can use joinRing() here
isSurveyMode = true;
try
{
progressSupport.progress("bootstrap", ProgressEvent.createNotification("Joining ring..."));
joinRing();
}
catch (IOException ignore)
{
// joinRing with survey mode does not throw IOException
}
progressSupport.progress("bootstrap", new ProgressEvent(ProgressEventType.COMPLETE, 1, 1, "Resume bootstrap complete"));
logger.info("Resume complete");
}
@Override
public void onFailure(Throwable e)
{
String message = "Error during bootstrap: " + e.getCause().getMessage();
logger.error(message, e.getCause());
progressSupport.progress("bootstrap", new ProgressEvent(ProgressEventType.ERROR, 1, 1, message));
progressSupport.progress("bootstrap", new ProgressEvent(ProgressEventType.COMPLETE, 1, 1, "Resume bootstrap complete"));
}
});
return true;
}
else
{
logger.info("Resuming bootstrap is requested, but the node is already bootstrapped.");
return false;
}
}
public boolean isBootstrapMode()
{
return isBootstrapMode;
}
public TokenMetadata getTokenMetadata()
{
return tokenMetadata;
}
/**
* Increment about the known Compaction severity of the events in this node
*/
public void reportSeverity(double incr)
{
bgMonitor.incrCompactionSeverity(incr);
}
public void reportManualSeverity(double incr)
{
bgMonitor.incrManualSeverity(incr);
}
public double getSeverity(InetAddress endpoint)
{
return bgMonitor.getSeverity(endpoint);
}
/**
* for a keyspace, return the ranges and corresponding listen addresses.
* @param keyspace
* @return the endpoint map
*/
public Map<List<String>, List<String>> getRangeToEndpointMap(String keyspace)
{
/* All the ranges for the tokens */
Map<List<String>, List<String>> map = new HashMap<>();
for (Map.Entry<Range<Token>,List<InetAddress>> entry : getRangeToAddressMap(keyspace).entrySet())
{
map.put(entry.getKey().asList(), stringify(entry.getValue()));
}
return map;
}
/**
* Return the rpc address associated with an endpoint as a string.
* @param endpoint The endpoint to get rpc address for
* @return the rpc address
*/
public String getRpcaddress(InetAddress endpoint)
{
if (endpoint.equals(FBUtilities.getBroadcastAddress()))
return DatabaseDescriptor.getBroadcastRpcAddress().getHostAddress();
else if (Gossiper.instance.getEndpointStateForEndpoint(endpoint).getApplicationState(ApplicationState.RPC_ADDRESS) == null)
return endpoint.getHostAddress();
else
return Gossiper.instance.getEndpointStateForEndpoint(endpoint).getApplicationState(ApplicationState.RPC_ADDRESS).value;
}
/**
* for a keyspace, return the ranges and corresponding RPC addresses for a given keyspace.
* @param keyspace
* @return the endpoint map
*/
public Map<List<String>, List<String>> getRangeToRpcaddressMap(String keyspace)
{
/* All the ranges for the tokens */
Map<List<String>, List<String>> map = new HashMap<>();
for (Map.Entry<Range<Token>, List<InetAddress>> entry : getRangeToAddressMap(keyspace).entrySet())
{
List<String> rpcaddrs = new ArrayList<>(entry.getValue().size());
for (InetAddress endpoint: entry.getValue())
{
rpcaddrs.add(getRpcaddress(endpoint));
}
map.put(entry.getKey().asList(), rpcaddrs);
}
return map;
}
public Map<List<String>, List<String>> getPendingRangeToEndpointMap(String keyspace)
{
// some people just want to get a visual representation of things. Allow null and set it to the first
// non-system keyspace.
if (keyspace == null)
keyspace = Schema.instance.getNonLocalStrategyKeyspaces().get(0);
Map<List<String>, List<String>> map = new HashMap<>();
for (Map.Entry<Range<Token>, Collection<InetAddress>> entry : tokenMetadata.getPendingRangesMM(keyspace).asMap().entrySet())
{
List<InetAddress> l = new ArrayList<>(entry.getValue());
map.put(entry.getKey().asList(), stringify(l));
}
return map;
}
public Map<Range<Token>, List<InetAddress>> getRangeToAddressMap(String keyspace)
{
return getRangeToAddressMap(keyspace, tokenMetadata.sortedTokens());
}
public Map<Range<Token>, List<InetAddress>> getRangeToAddressMapInLocalDC(String keyspace)
{
Predicate<InetAddress> isLocalDC = new Predicate<InetAddress>()
{
public boolean apply(InetAddress address)
{
return isLocalDC(address);
}
};
Map<Range<Token>, List<InetAddress>> origMap = getRangeToAddressMap(keyspace, getTokensInLocalDC());
Map<Range<Token>, List<InetAddress>> filteredMap = Maps.newHashMap();
for (Map.Entry<Range<Token>, List<InetAddress>> entry : origMap.entrySet())
{
List<InetAddress> endpointsInLocalDC = Lists.newArrayList(Collections2.filter(entry.getValue(), isLocalDC));
filteredMap.put(entry.getKey(), endpointsInLocalDC);
}
return filteredMap;
}
private List<Token> getTokensInLocalDC()
{
List<Token> filteredTokens = Lists.newArrayList();
for (Token token : tokenMetadata.sortedTokens())
{
InetAddress endpoint = tokenMetadata.getEndpoint(token);
if (isLocalDC(endpoint))
filteredTokens.add(token);
}
return filteredTokens;
}
private boolean isLocalDC(InetAddress targetHost)
{
String remoteDC = DatabaseDescriptor.getEndpointSnitch().getDatacenter(targetHost);
String localDC = DatabaseDescriptor.getEndpointSnitch().getDatacenter(FBUtilities.getBroadcastAddress());
return remoteDC.equals(localDC);
}
private Map<Range<Token>, List<InetAddress>> getRangeToAddressMap(String keyspace, List<Token> sortedTokens)
{
// some people just want to get a visual representation of things. Allow null and set it to the first
// non-system keyspace.
if (keyspace == null)
keyspace = Schema.instance.getNonLocalStrategyKeyspaces().get(0);
List<Range<Token>> ranges = getAllRanges(sortedTokens);
return constructRangeToEndpointMap(keyspace, ranges);
}
/**
* The same as {@code describeRing(String)} but converts TokenRange to the String for JMX compatibility
*
* @param keyspace The keyspace to fetch information about
*
* @return a List of TokenRange(s) converted to String for the given keyspace
*/
public List<String> describeRingJMX(String keyspace) throws IOException
{
List<TokenRange> tokenRanges;
try
{
tokenRanges = describeRing(keyspace);
}
catch (InvalidRequestException e)
{
throw new IOException(e.getMessage());
}
List<String> result = new ArrayList<>(tokenRanges.size());
for (TokenRange tokenRange : tokenRanges)
result.add(tokenRange.toString());
return result;
}
/**
* The TokenRange for a given keyspace.
*
* @param keyspace The keyspace to fetch information about
*
* @return a List of TokenRange(s) for the given keyspace
*
* @throws InvalidRequestException if there is no ring information available about keyspace
*/
public List<TokenRange> describeRing(String keyspace) throws InvalidRequestException
{
return describeRing(keyspace, false);
}
/**
* The same as {@code describeRing(String)} but considers only the part of the ring formed by nodes in the local DC.
*/
public List<TokenRange> describeLocalRing(String keyspace) throws InvalidRequestException
{
return describeRing(keyspace, true);
}
private List<TokenRange> describeRing(String keyspace, boolean includeOnlyLocalDC) throws InvalidRequestException
{
if (!Schema.instance.getKeyspaces().contains(keyspace))
throw new InvalidRequestException("No such keyspace: " + keyspace);
if (keyspace == null || Keyspace.open(keyspace).getReplicationStrategy() instanceof LocalStrategy)
throw new InvalidRequestException("There is no ring for the keyspace: " + keyspace);
List<TokenRange> ranges = new ArrayList<>();
Token.TokenFactory tf = getTokenFactory();
Map<Range<Token>, List<InetAddress>> rangeToAddressMap =
includeOnlyLocalDC
? getRangeToAddressMapInLocalDC(keyspace)
: getRangeToAddressMap(keyspace);
for (Map.Entry<Range<Token>, List<InetAddress>> entry : rangeToAddressMap.entrySet())
{
Range<Token> range = entry.getKey();
List<InetAddress> addresses = entry.getValue();
List<String> endpoints = new ArrayList<>(addresses.size());
List<String> rpc_endpoints = new ArrayList<>(addresses.size());
List<EndpointDetails> epDetails = new ArrayList<>(addresses.size());
for (InetAddress endpoint : addresses)
{
EndpointDetails details = new EndpointDetails();
details.host = endpoint.getHostAddress();
details.datacenter = DatabaseDescriptor.getEndpointSnitch().getDatacenter(endpoint);
details.rack = DatabaseDescriptor.getEndpointSnitch().getRack(endpoint);
endpoints.add(details.host);
rpc_endpoints.add(getRpcaddress(endpoint));
epDetails.add(details);
}
TokenRange tr = new TokenRange(tf.toString(range.left.getToken()), tf.toString(range.right.getToken()), endpoints)
.setEndpoint_details(epDetails)
.setRpc_endpoints(rpc_endpoints);
ranges.add(tr);
}
return ranges;
}
public Map<String, String> getTokenToEndpointMap()
{
Map<Token, InetAddress> mapInetAddress = tokenMetadata.getNormalAndBootstrappingTokenToEndpointMap();
// in order to preserve tokens in ascending order, we use LinkedHashMap here
Map<String, String> mapString = new LinkedHashMap<>(mapInetAddress.size());
List<Token> tokens = new ArrayList<>(mapInetAddress.keySet());
Collections.sort(tokens);
for (Token token : tokens)
{
mapString.put(token.toString(), mapInetAddress.get(token).getHostAddress());
}
return mapString;
}
public String getLocalHostId()
{
return getTokenMetadata().getHostId(FBUtilities.getBroadcastAddress()).toString();
}
public UUID getLocalHostUUID()
{
return getTokenMetadata().getHostId(FBUtilities.getBroadcastAddress());
}
public Map<String, String> getHostIdMap()
{
return getEndpointToHostId();
}
public Map<String, String> getEndpointToHostId()
{
Map<String, String> mapOut = new HashMap<>();
for (Map.Entry<InetAddress, UUID> entry : getTokenMetadata().getEndpointToHostIdMapForReading().entrySet())
mapOut.put(entry.getKey().getHostAddress(), entry.getValue().toString());
return mapOut;
}
public Map<String, String> getHostIdToEndpoint()
{
Map<String, String> mapOut = new HashMap<>();
for (Map.Entry<InetAddress, UUID> entry : getTokenMetadata().getEndpointToHostIdMapForReading().entrySet())
mapOut.put(entry.getValue().toString(), entry.getKey().getHostAddress());
return mapOut;
}
/**
* Construct the range to endpoint mapping based on the true view
* of the world.
* @param ranges
* @return mapping of ranges to the replicas responsible for them.
*/
private Map<Range<Token>, List<InetAddress>> constructRangeToEndpointMap(String keyspace, List<Range<Token>> ranges)
{
Map<Range<Token>, List<InetAddress>> rangeToEndpointMap = new HashMap<>(ranges.size());
for (Range<Token> range : ranges)
{
rangeToEndpointMap.put(range, Keyspace.open(keyspace).getReplicationStrategy().getNaturalEndpoints(range.right));
}
return rangeToEndpointMap;
}
public void beforeChange(InetAddress endpoint, EndpointState currentState, ApplicationState newStateKey, VersionedValue newValue)
{
// no-op
}
/*
* Handle the reception of a new particular ApplicationState for a particular endpoint. Note that the value of the
* ApplicationState has not necessarily "changed" since the last known value, if we already received the same update
* from somewhere else.
*
* onChange only ever sees one ApplicationState piece change at a time (even if many ApplicationState updates were
* received at the same time), so we perform a kind of state machine here. We are concerned with two events: knowing
* the token associated with an endpoint, and knowing its operation mode. Nodes can start in either bootstrap or
* normal mode, and from bootstrap mode can change mode to normal. A node in bootstrap mode needs to have
* pendingranges set in TokenMetadata; a node in normal mode should instead be part of the token ring.
*
* Normal progression of ApplicationState.STATUS values for a node should be like this:
* STATUS_BOOTSTRAPPING,token
* if bootstrapping. stays this way until all files are received.
* STATUS_NORMAL,token
* ready to serve reads and writes.
* STATUS_LEAVING,token
* get ready to leave the cluster as part of a decommission
* STATUS_LEFT,token
* set after decommission is completed.
*
* Other STATUS values that may be seen (possibly anywhere in the normal progression):
* STATUS_MOVING,newtoken
* set if node is currently moving to a new token in the ring
* REMOVING_TOKEN,deadtoken
* set if the node is dead and is being removed by its REMOVAL_COORDINATOR
* REMOVED_TOKEN,deadtoken
* set if the node is dead and has been removed by its REMOVAL_COORDINATOR
*
* Note: Any time a node state changes from STATUS_NORMAL, it will not be visible to new nodes. So it follows that
* you should never bootstrap a new node during a removenode, decommission or move.
*/
public void onChange(InetAddress endpoint, ApplicationState state, VersionedValue value)
{
if (state == ApplicationState.STATUS)
{
String apStateValue = value.value;
String[] pieces = apStateValue.split(VersionedValue.DELIMITER_STR, -1);
assert (pieces.length > 0);
String moveName = pieces[0];
switch (moveName)
{
case VersionedValue.STATUS_BOOTSTRAPPING:
handleStateBootstrap(endpoint);
break;
case VersionedValue.STATUS_NORMAL:
handleStateNormal(endpoint, VersionedValue.STATUS_NORMAL);
break;
case VersionedValue.SHUTDOWN:
handleStateNormal(endpoint, VersionedValue.SHUTDOWN);
break;
case VersionedValue.REMOVING_TOKEN:
case VersionedValue.REMOVED_TOKEN:
handleStateRemoving(endpoint, pieces);
break;
case VersionedValue.STATUS_LEAVING:
handleStateLeaving(endpoint);
break;
case VersionedValue.STATUS_LEFT:
handleStateLeft(endpoint, pieces);
break;
case VersionedValue.STATUS_MOVING:
handleStateMoving(endpoint, pieces);
break;
}
}
else
{
EndpointState epState = Gossiper.instance.getEndpointStateForEndpoint(endpoint);
if (epState == null || Gossiper.instance.isDeadState(epState))
{
logger.debug("Ignoring state change for dead or unknown endpoint: {}", endpoint);
return;
}
if (getTokenMetadata().isMember(endpoint))
{
switch (state)
{
case RELEASE_VERSION:
SystemKeyspace.updatePeerInfo(endpoint, "release_version", value.value);
break;
case DC:
updateTopology(endpoint);
SystemKeyspace.updatePeerInfo(endpoint, "data_center", value.value);
break;
case RACK:
updateTopology(endpoint);
SystemKeyspace.updatePeerInfo(endpoint, "rack", value.value);
break;
case RPC_ADDRESS:
try
{
SystemKeyspace.updatePeerInfo(endpoint, "rpc_address", InetAddress.getByName(value.value));
}
catch (UnknownHostException e)
{
throw new RuntimeException(e);
}
break;
case SCHEMA:
SystemKeyspace.updatePeerInfo(endpoint, "schema_version", UUID.fromString(value.value));
MigrationManager.instance.scheduleSchemaPull(endpoint, epState);
break;
case HOST_ID:
SystemKeyspace.updatePeerInfo(endpoint, "host_id", UUID.fromString(value.value));
break;
case RPC_READY:
notifyRpcChange(endpoint, epState.isRpcReady());
break;
case NET_VERSION:
updateNetVersion(endpoint, value);
break;
}
}
}
}
private void updateNetVersion(InetAddress endpoint, VersionedValue value)
{
try
{
MessagingService.instance().setVersion(endpoint, Integer.valueOf(value.value));
}
catch (NumberFormatException e)
{
throw new AssertionError("Got invalid value for NET_VERSION application state: " + value.value);
}
}
public void updateTopology(InetAddress endpoint)
{
if (getTokenMetadata().isMember(endpoint))
{
getTokenMetadata().updateTopology(endpoint);
}
}
public void updateTopology()
{
getTokenMetadata().updateTopology();
}
private void updatePeerInfo(InetAddress endpoint)
{
EndpointState epState = Gossiper.instance.getEndpointStateForEndpoint(endpoint);
for (Map.Entry<ApplicationState, VersionedValue> entry : epState.states())
{
switch (entry.getKey())
{
case RELEASE_VERSION:
SystemKeyspace.updatePeerInfo(endpoint, "release_version", entry.getValue().value);
break;
case DC:
SystemKeyspace.updatePeerInfo(endpoint, "data_center", entry.getValue().value);
break;
case RACK:
SystemKeyspace.updatePeerInfo(endpoint, "rack", entry.getValue().value);
break;
case RPC_ADDRESS:
try
{
SystemKeyspace.updatePeerInfo(endpoint, "rpc_address", InetAddress.getByName(entry.getValue().value));
}
catch (UnknownHostException e)
{
throw new RuntimeException(e);
}
break;
case SCHEMA:
SystemKeyspace.updatePeerInfo(endpoint, "schema_version", UUID.fromString(entry.getValue().value));
break;
case HOST_ID:
SystemKeyspace.updatePeerInfo(endpoint, "host_id", UUID.fromString(entry.getValue().value));
break;
}
}
}
private void notifyRpcChange(InetAddress endpoint, boolean ready)
{
if (ready)
{
notifyUp(endpoint);
notifyJoined(endpoint);
}
else
{
notifyDown(endpoint);
}
}
private void notifyUp(InetAddress endpoint)
{
if (!isRpcReady(endpoint) || !Gossiper.instance.isAlive(endpoint))
return;
for (IEndpointLifecycleSubscriber subscriber : lifecycleSubscribers)
subscriber.onUp(endpoint);
}
private void notifyDown(InetAddress endpoint)
{
for (IEndpointLifecycleSubscriber subscriber : lifecycleSubscribers)
subscriber.onDown(endpoint);
}
private void notifyJoined(InetAddress endpoint)
{
if (!isRpcReady(endpoint) || !isStatus(endpoint, VersionedValue.STATUS_NORMAL))
return;
for (IEndpointLifecycleSubscriber subscriber : lifecycleSubscribers)
subscriber.onJoinCluster(endpoint);
}
private void notifyMoved(InetAddress endpoint)
{
for (IEndpointLifecycleSubscriber subscriber : lifecycleSubscribers)
subscriber.onMove(endpoint);
}
private void notifyLeft(InetAddress endpoint)
{
for (IEndpointLifecycleSubscriber subscriber : lifecycleSubscribers)
subscriber.onLeaveCluster(endpoint);
}
private boolean isStatus(InetAddress endpoint, String status)
{
return Gossiper.instance.getEndpointStateForEndpoint(endpoint).getStatus().equals(status);
}
private boolean isRpcReady(InetAddress endpoint)
{
return MessagingService.instance().getVersion(endpoint) < MessagingService.VERSION_22 ||
Gossiper.instance.getEndpointStateForEndpoint(endpoint).isRpcReady();
}
public void setRpcReady(boolean value)
{
Gossiper.instance.addLocalApplicationState(ApplicationState.RPC_READY, valueFactory.rpcReady(value));
}
private Collection<Token> getTokensFor(InetAddress endpoint)
{
try
{
EndpointState state = Gossiper.instance.getEndpointStateForEndpoint(endpoint);
if (state == null)
return Collections.emptyList();
VersionedValue versionedValue = state.getApplicationState(ApplicationState.TOKENS);
if (versionedValue == null)
return Collections.emptyList();
return TokenSerializer.deserialize(tokenMetadata.partitioner, new DataInputStream(new ByteArrayInputStream(versionedValue.toBytes())));
}
catch (IOException e)
{
throw new RuntimeException(e);
}
}
/**
* Handle node bootstrap
*
* @param endpoint bootstrapping node
*/
private void handleStateBootstrap(InetAddress endpoint)
{
Collection<Token> tokens;
// explicitly check for TOKENS, because a bootstrapping node might be bootstrapping in legacy mode; that is, not using vnodes and no token specified
tokens = getTokensFor(endpoint);
if (logger.isDebugEnabled())
logger.debug("Node {} state bootstrapping, token {}", endpoint, tokens);
// if this node is present in token metadata, either we have missed intermediate states
// or the node had crashed. Print warning if needed, clear obsolete stuff and
// continue.
if (tokenMetadata.isMember(endpoint))
{
// If isLeaving is false, we have missed both LEAVING and LEFT. However, if
// isLeaving is true, we have only missed LEFT. Waiting time between completing
// leave operation and rebootstrapping is relatively short, so the latter is quite
// common (not enough time for gossip to spread). Therefore we report only the
// former in the log.
if (!tokenMetadata.isLeaving(endpoint))
logger.info("Node {} state jump to bootstrap", endpoint);
tokenMetadata.removeEndpoint(endpoint);
}
tokenMetadata.addBootstrapTokens(tokens, endpoint);
PendingRangeCalculatorService.instance.update();
tokenMetadata.updateHostId(Gossiper.instance.getHostId(endpoint), endpoint);
}
/**
* Handle node move to normal state. That is, node is entering token ring and participating
* in reads.
*
* @param endpoint node
*/
private void handleStateNormal(final InetAddress endpoint, final String status)
{
Collection<Token> tokens = getTokensFor(endpoint);
Set<Token> tokensToUpdateInMetadata = new HashSet<>();
Set<Token> tokensToUpdateInSystemKeyspace = new HashSet<>();
Set<InetAddress> endpointsToRemove = new HashSet<>();
if (logger.isDebugEnabled())
logger.debug("Node {} state {}, token {}", endpoint, status, tokens);
if (tokenMetadata.isMember(endpoint))
logger.info("Node {} state jump to {}", endpoint, status);
if (tokens.isEmpty() && status.equals(VersionedValue.STATUS_NORMAL))
logger.error("Node {} is in state normal but it has no tokens, state: {}",
endpoint,
Gossiper.instance.getEndpointStateForEndpoint(endpoint));
updatePeerInfo(endpoint);
// Order Matters, TM.updateHostID() should be called before TM.updateNormalToken(), (see CASSANDRA-4300).
UUID hostId = Gossiper.instance.getHostId(endpoint);
InetAddress existing = tokenMetadata.getEndpointForHostId(hostId);
if (replacing && Gossiper.instance.getEndpointStateForEndpoint(DatabaseDescriptor.getReplaceAddress()) != null && (hostId.equals(Gossiper.instance.getHostId(DatabaseDescriptor.getReplaceAddress()))))
logger.warn("Not updating token metadata for {} because I am replacing it", endpoint);
else
{
if (existing != null && !existing.equals(endpoint))
{
if (existing.equals(FBUtilities.getBroadcastAddress()))
{
logger.warn("Not updating host ID {} for {} because it's mine", hostId, endpoint);
tokenMetadata.removeEndpoint(endpoint);
endpointsToRemove.add(endpoint);
}
else if (Gossiper.instance.compareEndpointStartup(endpoint, existing) > 0)
{
logger.warn("Host ID collision for {} between {} and {}; {} is the new owner", hostId, existing, endpoint, endpoint);
tokenMetadata.removeEndpoint(existing);
endpointsToRemove.add(existing);
tokenMetadata.updateHostId(hostId, endpoint);
}
else
{
logger.warn("Host ID collision for {} between {} and {}; ignored {}", hostId, existing, endpoint, endpoint);
tokenMetadata.removeEndpoint(endpoint);
endpointsToRemove.add(endpoint);
}
}
else
tokenMetadata.updateHostId(hostId, endpoint);
}
for (final Token token : tokens)
{
// we don't want to update if this node is responsible for the token and it has a later startup time than endpoint.
InetAddress currentOwner = tokenMetadata.getEndpoint(token);
if (currentOwner == null)
{
logger.debug("New node {} at token {}", endpoint, token);
tokensToUpdateInMetadata.add(token);
tokensToUpdateInSystemKeyspace.add(token);
}
else if (endpoint.equals(currentOwner))
{
// set state back to normal, since the node may have tried to leave, but failed and is now back up
tokensToUpdateInMetadata.add(token);
tokensToUpdateInSystemKeyspace.add(token);
}
else if (Gossiper.instance.compareEndpointStartup(endpoint, currentOwner) > 0)
{
tokensToUpdateInMetadata.add(token);
tokensToUpdateInSystemKeyspace.add(token);
// currentOwner is no longer current, endpoint is. Keep track of these moves, because when
// a host no longer has any tokens, we'll want to remove it.
Multimap<InetAddress, Token> epToTokenCopy = getTokenMetadata().getEndpointToTokenMapForReading();
epToTokenCopy.get(currentOwner).remove(token);
if (epToTokenCopy.get(currentOwner).size() < 1)
endpointsToRemove.add(currentOwner);
logger.info(String.format("Nodes %s and %s have the same token %s. %s is the new owner",
endpoint,
currentOwner,
token,
endpoint));
}
else
{
logger.info(String.format("Nodes %s and %s have the same token %s. Ignoring %s",
endpoint,
currentOwner,
token,
endpoint));
}
}
boolean isMoving = tokenMetadata.isMoving(endpoint); // capture because updateNormalTokens clears moving status
tokenMetadata.updateNormalTokens(tokensToUpdateInMetadata, endpoint);
for (InetAddress ep : endpointsToRemove)
{
removeEndpoint(ep);
if (replacing && DatabaseDescriptor.getReplaceAddress().equals(ep))
Gossiper.instance.replacementQuarantine(ep); // quarantine locally longer than normally; see CASSANDRA-8260
}
if (!tokensToUpdateInSystemKeyspace.isEmpty())
SystemKeyspace.updateTokens(endpoint, tokensToUpdateInSystemKeyspace);;
if (isMoving || operationMode == Mode.MOVING)
{
tokenMetadata.removeFromMoving(endpoint);
notifyMoved(endpoint);
}
else
{
notifyJoined(endpoint);
}
PendingRangeCalculatorService.instance.update();
}
/**
* Handle node preparing to leave the ring
*
* @param endpoint node
*/
private void handleStateLeaving(InetAddress endpoint)
{
Collection<Token> tokens = getTokensFor(endpoint);
if (logger.isDebugEnabled())
logger.debug("Node {} state leaving, tokens {}", endpoint, tokens);
// If the node is previously unknown or tokens do not match, update tokenmetadata to
// have this node as 'normal' (it must have been using this token before the
// leave). This way we'll get pending ranges right.
if (!tokenMetadata.isMember(endpoint))
{
logger.info("Node {} state jump to leaving", endpoint);
tokenMetadata.updateNormalTokens(tokens, endpoint);
}
else if (!tokenMetadata.getTokens(endpoint).containsAll(tokens))
{
logger.warn("Node {} 'leaving' token mismatch. Long network partition?", endpoint);
tokenMetadata.updateNormalTokens(tokens, endpoint);
}
// at this point the endpoint is certainly a member with this token, so let's proceed
// normally
tokenMetadata.addLeavingEndpoint(endpoint);
PendingRangeCalculatorService.instance.update();
}
/**
* Handle node leaving the ring. This will happen when a node is decommissioned
*
* @param endpoint If reason for leaving is decommission, endpoint is the leaving node.
* @param pieces STATE_LEFT,token
*/
private void handleStateLeft(InetAddress endpoint, String[] pieces)
{
assert pieces.length >= 2;
Collection<Token> tokens = getTokensFor(endpoint);
if (logger.isDebugEnabled())
logger.debug("Node {} state left, tokens {}", endpoint, tokens);
excise(tokens, endpoint, extractExpireTime(pieces));
}
/**
* Handle node moving inside the ring.
*
* @param endpoint moving endpoint address
* @param pieces STATE_MOVING, token
*/
private void handleStateMoving(InetAddress endpoint, String[] pieces)
{
assert pieces.length >= 2;
Token token = getTokenFactory().fromString(pieces[1]);
if (logger.isDebugEnabled())
logger.debug("Node {} state moving, new token {}", endpoint, token);
tokenMetadata.addMovingEndpoint(token, endpoint);
PendingRangeCalculatorService.instance.update();
}
/**
* Handle notification that a node being actively removed from the ring via 'removenode'
*
* @param endpoint node
* @param pieces either REMOVED_TOKEN (node is gone) or REMOVING_TOKEN (replicas need to be restored)
*/
private void handleStateRemoving(InetAddress endpoint, String[] pieces)
{
assert (pieces.length > 0);
if (endpoint.equals(FBUtilities.getBroadcastAddress()))
{
logger.info("Received removenode gossip about myself. Is this node rejoining after an explicit removenode?");
try
{
drain();
}
catch (Exception e)
{
throw new RuntimeException(e);
}
return;
}
if (tokenMetadata.isMember(endpoint))
{
String state = pieces[0];
Collection<Token> removeTokens = tokenMetadata.getTokens(endpoint);
if (VersionedValue.REMOVED_TOKEN.equals(state))
{
excise(removeTokens, endpoint, extractExpireTime(pieces));
}
else if (VersionedValue.REMOVING_TOKEN.equals(state))
{
if (logger.isDebugEnabled())
logger.debug("Tokens {} removed manually (endpoint was {})", removeTokens, endpoint);
// Note that the endpoint is being removed
tokenMetadata.addLeavingEndpoint(endpoint);
PendingRangeCalculatorService.instance.update();
// find the endpoint coordinating this removal that we need to notify when we're done
String[] coordinator = Gossiper.instance.getEndpointStateForEndpoint(endpoint).getApplicationState(ApplicationState.REMOVAL_COORDINATOR).value.split(VersionedValue.DELIMITER_STR, -1);
UUID hostId = UUID.fromString(coordinator[1]);
// grab any data we are now responsible for and notify responsible node
restoreReplicaCount(endpoint, tokenMetadata.getEndpointForHostId(hostId));
}
}
else // now that the gossiper has told us about this nonexistent member, notify the gossiper to remove it
{
if (VersionedValue.REMOVED_TOKEN.equals(pieces[0]))
addExpireTimeIfFound(endpoint, extractExpireTime(pieces));
removeEndpoint(endpoint);
}
}
private void excise(Collection<Token> tokens, InetAddress endpoint)
{
logger.info("Removing tokens {} for {}", tokens, endpoint);
if (tokenMetadata.isMember(endpoint))
HintsService.instance.excise(tokenMetadata.getHostId(endpoint));
removeEndpoint(endpoint);
tokenMetadata.removeEndpoint(endpoint);
if (!tokens.isEmpty())
tokenMetadata.removeBootstrapTokens(tokens);
notifyLeft(endpoint);
PendingRangeCalculatorService.instance.update();
}
private void excise(Collection<Token> tokens, InetAddress endpoint, long expireTime)
{
addExpireTimeIfFound(endpoint, expireTime);
excise(tokens, endpoint);
}
/** unlike excise we just need this endpoint gone without going through any notifications **/
private void removeEndpoint(InetAddress endpoint)
{
Gossiper.instance.removeEndpoint(endpoint);
SystemKeyspace.removeEndpoint(endpoint);
}
protected void addExpireTimeIfFound(InetAddress endpoint, long expireTime)
{
if (expireTime != 0L)
{
Gossiper.instance.addExpireTimeForEndpoint(endpoint, expireTime);
}
}
protected long extractExpireTime(String[] pieces)
{
return Long.parseLong(pieces[2]);
}
/**
* Finds living endpoints responsible for the given ranges
*
* @param keyspaceName the keyspace ranges belong to
* @param ranges the ranges to find sources for
* @return multimap of addresses to ranges the address is responsible for
*/
private Multimap<InetAddress, Range<Token>> getNewSourceRanges(String keyspaceName, Set<Range<Token>> ranges)
{
InetAddress myAddress = FBUtilities.getBroadcastAddress();
Multimap<Range<Token>, InetAddress> rangeAddresses = Keyspace.open(keyspaceName).getReplicationStrategy().getRangeAddresses(tokenMetadata.cloneOnlyTokenMap());
Multimap<InetAddress, Range<Token>> sourceRanges = HashMultimap.create();
IFailureDetector failureDetector = FailureDetector.instance;
// find alive sources for our new ranges
for (Range<Token> range : ranges)
{
Collection<InetAddress> possibleRanges = rangeAddresses.get(range);
IEndpointSnitch snitch = DatabaseDescriptor.getEndpointSnitch();
List<InetAddress> sources = snitch.getSortedListByProximity(myAddress, possibleRanges);
assert (!sources.contains(myAddress));
for (InetAddress source : sources)
{
if (failureDetector.isAlive(source))
{
sourceRanges.put(source, range);
break;
}
}
}
return sourceRanges;
}
/**
* Sends a notification to a node indicating we have finished replicating data.
*
* @param remote node to send notification to
*/
private void sendReplicationNotification(InetAddress remote)
{
// notify the remote token
MessageOut msg = new MessageOut(MessagingService.Verb.REPLICATION_FINISHED);
IFailureDetector failureDetector = FailureDetector.instance;
if (logger.isDebugEnabled())
logger.debug("Notifying {} of replication completion\n", remote);
while (failureDetector.isAlive(remote))
{
AsyncOneResponse iar = MessagingService.instance().sendRR(msg, remote);
try
{
iar.get(DatabaseDescriptor.getRpcTimeout(), TimeUnit.MILLISECONDS);
return; // done
}
catch(TimeoutException e)
{
// try again
}
}
}
/**
* Called when an endpoint is removed from the ring. This function checks
* whether this node becomes responsible for new ranges as a
* consequence and streams data if needed.
*
* This is rather ineffective, but it does not matter so much
* since this is called very seldom
*
* @param endpoint the node that left
*/
private void restoreReplicaCount(InetAddress endpoint, final InetAddress notifyEndpoint)
{
Multimap<String, Map.Entry<InetAddress, Collection<Range<Token>>>> rangesToFetch = HashMultimap.create();
InetAddress myAddress = FBUtilities.getBroadcastAddress();
for (String keyspaceName : Schema.instance.getNonLocalStrategyKeyspaces())
{
Multimap<Range<Token>, InetAddress> changedRanges = getChangedRangesForLeaving(keyspaceName, endpoint);
Set<Range<Token>> myNewRanges = new HashSet<>();
for (Map.Entry<Range<Token>, InetAddress> entry : changedRanges.entries())
{
if (entry.getValue().equals(myAddress))
myNewRanges.add(entry.getKey());
}
Multimap<InetAddress, Range<Token>> sourceRanges = getNewSourceRanges(keyspaceName, myNewRanges);
for (Map.Entry<InetAddress, Collection<Range<Token>>> entry : sourceRanges.asMap().entrySet())
{
rangesToFetch.put(keyspaceName, entry);
}
}
StreamPlan stream = new StreamPlan("Restore replica count");
for (String keyspaceName : rangesToFetch.keySet())
{
for (Map.Entry<InetAddress, Collection<Range<Token>>> entry : rangesToFetch.get(keyspaceName))
{
InetAddress source = entry.getKey();
InetAddress preferred = SystemKeyspace.getPreferredIP(source);
Collection<Range<Token>> ranges = entry.getValue();
if (logger.isDebugEnabled())
logger.debug("Requesting from {} ranges {}", source, StringUtils.join(ranges, ", "));
stream.requestRanges(source, preferred, keyspaceName, ranges);
}
}
StreamResultFuture future = stream.execute();
Futures.addCallback(future, new FutureCallback<StreamState>()
{
public void onSuccess(StreamState finalState)
{
sendReplicationNotification(notifyEndpoint);
}
public void onFailure(Throwable t)
{
logger.warn("Streaming to restore replica count failed", t);
// We still want to send the notification
sendReplicationNotification(notifyEndpoint);
}
});
}
// needs to be modified to accept either a keyspace or ARS.
private Multimap<Range<Token>, InetAddress> getChangedRangesForLeaving(String keyspaceName, InetAddress endpoint)
{
// First get all ranges the leaving endpoint is responsible for
Collection<Range<Token>> ranges = getRangesForEndpoint(keyspaceName, endpoint);
if (logger.isDebugEnabled())
logger.debug("Node {} ranges [{}]", endpoint, StringUtils.join(ranges, ", "));
Map<Range<Token>, List<InetAddress>> currentReplicaEndpoints = new HashMap<>(ranges.size());
// Find (for each range) all nodes that store replicas for these ranges as well
TokenMetadata metadata = tokenMetadata.cloneOnlyTokenMap(); // don't do this in the loop! #7758
for (Range<Token> range : ranges)
currentReplicaEndpoints.put(range, Keyspace.open(keyspaceName).getReplicationStrategy().calculateNaturalEndpoints(range.right, metadata));
TokenMetadata temp = tokenMetadata.cloneAfterAllLeft();
// endpoint might or might not be 'leaving'. If it was not leaving (that is, removenode
// command was used), it is still present in temp and must be removed.
if (temp.isMember(endpoint))
temp.removeEndpoint(endpoint);
Multimap<Range<Token>, InetAddress> changedRanges = HashMultimap.create();
// Go through the ranges and for each range check who will be
// storing replicas for these ranges when the leaving endpoint
// is gone. Whoever is present in newReplicaEndpoints list, but
// not in the currentReplicaEndpoints list, will be needing the
// range.
for (Range<Token> range : ranges)
{
Collection<InetAddress> newReplicaEndpoints = Keyspace.open(keyspaceName).getReplicationStrategy().calculateNaturalEndpoints(range.right, temp);
newReplicaEndpoints.removeAll(currentReplicaEndpoints.get(range));
if (logger.isDebugEnabled())
if (newReplicaEndpoints.isEmpty())
logger.debug("Range {} already in all replicas", range);
else
logger.debug("Range {} will be responsibility of {}", range, StringUtils.join(newReplicaEndpoints, ", "));
changedRanges.putAll(range, newReplicaEndpoints);
}
return changedRanges;
}
public void onJoin(InetAddress endpoint, EndpointState epState)
{
for (Map.Entry<ApplicationState, VersionedValue> entry : epState.states())
{
onChange(endpoint, entry.getKey(), entry.getValue());
}
MigrationManager.instance.scheduleSchemaPull(endpoint, epState);
}
public void onAlive(InetAddress endpoint, EndpointState state)
{
MigrationManager.instance.scheduleSchemaPull(endpoint, state);
if (tokenMetadata.isMember(endpoint))
notifyUp(endpoint);
}
public void onRemove(InetAddress endpoint)
{
tokenMetadata.removeEndpoint(endpoint);
PendingRangeCalculatorService.instance.update();
}
public void onDead(InetAddress endpoint, EndpointState state)
{
MessagingService.instance().convict(endpoint);
notifyDown(endpoint);
}
public void onRestart(InetAddress endpoint, EndpointState state)
{
// If we have restarted before the node was even marked down, we need to reset the connection pool
if (state.isAlive())
onDead(endpoint, state);
// Then, the node may have been upgraded and changed its messaging protocol version. If so, we
// want to update that before we mark the node live again to avoid problems like CASSANDRA-11128.
VersionedValue netVersion = state.getApplicationState(ApplicationState.NET_VERSION);
if (netVersion != null)
updateNetVersion(endpoint, netVersion);
}
public String getLoadString()
{
return FileUtils.stringifyFileSize(StorageMetrics.load.getCount());
}
public Map<String, String> getLoadMap()
{
Map<String, String> map = new HashMap<>();
for (Map.Entry<InetAddress,Double> entry : LoadBroadcaster.instance.getLoadInfo().entrySet())
{
map.put(entry.getKey().getHostAddress(), FileUtils.stringifyFileSize(entry.getValue()));
}
// gossiper doesn't see its own updates, so we need to special-case the local node
map.put(FBUtilities.getBroadcastAddress().getHostAddress(), getLoadString());
return map;
}
// TODO
public final void deliverHints(String host) throws UnknownHostException
{
throw new UnsupportedOperationException();
}
public Collection<Token> getLocalTokens()
{
Collection<Token> tokens = SystemKeyspace.getSavedTokens();
assert tokens != null && !tokens.isEmpty(); // should not be called before initServer sets this
return tokens;
}
@Nullable
public InetAddress getEndpointForHostId(UUID hostId)
{
return tokenMetadata.getEndpointForHostId(hostId);
}
@Nullable
public UUID getHostIdForEndpoint(InetAddress address)
{
return tokenMetadata.getHostId(address);
}
/* These methods belong to the MBean interface */
public List<String> getTokens()
{
return getTokens(FBUtilities.getBroadcastAddress());
}
public List<String> getTokens(String endpoint) throws UnknownHostException
{
return getTokens(InetAddress.getByName(endpoint));
}
private List<String> getTokens(InetAddress endpoint)
{
List<String> strTokens = new ArrayList<>();
for (Token tok : getTokenMetadata().getTokens(endpoint))
strTokens.add(tok.toString());
return strTokens;
}
public String getReleaseVersion()
{
return FBUtilities.getReleaseVersionString();
}
public String getSchemaVersion()
{
return Schema.instance.getVersion().toString();
}
public List<String> getLeavingNodes()
{
return stringify(tokenMetadata.getLeavingEndpoints());
}
public List<String> getMovingNodes()
{
List<String> endpoints = new ArrayList<>();
for (Pair<Token, InetAddress> node : tokenMetadata.getMovingEndpoints())
{
endpoints.add(node.right.getHostAddress());
}
return endpoints;
}
public List<String> getJoiningNodes()
{
return stringify(tokenMetadata.getBootstrapTokens().valueSet());
}
public List<String> getLiveNodes()
{
return stringify(Gossiper.instance.getLiveMembers());
}
public Set<InetAddress> getLiveRingMembers()
{
return getLiveRingMembers(false);
}
public Set<InetAddress> getLiveRingMembers(boolean excludeDeadStates)
{
Set<InetAddress> ret = new HashSet<>();
for (InetAddress ep : Gossiper.instance.getLiveMembers())
{
if (excludeDeadStates)
{
EndpointState epState = Gossiper.instance.getEndpointStateForEndpoint(ep);
if (epState == null || Gossiper.instance.isDeadState(epState))
continue;
}
if (tokenMetadata.isMember(ep))
ret.add(ep);
}
return ret;
}
public List<String> getUnreachableNodes()
{
return stringify(Gossiper.instance.getUnreachableMembers());
}
public String[] getAllDataFileLocations()
{
String[] locations = DatabaseDescriptor.getAllDataFileLocations();
for (int i = 0; i < locations.length; i++)
locations[i] = FileUtils.getCanonicalPath(locations[i]);
return locations;
}
public String getCommitLogLocation()
{
return FileUtils.getCanonicalPath(DatabaseDescriptor.getCommitLogLocation());
}
public String getSavedCachesLocation()
{
return FileUtils.getCanonicalPath(DatabaseDescriptor.getSavedCachesLocation());
}
private List<String> stringify(Iterable<InetAddress> endpoints)
{
List<String> stringEndpoints = new ArrayList<>();
for (InetAddress ep : endpoints)
{
stringEndpoints.add(ep.getHostAddress());
}
return stringEndpoints;
}
public int getCurrentGenerationNumber()
{
return Gossiper.instance.getCurrentGenerationNumber(FBUtilities.getBroadcastAddress());
}
public int forceKeyspaceCleanup(String keyspaceName, String... tables) throws IOException, ExecutionException, InterruptedException
{
return forceKeyspaceCleanup(0, keyspaceName, tables);
}
public int forceKeyspaceCleanup(int jobs, String keyspaceName, String... tables) throws IOException, ExecutionException, InterruptedException
{
if (Schema.isSystemKeyspace(keyspaceName))
throw new RuntimeException("Cleanup of the system keyspace is neither necessary nor wise");
CompactionManager.AllSSTableOpStatus status = CompactionManager.AllSSTableOpStatus.SUCCESSFUL;
for (ColumnFamilyStore cfStore : getValidColumnFamilies(false, false, keyspaceName, tables))
{
CompactionManager.AllSSTableOpStatus oneStatus = cfStore.forceCleanup(jobs);
if (oneStatus != CompactionManager.AllSSTableOpStatus.SUCCESSFUL)
status = oneStatus;
}
return status.statusCode;
}
public int scrub(boolean disableSnapshot, boolean skipCorrupted, String keyspaceName, String... tables) throws IOException, ExecutionException, InterruptedException
{
return scrub(disableSnapshot, skipCorrupted, true, 0, keyspaceName, tables);
}
public int scrub(boolean disableSnapshot, boolean skipCorrupted, boolean checkData, String keyspaceName, String... tables) throws IOException, ExecutionException, InterruptedException
{
return scrub(disableSnapshot, skipCorrupted, checkData, 0, keyspaceName, tables);
}
public int scrub(boolean disableSnapshot, boolean skipCorrupted, boolean checkData, int jobs, String keyspaceName, String... tables) throws IOException, ExecutionException, InterruptedException
{
CompactionManager.AllSSTableOpStatus status = CompactionManager.AllSSTableOpStatus.SUCCESSFUL;
for (ColumnFamilyStore cfStore : getValidColumnFamilies(true, false, keyspaceName, tables))
{
CompactionManager.AllSSTableOpStatus oneStatus = cfStore.scrub(disableSnapshot, skipCorrupted, checkData, jobs);
if (oneStatus != CompactionManager.AllSSTableOpStatus.SUCCESSFUL)
status = oneStatus;
}
return status.statusCode;
}
public int verify(boolean extendedVerify, String keyspaceName, String... tableNames) throws IOException, ExecutionException, InterruptedException
{
CompactionManager.AllSSTableOpStatus status = CompactionManager.AllSSTableOpStatus.SUCCESSFUL;
for (ColumnFamilyStore cfStore : getValidColumnFamilies(false, false, keyspaceName, tableNames))
{
CompactionManager.AllSSTableOpStatus oneStatus = cfStore.verify(extendedVerify);
if (oneStatus != CompactionManager.AllSSTableOpStatus.SUCCESSFUL)
status = oneStatus;
}
return status.statusCode;
}
public int upgradeSSTables(String keyspaceName, boolean excludeCurrentVersion, String... tableNames) throws IOException, ExecutionException, InterruptedException
{
return upgradeSSTables(keyspaceName, excludeCurrentVersion, 0, tableNames);
}
public int upgradeSSTables(String keyspaceName, boolean excludeCurrentVersion, int jobs, String... tableNames) throws IOException, ExecutionException, InterruptedException
{
CompactionManager.AllSSTableOpStatus status = CompactionManager.AllSSTableOpStatus.SUCCESSFUL;
for (ColumnFamilyStore cfStore : getValidColumnFamilies(true, true, keyspaceName, tableNames))
{
CompactionManager.AllSSTableOpStatus oneStatus = cfStore.sstablesRewrite(excludeCurrentVersion, jobs);
if (oneStatus != CompactionManager.AllSSTableOpStatus.SUCCESSFUL)
status = oneStatus;
}
return status.statusCode;
}
public void forceKeyspaceCompaction(boolean splitOutput, String keyspaceName, String... tableNames) throws IOException, ExecutionException, InterruptedException
{
for (ColumnFamilyStore cfStore : getValidColumnFamilies(true, false, keyspaceName, tableNames))
{
cfStore.forceMajorCompaction(splitOutput);
}
}
public int relocateSSTables(String keyspaceName, String ... columnFamilies) throws IOException, ExecutionException, InterruptedException
{
return relocateSSTables(0, keyspaceName, columnFamilies);
}
public int relocateSSTables(int jobs, String keyspaceName, String ... columnFamilies) throws IOException, ExecutionException, InterruptedException
{
CompactionManager.AllSSTableOpStatus status = CompactionManager.AllSSTableOpStatus.SUCCESSFUL;
for (ColumnFamilyStore cfs : getValidColumnFamilies(false, false, keyspaceName, columnFamilies))
{
CompactionManager.AllSSTableOpStatus oneStatus = cfs.relocateSSTables(jobs);
if (oneStatus != CompactionManager.AllSSTableOpStatus.SUCCESSFUL)
status = oneStatus;
}
return status.statusCode;
}
/**
* Takes the snapshot of a multiple column family from different keyspaces. A snapshot name must be specified.
*
* @param tag
* the tag given to the snapshot; may not be null or empty
* @param options
* Map of options (skipFlush is the only supported option for now)
* @param entities
* list of keyspaces / tables in the form of empty | ks1 ks2 ... | ks1.cf1,ks2.cf2,...
*/
@Override
public void takeSnapshot(String tag, Map<String, String> options, String... entities) throws IOException
{
boolean skipFlush = Boolean.parseBoolean(options.getOrDefault("skipFlush", "false"));
if (entities != null && entities.length > 0 && entities[0].contains("."))
{
takeMultipleTableSnapshot(tag, skipFlush, entities);
}
else
{
takeSnapshot(tag, skipFlush, entities);
}
}
/**
* Takes the snapshot of a specific table. A snapshot name must be
* specified.
*
* @param keyspaceName
* the keyspace which holds the specified table
* @param tableName
* the table to snapshot
* @param tag
* the tag given to the snapshot; may not be null or empty
*/
public void takeTableSnapshot(String keyspaceName, String tableName, String tag)
throws IOException {
takeMultipleTableSnapshot(tag, false, keyspaceName + "." + tableName);
}
/**
* Takes the snapshot for the given keyspaces. A snapshot name must be specified.
*
* @param tag the tag given to the snapshot; may not be null or empty
* @param keyspaceNames the names of the keyspaces to snapshot; empty means "all."
*/
public void takeSnapshot(String tag, String... keyspaceNames) throws IOException
{
takeSnapshot(tag, false, keyspaceNames);
}
/**
* Takes the snapshot of a multiple column family from different keyspaces. A snapshot name must be specified.
*
* @param tag
* the tag given to the snapshot; may not be null or empty
* @param tableList
* list of tables from different keyspace in the form of ks1.cf1 ks2.cf2
*/
public void takeMultipleTableSnapshot(String tag, String... tableList)
throws IOException
{
takeMultipleTableSnapshot(tag, false, tableList);
}
/**
* Takes the snapshot for the given keyspaces. A snapshot name must be specified.
*
* @param tag the tag given to the snapshot; may not be null or empty
* @param skipFlush Skip blocking flush of memtable
* @param keyspaceNames the names of the keyspaces to snapshot; empty means "all."
*/
private void takeSnapshot(String tag, boolean skipFlush, String... keyspaceNames) throws IOException
{
if (operationMode == Mode.JOINING)
throw new IOException("Cannot snapshot until bootstrap completes");
if (tag == null || tag.equals(""))
throw new IOException("You must supply a snapshot name.");
Iterable<Keyspace> keyspaces;
if (keyspaceNames.length == 0)
{
keyspaces = Keyspace.all();
}
else
{
ArrayList<Keyspace> t = new ArrayList<>(keyspaceNames.length);
for (String keyspaceName : keyspaceNames)
t.add(getValidKeyspace(keyspaceName));
keyspaces = t;
}
// Do a check to see if this snapshot exists before we actually snapshot
for (Keyspace keyspace : keyspaces)
if (keyspace.snapshotExists(tag))
throw new IOException("Snapshot " + tag + " already exists.");
for (Keyspace keyspace : keyspaces)
keyspace.snapshot(tag, null, skipFlush);
}
/**
* Takes the snapshot of a multiple column family from different keyspaces. A snapshot name must be specified.
*
*
* @param tag
* the tag given to the snapshot; may not be null or empty
* @param skipFlush
* Skip blocking flush of memtable
* @param tableList
* list of tables from different keyspace in the form of ks1.cf1 ks2.cf2
*/
private void takeMultipleTableSnapshot(String tag, boolean skipFlush, String... tableList)
throws IOException
{
Map<Keyspace, List<String>> keyspaceColumnfamily = new HashMap<Keyspace, List<String>>();
for (String table : tableList)
{
String splittedString[] = StringUtils.split(table, '.');
if (splittedString.length == 2)
{
String keyspaceName = splittedString[0];
String tableName = splittedString[1];
if (keyspaceName == null)
throw new IOException("You must supply a keyspace name");
if (operationMode.equals(Mode.JOINING))
throw new IOException("Cannot snapshot until bootstrap completes");
if (tableName == null)
throw new IOException("You must supply a table name");
if (tag == null || tag.equals(""))
throw new IOException("You must supply a snapshot name.");
Keyspace keyspace = getValidKeyspace(keyspaceName);
ColumnFamilyStore columnFamilyStore = keyspace.getColumnFamilyStore(tableName);
// As there can be multiple column family from same keyspace check if snapshot exist for that specific
// columnfamily and not for whole keyspace
if (columnFamilyStore.snapshotExists(tag))
throw new IOException("Snapshot " + tag + " already exists.");
if (!keyspaceColumnfamily.containsKey(keyspace))
{
keyspaceColumnfamily.put(keyspace, new ArrayList<String>());
}
// Add Keyspace columnfamily to map in order to support atomicity for snapshot process.
// So no snapshot should happen if any one of the above conditions fail for any keyspace or columnfamily
keyspaceColumnfamily.get(keyspace).add(tableName);
}
else
{
throw new IllegalArgumentException(
"Cannot take a snapshot on secondary index or invalid column family name. You must supply a column family name in the form of keyspace.columnfamily");
}
}
for (Entry<Keyspace, List<String>> entry : keyspaceColumnfamily.entrySet())
{
for (String table : entry.getValue())
entry.getKey().snapshot(tag, table, skipFlush);
}
}
private Keyspace getValidKeyspace(String keyspaceName) throws IOException
{
if (!Schema.instance.getKeyspaces().contains(keyspaceName))
{
throw new IOException("Keyspace " + keyspaceName + " does not exist");
}
return Keyspace.open(keyspaceName);
}
/**
* Remove the snapshot with the given name from the given keyspaces.
* If no tag is specified we will remove all snapshots.
*/
public void clearSnapshot(String tag, String... keyspaceNames) throws IOException
{
if(tag == null)
tag = "";
Set<String> keyspaces = new HashSet<>();
for (String dataDir : DatabaseDescriptor.getAllDataFileLocations())
{
for(String keyspaceDir : new File(dataDir).list())
{
// Only add a ks if it has been specified as a param, assuming params were actually provided.
if (keyspaceNames.length > 0 && !Arrays.asList(keyspaceNames).contains(keyspaceDir))
continue;
keyspaces.add(keyspaceDir);
}
}
for (String keyspace : keyspaces)
Keyspace.clearSnapshot(tag, keyspace);
if (logger.isDebugEnabled())
logger.debug("Cleared out snapshot directories");
}
public Map<String, TabularData> getSnapshotDetails()
{
Map<String, TabularData> snapshotMap = new HashMap<>();
for (Keyspace keyspace : Keyspace.all())
{
if (Schema.isSystemKeyspace(keyspace.getName()))
continue;
for (ColumnFamilyStore cfStore : keyspace.getColumnFamilyStores())
{
for (Map.Entry<String, Pair<Long,Long>> snapshotDetail : cfStore.getSnapshotDetails().entrySet())
{
TabularDataSupport data = (TabularDataSupport)snapshotMap.get(snapshotDetail.getKey());
if (data == null)
{
data = new TabularDataSupport(SnapshotDetailsTabularData.TABULAR_TYPE);
snapshotMap.put(snapshotDetail.getKey(), data);
}
SnapshotDetailsTabularData.from(snapshotDetail.getKey(), keyspace.getName(), cfStore.getColumnFamilyName(), snapshotDetail, data);
}
}
}
return snapshotMap;
}
public long trueSnapshotsSize()
{
long total = 0;
for (Keyspace keyspace : Keyspace.all())
{
if (Schema.isSystemKeyspace(keyspace.getName()))
continue;
for (ColumnFamilyStore cfStore : keyspace.getColumnFamilyStores())
{
total += cfStore.trueSnapshotsSize();
}
}
return total;
}
public void refreshSizeEstimates() throws ExecutionException
{
FBUtilities.waitOnFuture(ScheduledExecutors.optionalTasks.submit(SizeEstimatesRecorder.instance));
}
/**
* @param allowIndexes Allow index CF names to be passed in
* @param autoAddIndexes Automatically add secondary indexes if a CF has them
* @param keyspaceName keyspace
* @param cfNames CFs
* @throws java.lang.IllegalArgumentException when given CF name does not exist
*/
public Iterable<ColumnFamilyStore> getValidColumnFamilies(boolean allowIndexes, boolean autoAddIndexes, String keyspaceName, String... cfNames) throws IOException
{
Keyspace keyspace = getValidKeyspace(keyspaceName);
return keyspace.getValidColumnFamilies(allowIndexes, autoAddIndexes, cfNames);
}
/**
* Flush all memtables for a keyspace and column families.
* @param keyspaceName
* @param tableNames
* @throws IOException
*/
public void forceKeyspaceFlush(String keyspaceName, String... tableNames) throws IOException
{
for (ColumnFamilyStore cfStore : getValidColumnFamilies(true, false, keyspaceName, tableNames))
{
logger.debug("Forcing flush on keyspace {}, CF {}", keyspaceName, cfStore.name);
cfStore.forceBlockingFlush();
}
}
public int repairAsync(String keyspace, Map<String, String> repairSpec)
{
RepairOption option = RepairOption.parse(repairSpec, tokenMetadata.partitioner);
// if ranges are not specified
if (option.getRanges().isEmpty())
{
if (option.isPrimaryRange())
{
// when repairing only primary range, neither dataCenters nor hosts can be set
if (option.getDataCenters().isEmpty() && option.getHosts().isEmpty())
option.getRanges().addAll(getPrimaryRanges(keyspace));
// except dataCenters only contain local DC (i.e. -local)
else if (option.getDataCenters().size() == 1 && option.getDataCenters().contains(DatabaseDescriptor.getLocalDataCenter()))
option.getRanges().addAll(getPrimaryRangesWithinDC(keyspace));
else
throw new IllegalArgumentException("You need to run primary range repair on all nodes in the cluster.");
}
else
{
option.getRanges().addAll(getLocalRanges(keyspace));
}
}
return forceRepairAsync(keyspace, option, false);
}
@Deprecated
public int forceRepairAsync(String keyspace,
boolean isSequential,
Collection<String> dataCenters,
Collection<String> hosts,
boolean primaryRange,
boolean fullRepair,
String... tableNames)
{
return forceRepairAsync(keyspace, isSequential ? RepairParallelism.SEQUENTIAL.ordinal() : RepairParallelism.PARALLEL.ordinal(), dataCenters, hosts, primaryRange, fullRepair, tableNames);
}
@Deprecated
public int forceRepairAsync(String keyspace,
int parallelismDegree,
Collection<String> dataCenters,
Collection<String> hosts,
boolean primaryRange,
boolean fullRepair,
String... tableNames)
{
if (parallelismDegree < 0 || parallelismDegree > RepairParallelism.values().length - 1)
{
throw new IllegalArgumentException("Invalid parallelism degree specified: " + parallelismDegree);
}
RepairParallelism parallelism = RepairParallelism.values()[parallelismDegree];
if (FBUtilities.isWindows() && parallelism != RepairParallelism.PARALLEL)
{
logger.warn("Snapshot-based repair is not yet supported on Windows. Reverting to parallel repair.");
parallelism = RepairParallelism.PARALLEL;
}
RepairOption options = new RepairOption(parallelism, primaryRange, !fullRepair, false, 1, Collections.<Range<Token>>emptyList(), false);
if (dataCenters != null)
{
options.getDataCenters().addAll(dataCenters);
}
if (hosts != null)
{
options.getHosts().addAll(hosts);
}
if (primaryRange)
{
// when repairing only primary range, neither dataCenters nor hosts can be set
if (options.getDataCenters().isEmpty() && options.getHosts().isEmpty())
options.getRanges().addAll(getPrimaryRanges(keyspace));
// except dataCenters only contain local DC (i.e. -local)
else if (options.getDataCenters().size() == 1 && options.getDataCenters().contains(DatabaseDescriptor.getLocalDataCenter()))
options.getRanges().addAll(getPrimaryRangesWithinDC(keyspace));
else
throw new IllegalArgumentException("You need to run primary range repair on all nodes in the cluster.");
}
else
{
options.getRanges().addAll(getLocalRanges(keyspace));
}
if (tableNames != null)
{
for (String table : tableNames)
{
options.getColumnFamilies().add(table);
}
}
return forceRepairAsync(keyspace, options, true);
}
@Deprecated
public int forceRepairAsync(String keyspace,
boolean isSequential,
boolean isLocal,
boolean primaryRange,
boolean fullRepair,
String... tableNames)
{
Set<String> dataCenters = null;
if (isLocal)
{
dataCenters = Sets.newHashSet(DatabaseDescriptor.getLocalDataCenter());
}
return forceRepairAsync(keyspace, isSequential, dataCenters, null, primaryRange, fullRepair, tableNames);
}
@Deprecated
public int forceRepairRangeAsync(String beginToken,
String endToken,
String keyspaceName,
boolean isSequential,
Collection<String> dataCenters,
Collection<String> hosts,
boolean fullRepair,
String... tableNames)
{
return forceRepairRangeAsync(beginToken, endToken, keyspaceName,
isSequential ? RepairParallelism.SEQUENTIAL.ordinal() : RepairParallelism.PARALLEL.ordinal(),
dataCenters, hosts, fullRepair, tableNames);
}
@Deprecated
public int forceRepairRangeAsync(String beginToken,
String endToken,
String keyspaceName,
int parallelismDegree,
Collection<String> dataCenters,
Collection<String> hosts,
boolean fullRepair,
String... tableNames)
{
if (parallelismDegree < 0 || parallelismDegree > RepairParallelism.values().length - 1)
{
throw new IllegalArgumentException("Invalid parallelism degree specified: " + parallelismDegree);
}
RepairParallelism parallelism = RepairParallelism.values()[parallelismDegree];
if (FBUtilities.isWindows() && parallelism != RepairParallelism.PARALLEL)
{
logger.warn("Snapshot-based repair is not yet supported on Windows. Reverting to parallel repair.");
parallelism = RepairParallelism.PARALLEL;
}
if (!fullRepair)
logger.warn("Incremental repair can't be requested with subrange repair " +
"because each subrange repair would generate an anti-compacted table. " +
"The repair will occur but without anti-compaction.");
Collection<Range<Token>> repairingRange = createRepairRangeFrom(beginToken, endToken);
RepairOption options = new RepairOption(parallelism, false, !fullRepair, false, 1, repairingRange, true);
if (dataCenters != null)
{
options.getDataCenters().addAll(dataCenters);
}
if (hosts != null)
{
options.getHosts().addAll(hosts);
}
if (tableNames != null)
{
for (String table : tableNames)
{
options.getColumnFamilies().add(table);
}
}
logger.info("starting user-requested repair of range {} for keyspace {} and column families {}",
repairingRange, keyspaceName, tableNames);
return forceRepairAsync(keyspaceName, options, true);
}
@Deprecated
public int forceRepairRangeAsync(String beginToken,
String endToken,
String keyspaceName,
boolean isSequential,
boolean isLocal,
boolean fullRepair,
String... tableNames)
{
Set<String> dataCenters = null;
if (isLocal)
{
dataCenters = Sets.newHashSet(DatabaseDescriptor.getLocalDataCenter());
}
return forceRepairRangeAsync(beginToken, endToken, keyspaceName, isSequential, dataCenters, null, fullRepair, tableNames);
}
/**
* Create collection of ranges that match ring layout from given tokens.
*
* @param beginToken beginning token of the range
* @param endToken end token of the range
* @return collection of ranges that match ring layout in TokenMetadata
*/
@VisibleForTesting
Collection<Range<Token>> createRepairRangeFrom(String beginToken, String endToken)
{
Token parsedBeginToken = getTokenFactory().fromString(beginToken);
Token parsedEndToken = getTokenFactory().fromString(endToken);
// Break up given range to match ring layout in TokenMetadata
ArrayList<Range<Token>> repairingRange = new ArrayList<>();
ArrayList<Token> tokens = new ArrayList<>(tokenMetadata.sortedTokens());
if (!tokens.contains(parsedBeginToken))
{
tokens.add(parsedBeginToken);
}
if (!tokens.contains(parsedEndToken))
{
tokens.add(parsedEndToken);
}
// tokens now contain all tokens including our endpoints
Collections.sort(tokens);
int start = tokens.indexOf(parsedBeginToken), end = tokens.indexOf(parsedEndToken);
for (int i = start; i != end; i = (i+1) % tokens.size())
{
Range<Token> range = new Range<>(tokens.get(i), tokens.get((i+1) % tokens.size()));
repairingRange.add(range);
}
return repairingRange;
}
public TokenFactory getTokenFactory()
{
return tokenMetadata.partitioner.getTokenFactory();
}
public int forceRepairAsync(String keyspace, RepairOption options, boolean legacy)
{
if (options.getRanges().isEmpty() || Keyspace.open(keyspace).getReplicationStrategy().getReplicationFactor() < 2)
return 0;
int cmd = nextRepairCommand.incrementAndGet();
new Thread(createRepairTask(cmd, keyspace, options, legacy)).start();
return cmd;
}
private FutureTask<Object> createRepairTask(final int cmd, final String keyspace, final RepairOption options, boolean legacy)
{
if (!options.getDataCenters().isEmpty() && !options.getDataCenters().contains(DatabaseDescriptor.getLocalDataCenter()))
{
throw new IllegalArgumentException("the local data center must be part of the repair");
}
RepairRunnable task = new RepairRunnable(this, cmd, options, keyspace);
task.addProgressListener(progressSupport);
if (legacy)
task.addProgressListener(legacyProgressSupport);
return new FutureTask<>(task, null);
}
public void forceTerminateAllRepairSessions() {
ActiveRepairService.instance.terminateSessions();
}
/* End of MBean interface methods */
/**
* Get the "primary ranges" for the specified keyspace and endpoint.
* "Primary ranges" are the ranges that the node is responsible for storing replica primarily.
* The node that stores replica primarily is defined as the first node returned
* by {@link AbstractReplicationStrategy#calculateNaturalEndpoints}.
*
* @param keyspace Keyspace name to check primary ranges
* @param ep endpoint we are interested in.
* @return primary ranges for the specified endpoint.
*/
public Collection<Range<Token>> getPrimaryRangesForEndpoint(String keyspace, InetAddress ep)
{
AbstractReplicationStrategy strategy = Keyspace.open(keyspace).getReplicationStrategy();
Collection<Range<Token>> primaryRanges = new HashSet<>();
TokenMetadata metadata = tokenMetadata.cloneOnlyTokenMap();
for (Token token : metadata.sortedTokens())
{
List<InetAddress> endpoints = strategy.calculateNaturalEndpoints(token, metadata);
if (endpoints.size() > 0 && endpoints.get(0).equals(ep))
primaryRanges.add(new Range<>(metadata.getPredecessor(token), token));
}
return primaryRanges;
}
/**
* Get the "primary ranges" within local DC for the specified keyspace and endpoint.
*
* @see #getPrimaryRangesForEndpoint(String, java.net.InetAddress)
* @param keyspace Keyspace name to check primary ranges
* @param referenceEndpoint endpoint we are interested in.
* @return primary ranges within local DC for the specified endpoint.
*/
public Collection<Range<Token>> getPrimaryRangeForEndpointWithinDC(String keyspace, InetAddress referenceEndpoint)
{
TokenMetadata metadata = tokenMetadata.cloneOnlyTokenMap();
String localDC = DatabaseDescriptor.getEndpointSnitch().getDatacenter(referenceEndpoint);
Collection<InetAddress> localDcNodes = metadata.getTopology().getDatacenterEndpoints().get(localDC);
AbstractReplicationStrategy strategy = Keyspace.open(keyspace).getReplicationStrategy();
Collection<Range<Token>> localDCPrimaryRanges = new HashSet<>();
for (Token token : metadata.sortedTokens())
{
List<InetAddress> endpoints = strategy.calculateNaturalEndpoints(token, metadata);
for (InetAddress endpoint : endpoints)
{
if (localDcNodes.contains(endpoint))
{
if (endpoint.equals(referenceEndpoint))
{
localDCPrimaryRanges.add(new Range<>(metadata.getPredecessor(token), token));
}
break;
}
}
}
return localDCPrimaryRanges;
}
/**
* Get all ranges an endpoint is responsible for (by keyspace)
* @param ep endpoint we are interested in.
* @return ranges for the specified endpoint.
*/
Collection<Range<Token>> getRangesForEndpoint(String keyspaceName, InetAddress ep)
{
return Keyspace.open(keyspaceName).getReplicationStrategy().getAddressRanges().get(ep);
}
/**
* Get all ranges that span the ring given a set
* of tokens. All ranges are in sorted order of
* ranges.
* @return ranges in sorted order
*/
public List<Range<Token>> getAllRanges(List<Token> sortedTokens)
{
if (logger.isTraceEnabled())
logger.trace("computing ranges for {}", StringUtils.join(sortedTokens, ", "));
if (sortedTokens.isEmpty())
return Collections.emptyList();
int size = sortedTokens.size();
List<Range<Token>> ranges = new ArrayList<>(size + 1);
for (int i = 1; i < size; ++i)
{
Range<Token> range = new Range<>(sortedTokens.get(i - 1), sortedTokens.get(i));
ranges.add(range);
}
Range<Token> range = new Range<>(sortedTokens.get(size - 1), sortedTokens.get(0));
ranges.add(range);
return ranges;
}
/**
* This method returns the N endpoints that are responsible for storing the
* specified key i.e for replication.
*
* @param keyspaceName keyspace name also known as keyspace
* @param cf Column family name
* @param key key for which we need to find the endpoint
* @return the endpoint responsible for this key
*/
public List<InetAddress> getNaturalEndpoints(String keyspaceName, String cf, String key)
{
KeyspaceMetadata ksMetaData = Schema.instance.getKSMetaData(keyspaceName);
if (ksMetaData == null)
throw new IllegalArgumentException("Unknown keyspace '" + keyspaceName + "'");
CFMetaData cfMetaData = ksMetaData.getTableOrViewNullable(cf);
if (cfMetaData == null)
throw new IllegalArgumentException("Unknown table '" + cf + "' in keyspace '" + keyspaceName + "'");
return getNaturalEndpoints(keyspaceName, tokenMetadata.partitioner.getToken(cfMetaData.getKeyValidator().fromString(key)));
}
public List<InetAddress> getNaturalEndpoints(String keyspaceName, ByteBuffer key)
{
return getNaturalEndpoints(keyspaceName, tokenMetadata.partitioner.getToken(key));
}
/**
* This method returns the N endpoints that are responsible for storing the
* specified key i.e for replication.
*
* @param keyspaceName keyspace name also known as keyspace
* @param pos position for which we need to find the endpoint
* @return the endpoint responsible for this token
*/
public List<InetAddress> getNaturalEndpoints(String keyspaceName, RingPosition pos)
{
return Keyspace.open(keyspaceName).getReplicationStrategy().getNaturalEndpoints(pos);
}
/**
* Returns the endpoints currently responsible for storing the token plus pending ones
*/
public Iterable<InetAddress> getNaturalAndPendingEndpoints(String keyspaceName, Token token)
{
return Iterables.concat(getNaturalEndpoints(keyspaceName, token), tokenMetadata.pendingEndpointsFor(token, keyspaceName));
}
/**
* This method attempts to return N endpoints that are responsible for storing the
* specified key i.e for replication.
*
* @param keyspace keyspace name also known as keyspace
* @param key key for which we need to find the endpoint
* @return the endpoint responsible for this key
*/
public List<InetAddress> getLiveNaturalEndpoints(Keyspace keyspace, ByteBuffer key)
{
return getLiveNaturalEndpoints(keyspace, tokenMetadata.decorateKey(key));
}
public List<InetAddress> getLiveNaturalEndpoints(Keyspace keyspace, RingPosition pos)
{
List<InetAddress> endpoints = keyspace.getReplicationStrategy().getNaturalEndpoints(pos);
List<InetAddress> liveEps = new ArrayList<>(endpoints.size());
for (InetAddress endpoint : endpoints)
{
if (FailureDetector.instance.isAlive(endpoint))
liveEps.add(endpoint);
}
return liveEps;
}
public void setLoggingLevel(String classQualifier, String rawLevel) throws Exception
{
ch.qos.logback.classic.Logger logBackLogger = (ch.qos.logback.classic.Logger) LoggerFactory.getLogger(classQualifier);
// if both classQualifer and rawLevel are empty, reload from configuration
if (StringUtils.isBlank(classQualifier) && StringUtils.isBlank(rawLevel) )
{
JMXConfiguratorMBean jmxConfiguratorMBean = JMX.newMBeanProxy(ManagementFactory.getPlatformMBeanServer(),
new ObjectName("ch.qos.logback.classic:Name=default,Type=ch.qos.logback.classic.jmx.JMXConfigurator"),
JMXConfiguratorMBean.class);
jmxConfiguratorMBean.reloadDefaultConfiguration();
return;
}
// classQualifer is set, but blank level given
else if (StringUtils.isNotBlank(classQualifier) && StringUtils.isBlank(rawLevel) )
{
if (logBackLogger.getLevel() != null || hasAppenders(logBackLogger))
logBackLogger.setLevel(null);
return;
}
ch.qos.logback.classic.Level level = ch.qos.logback.classic.Level.toLevel(rawLevel);
logBackLogger.setLevel(level);
logger.info("set log level to {} for classes under '{}' (if the level doesn't look like '{}' then the logger couldn't parse '{}')", level, classQualifier, rawLevel, rawLevel);
}
/**
* @return the runtime logging levels for all the configured loggers
*/
@Override
public Map<String,String>getLoggingLevels() {
Map<String, String> logLevelMaps = Maps.newLinkedHashMap();
LoggerContext lc = (LoggerContext) LoggerFactory.getILoggerFactory();
for (ch.qos.logback.classic.Logger logger : lc.getLoggerList())
{
if(logger.getLevel() != null || hasAppenders(logger))
logLevelMaps.put(logger.getName(), logger.getLevel().toString());
}
return logLevelMaps;
}
private boolean hasAppenders(ch.qos.logback.classic.Logger logger) {
Iterator<Appender<ILoggingEvent>> it = logger.iteratorForAppenders();
return it.hasNext();
}
/**
* @return list of Token ranges (_not_ keys!) together with estimated key count,
* breaking up the data this node is responsible for into pieces of roughly keysPerSplit
*/
public List<Pair<Range<Token>, Long>> getSplits(String keyspaceName, String cfName, Range<Token> range, int keysPerSplit)
{
Keyspace t = Keyspace.open(keyspaceName);
ColumnFamilyStore cfs = t.getColumnFamilyStore(cfName);
List<DecoratedKey> keys = keySamples(Collections.singleton(cfs), range);
long totalRowCountEstimate = cfs.estimatedKeysForRange(range);
// splitCount should be much smaller than number of key samples, to avoid huge sampling error
int minSamplesPerSplit = 4;
int maxSplitCount = keys.size() / minSamplesPerSplit + 1;
int splitCount = Math.max(1, Math.min(maxSplitCount, (int)(totalRowCountEstimate / keysPerSplit)));
List<Token> tokens = keysToTokens(range, keys);
return getSplits(tokens, splitCount, cfs);
}
private List<Pair<Range<Token>, Long>> getSplits(List<Token> tokens, int splitCount, ColumnFamilyStore cfs)
{
double step = (double) (tokens.size() - 1) / splitCount;
Token prevToken = tokens.get(0);
List<Pair<Range<Token>, Long>> splits = Lists.newArrayListWithExpectedSize(splitCount);
for (int i = 1; i <= splitCount; i++)
{
int index = (int) Math.round(i * step);
Token token = tokens.get(index);
Range<Token> range = new Range<>(prevToken, token);
// always return an estimate > 0 (see CASSANDRA-7322)
splits.add(Pair.create(range, Math.max(cfs.metadata.params.minIndexInterval, cfs.estimatedKeysForRange(range))));
prevToken = token;
}
return splits;
}
private List<Token> keysToTokens(Range<Token> range, List<DecoratedKey> keys)
{
List<Token> tokens = Lists.newArrayListWithExpectedSize(keys.size() + 2);
tokens.add(range.left);
for (DecoratedKey key : keys)
tokens.add(key.getToken());
tokens.add(range.right);
return tokens;
}
private List<DecoratedKey> keySamples(Iterable<ColumnFamilyStore> cfses, Range<Token> range)
{
List<DecoratedKey> keys = new ArrayList<>();
for (ColumnFamilyStore cfs : cfses)
Iterables.addAll(keys, cfs.keySamples(range));
FBUtilities.sortSampledKeys(keys, range);
return keys;
}
/**
* Broadcast leaving status and update local tokenMetadata accordingly
*/
private void startLeaving()
{
Gossiper.instance.addLocalApplicationState(ApplicationState.STATUS, valueFactory.leaving(getLocalTokens()));
tokenMetadata.addLeavingEndpoint(FBUtilities.getBroadcastAddress());
PendingRangeCalculatorService.instance.update();
}
public void decommission() throws InterruptedException
{
if (!tokenMetadata.isMember(FBUtilities.getBroadcastAddress()))
throw new UnsupportedOperationException("local node is not a member of the token ring yet");
if (tokenMetadata.cloneAfterAllLeft().sortedTokens().size() < 2)
throw new UnsupportedOperationException("no other normal nodes in the ring; decommission would be pointless");
if (operationMode != Mode.NORMAL)
throw new UnsupportedOperationException("Node in " + operationMode + " state; wait for status to become normal or restart");
PendingRangeCalculatorService.instance.blockUntilFinished();
for (String keyspaceName : Schema.instance.getNonLocalStrategyKeyspaces())
{
if (tokenMetadata.getPendingRanges(keyspaceName, FBUtilities.getBroadcastAddress()).size() > 0)
throw new UnsupportedOperationException("data is currently moving to this node; unable to leave the ring");
}
if (logger.isDebugEnabled())
logger.debug("DECOMMISSIONING");
startLeaving();
long timeout = Math.max(RING_DELAY, BatchlogManager.instance.getBatchlogTimeout());
setMode(Mode.LEAVING, "sleeping " + timeout + " ms for batch processing and pending range setup", true);
Thread.sleep(timeout);
Runnable finishLeaving = new Runnable()
{
public void run()
{
shutdownClientServers();
Gossiper.instance.stop();
try {
MessagingService.instance().shutdown();
} catch (IOError ioe) {
logger.info("failed to shutdown message service: {}", ioe);
}
StageManager.shutdownNow();
SystemKeyspace.setBootstrapState(SystemKeyspace.BootstrapState.DECOMMISSIONED);
setMode(Mode.DECOMMISSIONED, true);
// let op be responsible for killing the process
}
};
unbootstrap(finishLeaving);
}
private void leaveRing()
{
SystemKeyspace.setBootstrapState(SystemKeyspace.BootstrapState.NEEDS_BOOTSTRAP);
tokenMetadata.removeEndpoint(FBUtilities.getBroadcastAddress());
PendingRangeCalculatorService.instance.update();
Gossiper.instance.addLocalApplicationState(ApplicationState.STATUS, valueFactory.left(getLocalTokens(),Gossiper.computeExpireTime()));
int delay = Math.max(RING_DELAY, Gossiper.intervalInMillis * 2);
logger.info("Announcing that I have left the ring for {}ms", delay);
Uninterruptibles.sleepUninterruptibly(delay, TimeUnit.MILLISECONDS);
}
private void unbootstrap(Runnable onFinish)
{
Map<String, Multimap<Range<Token>, InetAddress>> rangesToStream = new HashMap<>();
for (String keyspaceName : Schema.instance.getNonLocalStrategyKeyspaces())
{
Multimap<Range<Token>, InetAddress> rangesMM = getChangedRangesForLeaving(keyspaceName, FBUtilities.getBroadcastAddress());
if (logger.isDebugEnabled())
logger.debug("Ranges needing transfer are [{}]", StringUtils.join(rangesMM.keySet(), ","));
rangesToStream.put(keyspaceName, rangesMM);
}
setMode(Mode.LEAVING, "replaying batch log and streaming data to other nodes", true);
// Start with BatchLog replay, which may create hints but no writes since this is no longer a valid endpoint.
Future<?> batchlogReplay = BatchlogManager.instance.startBatchlogReplay();
Future<StreamState> streamSuccess = streamRanges(rangesToStream);
// Wait for batch log to complete before streaming hints.
logger.debug("waiting for batch log processing.");
try
{
batchlogReplay.get();
}
catch (ExecutionException | InterruptedException e)
{
throw new RuntimeException(e);
}
setMode(Mode.LEAVING, "streaming hints to other nodes", true);
Future hintsSuccess = streamHints();
// wait for the transfer runnables to signal the latch.
logger.debug("waiting for stream acks.");
try
{
streamSuccess.get();
hintsSuccess.get();
}
catch (ExecutionException | InterruptedException e)
{
throw new RuntimeException(e);
}
logger.debug("stream acks all received.");
leaveRing();
onFinish.run();
}
private Future streamHints()
{
return HintsService.instance.transferHints(this::getPreferredHintsStreamTarget);
}
/**
* Find the best target to stream hints to. Currently the closest peer according to the snitch
*/
private UUID getPreferredHintsStreamTarget()
{
List<InetAddress> candidates = new ArrayList<>(StorageService.instance.getTokenMetadata().cloneAfterAllLeft().getAllEndpoints());
candidates.remove(FBUtilities.getBroadcastAddress());
for (Iterator<InetAddress> iter = candidates.iterator(); iter.hasNext(); )
{
InetAddress address = iter.next();
if (!FailureDetector.instance.isAlive(address))
iter.remove();
}
if (candidates.isEmpty())
{
logger.warn("Unable to stream hints since no live endpoints seen");
throw new RuntimeException("Unable to stream hints since no live endpoints seen");
}
else
{
// stream to the closest peer as chosen by the snitch
DatabaseDescriptor.getEndpointSnitch().sortByProximity(FBUtilities.getBroadcastAddress(), candidates);
InetAddress hintsDestinationHost = candidates.get(0);
InetAddress preferred = SystemKeyspace.getPreferredIP(hintsDestinationHost);
return tokenMetadata.getHostId(preferred);
}
}
public void move(String newToken) throws IOException
{
try
{
getTokenFactory().validate(newToken);
}
catch (ConfigurationException e)
{
throw new IOException(e.getMessage());
}
move(getTokenFactory().fromString(newToken));
}
/**
* move the node to new token or find a new token to boot to according to load
*
* @param newToken new token to boot to, or if null, find balanced token to boot to
*
* @throws IOException on any I/O operation error
*/
private void move(Token newToken) throws IOException
{
if (newToken == null)
throw new IOException("Can't move to the undefined (null) token.");
if (tokenMetadata.sortedTokens().contains(newToken))
throw new IOException("target token " + newToken + " is already owned by another node.");
// address of the current node
InetAddress localAddress = FBUtilities.getBroadcastAddress();
// This doesn't make any sense in a vnodes environment.
if (getTokenMetadata().getTokens(localAddress).size() > 1)
{
logger.error("Invalid request to move(Token); This node has more than one token and cannot be moved thusly.");
throw new UnsupportedOperationException("This node has more than one token and cannot be moved thusly.");
}
List<String> keyspacesToProcess = Schema.instance.getNonLocalStrategyKeyspaces();
PendingRangeCalculatorService.instance.blockUntilFinished();
// checking if data is moving to this node
for (String keyspaceName : keyspacesToProcess)
{
if (tokenMetadata.getPendingRanges(keyspaceName, localAddress).size() > 0)
throw new UnsupportedOperationException("data is currently moving to this node; unable to leave the ring");
}
Gossiper.instance.addLocalApplicationState(ApplicationState.STATUS, valueFactory.moving(newToken));
setMode(Mode.MOVING, String.format("Moving %s from %s to %s.", localAddress, getLocalTokens().iterator().next(), newToken), true);
setMode(Mode.MOVING, String.format("Sleeping %s ms before start streaming/fetching ranges", RING_DELAY), true);
Uninterruptibles.sleepUninterruptibly(RING_DELAY, TimeUnit.MILLISECONDS);
RangeRelocator relocator = new RangeRelocator(Collections.singleton(newToken), keyspacesToProcess);
if (relocator.streamsNeeded())
{
setMode(Mode.MOVING, "fetching new ranges and streaming old ranges", true);
try
{
relocator.stream().get();
}
catch (ExecutionException | InterruptedException e)
{
throw new RuntimeException("Interrupted while waiting for stream/fetch ranges to finish: " + e.getMessage());
}
}
else
{
setMode(Mode.MOVING, "No ranges to fetch/stream", true);
}
setTokens(Collections.singleton(newToken)); // setting new token as we have everything settled
if (logger.isDebugEnabled())
logger.debug("Successfully moved to new token {}", getLocalTokens().iterator().next());
}
private class RangeRelocator
{
private final StreamPlan streamPlan = new StreamPlan("Relocation");
private RangeRelocator(Collection<Token> tokens, List<String> keyspaceNames)
{
calculateToFromStreams(tokens, keyspaceNames);
}
private void calculateToFromStreams(Collection<Token> newTokens, List<String> keyspaceNames)
{
InetAddress localAddress = FBUtilities.getBroadcastAddress();
IEndpointSnitch snitch = DatabaseDescriptor.getEndpointSnitch();
TokenMetadata tokenMetaCloneAllSettled = tokenMetadata.cloneAfterAllSettled();
// clone to avoid concurrent modification in calculateNaturalEndpoints
TokenMetadata tokenMetaClone = tokenMetadata.cloneOnlyTokenMap();
for (String keyspace : keyspaceNames)
{
// replication strategy of the current keyspace
AbstractReplicationStrategy strategy = Keyspace.open(keyspace).getReplicationStrategy();
Multimap<InetAddress, Range<Token>> endpointToRanges = strategy.getAddressRanges();
logger.debug("Calculating ranges to stream and request for keyspace {}", keyspace);
for (Token newToken : newTokens)
{
// getting collection of the currently used ranges by this keyspace
Collection<Range<Token>> currentRanges = endpointToRanges.get(localAddress);
// collection of ranges which this node will serve after move to the new token
Collection<Range<Token>> updatedRanges = strategy.getPendingAddressRanges(tokenMetaClone, newToken, localAddress);
// ring ranges and endpoints associated with them
// this used to determine what nodes should we ping about range data
Multimap<Range<Token>, InetAddress> rangeAddresses = strategy.getRangeAddresses(tokenMetaClone);
// calculated parts of the ranges to request/stream from/to nodes in the ring
Pair<Set<Range<Token>>, Set<Range<Token>>> rangesPerKeyspace = calculateStreamAndFetchRanges(currentRanges, updatedRanges);
/**
* In this loop we are going through all ranges "to fetch" and determining
* nodes in the ring responsible for data we are interested in
*/
Multimap<Range<Token>, InetAddress> rangesToFetchWithPreferredEndpoints = ArrayListMultimap.create();
for (Range<Token> toFetch : rangesPerKeyspace.right)
{
for (Range<Token> range : rangeAddresses.keySet())
{
if (range.contains(toFetch))
{
List<InetAddress> endpoints = null;
if (useStrictConsistency)
{
Set<InetAddress> oldEndpoints = Sets.newHashSet(rangeAddresses.get(range));
Set<InetAddress> newEndpoints = Sets.newHashSet(strategy.calculateNaturalEndpoints(toFetch.right, tokenMetaCloneAllSettled));
//Due to CASSANDRA-5953 we can have a higher RF then we have endpoints.
//So we need to be careful to only be strict when endpoints == RF
if (oldEndpoints.size() == strategy.getReplicationFactor())
{
oldEndpoints.removeAll(newEndpoints);
//No relocation required
if (oldEndpoints.isEmpty())
continue;
assert oldEndpoints.size() == 1 : "Expected 1 endpoint but found " + oldEndpoints.size();
}
endpoints = Lists.newArrayList(oldEndpoints.iterator().next());
}
else
{
endpoints = snitch.getSortedListByProximity(localAddress, rangeAddresses.get(range));
}
// storing range and preferred endpoint set
rangesToFetchWithPreferredEndpoints.putAll(toFetch, endpoints);
}
}
Collection<InetAddress> addressList = rangesToFetchWithPreferredEndpoints.get(toFetch);
if (addressList == null || addressList.isEmpty())
continue;
if (useStrictConsistency)
{
if (addressList.size() > 1)
throw new IllegalStateException("Multiple strict sources found for " + toFetch);
InetAddress sourceIp = addressList.iterator().next();
if (Gossiper.instance.isEnabled() && !Gossiper.instance.getEndpointStateForEndpoint(sourceIp).isAlive())
throw new RuntimeException("A node required to move the data consistently is down ("+sourceIp+"). If you wish to move the data from a potentially inconsistent replica, restart the node with -Dcassandra.consistent.rangemovement=false");
}
}
// calculating endpoints to stream current ranges to if needed
// in some situations node will handle current ranges as part of the new ranges
Multimap<InetAddress, Range<Token>> endpointRanges = HashMultimap.create();
for (Range<Token> toStream : rangesPerKeyspace.left)
{
Set<InetAddress> currentEndpoints = ImmutableSet.copyOf(strategy.calculateNaturalEndpoints(toStream.right, tokenMetaClone));
Set<InetAddress> newEndpoints = ImmutableSet.copyOf(strategy.calculateNaturalEndpoints(toStream.right, tokenMetaCloneAllSettled));
logger.debug("Range: {} Current endpoints: {} New endpoints: {}", toStream, currentEndpoints, newEndpoints);
for (InetAddress address : Sets.difference(newEndpoints, currentEndpoints))
{
logger.debug("Range {} has new owner {}", toStream, address);
endpointRanges.put(address, toStream);
}
}
// stream ranges
for (InetAddress address : endpointRanges.keySet())
{
logger.debug("Will stream range {} of keyspace {} to endpoint {}", endpointRanges.get(address), keyspace, address);
InetAddress preferred = SystemKeyspace.getPreferredIP(address);
streamPlan.transferRanges(address, preferred, keyspace, endpointRanges.get(address));
}
// stream requests
Multimap<InetAddress, Range<Token>> workMap = RangeStreamer.getWorkMap(rangesToFetchWithPreferredEndpoints, keyspace, FailureDetector.instance);
for (InetAddress address : workMap.keySet())
{
logger.debug("Will request range {} of keyspace {} from endpoint {}", workMap.get(address), keyspace, address);
InetAddress preferred = SystemKeyspace.getPreferredIP(address);
streamPlan.requestRanges(address, preferred, keyspace, workMap.get(address));
}
logger.debug("Keyspace {}: work map {}.", keyspace, workMap);
}
}
}
public Future<StreamState> stream()
{
return streamPlan.execute();
}
public boolean streamsNeeded()
{
return !streamPlan.isEmpty();
}
}
/**
* Get the status of a token removal.
*/
public String getRemovalStatus()
{
if (removingNode == null) {
return "No token removals in process.";
}
return String.format("Removing token (%s). Waiting for replication confirmation from [%s].",
tokenMetadata.getToken(removingNode),
StringUtils.join(replicatingNodes, ","));
}
/**
* Force a remove operation to complete. This may be necessary if a remove operation
* blocks forever due to node/stream failure. removeNode() must be called
* first, this is a last resort measure. No further attempt will be made to restore replicas.
*/
public void forceRemoveCompletion()
{
if (!replicatingNodes.isEmpty() || !tokenMetadata.getLeavingEndpoints().isEmpty())
{
logger.warn("Removal not confirmed for for {}", StringUtils.join(this.replicatingNodes, ","));
for (InetAddress endpoint : tokenMetadata.getLeavingEndpoints())
{
UUID hostId = tokenMetadata.getHostId(endpoint);
Gossiper.instance.advertiseTokenRemoved(endpoint, hostId);
excise(tokenMetadata.getTokens(endpoint), endpoint);
}
replicatingNodes.clear();
removingNode = null;
}
else
{
logger.warn("No nodes to force removal on, call 'removenode' first");
}
}
/**
* Remove a node that has died, attempting to restore the replica count.
* If the node is alive, decommission should be attempted. If decommission
* fails, then removeNode should be called. If we fail while trying to
* restore the replica count, finally forceRemoveCompleteion should be
* called to forcibly remove the node without regard to replica count.
*
* @param hostIdString Host ID for the node
*/
public void removeNode(String hostIdString)
{
InetAddress myAddress = FBUtilities.getBroadcastAddress();
UUID localHostId = tokenMetadata.getHostId(myAddress);
UUID hostId = UUID.fromString(hostIdString);
InetAddress endpoint = tokenMetadata.getEndpointForHostId(hostId);
if (endpoint == null)
throw new UnsupportedOperationException("Host ID not found.");
if (!tokenMetadata.isMember(endpoint))
throw new UnsupportedOperationException("Node to be removed is not a member of the token ring");
if (endpoint.equals(myAddress))
throw new UnsupportedOperationException("Cannot remove self");
if (Gossiper.instance.getLiveMembers().contains(endpoint))
throw new UnsupportedOperationException("Node " + endpoint + " is alive and owns this ID. Use decommission command to remove it from the ring");
// A leaving endpoint that is dead is already being removed.
if (tokenMetadata.isLeaving(endpoint))
logger.warn("Node {} is already being removed, continuing removal anyway", endpoint);
if (!replicatingNodes.isEmpty())
throw new UnsupportedOperationException("This node is already processing a removal. Wait for it to complete, or use 'removenode force' if this has failed.");
Collection<Token> tokens = tokenMetadata.getTokens(endpoint);
// Find the endpoints that are going to become responsible for data
for (String keyspaceName : Schema.instance.getNonLocalStrategyKeyspaces())
{
// if the replication factor is 1 the data is lost so we shouldn't wait for confirmation
if (Keyspace.open(keyspaceName).getReplicationStrategy().getReplicationFactor() == 1)
continue;
// get all ranges that change ownership (that is, a node needs
// to take responsibility for new range)
Multimap<Range<Token>, InetAddress> changedRanges = getChangedRangesForLeaving(keyspaceName, endpoint);
IFailureDetector failureDetector = FailureDetector.instance;
for (InetAddress ep : changedRanges.values())
{
if (failureDetector.isAlive(ep))
replicatingNodes.add(ep);
else
logger.warn("Endpoint {} is down and will not receive data for re-replication of {}", ep, endpoint);
}
}
removingNode = endpoint;
tokenMetadata.addLeavingEndpoint(endpoint);
PendingRangeCalculatorService.instance.update();
// the gossiper will handle spoofing this node's state to REMOVING_TOKEN for us
// we add our own token so other nodes to let us know when they're done
Gossiper.instance.advertiseRemoving(endpoint, hostId, localHostId);
// kick off streaming commands
restoreReplicaCount(endpoint, myAddress);
// wait for ReplicationFinishedVerbHandler to signal we're done
while (!replicatingNodes.isEmpty())
{
Uninterruptibles.sleepUninterruptibly(100, TimeUnit.MILLISECONDS);
}
excise(tokens, endpoint);
// gossiper will indicate the token has left
Gossiper.instance.advertiseTokenRemoved(endpoint, hostId);
replicatingNodes.clear();
removingNode = null;
}
public void confirmReplication(InetAddress node)
{
// replicatingNodes can be empty in the case where this node used to be a removal coordinator,
// but restarted before all 'replication finished' messages arrived. In that case, we'll
// still go ahead and acknowledge it.
if (!replicatingNodes.isEmpty())
{
replicatingNodes.remove(node);
}
else
{
logger.info("Received unexpected REPLICATION_FINISHED message from {}. Was this node recently a removal coordinator?", node);
}
}
public String getOperationMode()
{
return operationMode.toString();
}
public boolean isStarting()
{
return operationMode == Mode.STARTING;
}
public boolean isMoving()
{
return operationMode == Mode.MOVING;
}
public boolean isJoining()
{
return operationMode == Mode.JOINING;
}
public String getDrainProgress()
{
return String.format("Drained %s/%s ColumnFamilies", remainingCFs, totalCFs);
}
/**
* Shuts node off to writes, empties memtables and the commit log.
* There are two differences between drain and the normal shutdown hook:
* - Drain waits for in-progress streaming to complete
* - Drain flushes *all* columnfamilies (shutdown hook only flushes non-durable CFs)
*/
public synchronized void drain() throws IOException, InterruptedException, ExecutionException
{
inShutdownHook = true;
BatchlogManager.instance.shutdown();
HintsService.instance.pauseDispatch();
ExecutorService counterMutationStage = StageManager.getStage(Stage.COUNTER_MUTATION);
ExecutorService viewMutationStage = StageManager.getStage(Stage.VIEW_MUTATION);
ExecutorService mutationStage = StageManager.getStage(Stage.MUTATION);
if (mutationStage.isTerminated()
&& counterMutationStage.isTerminated()
&& viewMutationStage.isTerminated())
{
logger.warn("Cannot drain node (did it already happen?)");
return;
}
setMode(Mode.DRAINING, "starting drain process", true);
shutdownClientServers();
ScheduledExecutors.optionalTasks.shutdown();
Gossiper.instance.stop();
setMode(Mode.DRAINING, "shutting down MessageService", false);
MessagingService.instance().shutdown();
setMode(Mode.DRAINING, "clearing mutation stage", false);
viewMutationStage.shutdown();
counterMutationStage.shutdown();
mutationStage.shutdown();
viewMutationStage.awaitTermination(3600, TimeUnit.SECONDS);
counterMutationStage.awaitTermination(3600, TimeUnit.SECONDS);
mutationStage.awaitTermination(3600, TimeUnit.SECONDS);
StorageProxy.instance.verifyNoHintsInProgress();
setMode(Mode.DRAINING, "flushing column families", false);
// count CFs first, since forceFlush could block for the flushWriter to get a queue slot empty
totalCFs = 0;
for (Keyspace keyspace : Keyspace.nonSystem())
totalCFs += keyspace.getColumnFamilyStores().size();
remainingCFs = totalCFs;
// flush
List<Future<?>> flushes = new ArrayList<>();
for (Keyspace keyspace : Keyspace.nonSystem())
{
for (ColumnFamilyStore cfs : keyspace.getColumnFamilyStores())
flushes.add(cfs.forceFlush());
}
// wait for the flushes.
// TODO this is a godawful way to track progress, since they flush in parallel. a long one could
// thus make several short ones "instant" if we wait for them later.
for (Future f : flushes)
{
FBUtilities.waitOnFuture(f);
remainingCFs--;
}
// flush the system ones after all the rest are done, just in case flushing modifies any system state
// like CASSANDRA-5151. don't bother with progress tracking since system data is tiny.
flushes.clear();
for (Keyspace keyspace : Keyspace.system())
{
for (ColumnFamilyStore cfs : keyspace.getColumnFamilyStores())
flushes.add(cfs.forceFlush());
}
FBUtilities.waitOnFutures(flushes);
BatchlogManager.instance.shutdown();
HintsService.instance.shutdownBlocking();
// Interrupt on going compaction and shutdown to prevent further compaction
CompactionManager.instance.forceShutdown();
// whilst we've flushed all the CFs, which will have recycled all completed segments, we want to ensure
// there are no segments to replay, so we force the recycling of any remaining (should be at most one)
CommitLog.instance.forceRecycleAllSegments();
ColumnFamilyStore.shutdownPostFlushExecutor();
CommitLog.instance.shutdownBlocking();
// wait for miscellaneous tasks like sstable and commitlog segment deletion
ScheduledExecutors.nonPeriodicTasks.shutdown();
if (!ScheduledExecutors.nonPeriodicTasks.awaitTermination(1, TimeUnit.MINUTES))
logger.warn("Miscellaneous task executor still busy after one minute; proceeding with shutdown");
setMode(Mode.DRAINED, true);
}
// Never ever do this at home. Used by tests.
@VisibleForTesting
public IPartitioner setPartitionerUnsafe(IPartitioner newPartitioner)
{
IPartitioner oldPartitioner = DatabaseDescriptor.setPartitionerUnsafe(newPartitioner);
tokenMetadata = tokenMetadata.cloneWithNewPartitioner(newPartitioner);
valueFactory = new VersionedValue.VersionedValueFactory(newPartitioner);
return oldPartitioner;
}
TokenMetadata setTokenMetadataUnsafe(TokenMetadata tmd)
{
TokenMetadata old = tokenMetadata;
tokenMetadata = tmd;
return old;
}
public void truncate(String keyspace, String table) throws TimeoutException, IOException
{
try
{
StorageProxy.truncateBlocking(keyspace, table);
}
catch (UnavailableException e)
{
throw new IOException(e.getMessage());
}
}
public Map<InetAddress, Float> getOwnership()
{
List<Token> sortedTokens = tokenMetadata.sortedTokens();
// describeOwnership returns tokens in an unspecified order, let's re-order them
Map<Token, Float> tokenMap = new TreeMap<Token, Float>(tokenMetadata.partitioner.describeOwnership(sortedTokens));
Map<InetAddress, Float> nodeMap = new LinkedHashMap<>();
for (Map.Entry<Token, Float> entry : tokenMap.entrySet())
{
InetAddress endpoint = tokenMetadata.getEndpoint(entry.getKey());
Float tokenOwnership = entry.getValue();
if (nodeMap.containsKey(endpoint))
nodeMap.put(endpoint, nodeMap.get(endpoint) + tokenOwnership);
else
nodeMap.put(endpoint, tokenOwnership);
}
return nodeMap;
}
/**
* Calculates ownership. If there are multiple DC's and the replication strategy is DC aware then ownership will be
* calculated per dc, i.e. each DC will have total ring ownership divided amongst its nodes. Without replication
* total ownership will be a multiple of the number of DC's and this value will then go up within each DC depending
* on the number of replicas within itself. For DC unaware replication strategies, ownership without replication
* will be 100%.
*
* @throws IllegalStateException when node is not configured properly.
*/
public LinkedHashMap<InetAddress, Float> effectiveOwnership(String keyspace) throws IllegalStateException
{
AbstractReplicationStrategy strategy;
if (keyspace != null)
{
Keyspace keyspaceInstance = Schema.instance.getKeyspaceInstance(keyspace);
if (keyspaceInstance == null)
throw new IllegalArgumentException("The keyspace " + keyspace + ", does not exist");
if (keyspaceInstance.getReplicationStrategy() instanceof LocalStrategy)
throw new IllegalStateException("Ownership values for keyspaces with LocalStrategy are meaningless");
strategy = keyspaceInstance.getReplicationStrategy();
}
else
{
List<String> userKeyspaces = Schema.instance.getUserKeyspaces();
if (userKeyspaces.size() > 0)
{
keyspace = userKeyspaces.get(0);
AbstractReplicationStrategy replicationStrategy = Schema.instance.getKeyspaceInstance(keyspace).getReplicationStrategy();
for (String keyspaceName : userKeyspaces)
{
if (!Schema.instance.getKeyspaceInstance(keyspaceName).getReplicationStrategy().hasSameSettings(replicationStrategy))
throw new IllegalStateException("Non-system keyspaces don't have the same replication settings, effective ownership information is meaningless");
}
}
else
{
keyspace = "system_traces";
}
Keyspace keyspaceInstance = Schema.instance.getKeyspaceInstance(keyspace);
if (keyspaceInstance == null)
throw new IllegalArgumentException("The node does not have " + keyspace + " yet, probably still bootstrapping");
strategy = keyspaceInstance.getReplicationStrategy();
}
TokenMetadata metadata = tokenMetadata.cloneOnlyTokenMap();
Collection<Collection<InetAddress>> endpointsGroupedByDc = new ArrayList<>();
// mapping of dc's to nodes, use sorted map so that we get dcs sorted
SortedMap<String, Collection<InetAddress>> sortedDcsToEndpoints = new TreeMap<>();
sortedDcsToEndpoints.putAll(metadata.getTopology().getDatacenterEndpoints().asMap());
for (Collection<InetAddress> endpoints : sortedDcsToEndpoints.values())
endpointsGroupedByDc.add(endpoints);
Map<Token, Float> tokenOwnership = tokenMetadata.partitioner.describeOwnership(tokenMetadata.sortedTokens());
LinkedHashMap<InetAddress, Float> finalOwnership = Maps.newLinkedHashMap();
Multimap<InetAddress, Range<Token>> endpointToRanges = strategy.getAddressRanges();
// calculate ownership per dc
for (Collection<InetAddress> endpoints : endpointsGroupedByDc)
{
// calculate the ownership with replication and add the endpoint to the final ownership map
for (InetAddress endpoint : endpoints)
{
float ownership = 0.0f;
for (Range<Token> range : endpointToRanges.get(endpoint))
{
if (tokenOwnership.containsKey(range.right))
ownership += tokenOwnership.get(range.right);
}
finalOwnership.put(endpoint, ownership);
}
}
return finalOwnership;
}
public List<String> getKeyspaces()
{
List<String> keyspaceNamesList = new ArrayList<>(Schema.instance.getKeyspaces());
return Collections.unmodifiableList(keyspaceNamesList);
}
public List<String> getNonSystemKeyspaces()
{
return Collections.unmodifiableList(Schema.instance.getNonSystemKeyspaces());
}
public List<String> getNonLocalStrategyKeyspaces()
{
return Collections.unmodifiableList(Schema.instance.getNonLocalStrategyKeyspaces());
}
public Map<String, String> getViewBuildStatuses(String keyspace, String view)
{
Map<UUID, String> coreViewStatus = SystemDistributedKeyspace.viewStatus(keyspace, view);
Map<InetAddress, UUID> hostIdToEndpoint = tokenMetadata.getEndpointToHostIdMapForReading();
Map<String, String> result = new HashMap<>();
for (Map.Entry<InetAddress, UUID> entry : hostIdToEndpoint.entrySet())
{
UUID hostId = entry.getValue();
InetAddress endpoint = entry.getKey();
result.put(endpoint.toString(),
coreViewStatus.containsKey(hostId)
? coreViewStatus.get(hostId)
: "UNKNOWN");
}
return Collections.unmodifiableMap(result);
}
public void updateSnitch(String epSnitchClassName, Boolean dynamic, Integer dynamicUpdateInterval, Integer dynamicResetInterval, Double dynamicBadnessThreshold) throws ClassNotFoundException
{
IEndpointSnitch oldSnitch = DatabaseDescriptor.getEndpointSnitch();
// new snitch registers mbean during construction
IEndpointSnitch newSnitch;
try
{
newSnitch = FBUtilities.construct(epSnitchClassName, "snitch");
}
catch (ConfigurationException e)
{
throw new ClassNotFoundException(e.getMessage());
}
if (dynamic)
{
DatabaseDescriptor.setDynamicUpdateInterval(dynamicUpdateInterval);
DatabaseDescriptor.setDynamicResetInterval(dynamicResetInterval);
DatabaseDescriptor.setDynamicBadnessThreshold(dynamicBadnessThreshold);
newSnitch = new DynamicEndpointSnitch(newSnitch);
}
// point snitch references to the new instance
DatabaseDescriptor.setEndpointSnitch(newSnitch);
for (String ks : Schema.instance.getKeyspaces())
{
Keyspace.open(ks).getReplicationStrategy().snitch = newSnitch;
}
if (oldSnitch instanceof DynamicEndpointSnitch)
((DynamicEndpointSnitch)oldSnitch).unregisterMBean();
updateTopology();
}
/**
* Seed data to the endpoints that will be responsible for it at the future
*
* @param rangesToStreamByKeyspace keyspaces and data ranges with endpoints included for each
* @return async Future for whether stream was success
*/
private Future<StreamState> streamRanges(Map<String, Multimap<Range<Token>, InetAddress>> rangesToStreamByKeyspace)
{
// First, we build a list of ranges to stream to each host, per table
Map<String, Map<InetAddress, List<Range<Token>>>> sessionsToStreamByKeyspace = new HashMap<>();
for (Map.Entry<String, Multimap<Range<Token>, InetAddress>> entry : rangesToStreamByKeyspace.entrySet())
{
String keyspace = entry.getKey();
Multimap<Range<Token>, InetAddress> rangesWithEndpoints = entry.getValue();
if (rangesWithEndpoints.isEmpty())
continue;
Map<InetAddress, List<Range<Token>>> rangesPerEndpoint = new HashMap<>();
for (Map.Entry<Range<Token>, InetAddress> endPointEntry : rangesWithEndpoints.entries())
{
Range<Token> range = endPointEntry.getKey();
InetAddress endpoint = endPointEntry.getValue();
List<Range<Token>> curRanges = rangesPerEndpoint.get(endpoint);
if (curRanges == null)
{
curRanges = new LinkedList<>();
rangesPerEndpoint.put(endpoint, curRanges);
}
curRanges.add(range);
}
sessionsToStreamByKeyspace.put(keyspace, rangesPerEndpoint);
}
StreamPlan streamPlan = new StreamPlan("Unbootstrap");
for (Map.Entry<String, Map<InetAddress, List<Range<Token>>>> entry : sessionsToStreamByKeyspace.entrySet())
{
String keyspaceName = entry.getKey();
Map<InetAddress, List<Range<Token>>> rangesPerEndpoint = entry.getValue();
for (Map.Entry<InetAddress, List<Range<Token>>> rangesEntry : rangesPerEndpoint.entrySet())
{
List<Range<Token>> ranges = rangesEntry.getValue();
InetAddress newEndpoint = rangesEntry.getKey();
InetAddress preferred = SystemKeyspace.getPreferredIP(newEndpoint);
// TODO each call to transferRanges re-flushes, this is potentially a lot of waste
streamPlan.transferRanges(newEndpoint, preferred, keyspaceName, ranges);
}
}
return streamPlan.execute();
}
/**
* Calculate pair of ranges to stream/fetch for given two range collections
* (current ranges for keyspace and ranges after move to new token)
*
* @param current collection of the ranges by current token
* @param updated collection of the ranges after token is changed
* @return pair of ranges to stream/fetch for given current and updated range collections
*/
public Pair<Set<Range<Token>>, Set<Range<Token>>> calculateStreamAndFetchRanges(Collection<Range<Token>> current, Collection<Range<Token>> updated)
{
Set<Range<Token>> toStream = new HashSet<>();
Set<Range<Token>> toFetch = new HashSet<>();
for (Range<Token> r1 : current)
{
boolean intersect = false;
for (Range<Token> r2 : updated)
{
if (r1.intersects(r2))
{
// adding difference ranges to fetch from a ring
toStream.addAll(r1.subtract(r2));
intersect = true;
}
}
if (!intersect)
{
toStream.add(r1); // should seed whole old range
}
}
for (Range<Token> r2 : updated)
{
boolean intersect = false;
for (Range<Token> r1 : current)
{
if (r2.intersects(r1))
{
// adding difference ranges to fetch from a ring
toFetch.addAll(r2.subtract(r1));
intersect = true;
}
}
if (!intersect)
{
toFetch.add(r2); // should fetch whole old range
}
}
return Pair.create(toStream, toFetch);
}
public void bulkLoad(String directory)
{
try
{
bulkLoadInternal(directory).get();
}
catch (Exception e)
{
throw new RuntimeException(e);
}
}
public String bulkLoadAsync(String directory)
{
return bulkLoadInternal(directory).planId.toString();
}
private StreamResultFuture bulkLoadInternal(String directory)
{
File dir = new File(directory);
if (!dir.exists() || !dir.isDirectory())
throw new IllegalArgumentException("Invalid directory " + directory);
SSTableLoader.Client client = new SSTableLoader.Client()
{
private String keyspace;
public void init(String keyspace)
{
this.keyspace = keyspace;
try
{
for (Map.Entry<Range<Token>, List<InetAddress>> entry : StorageService.instance.getRangeToAddressMap(keyspace).entrySet())
{
Range<Token> range = entry.getKey();
for (InetAddress endpoint : entry.getValue())
addRangeForEndpoint(range, endpoint);
}
}
catch (Exception e)
{
throw new RuntimeException(e);
}
}
public CFMetaData getTableMetadata(String tableName)
{
return Schema.instance.getCFMetaData(keyspace, tableName);
}
};
return new SSTableLoader(dir, client, new OutputHandler.LogOutput()).stream();
}
public void rescheduleFailedDeletions()
{
LifecycleTransaction.rescheduleFailedDeletions();
}
/**
* #{@inheritDoc}
*/
public void loadNewSSTables(String ksName, String cfName)
{
ColumnFamilyStore.loadNewSSTables(ksName, cfName);
}
/**
* #{@inheritDoc}
*/
public List<String> sampleKeyRange() // do not rename to getter - see CASSANDRA-4452 for details
{
List<DecoratedKey> keys = new ArrayList<>();
for (Keyspace keyspace : Keyspace.nonLocalStrategy())
{
for (Range<Token> range : getPrimaryRangesForEndpoint(keyspace.getName(), FBUtilities.getBroadcastAddress()))
keys.addAll(keySamples(keyspace.getColumnFamilyStores(), range));
}
List<String> sampledKeys = new ArrayList<>(keys.size());
for (DecoratedKey key : keys)
sampledKeys.add(key.getToken().toString());
return sampledKeys;
}
public void rebuildSecondaryIndex(String ksName, String cfName, String... idxNames)
{
String[] indices = asList(idxNames).stream()
.map(p -> isIndexColumnFamily(p) ? getIndexName(p) : p)
.collect(toList())
.toArray(new String[idxNames.length]);
ColumnFamilyStore.rebuildSecondaryIndex(ksName, cfName, indices);
}
public void resetLocalSchema() throws IOException
{
MigrationManager.resetLocalSchema();
}
public void setTraceProbability(double probability)
{
this.traceProbability = probability;
}
public double getTraceProbability()
{
return traceProbability;
}
public void disableAutoCompaction(String ks, String... tables) throws IOException
{
for (ColumnFamilyStore cfs : getValidColumnFamilies(true, true, ks, tables))
{
cfs.disableAutoCompaction();
}
}
public void enableAutoCompaction(String ks, String... tables) throws IOException
{
for (ColumnFamilyStore cfs : getValidColumnFamilies(true, true, ks, tables))
{
cfs.enableAutoCompaction();
}
}
/** Returns the name of the cluster */
public String getClusterName()
{
return DatabaseDescriptor.getClusterName();
}
/** Returns the cluster partitioner */
public String getPartitionerName()
{
return DatabaseDescriptor.getPartitionerName();
}
public int getTombstoneWarnThreshold()
{
return DatabaseDescriptor.getTombstoneWarnThreshold();
}
public void setTombstoneWarnThreshold(int threshold)
{
DatabaseDescriptor.setTombstoneWarnThreshold(threshold);
}
public int getTombstoneFailureThreshold()
{
return DatabaseDescriptor.getTombstoneFailureThreshold();
}
public void setTombstoneFailureThreshold(int threshold)
{
DatabaseDescriptor.setTombstoneFailureThreshold(threshold);
}
public int getBatchSizeFailureThreshold()
{
return DatabaseDescriptor.getBatchSizeFailThresholdInKB();
}
public void setBatchSizeFailureThreshold(int threshold)
{
DatabaseDescriptor.setBatchSizeFailThresholdInKB(threshold);
}
public void setHintedHandoffThrottleInKB(int throttleInKB)
{
DatabaseDescriptor.setHintedHandoffThrottleInKB(throttleInKB);
logger.info(String.format("Updated hinted_handoff_throttle_in_kb to %d", throttleInKB));
}
public static List<PartitionPosition> getDiskBoundaries(ColumnFamilyStore cfs, Directories.DataDirectory[] directories)
{
if (!cfs.getPartitioner().splitter().isPresent())
return null;
Collection<Range<Token>> lr;
if (StorageService.instance.isBootstrapMode())
{
lr = StorageService.instance.getTokenMetadata().getPendingRanges(cfs.keyspace.getName(), FBUtilities.getBroadcastAddress());
}
else
{
// Reason we use use the future settled TMD is that if we decommission a node, we want to stream
// from that node to the correct location on disk, if we didn't, we would put new files in the wrong places.
// We do this to minimize the amount of data we need to move in rebalancedisks once everything settled
TokenMetadata tmd = StorageService.instance.getTokenMetadata().cloneAfterAllSettled();
lr = cfs.keyspace.getReplicationStrategy().getAddressRanges(tmd).get(FBUtilities.getBroadcastAddress());
}
if (lr == null || lr.isEmpty())
return null;
List<Range<Token>> localRanges = Range.sort(lr);
return getDiskBoundaries(localRanges, cfs.getPartitioner(), directories);
}
public static List<PartitionPosition> getDiskBoundaries(ColumnFamilyStore cfs)
{
return getDiskBoundaries(cfs, cfs.getDirectories().getWriteableLocations());
}
/**
* Returns a list of disk boundaries, the result will differ depending on whether vnodes are enabled or not.
*
* What is returned are upper bounds for the disks, meaning everything from partitioner.minToken up to
* getDiskBoundaries(..).get(0) should be on the first disk, everything between 0 to 1 should be on the second disk
* etc.
*
* The final entry in the returned list will always be the partitioner maximum tokens upper key bound
*
* @param localRanges
* @param partitioner
* @param dataDirectories
* @return
*/
public static List<PartitionPosition> getDiskBoundaries(List<Range<Token>> localRanges, IPartitioner partitioner, Directories.DataDirectory[] dataDirectories)
{
assert partitioner.splitter().isPresent();
Splitter splitter = partitioner.splitter().get();
List<Token> boundaries = splitter.splitOwnedRanges(dataDirectories.length, localRanges, DatabaseDescriptor.getNumTokens() > 1);
List<PartitionPosition> diskBoundaries = new ArrayList<>();
for (int i = 0; i < boundaries.size() - 1; i++)
diskBoundaries.add(boundaries.get(i).maxKeyBound());
diskBoundaries.add(partitioner.getMaximumToken().maxKeyBound());
return diskBoundaries;
}
}
| apache-2.0 |
zpdian226/VerticalCoverflow | src/org/zpdian/coverflow/VerticalGallery.java | 46568 | /*
* Copyright (C) 2007 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.zpdian.coverflow;
//import com.android.internal.R;
//import android.annotation.Widget;
import org.zpdian.coverflow.R;
import android.content.Context;
import android.content.res.TypedArray;
import android.graphics.Rect;
import android.util.AttributeSet;
import android.util.Log;
import android.view.GestureDetector;
import android.view.Gravity;
import android.view.HapticFeedbackConstants;
import android.view.KeyEvent;
import android.view.MotionEvent;
import android.view.View;
import android.view.ViewConfiguration;
import android.view.ViewGroup;
import android.view.SoundEffectConstants;
import android.view.ContextMenu.ContextMenuInfo;
import android.view.animation.Transformation;
import android.widget.Scroller;
/**
* A view that shows items in a center-locked, horizontally scrolling list.
* <p>
* The default values for the Gallery assume you will be using
* {@link android.R.styleable#Theme_galleryItemBackground} as the background for
* each View given to the Gallery from the Adapter. If you are not doing this,
* you may need to adjust some Gallery properties, such as the spacing.
* <p>
* Views given to the Gallery should use {@link Gallery.LayoutParams} as their
* layout parameters type.
*
* @attr ref android.R.styleable#Gallery_animationDuration
* @attr ref android.R.styleable#Gallery_spacing
* @attr ref android.R.styleable#Gallery_gravity
*/
//@Widget
public class VerticalGallery extends AbsSpinner implements GestureDetector.OnGestureListener {
private static final String TAG = "Gallery";
private static final boolean localLOGV = false;
/**
* Duration in milliseconds from the start of a scroll during which we're
* unsure whether the user is scrolling or flinging.
*/
private static final int SCROLL_TO_FLING_UNCERTAINTY_TIMEOUT = 250;
/**
* Horizontal spacing between items.
*/
private int mSpacing = 0;
/**
* How long the transition animation should run when a child view changes
* position, measured in milliseconds.
*/
private int mAnimationDuration = 400;
/**
* The alpha of items that are not selected.
*/
private float mUnselectedAlpha;
/**
* Left most edge of a child seen so far during layout.
*/
private int mTopMost;
/**
* Right most edge of a child seen so far during layout.
*/
private int mBottomMost;
private int mGravity;
/**
* Helper for detecting touch gestures.
*/
private GestureDetector mGestureDetector;
/**
* The position of the item that received the user's down touch.
*/
private int mDownTouchPosition;
/**
* The view of the item that received the user's down touch.
*/
private View mDownTouchView;
/**
* Executes the delta scrolls from a fling or scroll movement.
*/
private FlingRunnable mFlingRunnable = new FlingRunnable();
private int mLastPosition = 0;
/**
* Sets mSuppressSelectionChanged = false. This is used to set it to false
* in the future. It will also trigger a selection changed.
*/
private Runnable mDisableSuppressSelectionChangedRunnable = new Runnable() {
public void run() {
mSuppressSelectionChanged = false;
selectionChanged();
}
};
/**
* When fling runnable runs, it resets this to false. Any method along the
* path until the end of its run() can set this to true to abort any
* remaining fling. For example, if we've reached either the leftmost or
* rightmost item, we will set this to true.
*/
private boolean mShouldStopFling;
/**
* The currently selected item's child.
*/
private View mSelectedChild;
/**
* Whether to continuously callback on the item selected listener during a
* fling.
*/
private boolean mShouldCallbackDuringFling = true;
/**
* Whether to callback when an item that is not selected is clicked.
*/
private boolean mShouldCallbackOnUnselectedItemClick = true;
/**
* If true, do not callback to item selected listener.
*/
private boolean mSuppressSelectionChanged;
/**
* If true, we have received the "invoke" (center or enter buttons) key
* down. This is checked before we action on the "invoke" key up, and is
* subsequently cleared.
*/
private boolean mReceivedInvokeKeyDown;
private AdapterContextMenuInfo mContextMenuInfo;
/**
* If true, this onScroll is the first for this user's drag (remember, a
* drag sends many onScrolls).
*/
private boolean mIsFirstScroll;
public VerticalGallery(Context context) {
this(context, null);
}
public VerticalGallery(Context context, AttributeSet attrs) {
this(context, attrs, android.R.attr.galleryStyle);
}
public VerticalGallery(Context context, AttributeSet attrs, int defStyle) {
super(context, attrs, defStyle);
mGestureDetector = new GestureDetector(context, this);
mGestureDetector.setIsLongpressEnabled(true);
// TypedArray a = context.obtainStyledAttributes(
// attrs, com.android.internal.R.styleable.Gallery, defStyle, 0);
//
// int index = a.getInt(com.android.internal.R.styleable.Gallery_gravity, -1);
// if (index >= 0) {
// setGravity(index);
// }
//
// int animationDuration =
// a.getInt(com.android.internal.R.styleable.Gallery_animationDuration, -1);
// if (animationDuration > 0) {
// setAnimationDuration(animationDuration);
// }
//
// int spacing =
// a.getDimensionPixelOffset(com.android.internal.R.styleable.Gallery_spacing, 0);
// setSpacing(spacing);
//
// float unselectedAlpha = a.getFloat(
// com.android.internal.R.styleable.Gallery_unselectedAlpha, 0.5f);
// setUnselectedAlpha(unselectedAlpha);
//
// a.recycle();
//
// // We draw the selected item last (because otherwise the item to the
// // right overlaps it)
// this.setChildrenDrawingCacheEnabled(true);
//
// this.setStaticTransformationsEnabled(true);
// mGroupFlags |= FLAG_USE_CHILD_DRAWING_ORDER;
// mGroupFlags |= FLAG_SUPPORT_STATIC_TRANSFORMATIONS;
}
/**
* Whether or not to callback on any {@link #getOnItemSelectedListener()}
* while the items are being flinged. If false, only the final selected item
* will cause the callback. If true, all items between the first and the
* final will cause callbacks.
*
* @param shouldCallback Whether or not to callback on the listener while
* the items are being flinged.
*/
public void setCallbackDuringFling(boolean shouldCallback) {
mShouldCallbackDuringFling = shouldCallback;
}
/**
* Whether or not to callback when an item that is not selected is clicked.
* If false, the item will become selected (and re-centered). If true, the
* {@link #getOnItemClickListener()} will get the callback.
*
* @param shouldCallback Whether or not to callback on the listener when a
* item that is not selected is clicked.
* @hide
*/
public void setCallbackOnUnselectedItemClick(boolean shouldCallback) {
mShouldCallbackOnUnselectedItemClick = shouldCallback;
}
/**
* Sets how long the transition animation should run when a child view
* changes position. Only relevant if animation is turned on.
*
* @param animationDurationMillis The duration of the transition, in
* milliseconds.
*
* @attr ref android.R.styleable#Gallery_animationDuration
*/
public void setAnimationDuration(int animationDurationMillis) {
mAnimationDuration = animationDurationMillis;
}
/**
* Sets the spacing between items in a Gallery
*
* @param spacing The spacing in pixels between items in the Gallery
*
* @attr ref android.R.styleable#Gallery_spacing
*/
public void setSpacing(int spacing) {
mSpacing = spacing;
}
/**
* Sets the alpha of items that are not selected in the Gallery.
*
* @param unselectedAlpha the alpha for the items that are not selected.
*
* @attr ref android.R.styleable#Gallery_unselectedAlpha
*/
public void setUnselectedAlpha(float unselectedAlpha) {
mUnselectedAlpha = unselectedAlpha;
}
@Override
protected boolean getChildStaticTransformation(View child, Transformation t) {
t.clear();
t.setAlpha(child == mSelectedChild ? 1.0f : mUnselectedAlpha);
return true;
}
@Override
protected int computeVerticalScrollExtent() {
// Only 1 item is considered to be selected
return 1;
}
@Override
protected int computeVerticalScrollOffset() {
// Current scroll position is the same as the selected position
return mSelectedPosition;
}
@Override
protected int computeVerticalScrollRange() {
// Scroll range is the same as the item count
return mItemCount;
}
@Override
protected boolean checkLayoutParams(ViewGroup.LayoutParams p) {
return p instanceof LayoutParams;
}
@Override
protected ViewGroup.LayoutParams generateLayoutParams(ViewGroup.LayoutParams p) {
return new LayoutParams(p);
}
@Override
public ViewGroup.LayoutParams generateLayoutParams(AttributeSet attrs) {
return new LayoutParams(getContext(), attrs);
}
@Override
protected ViewGroup.LayoutParams generateDefaultLayoutParams() {
/*
* Gallery expects Gallery.LayoutParams.
*/
return new VerticalGallery.LayoutParams(ViewGroup.LayoutParams.WRAP_CONTENT,
ViewGroup.LayoutParams.WRAP_CONTENT);
}
@Override
protected void onLayout(boolean changed, int l, int t, int r, int b) {
super.onLayout(changed, l, t, r, b);
/*
* Remember that we are in layout to prevent more layout request from
* being generated.
*/
mInLayout = true;
layout(0, false);
mInLayout = false;
}
// @Override
// int getChildHeight(View child) {
// return child.getMeasuredHeight();
// }
@Override
int getChildWidth(View child) {
return child.getMeasuredWidth();
}
/**
* Tracks a motion scroll. In reality, this is used to do just about any
* movement to items (touch scroll, arrow-key scroll, set an item as selected).
*
* @param deltaX Change in X from the previous event.
*/
void trackMotionScroll(int deltaY) {
if (getChildCount() == 0) {
return;
}
boolean toTop = deltaY < 0;
int limitedDeltaY = getLimitedMotionScrollAmount(toTop, deltaY);
if (limitedDeltaY != deltaY) {
// The above call returned a limited amount, so stop any scrolls/flings
mFlingRunnable.endFling(false);
onFinishedMovement();
}
offsetChildrenTopBottom(limitedDeltaY);
detachOffScreenChildren(toTop);
if (toTop) {
// If moved left, there will be empty space on the right
fillToGalleryBottom();
} else {
// Similarly, empty space on the left
fillToGalleryTop();
}
// Clear unused views
mRecycler.clear();
setSelectionToCenterChild();
invalidate();
}
int getLimitedMotionScrollAmount(boolean motionToTop, int deltaY) {
int extremeItemPosition = motionToTop ? mItemCount - 1 : 0;
View extremeChild = getChildAt(extremeItemPosition - mFirstPosition);
if (extremeChild == null) {
return deltaY;
}
int extremeChildCenter = getCenterOfView(extremeChild);
int galleryCenter = getCenterOfGallery();
if (motionToTop) {
if (extremeChildCenter <= galleryCenter) {
// The extreme child is past his boundary point!
return 0;
}
} else {
if (extremeChildCenter >= galleryCenter) {
// The extreme child is past his boundary point!
return 0;
}
}
int centerDifference = galleryCenter - extremeChildCenter;
return motionToTop
? Math.max(centerDifference, deltaY)
: Math.min(centerDifference, deltaY);
}
/**
* Offset the horizontal location of all children of this view by the
* specified number of pixels.
*
* @param offset the number of pixels to offset
*/
private void offsetChildrenTopBottom(int offset) {
for (int i = getChildCount() - 1; i >= 0; i--) {
getChildAt(i).offsetTopAndBottom(offset);
}
}
/**
* @return The center of this Gallery.
*/
private int getCenterOfGallery() {
return (getHeight() - getPaddingTop() - getPaddingBottom()) / 2 + getPaddingTop();
}
/**
* @return The center of the given view.
*/
private static int getCenterOfView(View view) {
return view.getTop() + view.getHeight() / 2;
}
/**
* Detaches children that are off the screen (i.e.: Gallery bounds).
*
* @param toTop Whether to detach children to the left of the Gallery, or
* to the right.
*/
private void detachOffScreenChildren(boolean toTop) {
int numChildren = getChildCount();
int firstPosition = mFirstPosition;
int start = 0;
int count = 0;
if (toTop) {
final int galleryTop = getPaddingTop();
for (int i = 0; i < numChildren; i++) {
final View child = getChildAt(i);
if (child.getBottom() >= galleryTop) {
break;
} else {
count++;
mRecycler.put(firstPosition + i, child);
}
}
} else {
final int galleryBottom = getHeight() - getPaddingBottom();
for (int i = numChildren - 1; i >= 0; i--) {
final View child = getChildAt(i);
if (child.getTop() <= galleryBottom) {
break;
} else {
start = i;
count++;
mRecycler.put(firstPosition + i, child);
}
}
}
detachViewsFromParent(start, count);
if (toTop) {
mFirstPosition += count;
}
}
/**
* Scrolls the items so that the selected item is in its 'slot' (its center
* is the gallery's center).
*/
private void scrollIntoSlots() {
if (getChildCount() == 0 || mSelectedChild == null) return;
int selectedCenter = getCenterOfView(mSelectedChild);
int targetCenter = getCenterOfGallery();
int scrollAmount = targetCenter - selectedCenter;
if (scrollAmount != 0) {
mFlingRunnable.startUsingDistance(scrollAmount);
} else {
onFinishedMovement();
}
}
private void onFinishedMovement() {
if (mSuppressSelectionChanged) {
mSuppressSelectionChanged = false;
// We haven't been callbacking during the fling, so do it now
super.selectionChanged();
}
invalidate();
}
@Override
void selectionChanged() {
if (!mSuppressSelectionChanged) {
super.selectionChanged();
}
}
/**
* Looks for the child that is closest to the center and sets it as the
* selected child.
*/
private void setSelectionToCenterChild() {
View selView = mSelectedChild;
if (mSelectedChild == null) return;
int galleryCenter = getCenterOfGallery();
// Common case where the current selected position is correct
if (selView.getTop() <= galleryCenter && selView.getBottom() >= galleryCenter) {
return;
}
// TODO better search
int closestEdgeDistance = Integer.MAX_VALUE;
int newSelectedChildIndex = 0;
for (int i = getChildCount() - 1; i >= 0; i--) {
View child = getChildAt(i);
if (child.getTop() <= galleryCenter && child.getBottom() >= galleryCenter) {
// This child is in the center
newSelectedChildIndex = i;
break;
}
int childClosestEdgeDistance = Math.min(Math.abs(child.getTop() - galleryCenter),
Math.abs(child.getBottom() - galleryCenter));
if (childClosestEdgeDistance < closestEdgeDistance) {
closestEdgeDistance = childClosestEdgeDistance;
newSelectedChildIndex = i;
}
}
int newPos = mFirstPosition + newSelectedChildIndex;
if (newPos != mSelectedPosition) {
setSelectedPositionInt(newPos);
setNextSelectedPositionInt(newPos);
checkSelectionChanged();
}
}
/**
* Creates and positions all views for this Gallery.
* <p>
* We layout rarely, most of the time {@link #trackMotionScroll(int)} takes
* care of repositioning, adding, and removing children.
*
* @param delta Change in the selected position. +1 means the selection is
* moving to the right, so views are scrolling to the left. -1
* means the selection is moving to the left.
*/
@Override
void layout(int delta, boolean animate) {
int childrenTop= mSpinnerPadding.top;
int childrenHeight = getBottom() - getTop() - mSpinnerPadding.top - mSpinnerPadding.bottom;
if (mDataChanged) {
handleDataChanged();
}
// Handle an empty gallery by removing all views.
if (mItemCount == 0) {
resetList();
return;
}
// Update to the new selected position.
if (mNextSelectedPosition >= 0) {
setSelectedPositionInt(mNextSelectedPosition);
}
// All views go in recycler while we are in layout
recycleAllViews();
// Clear out old views
//removeAllViewsInLayout();
detachAllViewsFromParent();
/*
* These will be used to give initial positions to views entering the
* gallery as we scroll
*/
mTopMost = 0;
mBottomMost = 0;
// Make selected view and center it
/*
* mFirstPosition will be decreased as we add views to the left later
* on. The 0 for x will be offset in a couple lines down.
*/
mFirstPosition = mSelectedPosition;
View sel = makeAndAddView(mSelectedPosition, 0, 0, true);
// Put the selected child in the center
int selectedOffset = childrenTop+ (childrenHeight / 2) - (sel.getHeight() / 2);
sel.offsetTopAndBottom(selectedOffset);
fillToGalleryBottom();
fillToGalleryTop();
// Flush any cached views that did not get reused above
mRecycler.clear();
invalidate();
checkSelectionChanged();
mDataChanged = false;
mNeedSync = false;
setNextSelectedPositionInt(mSelectedPosition);
updateSelectedItemMetadata();
}
private void fillToGalleryTop() {
int itemSpacing = mSpacing;
int galleryTop = getPaddingTop();
// Set state for initial iteration
View prevIterationView = getChildAt(0);
int curPosition;
int curBottomEdge;
if (prevIterationView != null) {
curPosition = mFirstPosition - 1;
curBottomEdge = prevIterationView.getTop() - itemSpacing;
} else {
// No children available!
curPosition = 0;
curBottomEdge = getBottom() - getTop() - getPaddingBottom();
mShouldStopFling = true;
}
while (curBottomEdge > galleryTop && curPosition >= 0) {
prevIterationView = makeAndAddView(curPosition, curPosition - mSelectedPosition,
curBottomEdge, false);
// Remember some state
mFirstPosition = curPosition;
// Set state for next iteration
curBottomEdge = prevIterationView.getTop() - itemSpacing;
curPosition--;
}
}
private void fillToGalleryBottom() {
int itemSpacing = mSpacing;
int galleryBottom = getBottom() - getTop() - getPaddingBottom();
int numChildren = getChildCount();
int numItems = mItemCount;
// Set state for initial iteration
View prevIterationView = getChildAt(numChildren - 1);
int curPosition;
int curTopEdge;
if (prevIterationView != null) {
curPosition = mFirstPosition + numChildren;
curTopEdge = prevIterationView.getBottom() + itemSpacing;
} else {
mFirstPosition = curPosition = mItemCount - 1;
curTopEdge = getPaddingTop();
mShouldStopFling = true;
}
while (curTopEdge < galleryBottom && curPosition < numItems) {
prevIterationView = makeAndAddView(curPosition, curPosition - mSelectedPosition,
curTopEdge, true);
// Set state for next iteration
curTopEdge = prevIterationView.getBottom() + itemSpacing;
curPosition++;
}
}
/**
* Obtain a view, either by pulling an existing view from the recycler or by
* getting a new one from the adapter. If we are animating, make sure there
* is enough information in the view's layout parameters to animate from the
* old to new positions.
*
* @param position Position in the gallery for the view to obtain
* @param offset Offset from the selected position
* @param x X-coordintate indicating where this view should be placed. This
* will either be the left or right edge of the view, depending on
* the fromLeft paramter
* @param fromLeft Are we posiitoning views based on the left edge? (i.e.,
* building from left to right)?
* @return A view that has been added to the gallery
*/
private View makeAndAddView(int position, int offset, int y,
boolean fromTop) {
View child;
if (!mDataChanged) {
child = mRecycler.get(position);
if (child != null) {
// Can reuse an existing view
int childTop = child.getTop();
// Remember left and right edges of where views have been placed
mBottomMost = Math.max(mBottomMost, childTop
+ child.getMeasuredHeight());
mTopMost = Math.min(mTopMost, childTop);
// Position the view
setUpChild(child, offset, y, fromTop);
return child;
}
}
// Nothing found in the recycler -- ask the adapter for a view
child = mAdapter.getView(position, null, this);
// Position the view
setUpChild(child, offset, y, fromTop);
return child;
}
/**
* Helper for makeAndAddView to set the position of a view and fill out its
* layout paramters.
*
* @param child The view to position
* @param offset Offset from the selected position
* @param x X-coordintate indicating where this view should be placed. This
* will either be the left or right edge of the view, depending on
* the fromLeft paramter
* @param fromLeft Are we posiitoning views based on the left edge? (i.e.,
* building from left to right)?
*/
private void setUpChild(View child, int offset, int y, boolean fromTop) {
// Respect layout params that are already in the view. Otherwise
// make some up...
ViewGroup.LayoutParams lp = (ViewGroup.LayoutParams)
child.getLayoutParams();
if (lp == null) {
lp = (ViewGroup.LayoutParams) generateDefaultLayoutParams();
}
addViewInLayout(child, fromTop ? -1 : 0, lp);
child.setSelected(offset == 0);
// Get measure specs
int childHeightSpec = ViewGroup.getChildMeasureSpec(mHeightMeasureSpec,
mSpinnerPadding.top + mSpinnerPadding.bottom, lp.height);
int childWidthSpec = ViewGroup.getChildMeasureSpec(mWidthMeasureSpec,
mSpinnerPadding.left + mSpinnerPadding.right, lp.width);
// Measure child
child.measure(childWidthSpec, childHeightSpec);
int childTop_1;
int childBottom_1;
// Position vertically based on gravity setting
int childLeft = calculateLeft(child, true);
int childRight = childLeft + child.getMeasuredWidth();
int height = child.getMeasuredHeight();
if (fromTop) {
childTop_1 = y;
childBottom_1 = childTop_1 + height;
} else {
childTop_1 = y - height;
childBottom_1 = y;
}
child.layout(childLeft, childTop_1, childRight, childBottom_1);
}
/**
* Figure out vertical placement based on mGravity
*
* @param child Child to place
* @return Where the top of the child should be
*/
private int calculateLeft(View child, boolean duringLayout) {
int myWidth = duringLayout ? getMeasuredWidth() : getWidth();
int childWidth = duringLayout ? child.getMeasuredWidth() : child.getWidth();
int childLeft = 0;
switch (mGravity) {
case Gravity.LEFT:
childLeft = mSpinnerPadding.left;
break;
case Gravity.CENTER_HORIZONTAL:
int availableSpace = myWidth - mSpinnerPadding.right
- mSpinnerPadding.left - childWidth;
childLeft = mSpinnerPadding.left + (availableSpace / 2);
break;
case Gravity.BOTTOM:
childLeft = myWidth - mSpinnerPadding.right - childWidth;
break;
}
return childLeft;
}
@Override
public boolean onTouchEvent(MotionEvent event) {
// Give everything to the gesture detector
boolean retValue = mGestureDetector.onTouchEvent(event);
int action = event.getAction();
if (action == MotionEvent.ACTION_UP) {
// Helper method for lifted finger
onUp();
} else if (action == MotionEvent.ACTION_CANCEL) {
onCancel();
}
return retValue;
}
/**
* {@inheritDoc}
*/
public boolean onSingleTapUp(MotionEvent e) {
// if (mDownTouchPosition >= 0) {
//
// // An item tap should make it selected, so scroll to this child.
// scrollToChild(mDownTouchPosition - mFirstPosition);
//
// // Also pass the click so the client knows, if it wants to.
// if (mShouldCallbackOnUnselectedItemClick || mDownTouchPosition == mSelectedPosition) {
// performItemClick(mDownTouchView, mDownTouchPosition, mAdapter
// .getItemId(mDownTouchPosition));
// }
//
// return true;
// }
return false;
}
/**
* {@inheritDoc}
*/
public boolean onFling(MotionEvent e1, MotionEvent e2, float velocityX, float velocityY) {
if (!mShouldCallbackDuringFling) {
// We want to suppress selection changes
// Remove any future code to set mSuppressSelectionChanged = false
removeCallbacks(mDisableSuppressSelectionChangedRunnable);
// This will get reset once we scroll into slots
if (!mSuppressSelectionChanged) mSuppressSelectionChanged = true;
}
// Fling the gallery!
mFlingRunnable.startUsingVelocity((int) -(velocityY * 0.1));
return true;
}
/**
* {@inheritDoc}
*/
public boolean onScroll(MotionEvent e1, MotionEvent e2, float distanceX, float distanceY) {
if (localLOGV) Log.v(TAG, String.valueOf(e2.getY() - e1.getY()));
/*
* Now's a good time to tell our parent to stop intercepting our events!
* The user has moved more than the slop amount, since GestureDetector
* ensures this before calling this method. Also, if a parent is more
* interested in this touch's events than we are, it would have
* intercepted them by now (for example, we can assume when a Gallery is
* in the ListView, a vertical scroll would not end up in this method
* since a ListView would have intercepted it by now).
*/
getParent().requestDisallowInterceptTouchEvent(true);
// As the user scrolls, we want to callback selection changes so related-
// info on the screen is up-to-date with the gallery's selection
if (!mShouldCallbackDuringFling) {
if (mIsFirstScroll) {
/*
* We're not notifying the client of selection changes during
* the fling, and this scroll could possibly be a fling. Don't
* do selection changes until we're sure it is not a fling.
*/
if (!mSuppressSelectionChanged) mSuppressSelectionChanged = true;
postDelayed(mDisableSuppressSelectionChangedRunnable, SCROLL_TO_FLING_UNCERTAINTY_TIMEOUT);
}
} else {
if (mSuppressSelectionChanged) mSuppressSelectionChanged = false;
}
// Track the motion
trackMotionScroll(-1 * (int) (distanceY * 0.3));
mIsFirstScroll = false;
return true;
}
/**
* {@inheritDoc}
*/
public boolean onDown(MotionEvent e) {
// Kill any existing fling/scroll
mFlingRunnable.stop(false);
// Get the item's view that was touched
mDownTouchPosition = pointToPosition((int) e.getX(), (int) e.getY());
if (mDownTouchPosition >= 0) {
mDownTouchView = getChildAt(mDownTouchPosition - mFirstPosition);
mDownTouchView.setPressed(true);
}
// Reset the multiple-scroll tracking state
mIsFirstScroll = true;
// Must return true to get matching events for this down event.
return true;
}
/**
* Called when a touch event's action is MotionEvent.ACTION_UP.
*/
void onUp() {
if (mFlingRunnable.mScroller.isFinished()) {
scrollIntoSlots();
}
dispatchUnpress();
}
/**
* Called when a touch event's action is MotionEvent.ACTION_CANCEL.
*/
void onCancel() {
onUp();
}
/**
* {@inheritDoc}
*/
public void onLongPress(MotionEvent e) {
if (mDownTouchPosition < 0) {
return;
}
performHapticFeedback(HapticFeedbackConstants.LONG_PRESS);
long id = getItemIdAtPosition(mDownTouchPosition);
dispatchLongPress(mDownTouchView, mDownTouchPosition, id);
}
// Unused methods from GestureDetector.OnGestureListener below
/**
* {@inheritDoc}
*/
public void onShowPress(MotionEvent e) {
}
// Unused methods from GestureDetector.OnGestureListener above
private void dispatchPress(View child) {
if (child != null) {
child.setPressed(true);
}
setPressed(true);
}
private void dispatchUnpress() {
for (int i = getChildCount() - 1; i >= 0; i--) {
getChildAt(i).setPressed(false);
}
setPressed(false);
}
@Override
public void dispatchSetSelected(boolean selected) {
/*
* We don't want to pass the selected state given from its parent to its
* children since this widget itself has a selected state to give to its
* children.
*/
}
@Override
protected void dispatchSetPressed(boolean pressed) {
// Show the pressed state on the selected child
if (mSelectedChild != null) {
mSelectedChild.setPressed(pressed);
}
}
@Override
protected ContextMenuInfo getContextMenuInfo() {
return mContextMenuInfo;
}
@Override
public boolean showContextMenuForChild(View originalView) {
final int longPressPosition = getPositionForView(originalView);
if (longPressPosition < 0) {
return false;
}
final long longPressId = mAdapter.getItemId(longPressPosition);
return dispatchLongPress(originalView, longPressPosition, longPressId);
}
@Override
public boolean showContextMenu() {
if (isPressed() && mSelectedPosition >= 0) {
int index = mSelectedPosition - mFirstPosition;
View v = getChildAt(index);
return dispatchLongPress(v, mSelectedPosition, mSelectedRowId);
}
return false;
}
private boolean dispatchLongPress(View view, int position, long id) {
boolean handled = false;
if (mOnItemLongClickListener != null) {
handled = mOnItemLongClickListener.onItemLongClick(this, mDownTouchView,
mDownTouchPosition, id);
}
if (!handled) {
mContextMenuInfo = new AdapterContextMenuInfo(view, position, id);
handled = super.showContextMenuForChild(this);
}
if (handled) {
performHapticFeedback(HapticFeedbackConstants.LONG_PRESS);
}
return handled;
}
@Override
public boolean dispatchKeyEvent(KeyEvent event) {
// Gallery steals all key events
return event.dispatch(this);
}
/**
* Handles left, right, and clicking
* @see android.view.View#onKeyDown
*/
@Override
public boolean onKeyDown(int keyCode, KeyEvent event) {
switch (keyCode) {
case KeyEvent.KEYCODE_DPAD_UP:
if (movePrevious()) {
playSoundEffect(SoundEffectConstants.NAVIGATION_UP);
}
return true;
case KeyEvent.KEYCODE_DPAD_DOWN:
if (moveNext()) {
playSoundEffect(SoundEffectConstants.NAVIGATION_DOWN);
}
return true;
case KeyEvent.KEYCODE_DPAD_CENTER:
case KeyEvent.KEYCODE_ENTER:
mReceivedInvokeKeyDown = true;
// fallthrough to default handling
}
return super.onKeyDown(keyCode, event);
}
@Override
public boolean onKeyUp(int keyCode, KeyEvent event) {
switch (keyCode) {
case KeyEvent.KEYCODE_DPAD_CENTER:
case KeyEvent.KEYCODE_ENTER: {
if (mReceivedInvokeKeyDown) {
if (mItemCount > 0) {
dispatchPress(mSelectedChild);
postDelayed(new Runnable() {
public void run() {
dispatchUnpress();
}
}, ViewConfiguration.getPressedStateDuration());
int selectedIndex = mSelectedPosition - mFirstPosition;
performItemClick(getChildAt(selectedIndex), mSelectedPosition, mAdapter
.getItemId(mSelectedPosition));
}
}
// Clear the flag
mReceivedInvokeKeyDown = false;
return true;
}
}
return super.onKeyUp(keyCode, event);
}
boolean movePrevious() {
if (mItemCount > 0 && mSelectedPosition > 0) {
scrollToChild(mSelectedPosition - mFirstPosition - 1);
return true;
} else {
return false;
}
}
boolean moveNext() {
if (mItemCount > 0 && mSelectedPosition < mItemCount - 1) {
scrollToChild(mSelectedPosition - mFirstPosition + 1);
return true;
} else {
return false;
}
}
private boolean scrollToChild(int childPosition) {
View child = getChildAt(childPosition);
if (child != null) {
int distance = getCenterOfGallery() - getCenterOfView(child);
mFlingRunnable.startUsingDistance(distance);
return true;
}
return false;
}
@Override
void setSelectedPositionInt(int position) {
super.setSelectedPositionInt(position);
// Updates any metadata we keep about the selected item.
updateSelectedItemMetadata();
}
private void updateSelectedItemMetadata() {
View oldSelectedChild = mSelectedChild;
View child = mSelectedChild = getChildAt(mSelectedPosition - mFirstPosition);
if (child == null) {
return;
}
child.setSelected(true);
child.setFocusable(true);
if (hasFocus()) {
child.requestFocus();
}
// We unfocus the old child down here so the above hasFocus check
// returns true
if (oldSelectedChild != null) {
// Make sure its drawable state doesn't contain 'selected'
oldSelectedChild.setSelected(false);
// Make sure it is not focusable anymore, since otherwise arrow keys
// can make this one be focused
oldSelectedChild.setFocusable(false);
}
}
/**
* Describes how the child views are aligned.
* @param gravity
*
* @attr ref android.R.styleable#Gallery_gravity
*/
public void setGravity(int gravity)
{
if (mGravity != gravity) {
mGravity = gravity;
requestLayout();
}
}
@Override
protected int getChildDrawingOrder(int childCount, int i) {
// int selectedIndex = mSelectedPosition - mFirstPosition;
//
// // Just to be safe
// if (selectedIndex < 0) return i;
//
// if (i == childCount - 1) {
// // Draw the selected child last
// return selectedIndex;
// } else if (i >= selectedIndex) {
// // Move the children to the right of the selected child earlier one
// return i + 1;
// } else {
// // Keep the children to the left of the selected child the same
// return i;
// }
if(i == 0)
mLastPosition = 0;
int centerPosition = getSelectedItemPosition() - getFirstVisiblePosition();
if (i == childCount - 1) {
return centerPosition;
} else if (i >= centerPosition) {
mLastPosition++;
return childCount - mLastPosition;
} else {
return i;
}
}
@Override
protected void onFocusChanged(boolean gainFocus, int direction, Rect previouslyFocusedRect) {
super.onFocusChanged(gainFocus, direction, previouslyFocusedRect);
/*
* The gallery shows focus by focusing the selected item. So, give
* focus to our selected item instead. We steal keys from our
* selected item elsewhere.
*/
if (gainFocus && mSelectedChild != null) {
mSelectedChild.requestFocus(direction);
}
}
/**
* Responsible for fling behavior. Use {@link #startUsingVelocity(int)} to
* initiate a fling. Each frame of the fling is handled in {@link #run()}.
* A FlingRunnable will keep re-posting itself until the fling is done.
*
*/
private class FlingRunnable implements Runnable {
/**
* Tracks the decay of a fling scroll
*/
private Scroller mScroller;
/**
* X value reported by mScroller on the previous fling
*/
private int mLastFlingY;
public FlingRunnable() {
mScroller = new Scroller(getContext());
}
private void startCommon() {
// Remove any pending flings
removeCallbacks(this);
}
public void startUsingVelocity(int initialVelocity) {
if (initialVelocity == 0) return;
startCommon();
int initialY = initialVelocity < 0 ? Integer.MAX_VALUE : 0;
mLastFlingY = initialY;
mScroller.fling(0, initialY, 0, initialVelocity,
0, Integer.MAX_VALUE, 0, Integer.MAX_VALUE);
post(this);
}
public void startUsingDistance(int distance) {
if (distance == 0) return;
startCommon();
mLastFlingY = 0;
mScroller.startScroll(0, 0, 0, -distance, mAnimationDuration);
post(this);
}
public void stop(boolean scrollIntoSlots) {
removeCallbacks(this);
endFling(scrollIntoSlots);
}
private void endFling(boolean scrollIntoSlots) {
/*
* Force the scroller's status to finished (without setting its
* position to the end)
*/
mScroller.forceFinished(true);
if (scrollIntoSlots) scrollIntoSlots();
}
public void run() {
if (mItemCount == 0) {
endFling(true);
return;
}
mShouldStopFling = false;
final Scroller scroller = mScroller;
boolean more = scroller.computeScrollOffset();
final int y = scroller.getCurrY();
// Flip sign to convert finger direction to list items direction
// (e.g. finger moving down means list is moving towards the top)
int delta = mLastFlingY - y;
// Pretend that each frame of a fling scroll is a touch scroll
if (delta > 0) {
// Moving towards the left. Use first view as mDownTouchPosition
mDownTouchPosition = mFirstPosition;
// Don't fling more than 1 screen
delta = Math.min(getHeight() - getPaddingTop() - getPaddingBottom() - 1, delta);
} else {
// Moving towards the right. Use last view as mDownTouchPosition
int offsetToLast = getChildCount() - 1;
mDownTouchPosition = mFirstPosition + offsetToLast;
// Don't fling more than 1 screen
delta = Math.max(-(getHeight() - getPaddingBottom() - getPaddingTop() - 1), delta);
}
trackMotionScroll(delta);
if (more && !mShouldStopFling) {
mLastFlingY = y;
post(this);
} else {
endFling(true);
}
}
}
/**
* Gallery extends LayoutParams to provide a place to hold current
* Transformation information along with previous position/transformation
* info.
*
*/
public static class LayoutParams extends ViewGroup.LayoutParams {
public LayoutParams(Context c, AttributeSet attrs) {
super(c, attrs);
}
public LayoutParams(int w, int h) {
super(w, h);
}
public LayoutParams(ViewGroup.LayoutParams source) {
super(source);
}
}
}
| apache-2.0 |
consulo/consulo | modules/base/lang-impl/src/main/java/com/intellij/application/options/codeStyle/arrangement/component/ArrangementCheckBoxUiComponent.java | 3963 | /*
* Copyright 2000-2013 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.application.options.codeStyle.arrangement.component;
import com.intellij.application.options.codeStyle.arrangement.ArrangementConstants;
import com.intellij.psi.codeStyle.arrangement.model.ArrangementAtomMatchCondition;
import com.intellij.psi.codeStyle.arrangement.model.ArrangementMatchCondition;
import com.intellij.psi.codeStyle.arrangement.std.ArrangementSettingsToken;
import com.intellij.ui.components.JBCheckBox;
import com.intellij.util.ui.GridBag;
import javax.annotation.Nonnull;
import javax.swing.*;
import java.awt.*;
import java.awt.event.ItemEvent;
import java.awt.event.ItemListener;
import java.awt.event.MouseAdapter;
import java.awt.event.MouseEvent;
/**
* @author Denis Zhdanov
* @since 3/11/13 10:25 AM
*/
public class ArrangementCheckBoxUiComponent extends AbstractArrangementUiComponent {
@Nonnull
private final JPanel myComponent = new JPanel(new GridBagLayout());
@Nonnull
private final ArrangementAtomMatchCondition myCondition;
@Nonnull
private final JBCheckBox myCheckBox;
@Nonnull
private final JLabel myTextLabel;
public ArrangementCheckBoxUiComponent(@Nonnull ArrangementSettingsToken token) {
super(token);
myComponent.setOpaque(false);
myCondition = new ArrangementAtomMatchCondition(token);
myCheckBox = new JBCheckBox();
myCheckBox.setOpaque(false);
myTextLabel = new JLabel(token.getRepresentationValue());
myCheckBox.addItemListener(new ItemListener() {
@Override
public void itemStateChanged(ItemEvent e) {
myTextLabel.setEnabled(myCheckBox.isEnabled());
fireStateChanged();
}
});
myTextLabel.addMouseListener(new MouseAdapter() {
@Override
public void mousePressed(MouseEvent e) {
myCheckBox.setSelected(!myCheckBox.isSelected());
}
});
myComponent.add(myCheckBox, new GridBag().anchor(GridBagConstraints.WEST).insets(0, 0, 0, 2));
myComponent.add(myTextLabel, new GridBag().anchor(GridBagConstraints.WEST).insets(0, 0, 0, ArrangementConstants.HORIZONTAL_GAP));
}
@Nonnull
@Override
public ArrangementSettingsToken getToken() {
return myCondition.getType();
}
@Override
public void chooseToken(@Nonnull ArrangementSettingsToken data) throws UnsupportedOperationException {
if (!getToken().equals(data)) {
throw new UnsupportedOperationException(String.format(
"Can't choose '%s' data at the check box token with data '%s'", data, getToken()
));
}
}
@Nonnull
@Override
public ArrangementMatchCondition getMatchCondition() {
return myCondition;
}
@Override
protected JComponent doGetUiComponent() {
return myComponent;
}
@Override
protected void doReset() {
}
@Override
public boolean isEnabled() {
return myCheckBox.isEnabled();
}
@Override
public void setEnabled(boolean enabled) {
myCheckBox.setEnabled(enabled);
}
@Override
public boolean isSelected() {
return myCheckBox.isSelected();
}
@Override
public void setSelected(boolean selected) {
myCheckBox.setSelected(selected);
}
@Override
public int getBaselineToUse(int width, int height) {
return myTextLabel.getBaseline(width, height);
}
@Override
public void handleMouseClickOnSelected() {
setSelected(false);
}
}
| apache-2.0 |
JohnnySun/MusicPlayer | app/src/main/java/bob/sun/bender/model/MiBandDevice.java | 918 | package bob.sun.bender.model;
import android.bluetooth.BluetoothDevice;
import com.huami.mibandscan.MiBandScanResult;
/**
* Created by bmy001 on 西暦17/04/01.
*/
public class MiBandDevice {
private String bandMac;
private int rssi;
private BluetoothDevice device;
private MiBandScanResult result;
public MiBandScanResult getResult() {
return result;
}
public void setResult(MiBandScanResult result) {
this.result = result;
}
public String getBandMac() {
return bandMac;
}
public void setBandMac(String bandMac) {
this.bandMac = bandMac;
}
public int getRssi() {
return rssi;
}
public void setRssi(int rssi) {
this.rssi = rssi;
}
public BluetoothDevice getDevice() {
return device;
}
public void setDevice(BluetoothDevice device) {
this.device = device;
}
}
| apache-2.0 |
phyokyaw/jaquapi | core/src/main/java/net/phyokyaw/jaquapi/core/model/Device.java | 3789 | package net.phyokyaw.jaquapi.core.model;
import java.util.concurrent.ScheduledFuture;
import java.util.concurrent.TimeUnit;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import net.phyokyaw.jaquapi.core.services.ScheduledService;
public class Device extends AbstractModel {
private static final Logger logger = LoggerFactory.getLogger(Device.class);
public static final String OVERRIDING_MODE_TIMEOUT = "overridingModeTimeOut";
private Mode mode = new OnOffMode(true);
private Operatable operableDevice;
private Mode overridingMode;
private final int id;
private String name;
@Autowired
private ScheduledService scheduledService;
public Device(int id) {
this.id = id;
}
public Mode getMode() {
return mode;
}
public Mode getActiveMode() {
return isOverridingModeScheduleActive() ? overridingMode : mode;
}
public void setMode(Mode mode) {
this.mode = mode;
}
private ScheduledFuture<?> overridingModeSchedule;
private ScheduledFuture<?> timeoutPublishSchedule;
private static final long INTERVAL = 100L;
public void setOverridingMode(Mode mode, long timeOutInMilliSec) {
cancelOverridingMode();
overridingMode = mode;
timeoutPublishSchedule = scheduledService.addScheduleAtFixrate(INTERVAL, new Runnable() {
@Override
public void run() {
if (isOverridingModeScheduleActive()) {
Device.this.firePropertyChange(OVERRIDING_MODE_TIMEOUT, null, overridingModeSchedule.getDelay(TimeUnit.MILLISECONDS));
}
}
}, INTERVAL);
createNewSchedule(timeOutInMilliSec);
}
private void createNewSchedule(long timeOutInMilliSec) {
overridingModeSchedule = scheduledService.addSchedule(timeOutInMilliSec, new Runnable() {
@Override
public void run() {
logger.info("Canceling");
cancelOverridingMode();
}
});
}
public boolean isOverridingModeScheduleActive() {
return (overridingModeSchedule != null && !overridingModeSchedule.isCancelled() && !overridingModeSchedule.isDone());
}
public void updateOverridingMode(long timeOutInMilliSec) {
synchronized (this) {
if (overridingMode == null) {
return;
}
long remainingTime = 0L;
if (isOverridingModeScheduleActive()) {
remainingTime = overridingModeSchedule.getDelay(TimeUnit.MILLISECONDS);
overridingModeSchedule.cancel(true);
setOverridingMode(overridingMode, remainingTime + timeOutInMilliSec);
}
createNewSchedule(remainingTime + timeOutInMilliSec);
}
}
public void cancelOverridingMode() {
synchronized (this) {
if (isOverridingModeScheduleActive()) {
overridingModeSchedule.cancel(true);
timeoutPublishSchedule.cancel(false);
overridingMode = null;
}
}
}
public long getOverridingModeTimeOut() {
return overridingModeSchedule.getDelay(TimeUnit.MILLISECONDS);
}
public void update() {
if (overridingMode != null) {
setOn(overridingMode.shouldBeOn());
} else {
setOn(mode.shouldBeOn());
}
}
private void setOn(boolean on) {
if (operableDevice != null && operableDevice.isReady()) {
try {
operableDevice.setOn(on);
} catch (Exception e) {
logger.error("Unable to set on off", e);
}
}
// logger.debug(name + " mode is " + on);
}
public boolean isOn() {
if (operableDevice != null && operableDevice.isReady()) {
try {
return operableDevice.isOn();
} catch (Exception e) {
logger.error("Unable to get on off", e);
}
}
return false;
}
public Operatable getOperatable() {
return operableDevice;
}
public void setOperatable(Operatable operableDevice) {
this.operableDevice = operableDevice;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public int getId() {
return id;
}
} | apache-2.0 |
ganyao114/SwiftAndroid | common/src/main/java/net/swiftos/common/exception/ExceptionEngine.java | 269 | package net.swiftos.common.exception;
/**
* 异常转换 转换为用户可读的异常信息
* Created by ganyao on 2016/11/4.
*/
public class ExceptionEngine {
public static Throwable handlerException(Throwable throwable){
return throwable;
}
}
| apache-2.0 |
vschs007/buck | src/com/facebook/buck/lua/AbstractNativeExecutableStarter.java | 11160 | /*
* Copyright 2016-present Facebook, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License. You may obtain
* a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package com.facebook.buck.lua;
import com.facebook.buck.cxx.AbstractCxxLibrary;
import com.facebook.buck.cxx.CxxBuckConfig;
import com.facebook.buck.cxx.CxxLink;
import com.facebook.buck.cxx.CxxLinkableEnhancer;
import com.facebook.buck.cxx.CxxPlatform;
import com.facebook.buck.cxx.CxxPreprocessAndCompile;
import com.facebook.buck.cxx.CxxPreprocessables;
import com.facebook.buck.cxx.CxxPreprocessorDep;
import com.facebook.buck.cxx.CxxPreprocessorInput;
import com.facebook.buck.cxx.CxxSource;
import com.facebook.buck.cxx.CxxSourceRuleFactory;
import com.facebook.buck.cxx.HeaderVisibility;
import com.facebook.buck.cxx.Linker;
import com.facebook.buck.cxx.Linkers;
import com.facebook.buck.cxx.NativeLinkTarget;
import com.facebook.buck.cxx.NativeLinkTargetMode;
import com.facebook.buck.cxx.NativeLinkable;
import com.facebook.buck.cxx.NativeLinkableInput;
import com.facebook.buck.file.WriteFile;
import com.facebook.buck.model.BuildTarget;
import com.facebook.buck.model.BuildTargets;
import com.facebook.buck.model.InternalFlavor;
import com.facebook.buck.parser.NoSuchBuildTargetException;
import com.facebook.buck.rules.BuildRule;
import com.facebook.buck.rules.BuildRuleParams;
import com.facebook.buck.rules.BuildRuleResolver;
import com.facebook.buck.rules.SourcePath;
import com.facebook.buck.rules.SourcePathResolver;
import com.facebook.buck.rules.SourcePathRuleFinder;
import com.facebook.buck.rules.WriteStringTemplateRule;
import com.facebook.buck.rules.args.SourcePathArg;
import com.facebook.buck.rules.args.StringArg;
import com.facebook.buck.util.Escaper;
import com.facebook.buck.util.immutables.BuckStyleTuple;
import com.google.common.base.Charsets;
import com.google.common.base.Preconditions;
import com.google.common.base.Predicates;
import com.google.common.base.Suppliers;
import com.google.common.collect.FluentIterable;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableMultimap;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.ImmutableSortedSet;
import com.google.common.collect.Iterables;
import com.google.common.io.Resources;
import org.immutables.value.Value;
import java.io.IOException;
import java.nio.file.Path;
import java.util.Optional;
/**
* {@link Starter} implementation which builds a starter as a native executable.
*/
@Value.Immutable
@BuckStyleTuple
abstract class AbstractNativeExecutableStarter implements Starter, NativeLinkTarget {
private static final String NATIVE_STARTER_CXX_SOURCE =
"com/facebook/buck/lua/native-starter.cpp.in";
abstract BuildRuleParams getBaseParams();
abstract BuildRuleResolver getRuleResolver();
abstract SourcePathResolver getPathResolver();
abstract SourcePathRuleFinder getRuleFinder();
abstract LuaConfig getLuaConfig();
abstract CxxBuckConfig getCxxBuckConfig();
abstract CxxPlatform getCxxPlatform();
abstract BuildTarget getTarget();
abstract Path getOutput();
abstract String getMainModule();
abstract Optional<BuildTarget> getNativeStarterLibrary();
abstract Optional<Path> getRelativeModulesDir();
abstract Optional<Path> getRelativePythonModulesDir();
abstract Optional<Path> getRelativeNativeLibsDir();
private String getNativeStarterCxxSourceTemplate() {
try {
return Resources.toString(Resources.getResource(NATIVE_STARTER_CXX_SOURCE), Charsets.UTF_8);
} catch (IOException e) {
throw new RuntimeException(e);
}
}
private CxxSource getNativeStarterCxxSource() {
BuildTarget target =
BuildTarget.builder(getBaseParams().getBuildTarget())
.addFlavors(InternalFlavor.of("native-starter-cxx-source"))
.build();
BuildRule rule;
Optional<BuildRule> maybeRule = getRuleResolver().getRuleOptional(target);
if (maybeRule.isPresent()) {
rule = maybeRule.get();
} else {
BuildTarget templateTarget =
BuildTarget.builder(getBaseParams().getBuildTarget())
.addFlavors(InternalFlavor.of("native-starter-cxx-source-template"))
.build();
WriteFile templateRule = getRuleResolver().addToIndex(
new WriteFile(
getBaseParams()
.withBuildTarget(templateTarget)
.copyReplacingDeclaredAndExtraDeps(
Suppliers.ofInstance(ImmutableSortedSet.of()),
Suppliers.ofInstance(ImmutableSortedSet.of())),
getNativeStarterCxxSourceTemplate(),
BuildTargets.getGenPath(
getBaseParams().getProjectFilesystem(),
templateTarget,
"%s/native-starter.cpp.in"),
/* executable */ false));
Path output =
BuildTargets.getGenPath(
getBaseParams().getProjectFilesystem(),
target,
"%s/native-starter.cpp");
rule = getRuleResolver().addToIndex(
WriteStringTemplateRule.from(
getBaseParams(),
getRuleFinder(),
target,
output,
templateRule.getSourcePathToOutput(),
ImmutableMap.of(
"MAIN_MODULE",
Escaper.escapeAsPythonString(getMainModule()),
"MODULES_DIR",
getRelativeModulesDir().isPresent() ?
Escaper.escapeAsPythonString(getRelativeModulesDir().get().toString()) :
"NULL",
"PY_MODULES_DIR",
getRelativePythonModulesDir().isPresent() ?
Escaper.escapeAsPythonString(getRelativePythonModulesDir().get().toString()) :
"NULL",
"EXT_SUFFIX",
Escaper.escapeAsPythonString(getCxxPlatform().getSharedLibraryExtension())),
/* executable */ false));
}
return CxxSource.of(
CxxSource.Type.CXX,
Preconditions.checkNotNull(rule.getSourcePathToOutput()),
ImmutableList.of());
}
private ImmutableList<CxxPreprocessorInput> getTransitiveCxxPreprocessorInput(
CxxPlatform cxxPlatform,
Iterable<? extends CxxPreprocessorDep> deps)
throws NoSuchBuildTargetException {
ImmutableList.Builder<CxxPreprocessorInput> inputs = ImmutableList.builder();
inputs.addAll(
CxxPreprocessables.getTransitiveCxxPreprocessorInput(
cxxPlatform,
FluentIterable.from(deps)
.filter(BuildRule.class)));
for (CxxPreprocessorDep dep :
Iterables.filter(deps, Predicates.not(BuildRule.class::isInstance))) {
inputs.add(dep.getCxxPreprocessorInput(cxxPlatform, HeaderVisibility.PUBLIC));
}
return inputs.build();
}
public Iterable<? extends AbstractCxxLibrary> getNativeStarterDeps() {
return ImmutableList.of(
getNativeStarterLibrary().isPresent() ?
getRuleResolver().getRuleWithType(
getNativeStarterLibrary().get(),
AbstractCxxLibrary.class) :
getLuaConfig().getLuaCxxLibrary(getRuleResolver()));
}
private NativeLinkableInput getNativeLinkableInput() throws NoSuchBuildTargetException {
Iterable<? extends AbstractCxxLibrary> nativeStarterDeps = getNativeStarterDeps();
ImmutableMap<CxxPreprocessAndCompile, SourcePath> objects =
CxxSourceRuleFactory.requirePreprocessAndCompileRules(
getBaseParams(),
getRuleResolver(),
getPathResolver(),
getRuleFinder(),
getCxxBuckConfig(),
getCxxPlatform(),
ImmutableList.<CxxPreprocessorInput>builder()
.add(
CxxPreprocessorInput.builder()
.putAllPreprocessorFlags(
CxxSource.Type.CXX,
getNativeStarterLibrary().isPresent() ?
ImmutableList.of() :
ImmutableList.of("-DBUILTIN_NATIVE_STARTER"))
.build())
.addAll(getTransitiveCxxPreprocessorInput(getCxxPlatform(), nativeStarterDeps))
.build(),
ImmutableMultimap.of(),
Optional.empty(),
Optional.empty(),
ImmutableMap.of("native-starter.cpp", getNativeStarterCxxSource()),
CxxSourceRuleFactory.PicType.PDC,
Optional.empty());
return NativeLinkableInput.builder()
.addAllArgs(
getRelativeNativeLibsDir().isPresent() ?
StringArg.from(
Linkers.iXlinker(
"-rpath",
String.format(
"%s/%s",
getCxxPlatform().getLd().resolve(getRuleResolver()).origin(),
getRelativeNativeLibsDir().get().toString()))) :
ImmutableList.of())
.addAllArgs(SourcePathArg.from(objects.values()))
.build();
}
@Override
public SourcePath build() throws NoSuchBuildTargetException {
BuildTarget linkTarget = getTarget();
CxxLink linkRule = getRuleResolver().addToIndex(
CxxLinkableEnhancer.createCxxLinkableBuildRule(
getCxxBuckConfig(),
getCxxPlatform(),
getBaseParams(),
getRuleResolver(),
getPathResolver(),
getRuleFinder(),
linkTarget,
Linker.LinkType.EXECUTABLE,
Optional.empty(),
getOutput(),
Linker.LinkableDepType.SHARED,
/* thinLto */ false,
getNativeStarterDeps(),
Optional.empty(),
Optional.empty(),
ImmutableSet.of(),
getNativeLinkableInput()));
return linkRule.getSourcePathToOutput();
}
@Override
public BuildTarget getBuildTarget() {
return getBaseParams().getBuildTarget();
}
@Override
public NativeLinkTargetMode getNativeLinkTargetMode(CxxPlatform cxxPlatform) {
return NativeLinkTargetMode.executable();
}
@Override
public Iterable<? extends NativeLinkable> getNativeLinkTargetDeps(CxxPlatform cxxPlatform) {
return getNativeStarterDeps();
}
@Override
public NativeLinkableInput getNativeLinkTargetInput(CxxPlatform cxxPlatform)
throws NoSuchBuildTargetException {
return getNativeLinkableInput();
}
@Override
public Optional<Path> getNativeLinkTargetOutputPath(CxxPlatform cxxPlatform) {
return Optional.of(getOutput());
}
}
| apache-2.0 |
jk1/intellij-community | json/src/com/intellij/json/psi/impl/JsonPropertyNameReference.java | 2152 | package com.intellij.json.psi.impl;
import com.intellij.json.psi.JsonProperty;
import com.intellij.json.psi.JsonValue;
import com.intellij.openapi.util.TextRange;
import com.intellij.psi.ElementManipulators;
import com.intellij.psi.PsiElement;
import com.intellij.psi.PsiReference;
import com.intellij.util.ArrayUtil;
import com.intellij.util.IncorrectOperationException;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
/**
* @author Mikhail Golubev
*/
public class JsonPropertyNameReference implements PsiReference {
private final JsonProperty myProperty;
public JsonPropertyNameReference(@NotNull JsonProperty property) {
myProperty = property;
}
@NotNull
@Override
public PsiElement getElement() {
return myProperty;
}
@NotNull
@Override
public TextRange getRangeInElement() {
final JsonValue nameElement = myProperty.getNameElement();
// Either value of string with quotes stripped or element's text as is
return ElementManipulators.getValueTextRange(nameElement);
}
@Nullable
@Override
public PsiElement resolve() {
return myProperty;
}
@NotNull
@Override
public String getCanonicalText() {
return myProperty.getName();
}
@Override
public PsiElement handleElementRename(String newElementName) throws IncorrectOperationException {
return myProperty.setName(newElementName);
}
@Override
public PsiElement bindToElement(@NotNull PsiElement element) throws IncorrectOperationException {
return null;
}
@Override
public boolean isReferenceTo(PsiElement element) {
if (!(element instanceof JsonProperty)) {
return false;
}
// May reference to the property with the same name for compatibility with JavaScript JSON support
final JsonProperty otherProperty = (JsonProperty)element;
final PsiElement selfResolve = resolve();
return otherProperty.getName().equals(getCanonicalText()) && selfResolve != otherProperty;
}
@NotNull
@Override
public Object[] getVariants() {
return ArrayUtil.EMPTY_OBJECT_ARRAY;
}
@Override
public boolean isSoft() {
return true;
}
}
| apache-2.0 |
philburk/jsyn | src/main/java/com/jsyn/unitgen/MorphingOscillatorBL.java | 2649 | /*
* Copyright 2009 Phil Burk, Mobileer Inc
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.jsyn.unitgen;
import com.jsyn.engine.MultiTable;
import com.jsyn.ports.UnitInputPort;
/**
* Oscillator that can change its shape from sine to sawtooth to pulse.
*
* @author Phil Burk (C) 2016 Mobileer Inc
*/
public class MorphingOscillatorBL extends PulseOscillatorBL {
/**
* Controls the shape of the waveform.
* The shape varies continuously from a sine wave at -1.0,
* to a sawtooth at 0.0 to a pulse wave at 1.0.
*/
public UnitInputPort shape;
public MorphingOscillatorBL() {
addPort(shape = new UnitInputPort("Shape"));
shape.setMinimum(-1.0);
shape.setMaximum(1.0);
}
@Override
protected double generateBL(MultiTable multiTable, double currentPhase,
double positivePhaseIncrement, double flevel, int i) {
double[] shapes = shape.getValues();
double shape = shapes[i];
if (shape < 0.0) {
// Squeeze flevel towards the pure sine table.
flevel += flevel * shape;
return multiTable.calculateSawtooth(currentPhase, positivePhaseIncrement, flevel);
} else {
double[] widths = width.getValues();
double width = widths[i];
width = (width > 0.999) ? 0.999 : ((width < -0.999) ? -0.999 : width);
double val1 = multiTable.calculateSawtooth(currentPhase, positivePhaseIncrement, flevel);
// Generate second sawtooth so we can add them together.
double phase2 = currentPhase + 1.0 - width; // 180 degrees out of phase
if (phase2 >= 1.0) {
phase2 -= 2.0;
}
double val2 = multiTable.calculateSawtooth(phase2, positivePhaseIncrement, flevel);
/*
* Need to adjust amplitude based on positive phaseInc. little less than half at
* Nyquist/2.0!
*/
double scale = 1.0 - positivePhaseIncrement;
return scale * (val1 - ((val2 + width) * shape)); // apply shape morphing
}
}
}
| apache-2.0 |
calvinaquino/LNReader-Android | app/src/main/java/com/erakk/lnreader/ui/activity/DisplaySettingsActivity.java | 68816 | package com.erakk.lnreader.ui.activity;
import android.annotation.SuppressLint;
import android.annotation.TargetApi;
import android.app.Dialog;
import android.content.ContentUris;
import android.content.Context;
import android.content.DialogInterface;
import android.content.DialogInterface.OnClickListener;
import android.content.Intent;
import android.content.pm.PackageManager;
import android.database.Cursor;
import android.graphics.Color;
import android.graphics.PorterDuff;
import android.graphics.drawable.Drawable;
import android.net.Uri;
import android.os.AsyncTask;
import android.os.Build;
import android.os.Bundle;
import android.os.Environment;
import android.preference.EditTextPreference;
import android.preference.Preference;
import android.preference.Preference.OnPreferenceChangeListener;
import android.preference.Preference.OnPreferenceClickListener;
import android.preference.PreferenceManager;
import android.preference.PreferenceScreen;
import android.provider.DocumentsContract;
import android.provider.MediaStore;
import android.provider.Settings;
import android.support.v7.app.AlertDialog;
import android.support.v7.widget.Toolbar;
import android.text.Spannable;
import android.text.SpannableString;
import android.text.style.ForegroundColorSpan;
import android.util.Log;
import android.util.SparseBooleanArray;
import android.util.TypedValue;
import android.view.LayoutInflater;
import android.view.MenuItem;
import android.view.View;
import android.view.ViewGroup;
import android.view.ViewParent;
import android.widget.LinearLayout;
import android.widget.ListView;
import android.widget.Toast;
import com.erakk.lnreader.AlternativeLanguageInfo;
import com.erakk.lnreader.Constants;
import com.erakk.lnreader.LNReaderApplication;
import com.erakk.lnreader.R;
import com.erakk.lnreader.UIHelper;
import com.erakk.lnreader.adapter.FileListAdapter;
import com.erakk.lnreader.callback.ICallbackEventData;
import com.erakk.lnreader.callback.IExtendedCallbackNotifier;
import com.erakk.lnreader.dao.NovelsDao;
import com.erakk.lnreader.helper.DBHelper;
import com.erakk.lnreader.helper.Util;
import com.erakk.lnreader.service.AutoBackupScheduleReceiver;
import com.erakk.lnreader.service.AutoBackupService;
import com.erakk.lnreader.service.UpdateScheduleReceiver;
import com.erakk.lnreader.task.AsyncTaskResult;
import com.erakk.lnreader.task.CopyDBTask;
import com.erakk.lnreader.task.DeleteFilesTask;
import com.erakk.lnreader.task.RelinkImagesTask;
import com.erakk.lnreader.task.UnZipFilesTask;
import com.erakk.lnreader.task.ZipFilesTask;
import com.example.android.supportv7.app.AppCompatPreferenceActivity;
import java.io.File;
import java.util.ArrayList;
import java.util.Date;
import java.util.HashMap;
import java.util.Iterator;
import java.util.Map;
import java.util.logging.Logger;
//public class DisplaySettingsActivity extends PreferenceActivity implements IExtendedCallbackNotifier<AsyncTaskResult<?>> {
public class DisplaySettingsActivity extends AppCompatPreferenceActivity implements IExtendedCallbackNotifier<AsyncTaskResult<?>> {
private static final String TAG = DisplaySettingsActivity.class.toString();
private DeleteFilesTask deleteTask;
private ZipFilesTask zipTask;
private UnZipFilesTask unzipTask;
private RelinkImagesTask relinkTask;
private CopyDBTask copyDbTask;
private CopyDBTask restoreDbTask;
// Context context;
/**
* ***********************************************************
* The onPreferenceTreeClick method's sole purpose is to deal with the known
* Android bug that doesn't custom theme the child preference screen
* ***************************************************************
*/
@Override
@SuppressWarnings("deprecation")
public boolean onPreferenceTreeClick(PreferenceScreen preferenceScreen, Preference preference) {
super.onPreferenceTreeClick(preferenceScreen, preference);
if (preference != null)
if (preference instanceof PreferenceScreen) {
setUpNestedScreen((PreferenceScreen) preference);
}
return false;
}
@Override
protected void onRestoreInstanceState(Bundle savedInstanceState) {
// HACK: Try to handle android.os.BadParcelableException: ClassNotFoundException when unmarshalling: android.support.v7.widget.Toolbar$SavedState
try {
super.onRestoreInstanceState(savedInstanceState);
} catch (Exception ex) {
Log.e(TAG, "Failed to restore instance state.");
}
}
/**
* Enable toolbar on child screen
* http://stackoverflow.com/a/27455330
*
* @param preferenceScreen
*/
public void setUpNestedScreen(PreferenceScreen preferenceScreen) {
final Dialog dialog = preferenceScreen.getDialog();
Toolbar bar = null;
try {
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.ICE_CREAM_SANDWICH) {
View tempView = dialog.findViewById(android.R.id.list);
ViewParent viewParent = tempView.getParent();
if (viewParent != null && viewParent instanceof LinearLayout) {
LinearLayout root = (LinearLayout) viewParent;
bar = (Toolbar) LayoutInflater.from(this).inflate(R.layout.settings_toolbar, root, false);
root.addView(bar, 0); // insert at top
} else
Log.i(TAG, "setUpNestedScreen() using unknown Layout: " + viewParent.getClass().toString());
} else {
ViewGroup root = (ViewGroup) dialog.findViewById(android.R.id.content);
ListView content = (ListView) root.getChildAt(0);
root.removeAllViews();
bar = (Toolbar) LayoutInflater.from(this).inflate(R.layout.settings_toolbar, root, false);
int height;
TypedValue tv = new TypedValue();
if (getTheme().resolveAttribute(R.attr.actionBarSize, tv, true)) {
height = TypedValue.complexToDimensionPixelSize(tv.data, getResources().getDisplayMetrics());
} else {
height = bar.getHeight();
}
content.setPadding(0, height, 0, 0);
root.addView(content);
root.addView(bar);
}
} catch (Exception ex) {
Log.w(TAG, "Failed to get Toolbar on Settings Page", ex);
}
if (bar != null) {
bar.setTitle(preferenceScreen.getTitle());
bar.setNavigationOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
dialog.dismiss();
}
});
}
}
/**
* Enable toolbar on pref screen
* http://stackoverflow.com/a/30281205
*/
private void setupActionBar() {
Toolbar toolbar = null;
try {
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.ICE_CREAM_SANDWICH) {
ViewGroup root = (ViewGroup) findViewById(android.R.id.list).getParent().getParent().getParent();
toolbar = (Toolbar) LayoutInflater.from(this).inflate(R.layout.settings_toolbar, root, false);
root.addView(toolbar, 0);
} else {
ViewGroup root = (ViewGroup) findViewById(android.R.id.content);
if (root.getChildAt(0) instanceof ListView) {
ListView content = (ListView) root.getChildAt(0);
root.removeAllViews();
toolbar = (Toolbar) LayoutInflater.from(this).inflate(R.layout.settings_toolbar, root, false);
int height;
TypedValue tv = new TypedValue();
if (getTheme().resolveAttribute(R.attr.actionBarSize, tv, true)) {
height = TypedValue.complexToDimensionPixelSize(tv.data, getResources().getDisplayMetrics());
} else {
height = toolbar.getHeight();
}
content.setPadding(0, height, 0, 0);
root.addView(content);
root.addView(toolbar);
}
}
} catch (Exception ex) {
Log.w(TAG, "Failed to get Toolbar on Settings Page", ex);
}
if (toolbar != null) {
setSupportActionBar(toolbar);
getSupportActionBar().setDisplayHomeAsUpEnabled(true);
}
}
@Override
@SuppressLint("SdCardPath")
@SuppressWarnings("deprecation")
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setupActionBar();
// This man is deprecated but but we may want to be able to run on older API
addPreferencesFromResource(R.xml.preferences);
generalPreferences();
updatePreferences();
readingPreferences();
storagePreferences();
maintenancePreferences();
// TOS activity
Preference tos = findPreference("tos");
tos.setOnPreferenceClickListener(new OnPreferenceClickListener() {
@Override
public boolean onPreferenceClick(Preference preference) {
try {
Intent intent = new Intent(getApplicationContext(), DisplayLightNovelContentActivity.class);
intent.putExtra(Constants.EXTRA_PAGE, getResources().getString(R.string.copyright));
startActivity(intent);
} catch (Exception e) {
Log.e(TAG, getResources().getString(R.string.not_copyright), e);
}
return false;
}
});
// App Version Activity
Preference appVersion = findPreference("app_version");
String version = "N/A";
try {
version = getPackageManager().getPackageInfo(getPackageName(), 0).versionName + " (" + getPackageManager().getPackageInfo(getPackageName(), 0).versionCode + ")";
} catch (PackageManager.NameNotFoundException e) {
Log.e(TAG, "Cannot get version.", e);
}
appVersion.setSummary(version);
appVersion.setOnPreferenceClickListener(new OnPreferenceClickListener() {
@Override
public boolean onPreferenceClick(Preference preference) {
try {
Intent intent = new Intent(getApplicationContext(), DisplayChangelogActivity.class);
startActivity(intent);
} catch (Exception e) {
Log.e(TAG, getResources().getString(R.string.title_activity_display_changelog), e);
}
return false;
}
});
// Credits activity
Preference credits = findPreference("credits");
credits.setOnPreferenceClickListener(new OnPreferenceClickListener() {
@Override
public boolean onPreferenceClick(Preference preference) {
try {
Intent intent = new Intent(getApplicationContext(), DisplayCreditActivity.class);
startActivity(intent);
} catch (Exception e) {
Log.e(TAG, getResources().getString(R.string.title_activity_display_credit), e);
}
return false;
}
});
// non preferences setup
LNReaderApplication.getInstance().setUpdateServiceListener(this);
LNReaderApplication.getInstance().setAutoBackupServiceListener(this);
}
@SuppressWarnings("deprecation")
private void maintenancePreferences() {
Preference findMissingChapter = findPreference(Constants.PREF_MISSING_CHAPTER);
findMissingChapter.setOnPreferenceClickListener(new OnPreferenceClickListener() {
@Override
public boolean onPreferenceClick(Preference preference) {
Intent intent = new Intent(getApplicationContext(), FindMissingActivity.class);
intent.putExtra(Constants.EXTRA_FIND_MISSING_MODE, Constants.PREF_MISSING_CHAPTER);
startActivity(intent);
return true;
}
});
Preference findRedlinkChapter = findPreference(Constants.PREF_REDLINK_CHAPTER);
findRedlinkChapter.setOnPreferenceClickListener(new OnPreferenceClickListener() {
@Override
public boolean onPreferenceClick(Preference preference) {
Intent intent = new Intent(getApplicationContext(), FindMissingActivity.class);
intent.putExtra(Constants.EXTRA_FIND_MISSING_MODE, Constants.PREF_REDLINK_CHAPTER);
startActivity(intent);
return true;
}
});
Preference findEmptyBook = findPreference(Constants.PREF_EMPTY_BOOK);
findEmptyBook.setOnPreferenceClickListener(new OnPreferenceClickListener() {
@Override
public boolean onPreferenceClick(Preference preference) {
Intent intent = new Intent(getApplicationContext(), FindMissingActivity.class);
intent.putExtra(Constants.EXTRA_FIND_MISSING_MODE, Constants.PREF_EMPTY_BOOK);
startActivity(intent);
return true;
}
});
Preference findEmptyNovel = findPreference(Constants.PREF_EMPTY_NOVEL);
findEmptyNovel.setOnPreferenceClickListener(new OnPreferenceClickListener() {
@Override
public boolean onPreferenceClick(Preference preference) {
Intent intent = new Intent(getApplicationContext(), FindMissingActivity.class);
intent.putExtra(Constants.EXTRA_FIND_MISSING_MODE, Constants.PREF_EMPTY_NOVEL);
startActivity(intent);
return true;
}
});
Preference cleanExternalTemp = findPreference(Constants.PREF_CLEAR_EXTERNAL_TEMP);
cleanExternalTemp.setOnPreferenceClickListener(new OnPreferenceClickListener() {
@Override
public boolean onPreferenceClick(Preference preference) {
deleteExternalTemp();
return false;
}
});
}
@SuppressWarnings("deprecation")
private void generalPreferences() {
// UI Selection
final Preference uiMode = findPreference("ui_selection");
final String[] uiSelectionArray = getResources().getStringArray(R.array.uiSelection);
int uiSelectionValue = UIHelper.getIntFromPreferences(Constants.PREF_UI_SELECTION, 0);
uiMode.setSummary(String.format(getResources().getString(R.string.selected_mode), uiSelectionArray[uiSelectionValue]));
uiMode.setOnPreferenceChangeListener(new OnPreferenceChangeListener() {
@Override
public boolean onPreferenceChange(Preference preference, Object newValue) {
int uiSelectionValue = Util.tryParseInt(newValue.toString(), 0);
uiMode.setSummary(String.format(getResources().getString(R.string.selected_mode), uiSelectionArray[uiSelectionValue]));
return true;
}
});
setApplicationLanguage();
setAlternateLanguageList();
// Invert Color
// Preference invertColors = findPreference(Constants.PREF_INVERT_COLOR);
// invertColors.setOnPreferenceClickListener(new Preference.OnPreferenceClickListener() {
//
// @Override
// public boolean onPreferenceClick(Preference p) {
// recreateUI();
// return true;
// }
// });
// Orientation Selection
final Preference orientation = findPreference(Constants.PREF_ORIENTATION);
final String[] orientationArray = getResources().getStringArray(R.array.orientationSelection);
int orientationIntervalValue = UIHelper.getIntFromPreferences(Constants.PREF_ORIENTATION, 0);
orientation.setSummary(String.format(getResources().getString(R.string.orientation_summary), orientationArray[orientationIntervalValue]));
orientation.setOnPreferenceChangeListener(new OnPreferenceChangeListener() {
@Override
public boolean onPreferenceChange(Preference preference, Object newValue) {
int orientationIntervalValue = Util.tryParseInt(newValue.toString(), 0);
// UpdateScheduleReceiver.reschedule(orientationIntervalValue);
orientation.setSummary(String.format(getResources().getString(R.string.orientation_summary), orientationArray[orientationIntervalValue]));
setOrientation();
return true;
}
});
}
@SuppressWarnings("deprecation")
private void updatePreferences() {
// Update Interval
final Preference updatesInterval = findPreference(Constants.PREF_UPDATE_INTERVAL);
final String[] updateIntervalArray = getResources().getStringArray(R.array.updateInterval);
int updatesIntervalValue = UIHelper.getIntFromPreferences(Constants.PREF_UPDATE_INTERVAL, 0);
updatesInterval.setSummary(String.format(getResources().getString(R.string.update_interval_summary), updateIntervalArray[updatesIntervalValue]));
updatesInterval.setOnPreferenceChangeListener(new OnPreferenceChangeListener() {
@Override
public boolean onPreferenceChange(Preference preference, Object newValue) {
int updatesIntervalInt = Util.tryParseInt(newValue.toString(), 0);
UpdateScheduleReceiver.reschedule(preference.getContext(), updatesIntervalInt);
updatesInterval.setSummary(String.format(getResources().getString(R.string.update_interval_summary), updateIntervalArray[updatesIntervalInt]));
return true;
}
});
// Run Updates
Preference runUpdates = findPreference(Constants.PREF_RUN_UPDATES);
runUpdates.setSummary(String.format(getResources().getString(R.string.last_run), runUpdates.getSharedPreferences().getString(Constants.PREF_RUN_UPDATES, getResources().getString(R.string.none)), runUpdates.getSharedPreferences().getString(Constants.PREF_RUN_UPDATES_STATUS, getResources().getString(R.string.unknown))));
runUpdates.setOnPreferenceClickListener(new OnPreferenceClickListener() {
@Override
public boolean onPreferenceClick(Preference p) {
runUpdate();
return true;
}
});
// Time out
final Preference timeout = findPreference(Constants.PREF_TIMEOUT);
int timeoutValue = UIHelper.getIntFromPreferences(Constants.PREF_TIMEOUT, 60);
timeout.setSummary(String.format(getResources().getString(R.string.pref_timeout_summary), timeoutValue));
timeout.setOnPreferenceChangeListener(new OnPreferenceChangeListener() {
@Override
public boolean onPreferenceChange(Preference preference, Object newValue) {
int timeoutValue = Util.tryParseInt(newValue.toString(), 60);
timeout.setSummary(String.format(getResources().getString(R.string.pref_timeout_summary), timeoutValue));
return true;
}
});
// Retry
final Preference retry = findPreference(Constants.PREF_RETRY);
int retryValue = UIHelper.getIntFromPreferences(Constants.PREF_RETRY, 3);
retry.setSummary(String.format(getResources().getString(R.string.pref_retry_summary), retryValue));
retry.setOnPreferenceChangeListener(new OnPreferenceChangeListener() {
@Override
public boolean onPreferenceChange(Preference preference, Object newValue) {
int retryValue = Util.tryParseInt(newValue.toString(), 3);
retry.setSummary(String.format(getResources().getString(R.string.pref_retry_summary), retryValue));
return true;
}
});
}
@SuppressWarnings("deprecation")
private void readingPreferences() {
// Scrolling Size
final Preference scrollingSize = findPreference(Constants.PREF_SCROLL_SIZE);
int scrollingSizeValue = UIHelper.getIntFromPreferences(Constants.PREF_SCROLL_SIZE, 5);
scrollingSize.setSummary(String.format(getResources().getString(R.string.scroll_size_summary2), scrollingSizeValue));
scrollingSize.setOnPreferenceChangeListener(new OnPreferenceChangeListener() {
@Override
public boolean onPreferenceChange(Preference preference, Object newValue) {
int scrollingSizeValue = Util.tryParseInt(newValue.toString(), 5);
scrollingSize.setSummary(String.format(getResources().getString(R.string.scroll_size_summary2), scrollingSizeValue));
return true;
}
});
// reset zoom
final Preference resetZoom = findPreference(Constants.PREF_RESET_ZOOM);
resetZoom.setOnPreferenceClickListener(new OnPreferenceClickListener() {
@Override
public boolean onPreferenceClick(Preference preference) {
try {
NovelsDao.getInstance().resetZoomLevel(null);
} catch (Exception e) {
Log.e(TAG, "Failed when resetting zoom level", e);
}
return true;
}
});
setCssPreferences();
setTtsPreferences();
}
@SuppressWarnings("deprecation")
private void setTtsPreferences() {
final Preference ttsEngine = findPreference(Constants.PREF_TTS_ENGINE);
ttsEngine.setOnPreferenceClickListener(new OnPreferenceClickListener() {
@Override
public boolean onPreferenceClick(Preference preference) {
loadTTSEngineSettings();
return true;
}
});
final Preference ttsPitch = findPreference(Constants.PREF_TTS_PITCH);
float ttsPitchVal = UIHelper.getFloatFromPreferences(Constants.PREF_TTS_PITCH, 1.0f);
ttsPitch.setSummary(getResources().getString(R.string.tts_pitch_summary, ttsPitchVal));
ttsPitch.setOnPreferenceChangeListener(new OnPreferenceChangeListener() {
@Override
public boolean onPreferenceChange(Preference preference, Object newValue) {
try {
float val = Float.parseFloat(newValue.toString());
ttsPitch.setSummary(getResources().getString(R.string.tts_pitch_summary, val));
} catch (NumberFormatException ex) {
return false;
}
return true;
}
});
final Preference ttsSpeechRate = findPreference(Constants.PREF_TTS_SPEECH_RATE);
float ttsSpeechRateVal = UIHelper.getFloatFromPreferences(Constants.PREF_TTS_SPEECH_RATE, 1.0f);
ttsSpeechRate.setSummary(getResources().getString(R.string.tts_reading_speed_summary, ttsSpeechRateVal));
ttsSpeechRate.setOnPreferenceChangeListener(new OnPreferenceChangeListener() {
@Override
public boolean onPreferenceChange(Preference preference, Object newValue) {
try {
float val = Float.parseFloat(newValue.toString());
ttsSpeechRate.setSummary(getResources().getString(R.string.tts_reading_speed_summary, val));
} catch (NumberFormatException ex) {
return false;
}
return true;
}
});
final Preference ttsDelay = findPreference(Constants.PREF_TTS_DELAY);
float ttsDelayVal = UIHelper.getIntFromPreferences(Constants.PREF_TTS_DELAY, 500);
ttsDelay.setSummary(getResources().getString(R.string.tts_whitespace_delay_summary, ttsDelayVal));
ttsDelay.setOnPreferenceChangeListener(new OnPreferenceChangeListener() {
@Override
public boolean onPreferenceChange(Preference preference, Object newValue) {
try {
int val = Integer.parseInt(newValue.toString());
ttsDelay.setSummary(getResources().getString(R.string.tts_whitespace_delay_summary, val));
} catch (NumberFormatException ex) {
return false;
}
return true;
}
});
}
private void loadTTSEngineSettings() {
try {
Intent intent = new Intent(Settings.ACTION_SETTINGS);
intent.putExtra(EXTRA_SHOW_FRAGMENT, "com.android.settings.tts.TextToSpeechSettings");
intent.putExtra(EXTRA_SHOW_FRAGMENT_ARGUMENTS, intent.getExtras());
startActivityForResult(intent, 0);
} catch (Exception ex) {
startActivityForResult(new Intent(Settings.ACTION_SETTINGS), 0);
}
}
@SuppressWarnings("deprecation")
private void storagePreferences() {
final DisplaySettingsActivity dsa = this;
// Clear DB
Preference clearDatabase = findPreference("clear_database");
clearDatabase.setOnPreferenceClickListener(new OnPreferenceClickListener() {
@Override
public boolean onPreferenceClick(Preference p) {
clearDB();
return true;
}
});
// DB Location
Preference defaultDbLocation = findPreference("db_location");
defaultDbLocation.setSummary(String.format(getResources().getString(R.string.novel_database_to), DBHelper.getDbPath(this)));
defaultDbLocation.setOnPreferenceClickListener(new OnPreferenceClickListener() {
@Override
public boolean onPreferenceClick(Preference preference) {
checkDB();
return false;
}
});
// Restore DB
Preference restoreDatabase = findPreference(Constants.PREF_RESTORE_DB);
restoreDatabase.setOnPreferenceClickListener(new OnPreferenceClickListener() {
@Override
public boolean onPreferenceClick(Preference p) {
// Quick fix, please revise as seen fit.
// Confirm task execution, useful during unintentional clicks.
UIHelper.createYesNoDialog(
dsa
, getResources().getString(R.string.restore_db_question)
, getResources().getString(R.string.restore_db_question2)
, new OnClickListener() {
@Override
public void onClick(DialogInterface dialog, int which) {
if (which == DialogInterface.BUTTON_POSITIVE) {
showBackupsDB();
}
}
}).show();
return true;
}
});
// Backup DB
Preference backupDatabase = findPreference(Constants.PREF_BACKUP_DB);
backupDatabase.setOnPreferenceClickListener(new OnPreferenceClickListener() {
@Override
public boolean onPreferenceClick(Preference p) {
// Quick fix, please revise as seen fit.
// Confirm task execution, useful during unintentional clicks.
UIHelper.createYesNoDialog(
dsa
, getResources().getString(R.string.backup_db_question)
, getResources().getString(R.string.backup_db_question2)
, new OnClickListener() {
@Override
public void onClick(DialogInterface dialog, int which) {
if (which == DialogInterface.BUTTON_POSITIVE) {
backupDB();
}
}
}).show();
return true;
}
});
// DB Backup Location
final EditTextPreference backupLocation = (EditTextPreference) findPreference(Constants.PREF_BACKUP_LOCATION);
backupLocation.setText(UIHelper.getBackupRoot(this));
backupLocation.setSummary(getResources().getString(R.string.pref_db_backup_location_summary, UIHelper.getBackupRoot(this)));
backupLocation.setOnPreferenceChangeListener(new OnPreferenceChangeListener() {
@Override
public boolean onPreferenceChange(Preference preference, Object newValue) {
String newPath = (String) newValue;
boolean result = checkBackupStoragePath(newPath);
if (result)
backupLocation.setSummary(getResources().getString(R.string.pref_db_backup_location_summary, newPath));
return result;
}
});
// Auto Backup DB
Preference autoBackup = findPreference(Constants.PREF_AUTO_BACKUP_ENABLED);
autoBackup.setSummary(getResources().getString(R.string.pref_db_auto_backup_summary, new Date(autoBackup.getSharedPreferences().getLong(Constants.PREF_LAST_AUTO_BACKUP_TIME, 0))));
autoBackup.setOnPreferenceChangeListener(new OnPreferenceChangeListener() {
@Override
public boolean onPreferenceChange(Preference preference, Object newValue) {
if ((Boolean) newValue) {
runAutoBackupService();
} else {
AutoBackupScheduleReceiver.removeSchedule(getApplicationContext());
}
return true;
}
});
// Auto Backup DB
final Preference autoBackupCount = findPreference(Constants.PREF_AUTO_BACKUP_COUNT);
autoBackupCount.setSummary(getResources().getString(R.string.pref_db_auto_backup_count_summary, UIHelper.getIntFromPreferences(Constants.PREF_AUTO_BACKUP_COUNT, 0)));
autoBackupCount.setOnPreferenceChangeListener(new OnPreferenceChangeListener() {
@Override
public boolean onPreferenceChange(Preference preference, Object newValue) {
autoBackupCount.setSummary(getResources().getString(R.string.pref_db_auto_backup_count_summary, (int) newValue));
return true;
}
});
// Clear Image
Preference clearImages = findPreference("clear_image_cache");
clearImages.setOnPreferenceClickListener(new OnPreferenceClickListener() {
@Override
public boolean onPreferenceClick(Preference p) {
clearImages();
return true;
}
});
// Image Location
final Preference defaultSaveLocation = (Preference) findPreference("save_location");
// defaultSaveLocation.setText(UIHelper.getImageRoot(this));
defaultSaveLocation.setSummary(String.format(getResources().getString(R.string.download_image_to), UIHelper.getImageRoot(this)));
defaultSaveLocation.setOnPreferenceClickListener(new OnPreferenceClickListener() {
@Override
public boolean onPreferenceClick(Preference preference) {
selectFolder(102);
return true;
}
});
defaultSaveLocation.setOnPreferenceChangeListener(new OnPreferenceChangeListener() {
@Override
public boolean onPreferenceChange(Preference preference, Object newValue) {
String newPath = (String) newValue;
boolean result = checkImageStoragePath(newPath);
if (result)
defaultSaveLocation.setSummary(String.format(getResources().getString(R.string.download_image_to), newPath));
Log.d(TAG, "Path Writeable? " + result + " ==> " + newPath);
return result;
}
});
// Backup Thumbs
Preference backupThumbs = findPreference(Constants.PREF_BACKUP_THUMB_IMAGES);
backupThumbs.setOnPreferenceClickListener(new OnPreferenceClickListener() {
@Override
public boolean onPreferenceClick(Preference preference) {
// Quick fix, please revise as seen fit.
// Confirm task execution, useful during unintentional clicks.
UIHelper.createYesNoDialog(
dsa
, getResources().getString(R.string.backup_zip_question)
, getResources().getString(R.string.backup_zip_question2)
, new OnClickListener() {
@Override
public void onClick(DialogInterface dialog, int which) {
if (which == DialogInterface.BUTTON_POSITIVE) {
backupThumbs();
}
}
}).show();
return true;
}
});
// Restore Thumbs
Preference restoreThumbs = findPreference(Constants.PREF_RESTORE_THUMB_IMAGES);
restoreThumbs.setOnPreferenceClickListener(new OnPreferenceClickListener() {
@Override
public boolean onPreferenceClick(Preference preference) {
// Quick fix, please revise as seen fit.
// Confirm task execution, useful during unintentional clicks.
UIHelper.createYesNoDialog(
dsa
, getResources().getString(R.string.restore_zip_question)
, getResources().getString(R.string.restore_zip_question2)
, new OnClickListener() {
@Override
public void onClick(DialogInterface dialog, int which) {
if (which == DialogInterface.BUTTON_POSITIVE) {
restoreThumbs();
}
}
}).show();
return true;
}
});
// relink thumbs
Preference relinkThumbs = findPreference(Constants.PREF_RELINK_THUMB_IMAGES);
relinkThumbs.setOnPreferenceClickListener(new OnPreferenceClickListener() {
@Override
public boolean onPreferenceClick(Preference preference) {
// Quick fix, please revise as seen fit.
// Confirm task execution, useful during unintentional clicks.
UIHelper.createYesNoDialog(
dsa
, getResources().getString(R.string.relink_question, UIHelper.getImageRoot(LNReaderApplication.getInstance()))
, getResources().getString(R.string.relink_question2)
, new OnClickListener() {
@Override
public void onClick(DialogInterface dialog, int which) {
if (which == DialogInterface.BUTTON_POSITIVE) {
relinkThumbs();
}
}
}).show();
return true;
}
});
}
private boolean checkBackupStoragePath(String newPath) {
if (Util.isStringNullOrEmpty(newPath)) {
newPath = UIHelper.getBackupRoot(this);
}
File dir = new File(newPath);
if (!dir.exists()) {
Log.e(TAG, String.format("Directory %s not exists, trying to create dir.", newPath));
boolean result = dir.mkdirs();
if (result) {
Log.i(TAG, String.format("Directory %s created.", newPath));
return true;
} else {
String message = String.format("Directory %s cannot be created.", newPath);
Log.e(TAG, message);
Toast.makeText(this, String.format("Directory %s cannot be created.", newPath), Toast.LENGTH_SHORT).show();
return false;
}
} else {
return true;
}
}
private void runAutoBackupService() {
LNReaderApplication.getInstance().runAutoBackupService(this);
}
private void checkDB() {
String result = NovelsDao.getInstance().checkDB();
Toast.makeText(this, result, Toast.LENGTH_SHORT).show();
}
@SuppressLint({"InlinedApi", "NewApi"})
private void relinkThumbs() {
if (RelinkImagesTask.getInstance() != null && RelinkImagesTask.getInstance().getStatus() == AsyncTask.Status.RUNNING) {
Toast.makeText(this, "Please wait until relink process completed.", Toast.LENGTH_SHORT).show();
return;
}
String rootPath = UIHelper.getImageRoot(this);
relinkTask = RelinkImagesTask.getInstance(rootPath, this, Constants.PREF_RELINK_THUMB_IMAGES);
String key = RelinkImagesTask.class.toString() + ":RelinkImage";
relinkTask = setupTaskList(relinkTask, key);
relinkTask.setCallback(this, Constants.PREF_RELINK_THUMB_IMAGES);
}
@SuppressLint({"InlinedApi", "NewApi"})
private void restoreThumbs() {
if (ZipFilesTask.getInstance() != null && ZipFilesTask.getInstance().getStatus() == AsyncTask.Status.RUNNING) {
Toast.makeText(this, "Please wait until all images are backed-up.", Toast.LENGTH_SHORT).show();
return;
}
String zipName = Environment.getExternalStorageDirectory().getAbsolutePath() + "/Backup_thumbs.zip";
String thumbRootPath = UIHelper.getImageRoot(this) + "/project/images/thumb";
if (getProcessAllImagesPreferences()) {
zipName = Environment.getExternalStorageDirectory().getAbsolutePath() + "/Backup_all_images.zip";
thumbRootPath = UIHelper.getImageRoot(this) + "/project/images";
}
unzipTask = UnZipFilesTask.getInstance(zipName, thumbRootPath, this, Constants.PREF_RESTORE_THUMB_IMAGES);
String key = UnZipFilesTask.class.toString() + ":" + unzipTask;
unzipTask = setupTaskList(unzipTask, key);
unzipTask.setCallback(this, Constants.PREF_RESTORE_THUMB_IMAGES);
}
private void showBackupsDB() {
ArrayList<File> backups = AutoBackupService.getBackupFiles(this);
final FileListAdapter adapter = new FileListAdapter(this, R.layout.item_file, backups);
AlertDialog.Builder builder = new AlertDialog.Builder(this);
builder.setTitle("Choose Backup File");
builder.setAdapter(adapter, new OnClickListener() {
@Override
public void onClick(DialogInterface dialog, int which) {
File f = adapter.getItem(which);
restoreDB(f.getAbsolutePath());
}
});
builder.create().show();
}
@SuppressLint("NewApi")
private void restoreDB(String filename) {
copyDbTask = new CopyDBTask(false, this, Constants.PREF_RESTORE_DB, filename);
String key = CopyDBTask.class.toString() + ":BackupDB";
copyDbTask = setupTaskList(copyDbTask, key);
copyDbTask.setCallbackNotifier(this);
}
@SuppressLint("NewApi")
private void backupDB() {
restoreDbTask = new CopyDBTask(true, this, Constants.PREF_BACKUP_DB, null);
String key = CopyDBTask.class.toString() + ":RestoreDB";
restoreDbTask = setupTaskList(restoreDbTask, key);
restoreDbTask.setCallbackNotifier(this);
}
@SuppressLint({"InlinedApi", "NewApi"})
private void backupThumbs() {
if (ZipFilesTask.getInstance() != null && ZipFilesTask.getInstance().getStatus() == AsyncTask.Status.RUNNING) {
Toast.makeText(this, "Please wait until all images are restored.", Toast.LENGTH_SHORT).show();
return;
}
String zipName = Environment.getExternalStorageDirectory().getAbsolutePath() + "/Backup_thumbs.zip";
String thumbRootPath = UIHelper.getImageRoot(this) + "/project/images/thumb";
if (getProcessAllImagesPreferences()) {
zipName = Environment.getExternalStorageDirectory().getAbsolutePath() + "/Backup_all_images.zip";
thumbRootPath = UIHelper.getImageRoot(this) + "/project/images";
}
zipTask = ZipFilesTask.getInstance(zipName, thumbRootPath, this, Constants.PREF_BACKUP_THUMB_IMAGES);
String key = ZipFilesTask.class.toString() + ":" + zipTask;
zipTask = setupTaskList(zipTask, key);
zipTask.setCallback(this, Constants.PREF_BACKUP_THUMB_IMAGES);
}
private void deleteExternalTemp() {
String filename = UIHelper.getImageRoot(LNReaderApplication.getInstance().getApplicationContext()) + "/wac/temp";
deleteTask = new DeleteFilesTask(this, filename, Constants.PREF_CLEAR_EXTERNAL_TEMP);
String key = DeleteFilesTask.class.toString() + ":DeleteExternalTemp";
deleteTask = setupTaskList(deleteTask, key);
deleteTask.owner = this;
}
@SuppressWarnings("unchecked")
@SuppressLint("NewApi")
private <T extends AsyncTask<Void, ICallbackEventData, ?>> T setupTaskList(T task, String key) {
boolean isAdded = LNReaderApplication.getInstance().addTask(key, task);
if (isAdded) {
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.HONEYCOMB)
task.executeOnExecutor(AsyncTask.THREAD_POOL_EXECUTOR);
else
task.execute();
} else {
T tempTask = (T) LNReaderApplication.getInstance().getTask(key);
if (tempTask != null) {
task = tempTask;
}
}
return task;
}
@SuppressWarnings("deprecation")
private void setAlternateLanguageList() {
/*
* A section to change Alternative Languages list
*
* @freedomofkeima
*/
Preference selectAlternativeLanguage = findPreference("select_alternative_language");
/* List of languages */
final boolean[] languageStatus = new boolean[AlternativeLanguageInfo.getAlternativeLanguageInfo().size()];
Iterator<Map.Entry<String, AlternativeLanguageInfo>> it = AlternativeLanguageInfo.getAlternativeLanguageInfo().entrySet().iterator();
int j = 0;
while (it.hasNext()) {
AlternativeLanguageInfo info = it.next().getValue();
/* Default value of unregistered Alternative language = false (preventing too much tabs) */
languageStatus[j] = PreferenceManager.getDefaultSharedPreferences(this).getBoolean(info.getLanguage(), false);
j++;
it.remove();
}
/* End of list of languages */
selectAlternativeLanguage.setOnPreferenceClickListener(new OnPreferenceClickListener() {
@Override
public boolean onPreferenceClick(Preference p) {
showLanguageSelection(languageStatus);
return true;
}
});
/* End of alternative languages list section */
}
private void showLanguageSelection(boolean[] languageStatus) {
final String[] languageChoice = new String[AlternativeLanguageInfo.getAlternativeLanguageInfo().size()];
Iterator<Map.Entry<String, AlternativeLanguageInfo>> it = AlternativeLanguageInfo.getAlternativeLanguageInfo().entrySet().iterator();
int j = 0;
while (it.hasNext()) {
AlternativeLanguageInfo info = it.next().getValue();
languageChoice[j] = info.getLanguage();
j++;
it.remove();
}
/* Show checkBox to screen */
AlertDialog.Builder builder = new AlertDialog.Builder(this);
builder.setTitle(getResources().getString(R.string.alternative_language_title));
builder.setMultiChoiceItems(languageChoice, languageStatus, new DialogInterface.OnMultiChoiceClickListener() {
@Override
public void onClick(DialogInterface dialogInterface, int item, boolean state) {
}
});
builder.setNegativeButton("Ok", new OnClickListener() {
@Override
public void onClick(DialogInterface dialog, int id) {
setLanguageSelectionOKDialog(dialog);
}
});
builder.setPositiveButton(getResources().getString(R.string.cancel), new OnClickListener() {
@Override
public void onClick(DialogInterface dialog, int id) {
dialog.cancel();
}
});
builder.create().show();
}
private void setLanguageSelectionOKDialog(DialogInterface dialog) {
SparseBooleanArray Checked = ((AlertDialog) dialog).getListView().getCheckedItemPositions();
/* Save all choices to Shared Preferences */
Iterator<Map.Entry<String, AlternativeLanguageInfo>> it = AlternativeLanguageInfo.getAlternativeLanguageInfo().entrySet().iterator();
int j = 0;
while (it.hasNext()) {
AlternativeLanguageInfo info = it.next().getValue();
UIHelper.setAlternativeLanguagePreferences(this, info.getLanguage(), Checked.get(j));
j++;
it.remove();
}
recreateUI();
}
@SuppressWarnings("deprecation")
private void setApplicationLanguage() {
/*
* A section to change Application Language
*
* @freedomofkeima
*/
final Preference changeLanguages = findPreference(Constants.PREF_LANGUAGE);
final String[] languageSelectionArray = getResources().getStringArray(R.array.languageSelection);
final String[] localeArray = getResources().getStringArray(R.array.languageSelectionValues);
String languageSelectionValue = PreferenceManager.getDefaultSharedPreferences(this).getString(Constants.PREF_LANGUAGE, "en");
// construct the hash map with locale as the key and language as the
// value
final HashMap<String, String> langDict = new HashMap<String, String>();
for (int i = 0; i < languageSelectionArray.length; i++) {
langDict.put(localeArray[i], languageSelectionArray[i]);
}
// check if key exist, else fall back to en
if (langDict.containsKey(languageSelectionValue)) {
changeLanguages.setSummary(String.format(getResources().getString(R.string.selected_language), langDict.get(languageSelectionValue)));
} else {
changeLanguages.setSummary(String.format(getResources().getString(R.string.selected_language), langDict.get("en")));
}
changeLanguages.setOnPreferenceChangeListener(new OnPreferenceChangeListener() {
@Override
public boolean onPreferenceChange(Preference preference, Object newValue) {
handleLanguageChange(newValue);
return true;
}
});
/* End of language section */
}
private void handleLanguageChange(Object newValue) {
String newLocale = newValue.toString();
UIHelper.setLanguage(this, newLocale);
LNReaderApplication.getInstance().restartApplication();
}
@SuppressWarnings("deprecation")
@SuppressLint("SdCardPath")
/**
* CSS Layout Behaviours
* 1. When user's css sheet is used, disable the force justify, linespace and margin preferences
* 2. When about to use user's css sheet, display a warning/message (NOT IMPLEMENTED)
* 3. When linespace/margin is changed, update the summary text to reflect current value
*/
private void setCssPreferences() {
final Preference user_cssPref = findPreference(Constants.PREF_USE_CUSTOM_CSS);
final Preference lineSpacePref = findPreference(Constants.PREF_LINESPACING);
final Preference justifyPref = findPreference(Constants.PREF_FORCE_JUSTIFIED);
final Preference customCssPathPref = findPreference(Constants.PREF_CUSTOM_CSS_PATH);
final Preference marginPref = findPreference(Constants.PREF_MARGINS);
final Preference headingFontPref = findPreference(Constants.PREF_HEADING_FONT);
final Preference contentFontPref = findPreference(Constants.PREF_CONTENT_FONT);
// Retrieve inital values stored
Boolean currUserCSS = getPreferenceScreen().getSharedPreferences().getBoolean(Constants.PREF_USE_CUSTOM_CSS, false);
String currLineSpacing = getPreferenceScreen().getSharedPreferences().getString(Constants.PREF_LINESPACING, "150");
String currMargin = getPreferenceScreen().getSharedPreferences().getString(Constants.PREF_MARGINS, "5");
String currHeadingFont = getPreferenceScreen().getSharedPreferences().getString(Constants.PREF_HEADING_FONT, "serif");
String currContentFont = getPreferenceScreen().getSharedPreferences().getString(Constants.PREF_CONTENT_FONT, "sans-serif");
// Behaviour 1 (Activity first loaded)
marginPref.setEnabled(!currUserCSS);
lineSpacePref.setEnabled(!currUserCSS);
justifyPref.setEnabled(!currUserCSS);
customCssPathPref.setEnabled(currUserCSS);
headingFontPref.setEnabled(!currUserCSS);
contentFontPref.setEnabled(!currUserCSS);
// Behaviour 3 (Activity first loaded)
lineSpacePref.setSummary(getResources().getString(R.string.line_spacing_summary2) + " \n" + getResources().getString(R.string.current_value) + ": " + currLineSpacing + "%");
marginPref.setSummary(getResources().getString(R.string.margin_summary2) + " \n" + getResources().getString(R.string.current_value) + ": " + currMargin + "%");
headingFontPref.setSummary(getResources().getString(R.string.pref_css_heading_fontface_summary) + currHeadingFont);
contentFontPref.setSummary(getResources().getString(R.string.pref_css_content_fontface_summary) + currContentFont);
// Behaviour 1 (Updated Preference)
user_cssPref.setOnPreferenceChangeListener(new OnPreferenceChangeListener() {
@Override
public boolean onPreferenceChange(Preference preference, Object newValue) {
Boolean set = (Boolean) newValue;
marginPref.setEnabled(!set);
lineSpacePref.setEnabled(!set);
justifyPref.setEnabled(!set);
customCssPathPref.setEnabled(set);
headingFontPref.setEnabled(!set);
contentFontPref.setEnabled(!set);
return true;
}
});
String customCssPath = customCssPathPref.getSharedPreferences().getString(Constants.PREF_CUSTOM_CSS_PATH, "/mnt/sdcard/custom.css");
customCssPathPref.setSummary("Path: " + customCssPath);
customCssPathPref.setOnPreferenceChangeListener(new OnPreferenceChangeListener() {
@Override
public boolean onPreferenceChange(Preference preference, Object newValue) {
customCssPathPref.setSummary("Path: " + newValue.toString());
return true;
}
});
// Line Spacing Preference update for Screen
lineSpacePref.setOnPreferenceChangeListener(new OnPreferenceChangeListener() {
@Override
public boolean onPreferenceChange(Preference preference, Object newValue) {
String set = (String) newValue;
preference.setSummary(getResources().getString(R.string.line_spacing_summary2) + " \n" + getResources().getString(R.string.current_value) + ": " + set + "%");
return true;
}
});
marginPref.setOnPreferenceChangeListener(new OnPreferenceChangeListener() {
@Override
public boolean onPreferenceChange(Preference preference, Object newValue) {
String set = (String) newValue;
preference.setSummary(getResources().getString(R.string.margin_summary2) + " \n" + getResources().getString(R.string.current_value) + ": " + set + "%");
return true;
}
});
headingFontPref.setOnPreferenceChangeListener(new OnPreferenceChangeListener() {
@Override
public boolean onPreferenceChange(Preference preference, Object newValue) {
String set = (String) newValue;
preference.setSummary(getResources().getString(R.string.pref_css_heading_fontface_summary) + set);
return true;
}
});
contentFontPref.setOnPreferenceChangeListener(new OnPreferenceChangeListener() {
@Override
public boolean onPreferenceChange(Preference preference, Object newValue) {
String set = (String) newValue;
preference.setSummary(getResources().getString(R.string.pref_css_content_fontface_summary) + set);
return true;
}
});
final Preference css_backColorPref = findPreference(Constants.PREF_CSS_BACKGROUND);
final Preference css_foreColorPref = findPreference(Constants.PREF_CSS_FOREGROUND);
final Preference css_linkColorPref = findPreference(Constants.PREF_CSS_LINK_COLOR);
final Preference css_tableBorderColorPref = findPreference(Constants.PREF_CSS_TABLE_BORDER);
final Preference css_tableBackPref = findPreference(Constants.PREF_CSS_TABLE_BACKGROUND);
css_backColorPref.setSummary(UIHelper.getBackgroundColor(this));
css_backColorPref.setOnPreferenceChangeListener(colorChangeListener);
setColorIcon(css_backColorPref, UIHelper.getBackgroundColor(this));
css_foreColorPref.setSummary(UIHelper.getForegroundColor(this));
css_foreColorPref.setOnPreferenceChangeListener(colorChangeListener);
setColorIcon(css_foreColorPref, UIHelper.getForegroundColor(this));
css_linkColorPref.setSummary(UIHelper.getLinkColor(this));
css_linkColorPref.setOnPreferenceChangeListener(colorChangeListener);
setColorIcon(css_linkColorPref, UIHelper.getLinkColor(this));
css_tableBorderColorPref.setSummary(UIHelper.getThumbBorderColor(this));
css_tableBorderColorPref.setOnPreferenceChangeListener(colorChangeListener);
setColorIcon(css_tableBorderColorPref, UIHelper.getThumbBorderColor(this));
css_tableBackPref.setSummary(UIHelper.getThumbBackgroundColor(this));
css_tableBackPref.setOnPreferenceChangeListener(colorChangeListener);
setColorIcon(css_tableBackPref, UIHelper.getThumbBackgroundColor(this));
}
private void setColorIcon(Preference colorPref, String hexColor) {
int c = Color.parseColor(hexColor);
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.HONEYCOMB) {
Drawable d1 = getResources().getDrawable(R.drawable.ic_square);
d1.mutate().setColorFilter(c, PorterDuff.Mode.MULTIPLY);
colorPref.setIcon(d1);
} else {
Spannable summary = new SpannableString(hexColor);
summary.setSpan(new ForegroundColorSpan(c), 0, summary.length(), 0);
colorPref.setSummary(summary);
}
}
private final OnPreferenceChangeListener colorChangeListener = new OnPreferenceChangeListener() {
@Override
public boolean onPreferenceChange(Preference preference, Object newValue) {
String set = (String) newValue;
try {
int c = Color.parseColor(set);
preference.setSummary(set);
Drawable d = getResources().getDrawable(R.drawable.ic_square);
d.mutate().setColorFilter(c, PorterDuff.Mode.MULTIPLY);
preference.setIcon(d);
return true;
} catch (Exception ex) {
Toast.makeText(getApplicationContext(), getString(R.string.error_invalid_color, set), Toast.LENGTH_SHORT).show();
return false;
}
}
};
protected boolean checkImageStoragePath(String newPath) {
if (Util.isStringNullOrEmpty(newPath)) {
newPath = UIHelper.getImageRoot(this);
}
File dir = new File(newPath);
if (!dir.exists()) {
Log.e(TAG, String.format("Directory %s not exists, trying to create dir.", newPath));
boolean result = dir.mkdirs();
if (result) {
Log.i(TAG, String.format("Directory %s created.", newPath));
return true;
} else {
String message = String.format("Directory %s cannot be created.", newPath);
Log.e(TAG, message);
Toast.makeText(this, String.format("Directory %s cannot be created.", newPath), Toast.LENGTH_SHORT).show();
return false;
}
} else {
return true;
}
}
private void setOrientation() {
UIHelper.Recreate(this);
}
private void clearImages() {
final String imageRoot = UIHelper.getImageRoot(this);
UIHelper.createYesNoDialog(this, getResources().getString(R.string.clear_image_question), getResources().getString(R.string.clear_image_question2), new OnClickListener() {
@Override
public void onClick(DialogInterface dialog, int which) {
if (which == DialogInterface.BUTTON_POSITIVE) {
Toast.makeText(getApplicationContext(), "Clearing Images...", Toast.LENGTH_SHORT).show();
DeleteRecursive(new File(imageRoot));
Toast.makeText(getApplicationContext(), "Image cache cleared!", Toast.LENGTH_SHORT).show();
}
}
}).show();
}
private void clearDB() {
UIHelper.createYesNoDialog(this, getResources().getString(R.string.clear_db_question), getResources().getString(R.string.clear_db_question2), new OnClickListener() {
@Override
public void onClick(DialogInterface dialog, int which) {
if (which == DialogInterface.BUTTON_POSITIVE) {
Toast.makeText(getApplicationContext(), getResources().getString(R.string.clear_database), Toast.LENGTH_SHORT).show();
NovelsDao.getInstance().deleteDB();
Toast.makeText(getApplicationContext(), getResources().getString(R.string.database_cleared), Toast.LENGTH_SHORT).show();
}
}
}).show();
}
@SuppressWarnings("deprecation")
private void runUpdate() {
LNReaderApplication.getInstance().runUpdateService(true, this);
Preference runUpdates = findPreference(Constants.PREF_RUN_UPDATES);
runUpdates.setSummary(getResources().getString(R.string.running));
}
@Override
protected void onRestart() {
super.onRestart();
}
@Override
protected void onResume() {
super.onResume();
// relisting all handler
LNReaderApplication.getInstance().setUpdateServiceListener(this);
LNReaderApplication.getInstance().setAutoBackupServiceListener(this);
if (ZipFilesTask.getInstance() != null) {
zipTask = ZipFilesTask.getInstance();
if (zipTask.getStatus() == AsyncTask.Status.RUNNING) {
zipTask.setCallback(this, Constants.PREF_BACKUP_THUMB_IMAGES);
}
}
if (UnZipFilesTask.getInstance() != null) {
unzipTask = UnZipFilesTask.getInstance();
if (unzipTask.getStatus() == AsyncTask.Status.RUNNING) {
unzipTask.setCallback(this, Constants.PREF_RESTORE_THUMB_IMAGES);
}
}
if (RelinkImagesTask.getInstance() != null) {
relinkTask = RelinkImagesTask.getInstance();
if (relinkTask.getStatus() == AsyncTask.Status.RUNNING) {
relinkTask.setCallback(this, Constants.PREF_RELINK_THUMB_IMAGES);
}
}
String key = DeleteFilesTask.class.toString() + ":DeleteExternalTemp";
deleteTask = (DeleteFilesTask) LNReaderApplication.getInstance().getTask(key);
if (deleteTask != null)
deleteTask.owner = this;
key = CopyDBTask.class.toString() + ":BackupDB";
copyDbTask = (CopyDBTask) LNReaderApplication.getInstance().getTask(key);
if (copyDbTask != null)
copyDbTask.setCallbackNotifier(this);
key = CopyDBTask.class.toString() + ":RestoreDB";
restoreDbTask = (CopyDBTask) LNReaderApplication.getInstance().getTask(key);
if (restoreDbTask != null)
restoreDbTask.setCallbackNotifier(this);
UIHelper.CheckScreenRotation(this);
UIHelper.CheckKeepAwake(this);
}
@Override
protected void onStop() {
super.onStop();
LNReaderApplication.getInstance().setUpdateServiceListener(null);
LNReaderApplication.getInstance().setAutoBackupServiceListener(null);
}
private void DeleteRecursive(File fileOrDirectory) {
// Skip Database
if (fileOrDirectory.getAbsolutePath() == DBHelper.getDbPath(this))
return;
if (fileOrDirectory.getAbsolutePath().contains("databases/pages.db")) {
Log.d(TAG, "Skippin DB!");
return;
}
if (fileOrDirectory.isDirectory())
Log.d(TAG, "Deleting Dir: " + fileOrDirectory.getAbsolutePath());
File[] fileList = fileOrDirectory.listFiles();
if (fileList == null || fileList.length == 0)
return;
for (File child : fileList)
DeleteRecursive(child);
boolean result = fileOrDirectory.delete();
if (!result)
Log.e(TAG, "Failed to delete: " + fileOrDirectory.getAbsolutePath());
}
@Override
@SuppressWarnings("deprecation")
public void onProgressCallback(ICallbackEventData message) {
// default goes to update
if (Util.isStringNullOrEmpty(message.getSource())) {
Preference runUpdates = findPreference(Constants.PREF_RUN_UPDATES);
runUpdates.setSummary("Status: " + message.getMessage());
} else {
Preference pref = findPreference(message.getSource());
if (pref != null)
pref.setSummary("Status: " + message.getMessage());
}
}
@Override
public void onCompleteCallback(ICallbackEventData message, AsyncTaskResult<?> result) {
onProgressCallback(message);
}
private void recreateUI() {
// UIHelper.Recreate(this);
finish();
startActivity(getIntent());
UIHelper.CheckScreenRotation(this);
UIHelper.CheckKeepAwake(this);
}
private boolean getProcessAllImagesPreferences() {
return PreferenceManager.getDefaultSharedPreferences(this).getBoolean(Constants.PREF_PROCESS_ALL_IMAGES, false);
}
@Override
public boolean onOptionsItemSelected(MenuItem item) {
switch (item.getItemId()) {
case android.R.id.home:
super.onBackPressed();
return true;
}
return super.onOptionsItemSelected(item);
}
@Override
public boolean downloadListSetup(String taskId, String message, int setupType, boolean hasError) {
// TODO Auto-generated method stub
return false;
}
@Override
public void onStart() {
super.onStart();
}
public void selectFolder(int code) {
Intent intent = new Intent(Intent.ACTION_OPEN_DOCUMENT_TREE);
startActivityForResult(intent, code);
}
@Override
protected void onActivityResult(int requestCode, int resultCode, Intent data) {
if(resultCode == RESULT_OK) {
if(requestCode == 102) {
Uri uri = data.getData();
Uri docUri = DocumentsContract.buildDocumentUriUsingTree(uri, DocumentsContract.getTreeDocumentId(uri));
final Preference defaultSaveLocation = findPreference("save_location");
String path = getPath(this, docUri);
Log.d(TAG, "path = " + path);
defaultSaveLocation.getOnPreferenceChangeListener().onPreferenceChange(defaultSaveLocation, path);
}
}
}
// region https://gist.github.com/asifmujteba/d89ba9074bc941de1eaa#file-asfurihelper
@TargetApi(Build.VERSION_CODES.KITKAT)
public static String getPath(final Context context, final Uri uri) {
final boolean isKitKat = Build.VERSION.SDK_INT >= Build.VERSION_CODES.KITKAT;
// DocumentProvider
if (isKitKat && DocumentsContract.isDocumentUri(context, uri)) {
// ExternalStorageProvider
if (isExternalStorageDocument(uri)) {
final String docId = DocumentsContract.getDocumentId(uri);
final String[] split = docId.split(":");
final String type = split[0];
if ("primary".equalsIgnoreCase(type)) {
return Environment.getExternalStorageDirectory() + "/" + split[1];
}
// TODO handle non-primary volumes
}
// DownloadsProvider
else if (isDownloadsDocument(uri)) {
final String id = DocumentsContract.getDocumentId(uri);
final Uri contentUri = ContentUris.withAppendedId(
Uri.parse("content://downloads/public_downloads"), Long.valueOf(id));
return getDataColumn(context, contentUri, null, null);
}
// MediaProvider
else if (isMediaDocument(uri)) {
final String docId = DocumentsContract.getDocumentId(uri);
final String[] split = docId.split(":");
final String type = split[0];
Uri contentUri = null;
if ("image".equals(type)) {
contentUri = MediaStore.Images.Media.EXTERNAL_CONTENT_URI;
} else if ("video".equals(type)) {
contentUri = MediaStore.Video.Media.EXTERNAL_CONTENT_URI;
} else if ("audio".equals(type)) {
contentUri = MediaStore.Audio.Media.EXTERNAL_CONTENT_URI;
}
final String selection = "_id=?";
final String[] selectionArgs = new String[] {
split[1]
};
return getDataColumn(context, contentUri, selection, selectionArgs);
}
}
// MediaStore (and general)
else if ("content".equalsIgnoreCase(uri.getScheme())) {
// Return the remote address
if (isGooglePhotosUri(uri))
return uri.getLastPathSegment();
return getDataColumn(context, uri, null, null);
}
// File
else if ("file".equalsIgnoreCase(uri.getScheme())) {
return uri.getPath();
}
return null;
}
public static String getDataColumn(Context context, Uri uri, String selection,
String[] selectionArgs) {
Cursor cursor = null;
final String column = "_data";
final String[] projection = {
column
};
try {
cursor = context.getContentResolver().query(uri, projection, selection, selectionArgs,
null);
if (cursor != null && cursor.moveToFirst()) {
final int index = cursor.getColumnIndexOrThrow(column);
return cursor.getString(index);
}
} finally {
if (cursor != null)
cursor.close();
}
return null;
}
/**
* @param uri The Uri to check.
* @return Whether the Uri authority is ExternalStorageProvider.
*/
public static boolean isExternalStorageDocument(Uri uri) {
return "com.android.externalstorage.documents".equals(uri.getAuthority());
}
/**
* @param uri The Uri to check.
* @return Whether the Uri authority is DownloadsProvider.
*/
public static boolean isDownloadsDocument(Uri uri) {
return "com.android.providers.downloads.documents".equals(uri.getAuthority());
}
/**
* @param uri The Uri to check.
* @return Whether the Uri authority is MediaProvider.
*/
public static boolean isMediaDocument(Uri uri) {
return "com.android.providers.media.documents".equals(uri.getAuthority());
}
/**
* @param uri The Uri to check.
* @return Whether the Uri authority is Google Photos.
*/
public static boolean isGooglePhotosUri(Uri uri) {
return "com.google.android.apps.photos.content".equals(uri.getAuthority());
}
// endregion
}
| apache-2.0 |
VISTALL/apache.velocity-engine | velocity-engine-core/src/main/java/org/apache/velocity/runtime/directive/contrib/For.java | 3590 | package org.apache.velocity.runtime.directive.contrib;
import java.io.IOException;
import java.io.Writer;
import java.util.ArrayList;
import org.apache.velocity.context.InternalContextAdapter;
import org.apache.velocity.exception.TemplateInitException;
import org.apache.velocity.runtime.RuntimeServices;
import org.apache.velocity.runtime.directive.Foreach;
import org.apache.velocity.runtime.directive.MacroParseException;
import org.apache.velocity.runtime.parser.ParseException;
import org.apache.velocity.runtime.parser.ParserTreeConstants;
import org.apache.velocity.runtime.parser.Token;
import org.apache.velocity.runtime.parser.node.ASTReference;
import org.apache.velocity.runtime.parser.node.Node;
/**
* The #for directive provides the behavior of the #foreach directive but also
* provides an 'index' keyword that allows the user to define an optional index variable
* that tracks the loop iterations. e.g.; #for($user in $users index $i).
* As $user iterates through $users the index reference $i will be equal to
* 0, 1, 2, etc..
* @see org.apache.velocity.runtime.directive.Foreach
*/
public class For extends Foreach
{
protected String counterName;
protected int counterInitialValue;
public String getName()
{
return "for";
}
public int getType()
{
return BLOCK;
}
public void init(RuntimeServices rs, InternalContextAdapter context, Node node)
throws TemplateInitException
{
super.init(rs, context, node);
// If we have more then 3 argument then the user has specified an
// index value, i.e.; #foreach($a in $b index $c)
if (node.jjtGetNumChildren() > 4)
{
// The index variable name is at position 4
counterName = ((ASTReference) node.jjtGetChild(4)).getRootString();
// The count value always starts at 0 when using an index.
counterInitialValue = 0;
}
}
@Override
public boolean render(InternalContextAdapter context, Writer writer, Node node)
throws IOException
{
Object c = context.get(counterName);
context.put(counterName, counterInitialValue);
try
{
return super.render(context, writer, node);
}
finally
{
if (c != null)
{
context.put(counterName, c);
}
else
{
context.remove(counterName);
}
}
}
@Override
protected void renderBlock(InternalContextAdapter context, Writer writer, Node node)
throws IOException
{
Object count = context.get(counterName);
if (count instanceof Number)
{
context.put(counterName, ((Number)count).intValue() + 1);
}
super.renderBlock(context, writer, node);
}
/**
* We do not allow a word token in any other arg position except for the 2nd
* since we are looking for the pattern #foreach($foo in $bar).
*/
public void checkArgs(ArrayList<Integer> argtypes, Token t,
String templateName) throws ParseException
{
super.checkArgs(argtypes, t, templateName);
// If #foreach is defining an index variable make sure it has the 'index
// $var' combo.
if (argtypes.size() > 3)
{
if (argtypes.get(3) != ParserTreeConstants.JJTWORD)
{
throw new MacroParseException(
"Expected word 'index' at argument position 4 in #foreach",
templateName, t);
}
else if (argtypes.size() == 4
|| argtypes.get(4) != ParserTreeConstants.JJTREFERENCE)
{
throw new MacroParseException(
"Expected a reference after 'index' in #foreach", templateName, t);
}
}
}
}
| apache-2.0 |
pablogrisafi1975/pinframework | src/main/java/com/pinframework/render/PinRenderPassing.java | 521 | package com.pinframework.render;
import java.io.BufferedWriter;
import java.io.IOException;
import com.pinframework.PinRenderType;
public class PinRenderPassing extends PinAbstractRender {
@Override
public String getType() {
return PinRenderType.PASSING;
}
@Override
protected String getNewContentType() {
return null;
}
@Override
protected void writeNonNullObject(BufferedWriter writer, Object obj) throws IOException {
writer.write(obj.toString());
}
}
| apache-2.0 |
ckclark/leetcode | java/leetcode/serialize_and_deserialize_binary_tree/Codec.java | 1914 | package leetcode.serialize_and_deserialize_binary_tree;
import common.TreeNode;
/**
* Definition for a binary tree node.
* public class TreeNode {
* int val;
* TreeNode left;
* TreeNode right;
* TreeNode(int x) { val = x; }
* }
*/
public class Codec {
public static class MyPair{
public TreeNode mTreeNode;
public int mNextStart;
public MyPair(TreeNode tn, int nextStart){
mTreeNode = tn;
mNextStart = nextStart;
}
}
// Encodes a tree to a single string.
public String serialize(TreeNode root) {
StringBuilder sb = new StringBuilder();
sb.append('(');
if(root != null){
sb.append(Integer.toString(root.val));
sb.append(serialize(root.left));
sb.append(serialize(root.right));
}
sb.append(')');
return sb.toString();
}
private MyPair doDeserialize(String data, int begin){
int nondigit;
for(nondigit = begin + 1; ; nondigit++){
if(data.charAt(nondigit) == '(' || data.charAt(nondigit) == ')'){
break;
}
}
if(nondigit == begin + 1){
return new MyPair(null, begin + 2);
}else{
TreeNode root = new TreeNode(Integer.parseInt(data.substring(begin + 1, nondigit)));
MyPair mp;
mp = doDeserialize(data, nondigit);
root.left = mp.mTreeNode;
mp = doDeserialize(data, mp.mNextStart);
root.right = mp.mTreeNode;
return new MyPair(root, mp.mNextStart + 1);
}
}
// Decodes your encoded data to tree.
public TreeNode deserialize(String data) {
return doDeserialize(data, 0).mTreeNode;
}
}
// Your Codec object will be instantiated and called as such:
// Codec codec = new Codec();
// codec.deserialize(codec.serialize(root));
| apache-2.0 |
paulseawa/p4ic4idea | idea150/src/net/groboclown/idea/p4ic/compat/idea150/VcsCompat150.java | 1390 | /*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package net.groboclown.idea.p4ic.compat.idea150;
import com.intellij.openapi.components.ServiceManager;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.vfs.VirtualFile;
import com.intellij.vcs.log.VcsUserRegistry;
import net.groboclown.idea.p4ic.compat.VcsCompat;
import org.jetbrains.annotations.NotNull;
import java.util.Collection;
public class VcsCompat150 extends VcsCompat {
@Override
public void setupPlugin(@NotNull Project project) {
ServiceManager.getService(project, VcsUserRegistry.class); // make sure to read the registry before opening commit dialog
}
@Override
public void refreshFiles(@NotNull final Project project, final Collection<VirtualFile> affectedFiles) {
// do nothing - not supported in Idea 15
}
}
| apache-2.0 |
1Evgeny/java-a-to-z | chapter_005_Lite/src/main/java/by/vorokhobko/generalization/tracker/abstractclass/Input.java | 535 | package by.vorokhobko.generalization.tracker.abstractclass;
/**
* Input.
*
* Class Input the interface for the program part 002, lesson 5.
* @author Evgeny Vorokhobko (vorokhobko2011@yandex.ru).
* @since 20.03.2017.
* @version 1.
*/
public interface Input {
/**
* Сreate the method ask.
* @param question - question.
* @return tag.
*/
String ask(String question);
/**
* Method overload ask.
* @param question - question.
* @param range - range.
* @return tag.
*/
int ask(String question, int[] range);
} | apache-2.0 |
javerdy/ja_verdy | mantis-tests/src/test/java/ru/pack/mantis/appmanager/NavigationHelper.java | 582 | package ru.pack.mantis.appmanager;
import org.openqa.selenium.By;
/**
* Created by Goblik on 16.10.2016.
*/
public class NavigationHelper extends HelperBase{
public NavigationHelper(ApplicationManadger app) {
super(app);
}
public void manageUsersPage() {
managePage();
click(By.xpath("//a[contains(@href, 'manage_user_page.php')]"));
}
private void managePage() {
click(By.xpath("//a[contains(@href, 'manage_overview_page.php')]"));
}
public void editUserPage(int id) {
click(By.xpath("//a[contains(@href, 'user_id=" + id + "')]"));
}
}
| apache-2.0 |
richkadel/flip.tv | cc2nlp/src/main/java/com/knowbout/cc2nlp/CaptionTypeChangeEvent.java | 2748 | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package com.knowbout.cc2nlp;
import java.io.Serializable;
import com.knowbout.cc4j.XDSData;
import com.knowbout.cc4j.XDSField;
import com.knowbout.cc4j.CaptionType;
import com.knowbout.epg.service.ScheduledProgram;
/**
* @author Jake Fear
* @author Rich Kadel
* @author $Author$
* @version $Rev$ $Date$
*/
public class CaptionTypeChangeEvent extends CCEvent implements Serializable {
private static final long serialVersionUID = -607863981372213815L;
private CaptionType captionType;
/**
* Creates an empty event to fill.
*/
public CaptionTypeChangeEvent() {
}
/**
* Creates an event with the given values.
* @param headendID the identifier for the callsign provider.
* @param callsign the TV callsign from which these captions were read.
* @param timestamp the system time at which this event was generated, which
* should be immediately after the capture software recognized the end of
* a xdsField.
* @param xdsField the xdsField captured
*/
public CaptionTypeChangeEvent(String headendID, String callsign, ScheduledProgram scheduledProgram, long timestamp,
CaptionType newCaptionType) {
super(headendID, callsign, scheduledProgram, timestamp);
this.captionType = newCaptionType;
}
/**
* Creates an event with the given values headendID and callsign. The other values
* need to be set before sending this event. This creates and event that
* can easily be reused by setting and changing the values.
* @param headendID the identifier for the callsign provider.
* @param callsign the TV callsign from which these captions were read.
*/
public CaptionTypeChangeEvent(String headendID, String callsign) {
super(headendID, callsign);
}
public CaptionType getCaptionType() {
return captionType;
}
public void setCaptionType(CaptionType captionType) {
this.captionType = captionType;
}
public String toString() {
return "CaptionType="+captionType;
}
}
| apache-2.0 |
PurelyApplied/geode | geode-core/src/main/java/org/apache/geode/management/internal/configuration/domain/XmlEntity.java | 21040 | /*
* Licensed to the Apache Software Foundation (ASF) under one or more contributor license
* agreements. See the NOTICE file distributed with this work for additional information regarding
* copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance with the License. You may obtain a
* copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package org.apache.geode.management.internal.configuration.domain;
import java.io.DataInput;
import java.io.DataOutput;
import java.io.IOException;
import java.io.PrintWriter;
import java.io.StringReader;
import java.io.StringWriter;
import java.util.HashMap;
import java.util.Iterator;
import java.util.Map;
import java.util.Map.Entry;
import javax.xml.parsers.ParserConfigurationException;
import javax.xml.transform.TransformerException;
import javax.xml.transform.TransformerFactoryConfigurationError;
import javax.xml.xpath.XPathExpressionException;
import org.apache.commons.lang3.StringUtils;
import org.apache.logging.log4j.Logger;
import org.w3c.dom.Document;
import org.w3c.dom.Node;
import org.xml.sax.InputSource;
import org.xml.sax.SAXException;
import org.apache.geode.DataSerializer;
import org.apache.geode.InternalGemFireError;
import org.apache.geode.cache.Cache;
import org.apache.geode.cache.CacheFactory;
import org.apache.geode.internal.Assert;
import org.apache.geode.internal.Version;
import org.apache.geode.internal.VersionedDataSerializable;
import org.apache.geode.internal.cache.InternalCache;
import org.apache.geode.internal.cache.xmlcache.CacheXml;
import org.apache.geode.internal.cache.xmlcache.CacheXmlGenerator;
import org.apache.geode.internal.logging.LogService;
import org.apache.geode.management.internal.configuration.utils.XmlUtils;
import org.apache.geode.management.internal.configuration.utils.XmlUtils.XPathContext;
/**
* Domain class for defining a GemFire entity in XML.
*/
public class XmlEntity implements VersionedDataSerializable {
private static final long serialVersionUID = 1L;
private static final Logger logger = LogService.getLogger();
private transient volatile CacheProvider cacheProvider;
private String type;
@SuppressWarnings("unused")
private String parentType;
private Map<String, String> attributes = new HashMap<>();
private String xmlDefinition;
private String searchString;
private String prefix = CacheXml.PREFIX;
private String namespace = CacheXml.GEODE_NAMESPACE;
private String childPrefix;
private String childNamespace;
/**
* Produce a new XmlEntityBuilder.
*
* @return new XmlEntityBuilder.
* @since GemFire 8.1
*/
public static XmlEntityBuilder builder() {
return new XmlEntityBuilder();
}
private static CacheProvider createDefaultCacheProvider() {
return () -> ((InternalCache) CacheFactory.getAnyInstance())
.getCacheForProcessingClientRequests();
}
/**
* Default constructor for serialization only.
*
* @deprecated Use {@link XmlEntity#builder()}.
*/
@Deprecated
public XmlEntity() {
cacheProvider = createDefaultCacheProvider();
}
/**
* Construct a new XmlEntity while creating XML from the cache using the element which has a type
* and attribute matching those given.
*
* @param type Type of the XML element to search for. Should be one of the constants from the
* {@link CacheXml} class. For example, CacheXml.REGION.
* @param key Key of the attribute to match, for example, "name" or "id".
* @param value Value of the attribute to match.
*/
public XmlEntity(final String type, final String key, final String value) {
cacheProvider = createDefaultCacheProvider();
this.type = type;
attributes.put(key, value);
init();
}
/**
* Construct a new XmlEntity while creating Xml from the cache using the element which has
* attributes matching those given
*
* @param parentType Parent type of the XML element to search for. Should be one of the constants
* from the {@link CacheXml} class. For example, CacheXml.REGION.
*
* @param parentKey Identifier for the parent elements such "name/id"
* @param parentValue Value of the identifier
* @param childType Child type of the XML element to search for within the parent . Should be one
* of the constants from the {@link CacheXml} class. For example, CacheXml.INDEX.
* @param childKey Identifier for the child element such as "name/id"
* @param childValue Value of the child element identifier
*/
public XmlEntity(final String parentType, final String parentKey, final String parentValue,
final String childType, final String childKey, final String childValue) {
cacheProvider = createDefaultCacheProvider();
this.parentType = parentType;
type = childType;
initializeSearchString(parentKey, parentValue, prefix, childKey, childValue);
}
/**
* Construct a new XmlEntity while creating Xml from the cache using the element which has
* attributes matching those given
*
* @param parentType Parent type of the XML element to search for. Should be one of the constants
* from the {@link CacheXml} class. For example, CacheXml.REGION.
*
* @param parentKey Identifier for the parent elements such "name/id"
* @param parentValue Value of the identifier
* @param childPrefix Namespace prefix for the child element such as "lucene"
* @param childNamespace Namespace for the child element such as
* "http://geode.apache.org/schema/lucene"
* @param childType Child type of the XML element to search for within the parent . Should be one
* of the constants from the {@link CacheXml} class. For example, CacheXml.INDEX.
* @param childKey Identifier for the child element such as "name/id"
* @param childValue Value of the child element identifier
*/
public XmlEntity(final String parentType, final String parentKey, final String parentValue,
final String childPrefix, final String childNamespace, final String childType,
final String childKey, final String childValue) {
// Note: Do not invoke init
cacheProvider = createDefaultCacheProvider();
this.parentType = parentType;
type = childType;
this.childPrefix = childPrefix;
this.childNamespace = childNamespace;
initializeSearchString(parentKey, parentValue, childPrefix, childKey, childValue);
}
public XmlEntity(final CacheProvider cacheProvider, final String parentType,
final String childPrefix, final String childNamespace, final String childType,
final String key, final String value) {
this.cacheProvider = cacheProvider;
this.parentType = parentType;
type = childType;
prefix = childPrefix;
namespace = childNamespace;
this.childPrefix = childPrefix;
this.childNamespace = childNamespace;
attributes.put(key, value);
searchString = "//" + this.parentType + '/' + childPrefix + ':' + type;
xmlDefinition = parseXmlForDefinition();
}
private String parseXmlForDefinition() {
final Cache cache = cacheProvider.getCache();
final StringWriter stringWriter = new StringWriter();
final PrintWriter printWriter = new PrintWriter(stringWriter);
CacheXmlGenerator.generate(cache, printWriter, false, false);
printWriter.close();
InputSource inputSource = new InputSource(new StringReader(stringWriter.toString()));
try {
Document document = XmlUtils.getDocumentBuilder().parse(inputSource);
Node element = document.getElementsByTagNameNS(childNamespace, type).item(0);
if (null != element) {
return XmlUtils.elementToString(element);
}
} catch (IOException | ParserConfigurationException | RuntimeException | SAXException
| TransformerException e) {
throw new InternalGemFireError("Could not parse XML when creating XMLEntity", e);
}
logger.warn("No XML definition could be found with name={} and attributes={}", type,
attributes);
return null;
}
private void initializeSearchString(final String parentKey, final String parentValue,
final String childPrefix, final String childKey, final String childValue) {
StringBuilder sb = new StringBuilder();
sb.append("//").append(prefix).append(':').append(parentType);
if (StringUtils.isNotBlank(parentKey) && StringUtils.isNotBlank(parentValue)) {
sb.append("[@").append(parentKey).append("='").append(parentValue).append("']");
}
sb.append('/').append(childPrefix).append(':').append(type);
if (StringUtils.isNotBlank(childKey) && StringUtils.isNotBlank(childValue)) {
sb.append("[@").append(childKey).append("='").append(childValue).append("']");
}
searchString = sb.toString();
}
/**
* Initialize new instances. Called from {@link #XmlEntity(String, String, String)} and
* {@link XmlEntityBuilder#build()}.
*
* @since GemFire 8.1
*/
private void init() {
Assert.assertTrue(StringUtils.isNotBlank(type));
Assert.assertTrue(StringUtils.isNotBlank(prefix));
Assert.assertTrue(StringUtils.isNotBlank(namespace));
Assert.assertTrue(attributes != null);
if (null == xmlDefinition) {
xmlDefinition = loadXmlDefinition();
}
}
/**
* Use the CacheXmlGenerator to create XML from the entity associated with the current cache.
*
* @return XML string representation of the entity.
*/
private String loadXmlDefinition() {
final Cache cache = cacheProvider.getCache();
final StringWriter stringWriter = new StringWriter();
final PrintWriter printWriter = new PrintWriter(stringWriter);
CacheXmlGenerator.generate(cache, printWriter, false, false);
printWriter.close();
return loadXmlDefinition(stringWriter.toString());
}
/**
* Used supplied xmlDocument to extract the XML for the defined XmlEntity.
*
* @param xmlDocument to extract XML from.
* @return XML for XmlEntity if found, otherwise {@code null}.
* @since GemFire 8.1
*/
private String loadXmlDefinition(final String xmlDocument) {
try {
InputSource inputSource = new InputSource(new StringReader(xmlDocument));
return loadXmlDefinition(XmlUtils.getDocumentBuilder().parse(inputSource));
} catch (IOException | SAXException | ParserConfigurationException | XPathExpressionException
| TransformerFactoryConfigurationError | TransformerException e) {
throw new InternalGemFireError("Could not parse XML when creating XMLEntity", e);
}
}
/**
* Used supplied XML {@link Document} to extract the XML for the defined XmlEntity.
*
* @param document to extract XML from.
* @return XML for XmlEntity if found, otherwise {@code null}.
* @since GemFire 8.1
*/
private String loadXmlDefinition(final Document document)
throws XPathExpressionException, TransformerFactoryConfigurationError, TransformerException {
searchString = createQueryString(prefix, type, attributes);
logger.info("XmlEntity:searchString: {}", searchString);
if (document != null) {
XPathContext xpathContext = new XPathContext();
xpathContext.addNamespace(prefix, namespace);
// TODO: wrap this line with conditional
xpathContext.addNamespace(childPrefix, childNamespace);
// Create an XPathContext here
Node element = XmlUtils.querySingleElement(document, searchString, xpathContext);
// Must copy to preserve namespaces.
if (null != element) {
return XmlUtils.elementToString(element);
}
}
logger.warn("No XML definition could be found with name={} and attributes={}", type,
attributes);
return null;
}
/**
* Create an XmlPath query string from the given element name and attributes.
*
* @param element Name of the XML element to search for.
* @param attributes Attributes of the element that should match, for example "name" or "id" and
* the value they should equal. This list may be empty.
*
* @return An XmlPath query string.
*/
private String createQueryString(final String prefix, final String element,
final Map<String, String> attributes) {
StringBuilder queryStringBuilder = new StringBuilder();
Iterator<Entry<String, String>> attributeIter = attributes.entrySet().iterator();
queryStringBuilder.append("//").append(prefix).append(':').append(element);
if (!attributes.isEmpty()) {
queryStringBuilder.append('[');
Entry<String, String> attrEntry = attributeIter.next();
queryStringBuilder.append('@').append(attrEntry.getKey()).append("='")
.append(attrEntry.getValue()).append('\'');
while (attributeIter.hasNext()) {
attrEntry = attributeIter.next();
queryStringBuilder.append(" and @").append(attrEntry.getKey()).append("='")
.append(attrEntry.getValue()).append('\'');
}
queryStringBuilder.append(']');
}
return queryStringBuilder.toString();
}
public String getSearchString() {
return searchString;
}
public String getType() {
return type;
}
public Map<String, String> getAttributes() {
return attributes;
}
/**
* Return the value of a single attribute.
*
* @param key Key of the attribute whose while will be returned.
*
* @return The value of the attribute.
*/
public String getAttribute(String key) {
return attributes.get(key);
}
/**
* A convenience method to get a name or id attributes from the list of attributes if one of them
* has been set. Name takes precedence.
*
* @return The name or id attribute or null if neither is found.
*/
public String getNameOrId() {
if (attributes.containsKey("name")) {
return attributes.get("name");
}
return attributes.get("id");
}
public String getXmlDefinition() {
return xmlDefinition;
}
/**
* Gets the namespace for the element. Defaults to {@link CacheXml#GEODE_NAMESPACE} if not set.
*
* @return XML element namespace
* @since GemFire 8.1
*/
public String getNamespace() {
return namespace;
}
/**
* Gets the prefix for the element. Defaults to {@link CacheXml#PREFIX} if not set.
*
* @return XML element prefix
* @since GemFire 8.1
*/
public String getPrefix() {
return prefix;
}
/**
* Gets the prefix for the child element.
*
* @return XML element prefix for the child element
*/
public String getChildPrefix() {
return childPrefix;
}
/**
* Gets the namespace for the child element.
*
* @return XML element namespace for the child element
*/
public String getChildNamespace() {
return childNamespace;
}
@Override
public Version[] getSerializationVersions() {
return new Version[] {Version.GEODE_1_1_1};
}
@Override
public String toString() {
return "XmlEntity [namespace=" + namespace + ", type=" + type + ", attributes="
+ attributes + ", xmlDefinition=" + xmlDefinition + ']';
}
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * result + ((attributes == null) ? 0 : attributes.hashCode());
result = prime * result + ((type == null) ? 0 : type.hashCode());
return result;
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (getClass() != obj.getClass())
return false;
XmlEntity other = (XmlEntity) obj;
if (attributes == null) {
if (other.attributes != null)
return false;
} else if (!attributes.equals(other.attributes))
return false;
if (namespace == null) {
if (other.namespace != null)
return false;
} else if (!namespace.equals(other.namespace))
return false;
if (type == null) {
return other.type == null;
} else
return type.equals(other.type);
}
@Override
public void toData(DataOutput out) throws IOException {
toDataPre_GEODE_1_1_1_0(out);
DataSerializer.writeString(childPrefix, out);
DataSerializer.writeString(childNamespace, out);
}
public void toDataPre_GEODE_1_1_1_0(DataOutput out) throws IOException {
DataSerializer.writeString(type, out);
DataSerializer.writeObject(attributes, out);
DataSerializer.writeString(xmlDefinition, out);
DataSerializer.writeString(searchString, out);
DataSerializer.writeString(prefix, out);
DataSerializer.writeString(namespace, out);
}
@Override
public void fromData(DataInput in) throws IOException, ClassNotFoundException {
fromDataPre_GEODE_1_1_1_0(in);
childPrefix = DataSerializer.readString(in);
childNamespace = DataSerializer.readString(in);
}
public void fromDataPre_GEODE_1_1_1_0(DataInput in) throws IOException, ClassNotFoundException {
type = DataSerializer.readString(in);
attributes = DataSerializer.readObject(in);
xmlDefinition = DataSerializer.readString(in);
searchString = DataSerializer.readString(in);
prefix = DataSerializer.readString(in);
namespace = DataSerializer.readString(in);
cacheProvider = createDefaultCacheProvider();
}
/**
* Defines how XmlEntity gets a reference to the Cache.
*/
public interface CacheProvider {
InternalCache getCache();
}
/**
* Builder for XmlEntity. Default values are as described in XmlEntity.
*
* @since GemFire 8.1
*/
public static class XmlEntityBuilder {
private XmlEntity xmlEntity;
/**
* Private constructor.
*
* @since GemFire 8.1
*/
@SuppressWarnings("deprecation")
XmlEntityBuilder() {
xmlEntity = new XmlEntity();
}
/**
* Produce an XmlEntity with the supplied values. Builder is reset after #build() is called.
* Subsequent calls will produce a new XmlEntity.
*
* You are required to at least call {@link #withType(String)}.
*
* @since GemFire 8.1
*/
@SuppressWarnings("deprecation")
public XmlEntity build() {
xmlEntity.init();
final XmlEntity built = xmlEntity;
xmlEntity = new XmlEntity();
return built;
}
/**
* Sets the type or element name value as returned by {@link XmlEntity#getType()}
*
* @param type Name of element type.
* @return this XmlEntityBuilder
* @since GemFire 8.1
*/
public XmlEntityBuilder withType(final String type) {
xmlEntity.type = type;
return this;
}
/**
* Sets the element prefix and namespace as returned by {@link XmlEntity#getPrefix()} and
* {@link XmlEntity#getNamespace()} respectively. Defaults are {@link CacheXml#PREFIX} and
* {@link CacheXml#GEODE_NAMESPACE} respectively.
*
* @param prefix Prefix of element
* @param namespace Namespace of element
* @return this XmlEntityBuilder
* @since GemFire 8.1
*/
public XmlEntityBuilder withNamespace(final String prefix, final String namespace) {
xmlEntity.prefix = prefix;
xmlEntity.namespace = namespace;
return this;
}
/**
* Adds an attribute for the given <code>name</code> and <code>value</code> to the attributes
* map returned by {@link XmlEntity#getAttributes()} or {@link XmlEntity#getAttribute(String)}.
*
* @param name Name of attribute to set.
* @param value Value of attribute to set.
* @return this XmlEntityBuilder
* @since GemFire 8.1
*/
public XmlEntityBuilder withAttribute(final String name, final String value) {
xmlEntity.attributes.put(name, value);
return this;
}
/**
* Replaces all attributes with the supplied attributes {@link Map}.
*
* @param attributes {@link Map} to use.
* @return this XmlEntityBuilder
* @since GemFire 8.1
*/
public XmlEntityBuilder withAttributes(final Map<String, String> attributes) {
xmlEntity.attributes = attributes;
return this;
}
/**
* Sets a config xml document source to get the entity XML Definition from as returned by
* {@link XmlEntity#getXmlDefinition()}. Defaults to current active configuration for
* {@link Cache}.
*
* <b>Should only be used for testing.</b>
*
* @param document Config XML {@link Document}.
* @return this XmlEntityBuilder
* @since GemFire 8.1
*/
public XmlEntityBuilder withConfig(final Document document) throws XPathExpressionException,
TransformerFactoryConfigurationError, TransformerException {
xmlEntity.xmlDefinition = xmlEntity.loadXmlDefinition(document);
return this;
}
}
}
| apache-2.0 |
eSDK/esdk_cloud_fm_r3_native_java | source/FM/V1R5/esdk_fm_neadp_1.5_native_java/src/test/java/com/huawei/esdk/fusionmanager/local/impl/resources/storage/VolumeResourceImplTest.java | 6846 | package com.huawei.esdk.fusionmanager.local.impl.resources.storage;
import java.util.ArrayList;
import java.util.HashMap;
import org.junit.BeforeClass;
import org.junit.Test;
import com.google.gson.Gson;
import com.huawei.esdk.fusionmanager.local.ServiceFactory;
import com.huawei.esdk.fusionmanager.local.model.ClientProviderBean;
import com.huawei.esdk.fusionmanager.local.model.common.AsyncResp;
import com.huawei.esdk.fusionmanager.local.model.storage.CreateVolumeReq;
import com.huawei.esdk.fusionmanager.local.model.storage.CreateVolumeResp;
import com.huawei.esdk.fusionmanager.local.model.storage.DeleteVolumeReq;
import com.huawei.esdk.fusionmanager.local.model.storage.DeleteVolumeResp;
import com.huawei.esdk.fusionmanager.local.model.storage.ExpandVolumeReq;
import com.huawei.esdk.fusionmanager.local.model.storage.ExpandVolumeReqEx;
import com.huawei.esdk.fusionmanager.local.model.storage.ListVolumeResp;
import com.huawei.esdk.fusionmanager.local.model.storage.ModifyVolumeReq;
import com.huawei.esdk.fusionmanager.local.model.storage.ModifyVolumeReqEx;
import com.huawei.esdk.fusionmanager.local.model.storage.MountVolumeReq;
import com.huawei.esdk.fusionmanager.local.model.storage.MountVolumeReqEx;
import com.huawei.esdk.fusionmanager.local.model.storage.QueryVolumeReq;
import com.huawei.esdk.fusionmanager.local.model.storage.RemoveVolumeReqEx;
import com.huawei.esdk.fusionmanager.local.model.storage.UnmountVolumeReq;
import com.huawei.esdk.fusionmanager.local.model.storage.UnmountVolumeReqEx;
import com.huawei.esdk.fusionmanager.local.model.storage.VolumeStatisticsResp;
import com.huawei.esdk.fusionmanager.local.model.user.AuthenticateReq;
import com.huawei.esdk.fusionmanager.local.model.user.AuthenticateResp;
import com.huawei.esdk.fusionmanager.local.resources.storage.VolumeResource;
import com.huawei.esdk.fusionmanager.local.resources.user.AuthenticateResource;
public class VolumeResourceImplTest {
private static VolumeResource service;
// private static UserVdcRelationResource res;
@BeforeClass
public static void setUpBeforeClass() throws Exception {
ClientProviderBean bean = new ClientProviderBean();
bean.setServerIp("166.166.40.150");
bean.setServerPort("543");
AuthenticateResource authenticateResource = ServiceFactory.getService(
AuthenticateResource.class, bean);
AuthenticateReq request = new AuthenticateReq();
request.setUserName("syfm03");
request.setPassword("Huawei@123");
request.setLoginScene("2");
AuthenticateResp resp = authenticateResource.authenticate(request);
System.err.println(resp.getTokenId());
service = (VolumeResourceImpl) ServiceFactory.getService(
VolumeResource.class, bean);
// res = ServiceFactory.getService(UserVdcRelationResource.class, bean);
}
static Gson g = new Gson();
@Test
public final void testCreate() {
CreateVolumeReq req = new CreateVolumeReq();
req.setName("Vol_Create");
req.setSize(100);
req.setAvailableZoneId("4616189618054758401");
req.setMediaType("SAN-Any");
req.setType("normal");
req.setConfigType(1);
String vdcId = "9";
String vpcId = "-1";
String cloudInfra = "34";
CreateVolumeResp resp = service.create(req, vdcId, vpcId, cloudInfra);
System.err.println(g.toJson(resp));
}
@Test
public final void testModify() {
ModifyVolumeReq req = new ModifyVolumeReq();
req.setName("Vol_Modify");
ModifyVolumeReqEx reqEx = new ModifyVolumeReqEx();
reqEx.setVdcId("9");
reqEx.setVpcId("-1");
reqEx.setVolumeId("4629700416936869889$urn:sites:3E090887:volumes:451");
reqEx.setCloudInfraId("34");
AsyncResp resp = service.modify(req, reqEx);
System.err.println(g.toJson(resp));
}
@Test
public final void testRemove() {
RemoveVolumeReqEx reqEx = new RemoveVolumeReqEx();
reqEx.setVdcId("9");
reqEx.setVpcId("-1");
reqEx.setVolumeId("4629700416936869889$urn:sites:3E090887:volumes:451");
reqEx.setCloudInfraId("34");
AsyncResp resp = service.remove(reqEx);
System.err.println(g.toJson(resp));
}
@Test
public final void testlist() {
QueryVolumeReq req = new QueryVolumeReq();
req.setStart(0);
req.setLimit(2);
req.setVmId("4629700416936869889$urn:sites:3E090887:vms:i-000000E1");
req.setSearchCondition(new HashMap<String, String>() {
private static final long serialVersionUID = -3686565003814763064L;
{
put("ALL", "4629700416936869889$urn:sites:3E090887:volumes:450");
}
});
req.setAzId("4616189618054758401");
ListVolumeResp resp = service.list("9", "-1", "34", req);
System.err.println(g.toJson(resp));
}
@Test
public final void testBatchRemove() {
DeleteVolumeReq req = new DeleteVolumeReq();
req.setVolumnIds(new ArrayList<String>() {
private static final long serialVersionUID = -8709813606129135937L;
{
add("4629700416936869889$urn:sites:31460622:volumes:286");
// add("4629700416936869889$urn:sites:31460622:volumes:285");
}
});
req.setSafeDelete(false);
DeleteVolumeResp resp = service.batchRemove(req, "2", "4792750811720056832", "34");
System.err.println(g.toJson(resp));
}
@Test
public final void testCount() {
VolumeStatisticsResp resp = service.count("9", "-1", "34", "attachstatus");
System.err.println(g.toJson(resp));
}
@Test
public final void testExpand() {
ExpandVolumeReq req = new ExpandVolumeReq();
req.setVmId("4629700416936869889$urn:sites:31460622:vms:i-0000005F");
req.setSize(6);
ExpandVolumeReqEx reqEx = new ExpandVolumeReqEx();
reqEx.setVdcId("2");
reqEx.setVpcId("4792750811720056832");
reqEx.setVolumeId("4629700416936869889$urn:sites:31460622:volumes:287");
reqEx.setCloudInfraId("34");
AsyncResp resp = service.expand(req, reqEx);
System.err.println(g.toJson(resp));
}
@Test
public final void testMount() {
MountVolumeReq req = new MountVolumeReq();
req.setVmId("4629700416936869889$urn:sites:31460622:volumes:277");
MountVolumeReqEx reqEx = new MountVolumeReqEx();
reqEx.setVdcId("1");
reqEx.setVpcId("-1");
reqEx.setVolumeId("4629700416936869889$urn:sites:3E090887:volumes:452");
reqEx.setCloudInfraId("34");
AsyncResp resp = service.mount(req, reqEx);
System.err.println(g.toJson(resp));
}
@Test
public final void testUnmount() {
UnmountVolumeReq req = new UnmountVolumeReq();
req.setVmId("4629700416936869889$urn:sites:3E090887:vms:i-000000E1");
UnmountVolumeReqEx reqEx = new UnmountVolumeReqEx();
reqEx.setVdcId("9");
reqEx.setVpcId("-1");
reqEx.setVolumeId("4629700416936869889$urn:sites:3E090887:volumes:452");
reqEx.setCloudInfraId("34");
AsyncResp resp = service.unmount(req, reqEx);
System.err.println(g.toJson(resp));
}
// public static void main(String[] args)
// throws UnsupportedEncodingException
// {
// char i = ':';
// int b = 36;
// int c = 58;
// String s = URLEncoder.encode("@", "utf-8");
// System.out.println(s);
// }
}
| apache-2.0 |
drowning/netty | codec-http/src/test/java/io/netty/handler/codec/http/websocketx/extensions/WebSocketClientExtensionHandlerTest.java | 11810 | /*
* Copyright 2014 The Netty Project
*
* The Netty Project licenses this file to you under the Apache License,
* version 2.0 (the "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at:
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package io.netty.handler.codec.http.websocketx.extensions;
import static io.netty.handler.codec.http.websocketx.extensions.WebSocketExtensionTestUtil.*;
import static org.easymock.EasyMock.*;
import static org.junit.Assert.*;
import io.netty.channel.ChannelHandlerContext;
import io.netty.channel.embedded.EmbeddedChannel;
import io.netty.handler.codec.CodecException;
import io.netty.handler.codec.http.HttpHeaders.Names;
import io.netty.handler.codec.http.HttpRequest;
import io.netty.handler.codec.http.HttpResponse;
import io.netty.handler.codec.http.websocketx.WebSocketFrame;
import java.util.Collections;
import java.util.List;
import org.junit.Test;
public class WebSocketClientExtensionHandlerTest {
WebSocketClientExtensionHandshaker mainHandshakerMock =
createMock("mainHandshaker", WebSocketClientExtensionHandshaker.class);
WebSocketClientExtensionHandshaker fallbackHandshakerMock =
createMock("fallbackHandshaker", WebSocketClientExtensionHandshaker.class);
WebSocketClientExtension mainExtensionMock =
createMock("mainExtension", WebSocketClientExtension.class);
WebSocketClientExtension fallbackExtensionMock =
createMock("fallbackExtension", WebSocketClientExtension.class);
@Test
public void testMainSuccess() {
// initialize
expect(mainHandshakerMock.newRequestData()).
andReturn(new WebSocketExtensionData("main", Collections.<String, String>emptyMap())).once();
expect(mainHandshakerMock.handshakeExtension(
anyObject(WebSocketExtensionData.class))).andReturn(mainExtensionMock).once();
replay(mainHandshakerMock);
expect(fallbackHandshakerMock.newRequestData()).
andReturn(new WebSocketExtensionData("fallback", Collections.<String, String>emptyMap())).once();
replay(fallbackHandshakerMock);
expect(mainExtensionMock.rsv()).andReturn(WebSocketExtension.RSV1).anyTimes();
expect(mainExtensionMock.newExtensionEncoder()).andReturn(new DummyEncoder()).once();
expect(mainExtensionMock.newExtensionDecoder()).andReturn(new DummyDecoder()).once();
replay(mainExtensionMock);
// execute
EmbeddedChannel ch = new EmbeddedChannel(new WebSocketClientExtensionHandler(
mainHandshakerMock, fallbackHandshakerMock));
HttpRequest req = newUpgradeRequest(null);
ch.writeOutbound(req);
HttpRequest req2 = ch.readOutbound();
List<WebSocketExtensionData> reqExts = WebSocketExtensionUtil.extractExtensions(
req2.headers().getAndConvert(Names.SEC_WEBSOCKET_EXTENSIONS));
HttpResponse res = newUpgradeResponse("main");
ch.writeInbound(res);
HttpResponse res2 = ch.readInbound();
List<WebSocketExtensionData> resExts = WebSocketExtensionUtil.extractExtensions(
res2.headers().getAndConvert(Names.SEC_WEBSOCKET_EXTENSIONS));
// test
assertEquals(2, reqExts.size());
assertEquals("main", reqExts.get(0).name());
assertEquals("fallback", reqExts.get(1).name());
assertEquals(1, resExts.size());
assertEquals("main", resExts.get(0).name());
assertTrue(resExts.get(0).parameters().isEmpty());
assertTrue(ch.pipeline().get(DummyDecoder.class) != null);
assertTrue(ch.pipeline().get(DummyEncoder.class) != null);
}
@Test
public void testFallbackSuccess() {
// initialize
expect(mainHandshakerMock.newRequestData()).
andReturn(new WebSocketExtensionData("main", Collections.<String, String>emptyMap())).once();
expect(mainHandshakerMock.handshakeExtension(
anyObject(WebSocketExtensionData.class))).andReturn(null).once();
replay(mainHandshakerMock);
expect(fallbackHandshakerMock.newRequestData()).
andReturn(new WebSocketExtensionData("fallback", Collections.<String, String>emptyMap())).once();
expect(fallbackHandshakerMock.handshakeExtension(
anyObject(WebSocketExtensionData.class))).andReturn(fallbackExtensionMock).once();
replay(fallbackHandshakerMock);
expect(fallbackExtensionMock.rsv()).andReturn(WebSocketExtension.RSV1).anyTimes();
expect(fallbackExtensionMock.newExtensionEncoder()).andReturn(new DummyEncoder()).once();
expect(fallbackExtensionMock.newExtensionDecoder()).andReturn(new DummyDecoder()).once();
replay(fallbackExtensionMock);
// execute
EmbeddedChannel ch = new EmbeddedChannel(new WebSocketClientExtensionHandler(
mainHandshakerMock, fallbackHandshakerMock));
HttpRequest req = newUpgradeRequest(null);
ch.writeOutbound(req);
HttpRequest req2 = ch.readOutbound();
List<WebSocketExtensionData> reqExts = WebSocketExtensionUtil.extractExtensions(
req2.headers().getAndConvert(Names.SEC_WEBSOCKET_EXTENSIONS));
HttpResponse res = newUpgradeResponse("fallback");
ch.writeInbound(res);
HttpResponse res2 = ch.readInbound();
List<WebSocketExtensionData> resExts = WebSocketExtensionUtil.extractExtensions(
res2.headers().getAndConvert(Names.SEC_WEBSOCKET_EXTENSIONS));
// test
assertEquals(2, reqExts.size());
assertEquals("main", reqExts.get(0).name());
assertEquals("fallback", reqExts.get(1).name());
assertEquals(1, resExts.size());
assertEquals("fallback", resExts.get(0).name());
assertTrue(resExts.get(0).parameters().isEmpty());
assertTrue(ch.pipeline().get(DummyDecoder.class) != null);
assertTrue(ch.pipeline().get(DummyEncoder.class) != null);
}
@Test
public void testAllSuccess() {
// initialize
expect(mainHandshakerMock.newRequestData()).
andReturn(new WebSocketExtensionData("main", Collections.<String, String>emptyMap())).once();
expect(mainHandshakerMock.handshakeExtension(
webSocketExtensionDataEqual("main"))).andReturn(mainExtensionMock).anyTimes();
expect(mainHandshakerMock.handshakeExtension(
webSocketExtensionDataEqual("fallback"))).andReturn(null).anyTimes();
replay(mainHandshakerMock);
expect(fallbackHandshakerMock.newRequestData()).
andReturn(new WebSocketExtensionData("fallback", Collections.<String, String>emptyMap())).once();
expect(fallbackHandshakerMock.handshakeExtension(
webSocketExtensionDataEqual("main"))).andReturn(null).anyTimes();
expect(fallbackHandshakerMock.handshakeExtension(
webSocketExtensionDataEqual("fallback"))).andReturn(fallbackExtensionMock).anyTimes();
replay(fallbackHandshakerMock);
DummyEncoder mainEncoder = new DummyEncoder();
DummyDecoder mainDecoder = new DummyDecoder();
expect(mainExtensionMock.rsv()).andReturn(WebSocketExtension.RSV1).anyTimes();
expect(mainExtensionMock.newExtensionEncoder()).andReturn(mainEncoder).once();
expect(mainExtensionMock.newExtensionDecoder()).andReturn(mainDecoder).once();
replay(mainExtensionMock);
Dummy2Encoder fallbackEncoder = new Dummy2Encoder();
Dummy2Decoder fallbackDecoder = new Dummy2Decoder();
expect(fallbackExtensionMock.rsv()).andReturn(WebSocketExtension.RSV2).anyTimes();
expect(fallbackExtensionMock.newExtensionEncoder()).andReturn(fallbackEncoder).once();
expect(fallbackExtensionMock.newExtensionDecoder()).andReturn(fallbackDecoder).once();
replay(fallbackExtensionMock);
// execute
EmbeddedChannel ch = new EmbeddedChannel(new WebSocketClientExtensionHandler(
mainHandshakerMock, fallbackHandshakerMock));
HttpRequest req = newUpgradeRequest(null);
ch.writeOutbound(req);
HttpRequest req2 = ch.readOutbound();
List<WebSocketExtensionData> reqExts = WebSocketExtensionUtil.extractExtensions(
req2.headers().getAndConvert(Names.SEC_WEBSOCKET_EXTENSIONS));
HttpResponse res = newUpgradeResponse("main, fallback");
ch.writeInbound(res);
HttpResponse res2 = ch.readInbound();
List<WebSocketExtensionData> resExts = WebSocketExtensionUtil.extractExtensions(
res2.headers().getAndConvert(Names.SEC_WEBSOCKET_EXTENSIONS));
// test
assertEquals(2, reqExts.size());
assertEquals("main", reqExts.get(0).name());
assertEquals("fallback", reqExts.get(1).name());
assertEquals(2, resExts.size());
assertEquals("main", resExts.get(0).name());
assertEquals("fallback", resExts.get(1).name());
assertTrue(ch.pipeline().context(mainEncoder) != null);
assertTrue(ch.pipeline().context(mainDecoder) != null);
assertTrue(ch.pipeline().context(fallbackEncoder) != null);
assertTrue(ch.pipeline().context(fallbackDecoder) != null);
}
@Test(expected = CodecException.class)
public void testIfMainAndFallbackUseRSV1WillFail() {
// initialize
expect(mainHandshakerMock.newRequestData()).
andReturn(new WebSocketExtensionData("main", Collections.<String, String>emptyMap())).once();
expect(mainHandshakerMock.handshakeExtension(
webSocketExtensionDataEqual("main"))).andReturn(mainExtensionMock).anyTimes();
expect(mainHandshakerMock.handshakeExtension(
webSocketExtensionDataEqual("fallback"))).andReturn(null).anyTimes();
replay(mainHandshakerMock);
expect(fallbackHandshakerMock.newRequestData()).
andReturn(new WebSocketExtensionData("fallback", Collections.<String, String>emptyMap())).once();
expect(fallbackHandshakerMock.handshakeExtension(
webSocketExtensionDataEqual("main"))).andReturn(null).anyTimes();
expect(fallbackHandshakerMock.handshakeExtension(
webSocketExtensionDataEqual("fallback"))).andReturn(fallbackExtensionMock).anyTimes();
replay(fallbackHandshakerMock);
expect(mainExtensionMock.rsv()).andReturn(WebSocketExtension.RSV1).anyTimes();
replay(mainExtensionMock);
expect(fallbackExtensionMock.rsv()).andReturn(WebSocketExtension.RSV1).anyTimes();
replay(fallbackExtensionMock);
// execute
EmbeddedChannel ch = new EmbeddedChannel(new WebSocketClientExtensionHandler(
mainHandshakerMock, fallbackHandshakerMock));
HttpRequest req = newUpgradeRequest(null);
ch.writeOutbound(req);
HttpRequest req2 = ch.readOutbound();
List<WebSocketExtensionData> reqExts = WebSocketExtensionUtil.extractExtensions(
req2.headers().getAndConvert(Names.SEC_WEBSOCKET_EXTENSIONS));
HttpResponse res = newUpgradeResponse("main, fallback");
ch.writeInbound(res);
// test
assertEquals(2, reqExts.size());
assertEquals("main", reqExts.get(0).name());
assertEquals("fallback", reqExts.get(1).name());
}
}
| apache-2.0 |
zauberlabs/gnip4j | core/src/main/java/com/zaubersoftware/gnip4j/api/model/MediaUrls.java | 2834 | /**
* Copyright (c) 2011-2016 Zauber S.A. <http://flowics.com/>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.zaubersoftware.gnip4j.api.model;
import org.codehaus.jackson.annotate.JsonProperty;
/**
* <p>Java class for anonymous complex type.
*
* <p>The following schema fragment specifies the expected content contained within this class.
*/
public class MediaUrls extends Urls {
private static final long serialVersionUID = 1L;
@JsonProperty(value = "id_str")
private String id;
@JsonProperty(value = "source_status_id_str")
private String sourceStatusIdStr;
@JsonProperty(value = "media_url")
private String mediaUrl;
@JsonProperty("media_url_https")
private String mediaUrlHttps;
private String type;
private Sizes sizes;
@JsonProperty(value = "video_info")
private VideoInfo videoInfo;
public String getId() {
return id;
}
public String getMediaURL() {
return mediaUrl;
}
public void setMediaURL(final String mediaUrl) {
this.mediaUrl = mediaUrl;
}
public String getMediaURLHttps() {
return mediaUrlHttps;
}
public void setMediaURLHttps(final String mediaUrlHttps) {
this.mediaUrlHttps = mediaUrlHttps;
}
public String getType() {
return type;
}
public void setType(final String type) {
this.type = type;
}
public Sizes getSizes() {
return sizes;
}
public void setSizes(final Sizes sizes) {
this.sizes = sizes;
}
public String getSourceStatusIdStr() {
return sourceStatusIdStr;
}
public void setSourceStatusIdStr(final String sourceStatusIdStr) {
this.sourceStatusIdStr = sourceStatusIdStr;
}
public VideoInfo getVideoInfo() {
return videoInfo;
}
public void setVideoInfo(final VideoInfo videoInfo) {
this.videoInfo = videoInfo;
}
public String getMediaUrl() {
return mediaUrl;
}
public void setMediaUrl(final String mediaUrl) {
this.mediaUrl = mediaUrl;
}
public String getMediaUrlHttps() {
return mediaUrlHttps;
}
public void setMediaUrlHttps(final String mediaUrlHttps) {
this.mediaUrlHttps = mediaUrlHttps;
}
}
| apache-2.0 |
prasser/swtbrowser | src/main/de/linearbits/swt/simplebrowser/tags/HTMLContentContainer.java | 2641 | /* ******************************************************************************
* Copyright (c) 2015 Fabian Prasser.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Eclipse Public License v1.0
* which accompanies this distribution, and is available at
* http://www.eclipse.org/legal/epl-v10.html
*
* Contributors:
* Fabian Prasser - initial API and implementation
* ****************************************************************************
*/
package de.linearbits.swt.simplebrowser.tags;
import java.net.URL;
import java.util.List;
import java.util.Map;
import org.eclipse.swt.custom.StyleRange;
import org.eclipse.swt.events.DisposeEvent;
import org.eclipse.swt.events.DisposeListener;
import org.eclipse.swt.graphics.Font;
import org.eclipse.swt.graphics.FontData;
import org.eclipse.swt.widgets.Composite;
import org.eclipse.swt.widgets.Control;
import de.linearbits.swt.simplebrowser.HTMLStyle;
/**
* A HTML element that implements a container for text
* @author Fabian Prasser
*/
public abstract class HTMLContentContainer extends HTMLElement {
/** View*/
private Composite container;
/**
* Constructor
* @param tag
*/
protected HTMLContentContainer(String tag) {
super(tag, true, false, false);
}
@Override
public StyleRange endStyle(StringBuilder builder) {
return null;
}
@Override
public String getContent() {
return null;
}
@Override
public void render(HTMLElement parent, String content, HTMLStyle style, List<StyleRange> styles) {
render(parent, container, content, style, styles);
}
@Override
public Composite render(URL base, HTMLElement previous, Composite parent, Map<String, String> attributes, HTMLStyle style) {
this.container = super.render(base, previous, parent, attributes, style);
return container;
}
@Override
public void startStyle(StringBuilder builder) {
// Nothing to do
}
protected abstract void render(HTMLElement parent, Composite container, String content, HTMLStyle style, List<StyleRange> styles);
protected void setFont(Control control, FontData data) {
final Font font = new Font(control.getDisplay(), data);
control.setFont(font);
control.addDisposeListener(new DisposeListener(){
@Override
public void widgetDisposed(DisposeEvent arg0) {
if (font != null && !font.isDisposed()) {
font.dispose();
}
}
});
}
}
| apache-2.0 |
nextreports/nextreports-designer | src/ro/nextreports/designer/action/report/layout/cell/InsertHyperlinkAction.java | 2377 | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package ro.nextreports.designer.action.report.layout.cell;
import ro.nextreports.engine.util.ObjectCloner;
import ro.nextreports.engine.band.BandElement;
import ro.nextreports.engine.band.HyperlinkBandElement;
import javax.swing.*;
import ro.nextreports.designer.BandUtil;
import ro.nextreports.designer.Globals;
import ro.nextreports.designer.LayoutHelper;
import ro.nextreports.designer.ReportGrid;
import ro.nextreports.designer.grid.SelectionModel;
import ro.nextreports.designer.util.I18NSupport;
import java.awt.event.ActionEvent;
/**
* User: mihai.panaitescu
* Date: 01-Mar-2010
* Time: 12:57:43
*/
public class InsertHyperlinkAction extends AbstractAction {
private static final String DEFAULT_TEXT = "?";
public InsertHyperlinkAction() {
super();
putValue(Action.NAME, I18NSupport.getString("insert.hyperlink.action.name"));
}
public void actionPerformed(ActionEvent event) {
ReportGrid grid = Globals.getReportGrid();
SelectionModel selectionModel = grid.getSelectionModel();
int row = selectionModel.getSelectedCell().getRow();
int column = selectionModel.getSelectedCell().getColumn();
BandElement element = new HyperlinkBandElement(DEFAULT_TEXT, DEFAULT_TEXT);
BandUtil.copySettings(grid.getBandElement(selectionModel.getSelectedCell()), element);
grid.putClientProperty("layoutBeforeInsert", ObjectCloner.silenceDeepCopy(LayoutHelper.getReportLayout()));
BandUtil.insertElement(element, row, column);
grid.editCellAt(row, column, event);
}
}
| apache-2.0 |
leapframework/framework | data/orm/src/test/java/leap/orm/dao/FindTest.java | 3978 | /*
* Copyright 2014 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package leap.orm.dao;
import leap.core.exception.RecordNotFoundException;
import leap.orm.OrmTestCase;
import leap.orm.tested.model.petclinic.Owner;
import org.junit.Test;
public class FindTest extends OrmTestCase {
@Test
public void tetFind() {
deleteAll(Owner.class);
Object id1 = new Owner().setFullName("a","b").save().id();
assertNotNull(dao.find(Owner.class, id1));
try {
dao.find(Owner.class, -1);
fail("Should throw RecordNotFoundException");
} catch (RecordNotFoundException e) {
}
try {
dao.find("Owner", -1);
fail("Should throw RecordNotFoundException");
} catch (RecordNotFoundException e) {
}
try {
dao.find("Owner", Owner.class, -1);
fail("Should throw RecordNotFoundException");
} catch (RecordNotFoundException e) {
}
try {
dao.find(Owner.metamodel(), Owner.class, -1);
fail("Should throw RecordNotFoundException");
} catch (RecordNotFoundException e) {
}
}
@Test
public void tetFindOrNull() {
deleteAll(Owner.class);
Object id1 = new Owner().setFullName("a","b").save().id();
assertNotNull(dao.find(Owner.class, id1));
assertNull(dao.findOrNull(Owner.class, -1));
assertNull(dao.findOrNull("Owner", -1));
assertNull(dao.findOrNull("Owner", Owner.class, -1));
assertNull(dao.findOrNull(Owner.metamodel(),Owner.class, -1));
}
@Test
public void testFindList() {
deleteAll(Owner.class);
Object id1 = new Owner().setFullName("a","b").save().id();
Object id2 = new Owner().setFullName("c","d").save().id();
Object[] ids = new Object[]{id1,id2};
assertEquals(2,dao.findList(Owner.class, ids).size());
assertEquals(2,dao.findList("Owner", ids).size());
assertEquals(2,dao.findList("Owner", Owner.class, ids).size());
assertEquals(2,Owner.findList(ids).size());
assertEquals(1,dao.findList(Owner.class, new Object[]{id1}).size());
try {
dao.findList(Owner.class, new Object[]{-1});
fail("Should throw RecordNotFoundException");
} catch (RecordNotFoundException e) {
}
}
@Test
public void testFindListIfExiss() {
deleteAll(Owner.class);
Object id1 = new Owner().setFullName("a","b").save().id();
Object id2 = new Owner().setFullName("c","d").save().id();
Object[] ids = new Object[]{id1,id2};
assertEquals(2,dao.findListIfExists(Owner.class, ids).size());
assertEquals(2,dao.findListIfExists("Owner", ids).size());
assertEquals(2,dao.findListIfExists("Owner", Owner.class, ids).size());
assertEquals(2,Owner.findListIfExists(ids).size());
assertEquals(1,dao.findListIfExists(Owner.class, new Object[]{id1}).size());
assertEquals(0,dao.findListIfExists(Owner.class, new Object[]{-1}).size());
assertEquals(0,Owner.findListIfExists(new Object[]{-1}).size());
assertEquals(1,Owner.findListIfExists(new Object[]{id1,-1}).size());
}
}
| apache-2.0 |
amoAHCP/vertx3-talk | vertx-demo/src/main/java/ch/trivadis/verticles/demo1/Ping.java | 1263 | package ch.trivadis.verticles.demo1;
import io.vertx.core.AbstractVerticle;
import io.vertx.core.DeploymentOptions;
import io.vertx.core.Future;
import io.vertx.core.Vertx;
import io.vertx.core.eventbus.EventBus;
/**
* Created by Andy Moncsek on 15.02.16.
*/
public class Ping extends AbstractVerticle {
@Override
public void start(Future<Void> startFuture) throws Exception {
final EventBus eventBus = vertx.eventBus();
System.out.println("start Ping");
vertx.deployVerticle(Pong.class.getName(), completionHandler -> {
if (completionHandler.succeeded()) {
sendMessage(eventBus);
startFuture.complete();
} else {
vertx.close();
}
});
}
private void sendMessage(EventBus eventBus) {
eventBus.send("pong.addr", "ping", handler -> {
if (handler.succeeded()) {
System.out.println("Ping: "+handler.result().body().toString());
vertx.close();
}
});
}
public static void main(String[] args) {
DeploymentOptions options = new DeploymentOptions().setInstances(1);
Vertx.vertx().deployVerticle(Ping.class.getName(), options);
}
} | apache-2.0 |
msebire/intellij-community | platform/platform-api/src/com/intellij/openapi/editor/actionSystem/EditorAction.java | 6560 | // Copyright 2000-2018 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.
package com.intellij.openapi.editor.actionSystem;
import com.intellij.openapi.actionSystem.*;
import com.intellij.openapi.command.CommandProcessor;
import com.intellij.openapi.command.UndoConfirmationPolicy;
import com.intellij.openapi.diagnostic.Logger;
import com.intellij.openapi.editor.Caret;
import com.intellij.openapi.editor.Editor;
import com.intellij.openapi.fileEditor.FileDocumentManager;
import com.intellij.openapi.project.DumbAware;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.vfs.VirtualFile;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import java.awt.event.InputEvent;
import java.awt.event.KeyEvent;
import java.util.List;
import static com.intellij.openapi.actionSystem.CommonDataKeys.EDITOR;
import static com.intellij.openapi.actionSystem.CommonDataKeys.PROJECT;
public abstract class EditorAction extends AnAction implements DumbAware, UpdateInBackground {
private static final Logger LOG = Logger.getInstance(EditorAction.class);
private EditorActionHandler myHandler;
private boolean myHandlersLoaded;
public final EditorActionHandler getHandler() {
ensureHandlersLoaded();
return myHandler;
}
protected EditorAction(EditorActionHandler defaultHandler) {
myHandler = defaultHandler;
setEnabledInModalContext(true);
}
public final EditorActionHandler setupHandler(@NotNull EditorActionHandler newHandler) {
ensureHandlersLoaded();
EditorActionHandler tmp = myHandler;
myHandler = newHandler;
myHandler.setWorksInInjected(isInInjectedContext());
return tmp;
}
private void ensureHandlersLoaded() {
if (!myHandlersLoaded) {
myHandlersLoaded = true;
final String id = ActionManager.getInstance().getId(this);
List<EditorActionHandlerBean> extensions = EditorActionHandlerBean.EP_NAME.getExtensionList();
for (int i = extensions.size() - 1; i >= 0; i--) {
final EditorActionHandlerBean handlerBean = extensions.get(i);
if (handlerBean.action.equals(id)) {
myHandler = handlerBean.getHandler(myHandler);
myHandler.setWorksInInjected(isInInjectedContext());
}
}
}
}
@Override
public void setInjectedContext(boolean worksInInjected) {
super.setInjectedContext(worksInInjected);
// we assume that this method is called in constructor at the point
// where the chain of handlers is not initialized yet
// and it's enough to pass the flag to the default handler only
myHandler.setWorksInInjected(isInInjectedContext());
}
@Override
public final void actionPerformed(@NotNull AnActionEvent e) {
DataContext dataContext = e.getDataContext();
Editor editor = getEditor(dataContext);
if (this instanceof LatencyAwareEditorAction && editor != null) {
String actionId = ActionManager.getInstance().getId(this);
InputEvent inputEvent = e.getInputEvent();
if (actionId != null && inputEvent != null) {
LatencyRecorder.getInstance().recordLatencyAwareAction(editor, actionId, inputEvent.getWhen());
}
}
actionPerformed(editor, dataContext);
}
@Nullable
protected Editor getEditor(@NotNull DataContext dataContext) {
return EDITOR.getData(dataContext);
}
public final void actionPerformed(final Editor editor, @NotNull final DataContext dataContext) {
if (editor == null) return;
if (editor.isDisposed()) {
VirtualFile file = FileDocumentManager.getInstance().getFile(editor.getDocument());
LOG.error("Action " + this + " invoked on a disposed editor" + (file == null ? "" : " for file " + file));
return;
}
final EditorActionHandler handler = getHandler();
Runnable command = () -> handler.execute(editor, null, getProjectAwareDataContext(editor, dataContext));
if (!handler.executeInCommand(editor, dataContext)) {
command.run();
return;
}
String commandName = getTemplatePresentation().getText();
if (commandName == null) commandName = "";
CommandProcessor.getInstance().executeCommand(editor.getProject(),
command,
commandName,
handler.getCommandGroupId(editor),
UndoConfirmationPolicy.DEFAULT,
editor.getDocument());
}
public void update(Editor editor, Presentation presentation, DataContext dataContext) {
presentation.setEnabled(getHandler().isEnabled(editor, null, dataContext));
}
public void updateForKeyboardAccess(Editor editor, Presentation presentation, DataContext dataContext) {
update(editor, presentation, dataContext);
}
@Override
public void beforeActionPerformedUpdate(@NotNull AnActionEvent e) {
if (isInInjectedContext()) {
Editor editor = CommonDataKeys.HOST_EDITOR.getData(e.getDataContext());
if (editor != null) {
for (Caret caret : editor.getCaretModel().getAllCarets()) {
if (EditorActionHandler.ensureInjectionUpToDate(caret)) {
break;
}
}
}
}
super.beforeActionPerformedUpdate(e);
}
@Override
public void update(@NotNull AnActionEvent e) {
Presentation presentation = e.getPresentation();
DataContext dataContext = e.getDataContext();
Editor editor = getEditor(dataContext);
if (editor == null) {
presentation.setEnabled(false);
}
else {
if (editor.isDisposed()) {
LOG.error("Disposed editor in " + dataContext + " for " + this);
presentation.setEnabled(false);
}
else {
if (e.getInputEvent() instanceof KeyEvent) {
updateForKeyboardAccess(editor, presentation, dataContext);
}
else {
update(editor, presentation, dataContext);
}
}
}
}
private static DataContext getProjectAwareDataContext(final Editor editor, @NotNull final DataContext original) {
if (PROJECT.getData(original) == editor.getProject()) {
return new DialogAwareDataContext(original);
}
return dataId -> {
if (PROJECT.is(dataId)) {
final Project project = editor.getProject();
if (project != null) {
return project;
}
}
return original.getData(dataId);
};
}
}
| apache-2.0 |
jreijn/hippo-addon-restful-webservices | src/test/java/org/onehippo/forge/webservices/jaxrs/PropertiesIntegrationTest.java | 7652 | /*
* Copyright 2014 Hippo B.V. (http://www.onehippo.com)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.onehippo.forge.webservices.jaxrs;
import java.util.ArrayList;
import javax.jcr.Node;
import javax.jcr.RepositoryException;
import javax.ws.rs.core.MediaType;
import javax.ws.rs.core.Response;
import org.junit.Test;
import org.onehippo.forge.webservices.WebservicesIntegrationTest;
import org.onehippo.forge.webservices.jaxrs.jcr.model.JcrProperty;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
public class PropertiesIntegrationTest extends WebservicesIntegrationTest {
@Test
public void testGetPropertyFromNode() {
final JcrProperty response = client.path("properties/jcr:primaryType")
.accept(MediaType.APPLICATION_JSON)
.type(MediaType.APPLICATION_JSON)
.get(JcrProperty.class);
assertTrue(response != null);
assertTrue(response.getName().equals("jcr:primaryType"));
assertTrue(response.getValues().get(0).equals("rep:root"));
}
@Test
public void testNotFoundOnGetProperty() {
final Response response = client.path("properties/jcr:someProperty")
.accept(MediaType.APPLICATION_JSON)
.type(MediaType.APPLICATION_JSON)
.get(Response.class);
assertTrue(response.getStatus() == Response.Status.NOT_FOUND.getStatusCode());
}
@Test
public void testAddProperty() throws RepositoryException {
session.getRootNode().addNode("test", "nt:unstructured");
session.save();
final ArrayList<String> values = new ArrayList<String>(1);
values.add("test");
final JcrProperty jcrProperty = new JcrProperty();
jcrProperty.setName("myproperty");
jcrProperty.setType("String");
jcrProperty.setMultiple(false);
jcrProperty.setValues(values);
final Response response = client
.path("properties/test")
.accept(MediaType.APPLICATION_JSON)
.type(MediaType.APPLICATION_JSON)
.post(jcrProperty);
assertTrue(response.getStatus() == Response.Status.CREATED.getStatusCode());
assertTrue(response.getMetadata().getFirst("Location").equals(HTTP_ENDPOINT_ADDRESS + "/properties/test/myproperty"));
session.getRootNode().getNode("test").remove();
session.save();
}
@Test
public void testAddPropertyNotFoundWithIncorrectPath() throws RepositoryException {
final ArrayList<String> values = new ArrayList<String>(1);
values.add("test");
final JcrProperty jcrProperty = new JcrProperty();
jcrProperty.setName("myproperty");
jcrProperty.setType("String");
jcrProperty.setMultiple(false);
jcrProperty.setValues(values);
final Response response = client
.path("properties/test12355")
.accept(MediaType.APPLICATION_JSON)
.type(MediaType.APPLICATION_JSON)
.post(jcrProperty);
assertTrue(response.getStatus() == Response.Status.NOT_FOUND.getStatusCode());
}
@Test
public void testAddPropertyFailsWithIncorrectInput() throws RepositoryException {
session.getRootNode().addNode("test", "nt:unstructured");
session.save();
final ArrayList<String> values = new ArrayList<String>(1);
values.add("test");
final JcrProperty jcrProperty = new JcrProperty();
jcrProperty.setName("");
jcrProperty.setType("String");
jcrProperty.setMultiple(false);
jcrProperty.setValues(values);
final Response response = client
.path("properties/test")
.accept(MediaType.APPLICATION_JSON)
.type(MediaType.APPLICATION_JSON)
.post(jcrProperty);
assertTrue(response.getStatus() == Response.Status.BAD_REQUEST.getStatusCode());
client.reset();
jcrProperty.setName("name");
jcrProperty.setType("");
final Response typedResponse = client
.path("properties/test")
.accept(MediaType.APPLICATION_JSON)
.type(MediaType.APPLICATION_JSON)
.post(jcrProperty);
assertTrue(typedResponse.getStatus() == Response.Status.BAD_REQUEST.getStatusCode());
session.getRootNode().getNode("test").remove();
session.save();
}
@Test
public void testUpdateProperty() throws RepositoryException {
session.getRootNode().addNode("test", "nt:unstructured");
session.save();
final ArrayList<String> values = new ArrayList<String>(1);
values.add("test");
final JcrProperty jcrProperty = new JcrProperty();
jcrProperty.setName("test");
jcrProperty.setType("String");
jcrProperty.setMultiple(false);
jcrProperty.setValues(values);
final Response response = client
.path("properties/test")
.accept(MediaType.APPLICATION_JSON)
.type(MediaType.APPLICATION_JSON)
.post(jcrProperty);
assertTrue(response.getStatus() == Response.Status.CREATED.getStatusCode());
client.reset();
values.remove("test");
values.add("test2");
final Response updateResponse = client
.path("properties/test/test")
.accept(MediaType.APPLICATION_JSON)
.type(MediaType.APPLICATION_JSON)
.put(jcrProperty);
assertTrue(updateResponse.getStatus() == Response.Status.NO_CONTENT.getStatusCode());
client.reset();
final JcrProperty newValuedProperty = client.path("properties/test/test")
.accept(MediaType.APPLICATION_JSON)
.type(MediaType.APPLICATION_JSON)
.get(JcrProperty.class);
assertTrue(newValuedProperty.getValues().get(0).equals("test2"));
session.getRootNode().getNode("test").remove();
session.save();
}
@Test
public void testDeleteProperty() throws RepositoryException {
final Node test = session.getRootNode().addNode("test", "nt:unstructured");
test.setProperty("propname", "propvalue");
session.save();
final Response emptyPathResponse = client
.path("properties/")
.accept(MediaType.APPLICATION_JSON)
.type(MediaType.APPLICATION_JSON)
.delete();
assertTrue(emptyPathResponse.getStatus() == Response.Status.NOT_FOUND.getStatusCode());
client.reset();
final Response response = client
.path("properties/test/propname")
.accept(MediaType.APPLICATION_JSON)
.type(MediaType.APPLICATION_JSON)
.delete();
assertTrue(response.getStatus() == Response.Status.NO_CONTENT.getStatusCode());
assertFalse(session.getNode("/test").hasProperty("propname"));
session.getNode("/test").remove();
session.save();
}
}
| apache-2.0 |
mhus/mhus-inka | de.mhus.aqua/de.mhus.aqua/src/de/mhus/aqua/cao/AquaDriver.java | 407 | package de.mhus.aqua.cao;
import de.mhus.aqua.api.AquaSession;
import de.mhus.lib.cao.CaoConnection;
import de.mhus.lib.cao.CaoDriver;
import de.mhus.lib.cao.CaoException;
import de.mhus.lib.cao.CaoForm;
public abstract class AquaDriver extends CaoDriver<AquaSession> {
public static final String APP_AAA = "app_aaa";
@Override
public CaoForm createConfiguration() {
return new AquaConfig();
}
}
| apache-2.0 |
quarkusio/quarkus | extensions/hibernate-orm/deployment/src/test/java/io/quarkus/hibernate/orm/multiplepersistenceunits/MultiplePersistenceUnitsCdiEntityManagerTest.java | 2670 | package io.quarkus.hibernate.orm.multiplepersistenceunits;
import static org.assertj.core.api.Assertions.assertThatThrownBy;
import static org.junit.jupiter.api.Assertions.assertEquals;
import javax.inject.Inject;
import javax.persistence.EntityManager;
import javax.transaction.Transactional;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.extension.RegisterExtension;
import io.quarkus.hibernate.orm.PersistenceUnit;
import io.quarkus.hibernate.orm.multiplepersistenceunits.model.config.DefaultEntity;
import io.quarkus.hibernate.orm.multiplepersistenceunits.model.config.inventory.Plane;
import io.quarkus.hibernate.orm.multiplepersistenceunits.model.config.user.User;
import io.quarkus.test.QuarkusUnitTest;
public class MultiplePersistenceUnitsCdiEntityManagerTest {
@RegisterExtension
static QuarkusUnitTest runner = new QuarkusUnitTest()
.withApplicationRoot((jar) -> jar
.addClass(DefaultEntity.class)
.addClass(User.class)
.addClass(Plane.class)
.addAsResource("application-multiple-persistence-units.properties", "application.properties"));
@Inject
EntityManager defaultEntityManager;
@Inject
@PersistenceUnit("users")
EntityManager usersEntityManager;
@Inject
@PersistenceUnit("inventory")
EntityManager inventoryEntityManager;
@Test
@Transactional
public void testDefault() {
DefaultEntity defaultEntity = new DefaultEntity("default");
defaultEntityManager.persist(defaultEntity);
DefaultEntity savedDefaultEntity = defaultEntityManager.find(DefaultEntity.class, defaultEntity.getId());
assertEquals(defaultEntity.getName(), savedDefaultEntity.getName());
}
@Test
@Transactional
public void testUser() {
User user = new User("gsmet");
usersEntityManager.persist(user);
User savedUser = usersEntityManager.find(User.class, user.getId());
assertEquals(user.getName(), savedUser.getName());
}
@Test
@Transactional
public void testPlane() {
Plane plane = new Plane("Airbus A380");
inventoryEntityManager.persist(plane);
Plane savedPlane = inventoryEntityManager.find(Plane.class, plane.getId());
assertEquals(plane.getName(), savedPlane.getName());
}
@Test
@Transactional
public void testUserInInventoryEntityManager() {
User user = new User("gsmet");
assertThatThrownBy(() -> inventoryEntityManager.persist(user)).isInstanceOf(IllegalArgumentException.class)
.hasMessageContaining("Unknown entity");
}
}
| apache-2.0 |
mifos/1.5.x | application/src/main/java/org/mifos/framework/hibernate/helper/QueryResultSearchDTOImpl.java | 5670 | /*
* Copyright (c) 2005-2010 Grameen Foundation USA
* All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
* implied. See the License for the specific language governing
* permissions and limitations under the License.
*
* See also http://www.apache.org/licenses/LICENSE-2.0.html for an
* explanation of the license and how it is applied.
*/
package org.mifos.framework.hibernate.helper;
import java.util.List;
import org.hibernate.Query;
import org.hibernate.Session;
import org.mifos.customers.util.helpers.Param;
import org.mifos.framework.components.logger.LoggerConstants;
import org.mifos.framework.components.logger.MifosLogManager;
import org.mifos.framework.components.logger.MifosLogger;
import org.mifos.framework.exceptions.HibernateSearchException;
/**
* This is the class that is returned on a search operation. Search would
* typically result in a set of search result objects , these search result
* objects would be obtained through hibernate scroll for pagination in the
* front end , the associate hibernate session would be held in this object , a
* call to close from the front end on this interface would result in the
* hibernate session object getting closed.
*/
public class QueryResultSearchDTOImpl extends QueryResultDTOImpl {
java.util.List list = new java.util.ArrayList();
private MifosLogger logger = MifosLogManager.getLogger(LoggerConstants.COLLECTIONSHEETLOGGER);
/** Set the query inputs which will be used for query execution */
@Override
public void setQueryInputs(QueryInputs queryInputs) throws HibernateSearchException {
if (queryInputs == null) {
throw new HibernateSearchException(HibernateConstants.SEARCH_INPUTNULL);
}
if (queryInputs.getBuildDTO()) {
this.queryInputs = queryInputs;
dtoBuilder = new DTOBuilder();
this.buildDTO = queryInputs.getBuildDTO();
}
}
/**
* Returns the requested set of search result objects based on the
* pagination at the front end.
*/
@Override
public java.util.List get(int position, int noOfObjects) throws HibernateSearchException {
Session session = null;
java.util.List returnList = new java.util.ArrayList();
java.util.List list = new java.util.ArrayList();
try {
session = QuerySession.openSession();
Query query = prepareQuery(session, queryInputs.getQueryStrings()[1]);
query.setFirstResult(position);
query.setMaxResults(noOfObjects);
list = query.list();
logger.debug("\n\nInside get of QueryResultSearchDTOImpl.java . size of main query=" + list.size());
this.queryInputs.setTypes(query.getReturnTypes());
dtoBuilder.setInputs(queryInputs);
returnList = new java.util.ArrayList();
if (list != null) {
for (int i = 0; i < list.size(); i++) {
if (buildDTO) {
returnList.add(buildDTO((Object[]) list.get(i)));
} else {
if (i < noOfObjects) {
returnList.add(list.get(i));
}
}
}
}
QuerySession.closeSession(session);
} catch (Exception e) {
throw new HibernateSearchException(HibernateConstants.SEARCH_FAILED, e);
}
return returnList;
}
/**
* Returns the records valid for the query
*/
@Override
public int getSize() throws HibernateSearchException {
Session session = null;
try {
session = QuerySession.openSession();
if (this.queryInputs == null) {
throw new HibernateSearchException(HibernateConstants.SEARCH_INPUTNULL);
}
Query query = prepareQuery(session, queryInputs.getQueryStrings()[0]);
Integer resultSetCount = ((Number) query.uniqueResult()).intValue();
logger.debug("\n\nInside get of QueryResultSearchDTOImpl.java . size of count query=" + resultSetCount);
this.queryInputs.setTypes(query.getReturnTypes());
dtoBuilder.setInputs(queryInputs);
if (resultSetCount != null && resultSetCount > 0) {
size = resultSetCount;
}
QuerySession.closeSession(session);
} catch (Exception e) {
throw new HibernateSearchException(HibernateConstants.SEARCH_FAILED, e);
}
return size;
}
public Query prepareQuery(Session session, String namedQuery) throws HibernateSearchException {
if (this.queryInputs == null) {
throw new HibernateSearchException(HibernateConstants.SEARCH_INPUTNULL);
}
List<Param> paramList = queryInputs.getParamList();
Query query = null;
query = session.getNamedQuery(namedQuery);
if (paramList != null) {
for (int i = 0; i < paramList.size(); i++) {
if (paramList.get(i) != null) {
query.setParameter(paramList.get(i).getName(), paramList.get(i).getValue());
}
}
}
return query;
}
}
| apache-2.0 |
jentfoo/aws-sdk-java | aws-java-sdk-pinpointemail/src/main/java/com/amazonaws/services/pinpointemail/model/GetDedicatedIpsRequest.java | 9358 | /*
* Copyright 2014-2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with
* the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package com.amazonaws.services.pinpointemail.model;
import java.io.Serializable;
import javax.annotation.Generated;
import com.amazonaws.AmazonWebServiceRequest;
/**
* <p>
* A request to obtain more information about dedicated IP pools.
* </p>
*
* @see <a href="http://docs.aws.amazon.com/goto/WebAPI/pinpoint-email-2018-07-26/GetDedicatedIps" target="_top">AWS API
* Documentation</a>
*/
@Generated("com.amazonaws:aws-java-sdk-code-generator")
public class GetDedicatedIpsRequest extends com.amazonaws.AmazonWebServiceRequest implements Serializable, Cloneable {
/**
* <p>
* The name of the IP pool that the dedicated IP address is associated with.
* </p>
*/
private String poolName;
/**
* <p>
* A token returned from a previous call to <code>GetDedicatedIps</code> to indicate the position of the dedicated
* IP pool in the list of IP pools.
* </p>
*/
private String nextToken;
/**
* <p>
* The number of results to show in a single call to <code>GetDedicatedIpsRequest</code>. If the number of results
* is larger than the number you specified in this parameter, then the response includes a <code>NextToken</code>
* element, which you can use to obtain additional results.
* </p>
*/
private Integer pageSize;
/**
* <p>
* The name of the IP pool that the dedicated IP address is associated with.
* </p>
*
* @param poolName
* The name of the IP pool that the dedicated IP address is associated with.
*/
public void setPoolName(String poolName) {
this.poolName = poolName;
}
/**
* <p>
* The name of the IP pool that the dedicated IP address is associated with.
* </p>
*
* @return The name of the IP pool that the dedicated IP address is associated with.
*/
public String getPoolName() {
return this.poolName;
}
/**
* <p>
* The name of the IP pool that the dedicated IP address is associated with.
* </p>
*
* @param poolName
* The name of the IP pool that the dedicated IP address is associated with.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public GetDedicatedIpsRequest withPoolName(String poolName) {
setPoolName(poolName);
return this;
}
/**
* <p>
* A token returned from a previous call to <code>GetDedicatedIps</code> to indicate the position of the dedicated
* IP pool in the list of IP pools.
* </p>
*
* @param nextToken
* A token returned from a previous call to <code>GetDedicatedIps</code> to indicate the position of the
* dedicated IP pool in the list of IP pools.
*/
public void setNextToken(String nextToken) {
this.nextToken = nextToken;
}
/**
* <p>
* A token returned from a previous call to <code>GetDedicatedIps</code> to indicate the position of the dedicated
* IP pool in the list of IP pools.
* </p>
*
* @return A token returned from a previous call to <code>GetDedicatedIps</code> to indicate the position of the
* dedicated IP pool in the list of IP pools.
*/
public String getNextToken() {
return this.nextToken;
}
/**
* <p>
* A token returned from a previous call to <code>GetDedicatedIps</code> to indicate the position of the dedicated
* IP pool in the list of IP pools.
* </p>
*
* @param nextToken
* A token returned from a previous call to <code>GetDedicatedIps</code> to indicate the position of the
* dedicated IP pool in the list of IP pools.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public GetDedicatedIpsRequest withNextToken(String nextToken) {
setNextToken(nextToken);
return this;
}
/**
* <p>
* The number of results to show in a single call to <code>GetDedicatedIpsRequest</code>. If the number of results
* is larger than the number you specified in this parameter, then the response includes a <code>NextToken</code>
* element, which you can use to obtain additional results.
* </p>
*
* @param pageSize
* The number of results to show in a single call to <code>GetDedicatedIpsRequest</code>. If the number of
* results is larger than the number you specified in this parameter, then the response includes a
* <code>NextToken</code> element, which you can use to obtain additional results.
*/
public void setPageSize(Integer pageSize) {
this.pageSize = pageSize;
}
/**
* <p>
* The number of results to show in a single call to <code>GetDedicatedIpsRequest</code>. If the number of results
* is larger than the number you specified in this parameter, then the response includes a <code>NextToken</code>
* element, which you can use to obtain additional results.
* </p>
*
* @return The number of results to show in a single call to <code>GetDedicatedIpsRequest</code>. If the number of
* results is larger than the number you specified in this parameter, then the response includes a
* <code>NextToken</code> element, which you can use to obtain additional results.
*/
public Integer getPageSize() {
return this.pageSize;
}
/**
* <p>
* The number of results to show in a single call to <code>GetDedicatedIpsRequest</code>. If the number of results
* is larger than the number you specified in this parameter, then the response includes a <code>NextToken</code>
* element, which you can use to obtain additional results.
* </p>
*
* @param pageSize
* The number of results to show in a single call to <code>GetDedicatedIpsRequest</code>. If the number of
* results is larger than the number you specified in this parameter, then the response includes a
* <code>NextToken</code> element, which you can use to obtain additional results.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public GetDedicatedIpsRequest withPageSize(Integer pageSize) {
setPageSize(pageSize);
return this;
}
/**
* Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be
* redacted from this string using a placeholder value.
*
* @return A string representation of this object.
*
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("{");
if (getPoolName() != null)
sb.append("PoolName: ").append(getPoolName()).append(",");
if (getNextToken() != null)
sb.append("NextToken: ").append(getNextToken()).append(",");
if (getPageSize() != null)
sb.append("PageSize: ").append(getPageSize());
sb.append("}");
return sb.toString();
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (obj instanceof GetDedicatedIpsRequest == false)
return false;
GetDedicatedIpsRequest other = (GetDedicatedIpsRequest) obj;
if (other.getPoolName() == null ^ this.getPoolName() == null)
return false;
if (other.getPoolName() != null && other.getPoolName().equals(this.getPoolName()) == false)
return false;
if (other.getNextToken() == null ^ this.getNextToken() == null)
return false;
if (other.getNextToken() != null && other.getNextToken().equals(this.getNextToken()) == false)
return false;
if (other.getPageSize() == null ^ this.getPageSize() == null)
return false;
if (other.getPageSize() != null && other.getPageSize().equals(this.getPageSize()) == false)
return false;
return true;
}
@Override
public int hashCode() {
final int prime = 31;
int hashCode = 1;
hashCode = prime * hashCode + ((getPoolName() == null) ? 0 : getPoolName().hashCode());
hashCode = prime * hashCode + ((getNextToken() == null) ? 0 : getNextToken().hashCode());
hashCode = prime * hashCode + ((getPageSize() == null) ? 0 : getPageSize().hashCode());
return hashCode;
}
@Override
public GetDedicatedIpsRequest clone() {
return (GetDedicatedIpsRequest) super.clone();
}
}
| apache-2.0 |
jentfoo/aws-sdk-java | aws-java-sdk-ec2/src/main/java/com/amazonaws/services/ec2/model/ModifyFleetResult.java | 4105 | /*
* Copyright 2014-2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with
* the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package com.amazonaws.services.ec2.model;
import java.io.Serializable;
import javax.annotation.Generated;
import com.amazonaws.AmazonWebServiceResult;
/**
*
*/
@Generated("com.amazonaws:aws-java-sdk-code-generator")
public class ModifyFleetResult extends com.amazonaws.AmazonWebServiceResult<com.amazonaws.ResponseMetadata> implements Serializable, Cloneable {
/**
* <p>
* Is <code>true</code> if the request succeeds, and an error otherwise.
* </p>
*/
private Boolean returnValue;
/**
* <p>
* Is <code>true</code> if the request succeeds, and an error otherwise.
* </p>
*
* @param returnValue
* Is <code>true</code> if the request succeeds, and an error otherwise.
*/
public void setReturn(Boolean returnValue) {
this.returnValue = returnValue;
}
/**
* <p>
* Is <code>true</code> if the request succeeds, and an error otherwise.
* </p>
*
* @return Is <code>true</code> if the request succeeds, and an error otherwise.
*/
public Boolean getReturn() {
return this.returnValue;
}
/**
* <p>
* Is <code>true</code> if the request succeeds, and an error otherwise.
* </p>
*
* @param returnValue
* Is <code>true</code> if the request succeeds, and an error otherwise.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public ModifyFleetResult withReturn(Boolean returnValue) {
setReturn(returnValue);
return this;
}
/**
* <p>
* Is <code>true</code> if the request succeeds, and an error otherwise.
* </p>
*
* @return Is <code>true</code> if the request succeeds, and an error otherwise.
*/
public Boolean isReturn() {
return this.returnValue;
}
/**
* Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be
* redacted from this string using a placeholder value.
*
* @return A string representation of this object.
*
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("{");
if (getReturn() != null)
sb.append("Return: ").append(getReturn());
sb.append("}");
return sb.toString();
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (obj instanceof ModifyFleetResult == false)
return false;
ModifyFleetResult other = (ModifyFleetResult) obj;
if (other.getReturn() == null ^ this.getReturn() == null)
return false;
if (other.getReturn() != null && other.getReturn().equals(this.getReturn()) == false)
return false;
return true;
}
@Override
public int hashCode() {
final int prime = 31;
int hashCode = 1;
hashCode = prime * hashCode + ((getReturn() == null) ? 0 : getReturn().hashCode());
return hashCode;
}
@Override
public ModifyFleetResult clone() {
try {
return (ModifyFleetResult) super.clone();
} catch (CloneNotSupportedException e) {
throw new IllegalStateException("Got a CloneNotSupportedException from Object.clone() " + "even though we're Cloneable!", e);
}
}
}
| apache-2.0 |
gawkermedia/googleads-java-lib | examples/dfp_axis/src/main/java/dfp/axis/v201511/lineitemservice/CreateVideoLineItems.java | 8455 | // Copyright 2015 Google Inc. All Rights Reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package dfp.axis.v201511.lineitemservice;
import com.google.api.ads.common.lib.auth.OfflineCredentials;
import com.google.api.ads.common.lib.auth.OfflineCredentials.Api;
import com.google.api.ads.dfp.axis.factory.DfpServices;
import com.google.api.ads.dfp.axis.utils.v201511.DateTimes;
import com.google.api.ads.dfp.axis.v201511.AdUnitTargeting;
import com.google.api.ads.dfp.axis.v201511.CompanionDeliveryOption;
import com.google.api.ads.dfp.axis.v201511.ContentMetadataKeyHierarchyTargeting;
import com.google.api.ads.dfp.axis.v201511.ContentTargeting;
import com.google.api.ads.dfp.axis.v201511.CostType;
import com.google.api.ads.dfp.axis.v201511.CreativePlaceholder;
import com.google.api.ads.dfp.axis.v201511.CreativeRotationType;
import com.google.api.ads.dfp.axis.v201511.EnvironmentType;
import com.google.api.ads.dfp.axis.v201511.Goal;
import com.google.api.ads.dfp.axis.v201511.GoalType;
import com.google.api.ads.dfp.axis.v201511.InventoryTargeting;
import com.google.api.ads.dfp.axis.v201511.LineItem;
import com.google.api.ads.dfp.axis.v201511.LineItemServiceInterface;
import com.google.api.ads.dfp.axis.v201511.LineItemType;
import com.google.api.ads.dfp.axis.v201511.Money;
import com.google.api.ads.dfp.axis.v201511.Size;
import com.google.api.ads.dfp.axis.v201511.StartDateTimeType;
import com.google.api.ads.dfp.axis.v201511.Targeting;
import com.google.api.ads.dfp.axis.v201511.VideoPosition;
import com.google.api.ads.dfp.axis.v201511.VideoPositionTarget;
import com.google.api.ads.dfp.axis.v201511.VideoPositionTargeting;
import com.google.api.ads.dfp.axis.v201511.VideoPositionType;
import com.google.api.ads.dfp.lib.client.DfpSession;
import com.google.api.client.auth.oauth2.Credential;
import org.joda.time.Duration;
import org.joda.time.Instant;
import java.util.Random;
/**
* This example creates a new line item for a video environment. To determine
* which line items exist, run GetAllLineItems.java. To determine which orders
* exist, run GetAllOrders.java. To determine which content metadata key
* hierarchies exist, run GetAllContentMetadataKeyHierarchies.java. To determine
* which ad units exist, run GetAllAdUnits.java.
*
* Credentials and properties in {@code fromFile()} are pulled from the
* "ads.properties" file. See README for more info.
*/
public class CreateVideoLineItems {
// Set the ID of the order that the line item will belong to.
private static final String ORDER_ID = "INSERT_ORDER_ID_HERE";
// Set the ID of the ad unit that the line item will target.
private static final String TARGETED_VIDEO_AD_UNIT_ID = "INSERT_VIDEO_AD_UNIT_ID_HERE";
// Set the custom targeting value ID representing the metadata
// on the content to target. This would typically be from a key representing
// a "genre" and the value representing something like "comedy". The value
// must be from a key in a content metadata key hierarchy.
private static final String CONTENT_CUSTOM_TARGETING_VALUE_ID =
"INSERT_CUSTOM_TARGETING_VALUE_ID_HERE";
public static void runExample(DfpServices dfpServices, DfpSession session, long orderId,
String targetedVideoAdUnitId, long contentCustomTargetingValueId) throws Exception {
// Get the LineItemService.
LineItemServiceInterface lineItemService =
dfpServices.get(session, LineItemServiceInterface.class);
// Create content targeting.
ContentMetadataKeyHierarchyTargeting contentMetadataTargeting =
new ContentMetadataKeyHierarchyTargeting();
contentMetadataTargeting.setCustomTargetingValueIds(
new long[] {contentCustomTargetingValueId});
ContentTargeting contentTargeting = new ContentTargeting();
contentTargeting.setTargetedContentMetadata(
new ContentMetadataKeyHierarchyTargeting[] {contentMetadataTargeting});
// Create inventory targeting.
InventoryTargeting inventoryTargeting = new InventoryTargeting();
inventoryTargeting.setTargetedAdUnits(
new AdUnitTargeting[] {new AdUnitTargeting(targetedVideoAdUnitId, true)});
// Create video position targeting.
VideoPosition videoPosition = new VideoPosition();
videoPosition.setPositionType(VideoPositionType.PREROLL);
VideoPositionTarget videoPositionTarget = new VideoPositionTarget();
videoPositionTarget.setVideoPosition(videoPosition);
VideoPositionTargeting videoPositionTargeting = new VideoPositionTargeting();
videoPositionTargeting.setTargetedPositions(
new VideoPositionTarget[] {videoPositionTarget});
// Create targeting.
Targeting targeting = new Targeting();
targeting.setContentTargeting(contentTargeting);
targeting.setInventoryTargeting(inventoryTargeting);
targeting.setVideoPositionTargeting(videoPositionTargeting);
// Create local line item object.
LineItem lineItem = new LineItem();
lineItem.setName("Video line item #" + new Random().nextInt(Integer.MAX_VALUE));
lineItem.setOrderId(orderId);
lineItem.setTargeting(targeting);
// Allow the line item to be booked even if there is not enough inventory.
lineItem.setAllowOverbook(true);
// Set the line item type to SPONSORSHIP.
lineItem.setLineItemType(LineItemType.SPONSORSHIP);
// Set the environment type to video.
lineItem.setEnvironmentType(EnvironmentType.VIDEO_PLAYER);
// Set the creative rotation type to optimized.
lineItem.setCreativeRotationType(CreativeRotationType.OPTIMIZED);
// Create the master creative placeholder.
CreativePlaceholder creativeMasterPlaceholder = new CreativePlaceholder();
creativeMasterPlaceholder.setSize(new Size(640, 360, false));
// Create companion creative placeholders.
CreativePlaceholder companionCreativePlaceholder = new CreativePlaceholder();
companionCreativePlaceholder.setSize(new Size(300, 250, false));
// Set companion creative placeholders.
creativeMasterPlaceholder.setCompanions(
new CreativePlaceholder[] {companionCreativePlaceholder});
// Set the size of creatives that can be associated with this line item.
lineItem.setCreativePlaceholders(new CreativePlaceholder[] {creativeMasterPlaceholder});
// Set delivery of video companions to optional.
lineItem.setCompanionDeliveryOption(CompanionDeliveryOption.OPTIONAL);
// Set the length of the line item to run.
lineItem.setStartDateTimeType(StartDateTimeType.IMMEDIATELY);
lineItem.setEndDateTime(
DateTimes.toDateTime(Instant.now().plus(Duration.standardDays(30L)), "America/New_York"));
// Set the cost per day to $1.
lineItem.setCostType(CostType.CPD);
lineItem.setCostPerUnit(new Money("USD", 1000000L));
// Set the percentage to be 100%.
Goal goal = new Goal();
goal.setGoalType(GoalType.DAILY);
goal.setUnits(100L);
lineItem.setPrimaryGoal(goal);
// Create the line item on the server.
LineItem[] lineItems = lineItemService.createLineItems(new LineItem[] {lineItem});
for (LineItem createdLineItem : lineItems) {
System.out.printf("A video line item with ID %d and name '%s' was created.%n",
createdLineItem.getId(), createdLineItem.getName());
}
}
public static void main(String[] args) throws Exception {
// Generate a refreshable OAuth2 credential.
Credential oAuth2Credential = new OfflineCredentials.Builder()
.forApi(Api.DFP)
.fromFile()
.build()
.generateCredential();
// Construct a DfpSession.
DfpSession session = new DfpSession.Builder()
.fromFile()
.withOAuth2Credential(oAuth2Credential)
.build();
DfpServices dfpServices = new DfpServices();
runExample(dfpServices,
session,
Long.parseLong(ORDER_ID),
TARGETED_VIDEO_AD_UNIT_ID,
Long.parseLong(CONTENT_CUSTOM_TARGETING_VALUE_ID));
}
}
| apache-2.0 |
jacktomcat/spark2.0 | src/main/java/com/gochinatv/spark/kafka/KafkaSendMessage.java | 2789 | package com.gochinatv.spark.kafka;
import java.util.Date;
import java.util.Properties;
import org.apache.kafka.clients.producer.KafkaProducer;
import org.apache.kafka.clients.producer.Producer;
import org.apache.kafka.clients.producer.ProducerRecord;
/**
*
* 创建消息 topic
* kafka-topics.bat --create --topic spark-test --zookeeper localhost:2181 --partitions 3 --replication-factor 1
*
* 查看消息topic
* kafka-topics.bat --describe --zookeeper localhost:2181 --topic spark-test
*
*
*/
public class KafkaSendMessage {
public static String servers="localhost:9092";
public static void main(String[] args) throws Exception {
sendStringMessage();
//sendWrapperMessage();
}
public static void sendStringMessage() throws Exception{
Properties props = new Properties();
props.put("bootstrap.servers", servers);
props.put("acks", "all");
props.put("retries", 0);
props.put("batch.size", 16384);
props.put("linger.ms", 1);
props.put("buffer.memory", 33554432);
props.put("key.serializer", "org.apache.kafka.common.serialization.StringSerializer");
props.put("value.serializer", "org.apache.kafka.common.serialization.StringSerializer");
Producer<String, String> producer = new org.apache.kafka.clients.producer.KafkaProducer<>(props);
//没有任何分区,默认1个分区,发送消息
int i=0;
while(i<1000){
Thread.sleep(1000L);
String message = "zhangsan"+i;
producer.send(new ProducerRecord<>("NL_U_APP_ALARM_APP_STRING",message));
i++;
producer.flush();
}
producer.close();
}
public static void sendWrapperMessage() throws Exception {
Properties props = new Properties();
props.put("bootstrap.servers", servers);
props.put("acks", "all");
props.put("retries", 0);
props.put("batch.size", 16384);
props.put("linger.ms", 1);
props.put("buffer.memory", 33554432);
props.put("key.serializer", "org.apache.kafka.common.serialization.StringSerializer");
props.put("value.serializer", "com.gochinatv.spark.kafka.SerializedMessage");
Producer<String, WrapperAppMessage> producer = new org.apache.kafka.clients.producer.KafkaProducer<>(props);
//case 1:
//没有任何分区,默认1个分区,发送消息
int i=0;
while(i<1000){
Thread.sleep(1000L);
WrapperAppMessage message = new WrapperAppMessage();
message.setAgreeId((i+1)%5);
message.setCityId((i+1)%3);
message.setConnectType((i+1)%4);
message.setCount((i+100)%10);
message.setInstanceId((i+1)%6);
message.setProvinceId((i+1)%4);
message.setTimestamp(System.currentTimeMillis());
message.setValue((float)((i+200)%4));
producer.send(new ProducerRecord<>("NL_U_APP_ALARM_APP",message));
System.out.println(message.toString());
i++;
producer.flush();
}
producer.close();
}
}
| apache-2.0 |
dgrlucky/Awesome | app/src/main/java/com/dgrlucky/extend/view/widget/OverwatchView.java | 5141 | package com.dgrlucky.extend.view.widget;
import android.content.Context;
import android.content.res.TypedArray;
import android.graphics.Canvas;
import android.graphics.Color;
import android.graphics.Paint;
import android.graphics.Path;
import android.util.AttributeSet;
import android.view.View;
import com.dgrlucky.extend.R;
import com.nineoldandroids.animation.Animator;
import java.util.List;
/**
* @author dgrlucky
* @date 2016/6/25 0:48
* @company dgrlucky
* @desc 守望先锋加载
*/
public class OverwatchView extends View {
private int color;//颜色
private float width;//边长
private int DEFAULT_WIDTH = 8;//默认边长
private Path mPath;//路径
private Paint mPaint;//画笔
private List<Animator> mAnimators;//动画集合
/**
* 动画状态
*/
public enum AnimState {
START, END, CANCEL
}
public OverwatchView(Context context) {
super(context);
init(null);
}
public OverwatchView(Context context, AttributeSet attrs) {
super(context, attrs);
init(attrs);
}
private void init(AttributeSet attrs) {
TypedArray typedArray = getContext().obtainStyledAttributes(attrs, R.styleable
.OverwatchView);
width = typedArray.getDimension(R.styleable.OverwatchView_side_width,
dp2px(DEFAULT_WIDTH));
color = typedArray.getColor(R.styleable.OverwatchView_side_color, Color.LTGRAY);
typedArray.recycle();
mPaint = new Paint(Paint.ANTI_ALIAS_FLAG);
mPaint.setColor(color);
mPaint.setStyle(Paint.Style.FILL);
mPath = new Path();
}
@Override
protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) {
int width = measureDimension(dp2px(6 * DEFAULT_WIDTH), widthMeasureSpec);
int height = measureDimension(dp2px(6 * DEFAULT_WIDTH), heightMeasureSpec);
setMeasuredDimension(width, height);
}
private int measureDimension(int original, int measureSpec) {
int result;
int mode = MeasureSpec.getMode(measureSpec);
int size = MeasureSpec.getSize(measureSpec);
if (mode == MeasureSpec.AT_MOST) {
result = Math.min(original, size);
} else if (mode == MeasureSpec.EXACTLY) {
result = size;
} else {
result = original;
}
return result;
}
@Override
protected void onDraw(Canvas canvas) {
super.onDraw(canvas);
float height = (float) (Math.sqrt(3) * width);
int x = getWidth() / 2 - (int) (width / 2) - 2;
int y = getHeight() / 2 - (int) (3 * height / 2) - 4;
drawSix(canvas, height, x, y);
drawSix(canvas, height, x + (int) (1.5 * width) + 2, y + height / 2 + 2);
drawSix(canvas, height, x + (int) (1.5 * width) + 2, y + (int) (height * 1.5) + 4);
drawSix(canvas, height, x, y + 2 * height + 4);
drawSix(canvas, height, x - (int) (1.5 * width) - 2, y + (int) (height * 1.5) + 4);
drawSix(canvas, height, x - (int) (1.5 * width) - 2, y + height / 2 + 2);
drawSix(canvas, height, x, y + height + 2);
canvas.drawPath(mPath, mPaint);
}
@Override
public void setVisibility(int v) {
if (getVisibility() != v) {
super.setVisibility(v);
if (v == GONE || v == INVISIBLE) {
animating(AnimState.END);
} else {
animating(AnimState.START);
}
}
}
@Override
protected void onDetachedFromWindow() {
super.onDetachedFromWindow();
animating(AnimState.CANCEL);
}
/**
* 进行动画
*/
private void animating(AnimState state) {
if (mAnimators == null) {
return;
}
int count = mAnimators.size();
for (int i = 0; i < count; i++) {
Animator animator = mAnimators.get(i);
boolean isRunning = animator.isRunning();
switch (state) {
case START:
if (!isRunning) {
animator.start();
}
break;
case END:
if (isRunning) {
animator.end();
}
break;
case CANCEL:
if (isRunning) {
animator.cancel();
}
break;
}
}
}
/**
* 画六边形
*/
private synchronized void drawSix(Canvas canvas, float height, float x, float y) {
mPath.moveTo(x, y);
mPath.lineTo(x - width / 2, height / 2 + y);
mPath.lineTo(x, height + y);
mPath.lineTo(x + width, height + y);
mPath.lineTo((float) (x + 1.5 * width), height / 2 + y);
mPath.lineTo(x + width, y);
mPath.lineTo(x, y);
mPath.close();
}
/**
* 单位转换
*/
private int dp2px(int dpValue) {
return Math.round(getContext().getResources().getDisplayMetrics().density * dpValue);
}
}
| apache-2.0 |
yy13003/im003 | im003/src/com/example/im003/config/BaseActivity.java | 473 | package com.example.im003.config;
import cn.bmob.im.BmobChat;
import android.app.Activity;
import android.os.Bundle;
/**
* »ù´¡µÄActivity
*
* @author Administrator
*
*/
public class BaseActivity extends Activity {
@Override
protected void onCreate(Bundle savedInstanceState) {
// TODO Auto-generated method stub
super.onCreate(savedInstanceState);
//BmobµÄ³õʼ»¯
BmobChat.getInstance(BaseActivity.this).init(Config.AppId);
}
}
| apache-2.0 |
lexjoy/lex-utils | src/me/lexjoy/widget/fill_drawable/FillStateListDrawable.java | 1643 | package me.lexjoy.widget.fill_drawable;
import me.lexjoy.utils.ColorUtils;
import me.lexjoy.utils._assert.AssertUtils;
import android.content.res.ColorStateList;
import android.content.res.Resources;
import android.graphics.drawable.StateListDrawable;
public class FillStateListDrawable extends StateListDrawable {
public FillStateListDrawable(Resources resources) {
AssertUtils.checkNotEmpty(resources, "invalid resources");
this.resources = resources;
}
public FillStateListDrawable(Resources resources, int colorListId, int drawableId, boolean fitDark) {
this(resources);
FillDrawableFactory drawableHelper = new FillDrawableFactory(resources, drawableId);
ColorStateList colorStateList = resources.getColorStateList(colorListId);
AssertUtils.checkNotEmpty(colorStateList, "invalid color state list, id: " + colorListId);
int stateColor;
for (int[] state : StateList.COMMON_STATES) {
stateColor = colorStateList.getColorForState(state, UNSET_COLOR);
if (stateColor == UNSET_COLOR) {
continue;
}
this.addState(state, drawableHelper.getDrawable(stateColor, fitDark));
}
}
public FillStateListDrawable addState(int[] stateSet, int drawableId, int colorId) {
return this.addState(stateSet, drawableId, colorId, true);
}
public FillStateListDrawable addState(int[] stateSet, int drawableId, int colorId, boolean fitDark) {
this.addState(stateSet, ColorUtils.fillByResources(this.resources, drawableId, colorId, fitDark));
return this;
}
private Resources resources;
private static final int UNSET_COLOR = 0x00000001;//#00 000001
}
| apache-2.0 |
darrenshao/crossing | src/main/java/club/jmint/crossing/server/Bootstrap.java | 1215 | /*
* Copyright 2016 The Crossing Project
*
* The Crossing Project licenses this file to you under the Apache License,
* version 2.0 (the "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at:
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package club.jmint.crossing.server;
import club.jmint.crossing.wizard.WizardManager;
/**
* @author shc
*
*/
public class Bootstrap {
/**
* @param args
*/
public static void main(String[] args) {
//Get command line parameters and parse
CommandLine cl = new CommandLine(args);
cl.parse();
//Do initialization
WizardManager.initWizard();
//Startup all components
WizardManager.startupWizard();
//Server statup
CrossingServer cs = new CrossingServer();
cs.start();
//Shutdown all components
WizardManager.shutdownWizard();
}
}
| apache-2.0 |
isparkes/OpenRate | src/main/java/OpenRate/transaction/ITMClient.java | 1867 | package OpenRate.transaction;
/**
* The ITMClient interface registers a class as a client of
* the transaction manager, meaning that the processing is conducted by the
* centralised transaction manager class, to allow processing to be committed
* or rolled back based on the overall status of the processing.
*
* The interface defines a number of control variables, and the methods that
* must be hooked to perform the interaction with the TM.
*/
public interface ITMClient
{
/**
* This is used to inform the client that an update has taken place to the
* status of the transaction, and that we are now in the flush phase
*
* @param transactionNumber The number of the transaction to update
* @return true if the flush was ok, otherwise false if there was an error
*/
public boolean updateTransactionStatusFlush(int transactionNumber);
/**
* This is used to inform the client that an update has taken place to the
* status of the transaction, and that we are now in the commit/rollback phase
* and the transaction was processed correctly
*
* @param transactionNumber The number of the transaction to update
*/
public void updateTransactionStatusCommit(int transactionNumber);
/**
* This is used to inform the client that an update has taken place to the
* status of the transaction, and that we are now in the close phase
* and the transaction was processed with an error
*
* @param transactionNumber The number of the transaction to update
*/
public void updateTransactionStatusRollback(int transactionNumber);
/**
* This is used to inform the client that an update has taken place to the
* status of the transaction, and that we are now in the close phase
*
* @param transactionNumber The number of the transaction to update
*/
public void updateTransactionStatusClose(int transactionNumber);
}
| apache-2.0 |
rocye/wx-idk | src/main/java/org/wx/sdk/card/request/CardCodeDepositRequest.java | 2913 | package org.wx.sdk.card.request;
import org.wx.sdk.base.Request;
import org.wx.sdk.card.respone.CardCodeDepositRespone;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
/**
* <p>导入code请求对象
* <p>在自定义code卡券成功创建并且通过审核后,必须将自定义code按照与发券方的约定数量调用导入code接口导入微信后台。
* <p>开发者可调用该接口将自定义code导入微信卡券后台,由微信侧代理存储并下发code。
* <p>1) 单次调用接口传入code的数量上限为100个;
* <p>2) 每一个 code 均不能为空串;
* <p>3) 导入结束后系统会自动判断提供方设置库存与实际导入code的量是否一致;
* <p>4) 导入失败支持重复导入,提示成功为止。
* @author Rocye
* @version 2017.12.26
*/
public class CardCodeDepositRequest implements Request<CardCodeDepositRespone> {
/** 微信公众平台唯一接口凭证 */
private String accessToken;
/** 请求参数的Map */
private Map<String, Object> wxHashMap = new HashMap<String, Object>();
/** 需要进行导入code的卡券ID */
private String card_id;
/** 需导入微信卡券后台的自定义code,上限为100个 */
private List<String> code;
/**
* 构造器
* @param cardId 需要进行导入code的卡券ID
* @param code 需导入微信卡券后台的自定义code,上限为100个
*/
public CardCodeDepositRequest(String cardId, List<String> code) {
this.card_id = cardId;
this.code = code;
}
/**
* 获取接口请求地址
*/
public String getApiUrl(){
String url = "http://api.weixin.qq.com/card/code/deposit?access_token="+ this.accessToken;
return url;
}
/**
* 获取返回对象类
*/
public Class<CardCodeDepositRespone> getResponseClass(){
return CardCodeDepositRespone.class;
}
/**
* 获取请求参数的HashMap
*/
public Map<String, Object> getWxHashMap(){
wxHashMap.put("card_id", this.card_id);
wxHashMap.put("code", this.code);
return wxHashMap;
}
/**
* 请求类型:1-普通Get 2-下载GET 3-普通POST 4-下载POST 5-无参上传 6-有参上传
*/
public int getReqType(){
return 3;
}
/**
* 请求参数格式(kv,json,xml)
*/
public String getParamFormat(){
return "json";
}
/**
* 设置AccessToken
*/
public void setAccessToken(String accessToken){
this.accessToken = accessToken;
}
public String getCard_id() {
return card_id;
}
public void setCard_id(String card_id) {
this.card_id = card_id;
}
public List<String> getCode() {
return code;
}
public void setCode(List<String> code) {
this.code = code;
}
}
| apache-2.0 |
fkrafi/swing-xml | src/main/java/com/aboutrafi/sxml/component/MenuBar.java | 282 | package com.aboutrafi.sxml.component;
import javax.swing.JMenuBar;
import org.w3c.dom.NamedNodeMap;
public class MenuBar extends Component<JMenuBar> {
@Override
public JMenuBar create(NamedNodeMap mProperties) {
return (JMenuBar) decorate(new JMenuBar(), mProperties);
}
}
| apache-2.0 |
youtongluan/sumk | src/main/java/org/yx/rpc/transport/DataBuffer.java | 884 | /**
* Copyright (C) 2016 - 2030 youtongluan.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.yx.rpc.transport;
import org.yx.common.codec.DataStream;
public interface DataBuffer extends DataStream {
/**
* 写入byte数组
*
* @param bs
* 不能为null
*/
void writeBytes(byte[] bs);
void flip();
boolean avilable(int length);
}
| apache-2.0 |
asakusafw/asakusafw | operator/core/src/main/java/com/asakusafw/operator/description/ClassDescription.java | 3595 | /**
* Copyright 2011-2021 Asakusa Framework Team.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.asakusafw.operator.description;
import java.text.MessageFormat;
import java.util.Objects;
/**
* Represents a class.
*/
public class ClassDescription extends ReifiableTypeDescription {
private final String name;
/**
* Creates a new instance.
* @param name the binary name
*/
public ClassDescription(String name) {
this.name = Objects.requireNonNull(name);
}
/**
* Returns an instance.
* @param aClass the reflective object
* @return the related instance
*/
public static ClassDescription of(Class<?> aClass) {
if (aClass.isArray() || aClass.isPrimitive()) {
throw new IllegalArgumentException("must be class or interface type"); //$NON-NLS-1$
}
return new ClassDescription(aClass.getName());
}
@Override
public TypeKind getTypeKind() {
return TypeKind.CLASS;
}
@Override
public ClassDescription getErasure() {
return this;
}
/**
* Returns the fully qualified class name.
* @return the fully qualified class name
*/
public String getClassName() {
return name.replace('$', '.');
}
/**
* Returns the binary name.
* @return the binary name
*/
public String getBinaryName() {
return name;
}
/**
* Returns the binary name.
* @return the binary name
*/
public String getInternalName() {
return name.replace('.', '/');
}
/**
* Returns the binary name.
* @return the binary name
*/
public String getName() {
return getBinaryName();
}
/**
* Returns the class simple name.
* @return the class simple name
*/
public String getSimpleName() {
int index = Math.max(name.lastIndexOf('.'), name.lastIndexOf('$'));
if (index < 0) {
return name;
}
return name.substring(index + 1);
}
/**
* Returns package name of the class.
* @return the package name
*/
public String getPackageName() {
int index = name.lastIndexOf('.');
if (index <= 0) {
return null;
}
return name.substring(0, index);
}
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * result + name.hashCode();
return result;
}
@Override
public boolean equals(Object obj) {
if (this == obj) {
return true;
}
if (obj == null) {
return false;
}
if (getClass() != obj.getClass()) {
return false;
}
ClassDescription other = (ClassDescription) obj;
if (!name.equals(other.name)) {
return false;
}
return true;
}
@Override
public String toString() {
return MessageFormat.format(
"Class({0})", //$NON-NLS-1$
name);
}
}
| apache-2.0 |
twitter/hraven | hraven-etl/src/main/java/com/twitter/hraven/mapreduce/HadoopCompat.java | 4994 | /**
Copyright 2013 Twitter, Inc.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package com.twitter.hraven.mapreduce;
import java.lang.reflect.Constructor;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
import org.apache.hadoop.mapreduce.Counter;
import org.apache.hadoop.mapreduce.TaskInputOutputContext;
/*
* This is based on ContextFactory.java from hadoop-2.0.x sources.
*/
/**
* Utility methods to allow applications to deal with inconsistencies between MapReduce Context
* Objects API between Hadoop 1.x and 2.x.
*/
public class HadoopCompat {
private static final boolean useV21;
private static final Constructor<?> GENERIC_COUNTER_CONSTRUCTOR;
private static final Method GET_COUNTER_METHOD;
private static final Method INCREMENT_COUNTER_METHOD;
static {
boolean v21 = true;
final String PACKAGE = "org.apache.hadoop.mapreduce";
try {
Class.forName(PACKAGE + ".task.JobContextImpl");
} catch (ClassNotFoundException cnfe) {
v21 = false;
}
useV21 = v21;
Class<?> genericCounterCls;
try {
if (v21) {
genericCounterCls = Class.forName(PACKAGE + ".counters.GenericCounter");
} else {
genericCounterCls = Class.forName("org.apache.hadoop.mapred.Counters$Counter");
}
} catch (ClassNotFoundException e) {
throw new IllegalArgumentException("Can't find class", e);
}
try {
GENERIC_COUNTER_CONSTRUCTOR =
genericCounterCls.getDeclaredConstructor(String.class, String.class, Long.TYPE);
GENERIC_COUNTER_CONSTRUCTOR.setAccessible(true);
if (useV21) {
Method get_counter;
try {
get_counter =
Class.forName(PACKAGE + ".TaskAttemptContext").getMethod("getCounter", String.class,
String.class);
} catch (Exception e) {
get_counter =
Class.forName(PACKAGE + ".TaskInputOutputContext").getMethod("getCounter",
String.class, String.class);
}
GET_COUNTER_METHOD = get_counter;
} else {
GET_COUNTER_METHOD =
Class.forName(PACKAGE + ".TaskInputOutputContext").getMethod("getCounter",
String.class, String.class);
}
INCREMENT_COUNTER_METHOD =
Class.forName(PACKAGE + ".Counter").getMethod("increment", Long.TYPE);
} catch (SecurityException e) {
throw new IllegalArgumentException("Can't run constructor ", e);
} catch (NoSuchMethodException e) {
throw new IllegalArgumentException("Can't find constructor ", e);
} catch (ClassNotFoundException e) {
throw new IllegalArgumentException("Can't find class", e);
}
}
/**
* True if runtime Hadoop version is 2.x, false otherwise.
*/
public static boolean isVersion2x() {
return useV21;
}
/**
* @return with Hadoop 2 : <code>new GenericCounter(args)</code>,<br>
* with Hadoop 1 : <code>new Counter(args)</code>
*/
public static Counter newGenericCounter(String name, String displayName, long value) {
try {
return (Counter) GENERIC_COUNTER_CONSTRUCTOR.newInstance(name, displayName, value);
} catch (InstantiationException e) {
throw new IllegalArgumentException("Can't instantiate Counter", e);
} catch (IllegalAccessException e) {
throw new IllegalArgumentException("Can't instantiate Counter", e);
} catch (InvocationTargetException e) {
throw new IllegalArgumentException("Can't instantiate Counter", e);
}
}
/**
* Invokes a method and rethrows any exception as runtime exceptions.
*/
private static Object invoke(Method method, Object obj, Object... args) {
try {
return method.invoke(obj, args);
} catch (IllegalAccessException e) {
throw new IllegalArgumentException("Can't invoke method " + method.getName(), e);
} catch (InvocationTargetException e) {
throw new IllegalArgumentException("Can't invoke method " + method.getName(), e);
}
}
/**
* Invoke getCounter() on TaskInputOutputContext. Works with both Hadoop 1 and 2.
*/
public static Counter getCounter(TaskInputOutputContext context, String groupName,
String counterName) {
return (Counter) invoke(GET_COUNTER_METHOD, context, groupName, counterName);
}
/**
* Increment the counter. Works with both Hadoop 1 and 2
*/
public static void incrementCounter(Counter counter, long increment) {
invoke(INCREMENT_COUNTER_METHOD, counter, increment);
}
}
| apache-2.0 |
lofei117/AndroidWorks | Torch/src/info/lofei/android/torch/receiver/TorchAppWidgetProvider.java | 3587 | package info.lofei.android.torch.receiver;
import android.annotation.TargetApi;
import android.app.PendingIntent;
import android.appwidget.AppWidgetManager;
import android.appwidget.AppWidgetProvider;
import android.content.ComponentName;
import android.content.Context;
import android.content.Intent;
import android.net.Uri;
import android.os.Build;
import android.os.Bundle;
import android.util.Log;
import android.widget.RemoteViews;
import info.lofei.android.torch.MainActivity;
import info.lofei.android.torch.R;
import info.lofei.android.torch.Util.TorchUtil;
import info.lofei.android.torch.service.TorchAppWidgetService;
import info.lofei.android.torch.service.TorchToggleService;
/**
* Created by lofei on 14/10/20.
*/
public class TorchAppWidgetProvider extends AppWidgetProvider {
public static void updateWidgets(Context context){
AppWidgetManager appWidgetManager = AppWidgetManager.getInstance(context);
int[] appWidgetIds = appWidgetManager.getAppWidgetIds(new ComponentName(context, TorchAppWidgetProvider.class));
for ( int id : appWidgetIds ) {
RemoteViews views = new RemoteViews(context.getPackageName(), R.layout.main_appwidget);
appWidgetManager.updateAppWidget(id, views);
}
//获取widget的布局
RemoteViews remoteViews=new RemoteViews(context.getPackageName(), R.layout.main_appwidget);
remoteViews.setImageViewResource(R.id.btn_torch_state,
TorchUtil.isTorchOn() ? R.drawable.ic_lightbulb_on : R.drawable.ic_lightbulb);
final ComponentName activityName = new ComponentName(context, TorchToggleService.class);
Intent action = new Intent(TorchToggleService.TOGGLE_ACTION);
action.setComponent(activityName);
PendingIntent pendingIntent = PendingIntent.getService(context, 0, action, 0);
remoteViews.setOnClickPendingIntent(R.id.btn_torch_state, pendingIntent);
Intent intent = new Intent(context, TorchAppWidgetService.class);
intent.setData(Uri.parse(intent.toUri(Intent.URI_INTENT_SCHEME)));
remoteViews.setRemoteAdapter(R.id.btn_torch_state, intent);
appWidgetManager.updateAppWidget(appWidgetIds, remoteViews);
}
@Override
public void onUpdate(Context context, AppWidgetManager appWidgetManager, int[] appWidgetIds) {
super.onUpdate(context, appWidgetManager, appWidgetIds);
updateWidgets(context);
}
@TargetApi(Build.VERSION_CODES.JELLY_BEAN)
@Override
public void onAppWidgetOptionsChanged(Context context, AppWidgetManager appWidgetManager,
int appWidgetId, Bundle newOptions) {
super.onAppWidgetOptionsChanged(context, appWidgetManager, appWidgetId, newOptions);
}
@Override
public void onReceive(Context context, Intent intent) {
Log.d(TorchAppWidgetService.class.getSimpleName(), " onReceive intent= " + intent);
super.onReceive(context, intent);
}
@Override
public void onEnabled(Context context) {
context.startService(new Intent(context, TorchToggleService.class));
super.onEnabled(context);
}
@Override
public void onDisabled(Context context) {
context.stopService(new Intent(context, TorchToggleService.class));//停止更新widget的service
super.onDisabled(context);
}
@Override
public void onDeleted(Context context, int[] appWidgetIds) {
context.stopService(new Intent(context, TorchToggleService.class));//停止更新widget的service
super.onDeleted(context, appWidgetIds);
}
}
| apache-2.0 |
idailylife/CreativeCrowd | src/main/java/edu/inlab/service/MicroTaskServiceImpl.java | 1030 | package edu.inlab.service;
import edu.inlab.models.Microtask;
import edu.inlab.models.Task;
import edu.inlab.repo.MicroTaskRepository;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import java.util.List;
/**
* Created by inlab-dell on 2016/5/16.
*/
@Service("microTaskService")
public class MicroTaskServiceImpl implements MicroTaskService {
@Autowired
MicroTaskRepository microTaskRepository;
public Microtask getById(int id) {
return microTaskRepository.getById(id);
}
public void save(Microtask microtask) {
microTaskRepository.save(microtask);
}
@Override
public void delete(Microtask microtask) {
microTaskRepository.remove(microtask);
}
@Override
public void saveOrUpdate(Microtask microtask) {
microTaskRepository.update(microtask);
}
@Override
public Microtask getUniqueByTask(Task task) {
return microTaskRepository.getFirstMtaskByTask(task);
}
}
| apache-2.0 |
dmundra/cis650completestreets | src/edu/uoregon/TakePictureView.java | 3222 | package edu.uoregon;
import java.io.IOException;
import edu.uoregon.db.GeoDBConnector;
import edu.uoregon.db.IGeoDB;
import edu.uoregon.log.CSLog;
import android.app.Activity;
import android.content.Context;
import android.hardware.Camera;
import android.os.Bundle;
import android.view.SurfaceHolder;
import android.view.SurfaceView;
import android.view.View;
import android.view.Window;
/**
* Inspired by the API demos.
*/
public class TakePictureView extends Activity {
private CameraPreview mPreview;
private IGeoDB db;
private int geoStampID;
// Used for logging
private static final String TAG = "TakePictureViewLog";
private boolean pictureTaken = false;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
CSLog.i(TAG, "Camera view started.");
// Hide the window title.
requestWindowFeature(Window.FEATURE_NO_TITLE);
// Set up the db
db = GeoDBConnector.open(this);
// Get the geoStampID from the intent
geoStampID = getIntent().getIntExtra("geoStampID", -1);
// Create our Preview view and set it as the content of our activity.
mPreview = new CameraPreview(this);
setContentView(mPreview);
}
@Override
protected void onDestroy() {
super.onDestroy();
CSLog.i(TAG, "Camera view closed.");
db.close();
}
private class CameraPreview extends SurfaceView implements
SurfaceHolder.Callback, Camera.PictureCallback,
View.OnClickListener {
SurfaceHolder mHolder;
Camera mCamera;
CameraPreview(Context context) {
super(context);
// Install a SurfaceHolder.Callback so we get notified when the
// underlying surface is created and destroyed.
mHolder = getHolder();
mHolder.addCallback(this);
mHolder.setType(SurfaceHolder.SURFACE_TYPE_PUSH_BUFFERS);
this.setOnClickListener(this);
}
public void surfaceCreated(SurfaceHolder holder) {
// The Surface has been created, acquire the camera and tell it
// where
// to draw.
mCamera = Camera.open();
try {
mCamera.setPreviewDisplay(holder);
} catch (IOException exception) {
mCamera.release();
mCamera = null;
// TODO: add more exception handling logic here
}
}
public void surfaceDestroyed(SurfaceHolder holder) {
// Surface will be destroyed when we return, so stop the preview.
// Because the CameraDevice object is not a shared resource, it's
// very
// important to release it when the activity is paused.
mCamera.stopPreview();
mCamera.release();
mCamera = null;
}
public void surfaceChanged(SurfaceHolder holder, int format, int w,
int h) {
// Now that the size is known, set up the camera parameters and
// begin
// the preview.
Camera.Parameters parameters = mCamera.getParameters();
parameters.setPreviewSize(w, h);
mCamera.setParameters(parameters);
mCamera.startPreview();
}
@Override
public void onPictureTaken(byte[] data, Camera camera) {
CSLog.i(TAG, "Picture taken.");
if (geoStampID != -1) {
db.addPictureToGeoStamp(geoStampID, data);
}
finish();
}
@Override
public void onClick(View v) {
if (!pictureTaken) {
pictureTaken = true;
mCamera.takePicture(null, null, this);
}
}
}
} | apache-2.0 |
gravitee-io/gravitee-management-rest-api | gravitee-rest-api-model/src/main/java/io/gravitee/rest/api/model/analytics/TopHitsAnalytics.java | 1331 | /**
* Copyright (C) 2015 The Gravitee team (http://gravitee.io)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.gravitee.rest.api.model.analytics;
import java.util.Map;
/**
* @author Titouan COMPIEGNE (titouan.compiegne at graviteesource.com)
* @author GraviteeSource Team
*/
public class TopHitsAnalytics implements Analytics {
private Map<String, Long> values;
private Map<String, Map<String, String>> metadata;
public Map<String, Long> getValues() {
return values;
}
public void setValues(Map<String, Long> values) {
this.values = values;
}
public Map<String, Map<String, String>> getMetadata() {
return metadata;
}
public void setMetadata(Map<String, Map<String, String>> metadata) {
this.metadata = metadata;
}
}
| apache-2.0 |
lesaint/experimenting-annotation-processing | experimenting-rounds/massive-count-of-annotated-classes/src/main/java/fr/javatronic/blog/massive/annotation1/sub1/Class_5848.java | 151 | package fr.javatronic.blog.massive.annotation1.sub1;
import fr.javatronic.blog.processor.Annotation_001;
@Annotation_001
public class Class_5848 {
}
| apache-2.0 |
javier-tarazaga/instasearch-app-android | domain/src/main/java/com/javiertarazaga/instasearch/domain/interactor/SaveMaxDistance.java | 2361 | /**
* Copyright (C) 2017 Javier Tarazaga Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.javiertarazaga.instasearch.domain.interactor;
import com.fernandocejas.arrow.checks.Preconditions;
import com.javiertarazaga.instasearch.domain.exception.preferences.PreferenceException;
import com.javiertarazaga.instasearch.domain.executor.PostExecutionThread;
import com.javiertarazaga.instasearch.domain.executor.ThreadExecutor;
import com.javiertarazaga.instasearch.domain.repository.PreferencesRepository;
import io.reactivex.Observable;
import javax.inject.Inject;
/**
* This class is an implementation of {@link UseCase} that represents a use case for
* saving the distance to be used while searching for medias.
*/
public class SaveMaxDistance extends UseCase<Integer, SaveMaxDistance.Params> {
private final PreferencesRepository preferencesRepository;
@Inject SaveMaxDistance(PreferencesRepository preferencesRepository, ThreadExecutor threadExecutor,
PostExecutionThread postExecutionThread) {
super(threadExecutor, postExecutionThread);
this.preferencesRepository = preferencesRepository;
}
/**
* Builds the {@link UseCase} observable @return an {@link Observable} that will emit the maximum
* distance to be used while searching for medias nearby.
* <li>{@link PreferenceException} if an unknown error occurs</li>
* </ul>
*/
@Override Observable<Integer> buildUseCaseObservable(SaveMaxDistance.Params params) {
Preconditions.checkNotNull(params);
return this.preferencesRepository.saveDistance(params.distance);
}
public static final class Params {
private final int distance;
private Params(int distance) {
this.distance = distance;
}
public static Params forDistance(int distance) {
return new Params(distance);
}
}
}
| apache-2.0 |
Gaia3D/mago3d | mago3d-admin/src/main/java/gaia3d/service/impl/DataObjectAttributeServiceImpl.java | 4161 | package gaia3d.service.impl;
import gaia3d.domain.data.DataInfo;
import gaia3d.domain.data.DataObjectAttribute;
import gaia3d.domain.data.DataObjectAttributeFileInfo;
import gaia3d.parser.DataObjectAttributeFileParser;
import gaia3d.parser.impl.DataObjectAttributeFileJsonParser;
import gaia3d.persistence.DataObjectAttributeMapper;
import gaia3d.service.DataObjectAttributeService;
import gaia3d.service.DataService;
import gaia3d.support.LogMessageSupport;
import gaia3d.utils.FileUtils;
import lombok.extern.slf4j.Slf4j;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.dao.DataAccessException;
import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Transactional;
import java.util.Map;
/**
* 데이터 Object 속성 관리
* @author jeongdae
*
*/
@Slf4j
@Service
public class DataObjectAttributeServiceImpl implements DataObjectAttributeService {
@Autowired
private DataService dataService;
@Autowired
private DataObjectAttributeMapper dataObjectAttributeMapper;
/**
* 데이터 Object 속성 정보를 취득
* @param dataId
* @return
*/
@Transactional(readOnly=true)
public DataObjectAttribute getDataObjectAttribute(Long dataId) {
return dataObjectAttributeMapper.getDataObjectAttribute(dataId);
}
/**
* 데이터 Object 속성 등록
* @param dataId
* @param dataObjectAttributeFileInfo
* @return
*/
@Transactional
public DataObjectAttributeFileInfo insertDataObjectAttribute(Long dataId, DataObjectAttributeFileInfo dataObjectAttributeFileInfo) {
// 파일 이력을 저장
dataObjectAttributeMapper.insertDataObjectAttributeFileInfo(dataObjectAttributeFileInfo);
DataObjectAttributeFileParser dataObjectAttributeFileParser;
if(FileUtils.EXTENSION_JSON.equals(dataObjectAttributeFileInfo.getFileExt())) {
dataObjectAttributeFileParser = new DataObjectAttributeFileJsonParser();
} else {
dataObjectAttributeFileParser = new DataObjectAttributeFileJsonParser();
}
Map<String, Object> map = dataObjectAttributeFileParser.parse(dataId, dataObjectAttributeFileInfo);
String attribute = (String) map.get("attribute");
int insertSuccessCount = 0;
int updateSuccessCount = 0;
int insertErrorCount = 0;
try {
DataObjectAttribute dataObjectAttribute = dataObjectAttributeMapper.getDataObjectAttribute(dataId);
if(dataObjectAttribute == null) {
dataObjectAttribute = new DataObjectAttribute();
dataObjectAttribute.setDataId(dataId);
dataObjectAttribute.setAttributes(attribute);
dataObjectAttributeMapper.insertDataObjectAttribute(dataObjectAttribute);
insertSuccessCount++;
} else {
dataObjectAttribute.setAttributes(attribute);
dataObjectAttributeMapper.updateDataObjectAttribute(dataObjectAttribute);
updateSuccessCount++;
}
} catch(DataAccessException e) {
LogMessageSupport.printMessage(e, "@@@@@@@@@@@@ dataAccess exception. message = {}", e.getClass().getName());
insertErrorCount++;
} catch(RuntimeException e) {
LogMessageSupport.printMessage(e, "@@@@@@@@@@@@ runtime exception. message = {}", e.getClass().getName());
insertErrorCount++;
} catch(Exception e) {
LogMessageSupport.printMessage(e, "@@@@@@@@@@@@ exception. message = {}", e.getClass().getName());
insertErrorCount++;
}
dataObjectAttributeFileInfo.setTotalCount((Integer) map.get("totalCount"));
dataObjectAttributeFileInfo.setParseSuccessCount((Integer) map.get("parseSuccessCount"));
dataObjectAttributeFileInfo.setParseErrorCount((Integer) map.get("parseErrorCount"));
dataObjectAttributeFileInfo.setInsertSuccessCount(insertSuccessCount);
dataObjectAttributeFileInfo.setUpdateSuccessCount(updateSuccessCount);
dataObjectAttributeFileInfo.setInsertErrorCount(insertErrorCount);
dataObjectAttributeMapper.updateDataObjectAttributeFileInfo(dataObjectAttributeFileInfo);
// 데이터 속성 필드 수정
DataInfo dataInfo = new DataInfo();
dataInfo.setDataId(dataObjectAttributeFileInfo.getDataId());
dataInfo.setObjectAttributeExist(true);
dataService.updateData(dataInfo);
return dataObjectAttributeFileInfo;
}
}
| apache-2.0 |
taktos/ea2ddl | ea2ddl-dao/src/main/java/jp/sourceforge/ea2ddl/dao/cbean/bs/BsTPackageCB.java | 10158 | package jp.sourceforge.ea2ddl.dao.cbean.bs;
import java.util.Map;
import org.seasar.dbflute.cbean.AbstractConditionBean;
import org.seasar.dbflute.cbean.ConditionBean;
import org.seasar.dbflute.cbean.ConditionQuery;
import org.seasar.dbflute.cbean.SubQuery;
import org.seasar.dbflute.cbean.UnionQuery;
import org.seasar.dbflute.cbean.sqlclause.SqlClause;
import org.seasar.dbflute.dbmeta.DBMetaProvider;
import jp.sourceforge.ea2ddl.dao.allcommon.DBFluteConfig;
import jp.sourceforge.ea2ddl.dao.allcommon.DBMetaInstanceHandler;
import jp.sourceforge.ea2ddl.dao.allcommon.ImplementedSqlClauseCreator;
import jp.sourceforge.ea2ddl.dao.cbean.*;
import jp.sourceforge.ea2ddl.dao.cbean.cq.*;
/**
* The base condition-bean of t_package.
* @author DBFlute(AutoGenerator)
*/
public class BsTPackageCB extends AbstractConditionBean {
// ===================================================================================
// Attribute
// =========
private final DBMetaProvider _dbmetaProvider = new DBMetaInstanceHandler();
protected TPackageCQ _conditionQuery;
// ===================================================================================
// SqlClause
// =========
@Override
protected SqlClause createSqlClause() {
return new ImplementedSqlClauseCreator().createSqlClause(this);
}
// ===================================================================================
// DBMeta Provider
// ===============
@Override
protected DBMetaProvider getDBMetaProvider() {
return _dbmetaProvider;
}
// ===================================================================================
// Table Name
// ==========
public String getTableDbName() {
return "t_package";
}
public String getTableSqlName() {
return "t_package";
}
// ===================================================================================
// PrimaryKey Map
// ==============
public void acceptPrimaryKeyMap(Map<String, ? extends Object> primaryKeyMap) {
String msg = "This table has no primary-keys: " + getTableDbName();
throw new IllegalStateException(msg);
}
// ===================================================================================
// OrderBy Setting
// ===============
public ConditionBean addOrderBy_PK_Asc() {
String msg = "This table has no primary-keys: " + getTableDbName();
throw new IllegalStateException(msg);
}
public ConditionBean addOrderBy_PK_Desc() {
String msg = "This table has no primary-keys: " + getTableDbName();
throw new IllegalStateException(msg);
}
// ===================================================================================
// Query
// =====
public TPackageCQ query() {
return getConditionQuery();
}
public TPackageCQ getConditionQuery() {
if (_conditionQuery == null) {
_conditionQuery = new TPackageCQ(null, getSqlClause(), getSqlClause().getLocalTableAliasName(), 0);
}
return _conditionQuery;
}
/**
* {@inheritDoc}
* @return The conditionQuery of the local table as interface. (NotNull)
*/
public ConditionQuery localCQ() {
return getConditionQuery();
}
// ===================================================================================
// Union
// =====
/**
* Set up 'union'.
* <pre>
* cb.query().union(new UnionQuery<TPackageCB>() {
* public void query(TPackageCB unionCB) {
* unionCB.query().setXxx...
* }
* });
* </pre>
* @param unionQuery The query of 'union'. (NotNull)
*/
public void union(UnionQuery<TPackageCB> unionQuery) {
final TPackageCB cb = new TPackageCB(); cb.xsetupForUnion(); unionQuery.query(cb);
final TPackageCQ cq = cb.query(); query().xsetUnionQuery(cq);
}
/**
* Set up 'union all'.
* <pre>
* cb.query().unionAll(new UnionQuery<TPackageCB>() {
* public void query(TPackageCB unionCB) {
* unionCB.query().setXxx...
* }
* });
* </pre>
* @param unionQuery The query of 'union'. (NotNull)
*/
public void unionAll(UnionQuery<TPackageCB> unionQuery) {
final TPackageCB cb = new TPackageCB(); cb.xsetupForUnion(); unionQuery.query(cb);
final TPackageCQ cq = cb.query(); query().xsetUnionAllQuery(cq);
}
public boolean hasUnionQueryOrUnionAllQuery() {
return query().hasUnionQueryOrUnionAllQuery();
}
// ===================================================================================
// Setup Select
// ============
// [DBFlute-0.7.4]
// ===================================================================================
// Specify
// =======
protected Specification _specification;
public Specification specify() {
if (_specification == null) { _specification = new Specification(this, new SpQyCall<TPackageCQ>() {
public boolean has() { return true; } public TPackageCQ qy() { return query(); } }, _forDerivedReferrer, _forScalarSelect, _forScalarSubQuery, getDBMetaProvider()); }
return _specification;
}
public static class Specification extends AbstractSpecification<TPackageCQ> {
protected SpQyCall<TPackageCQ> _myQyCall;
public Specification(ConditionBean baseCB, SpQyCall<TPackageCQ> qyCall
, boolean forDeriveReferrer, boolean forScalarSelect, boolean forScalarSubQuery
, DBMetaProvider dbmetaProvider)
{ super(baseCB, qyCall, forDeriveReferrer, forScalarSelect, forScalarSubQuery, dbmetaProvider); _myQyCall = qyCall; }
public void columnPackageId() { doColumn("Package_ID"); }
public void columnName() { doColumn("Name"); }
public void columnParentId() { doColumn("Parent_ID"); }
public void columnCreateddate() { doColumn("CreatedDate"); }
public void columnModifieddate() { doColumn("ModifiedDate"); }
public void columnNotes() { doColumn("Notes"); }
public void columnEaGuid() { doColumn("ea_guid"); }
public void columnXmlpath() { doColumn("XMLPath"); }
public void columnIscontrolled() { doColumn("IsControlled"); }
public void columnLastloaddate() { doColumn("LastLoadDate"); }
public void columnLastsavedate() { doColumn("LastSaveDate"); }
public void columnVersion() { doColumn("Version"); }
public void columnPkgowner() { doColumn("PkgOwner"); }
public void columnUmlversion() { doColumn("UMLVersion"); }
public void columnUsedtd() { doColumn("UseDTD"); }
public void columnLogxml() { doColumn("LogXML"); }
public void columnCodepath() { doColumn("CodePath"); }
public void columnNamespace() { doColumn("Namespace"); }
public void columnTpos() { doColumn("TPos"); }
public void columnPackageflags() { doColumn("PackageFlags"); }
public void columnBatchsave() { doColumn("BatchSave"); }
public void columnBatchload() { doColumn("BatchLoad"); }
protected void doSpecifyRequiredColumn() {
}
protected String getTableDbName() { return "t_package"; }
}
// ===================================================================================
// Display SQL
// ===========
@Override
protected String getLogDateFormat() { return DBFluteConfig.getInstance().getLogDateFormat(); }
@Override
protected String getLogTimestampFormat() { return DBFluteConfig.getInstance().getLogTimestampFormat(); }
// ===================================================================================
// Internal
// ========
// Very Internal (for Suppressing Warn about 'Not Use Import')
protected String getConditionBeanClassNameInternally() { return TPackageCB.class.getName(); }
protected String getConditionQueryClassNameInternally() { return TPackageCQ.class.getName(); }
protected String getSubQueryClassNameInternally() { return SubQuery.class.getName(); }
}
| apache-2.0 |
jason-rhodes/bridgepoint | src/org.xtuml.bp.xtext.masl.parent/org.xtuml.bp.xtext.masl/emf-gen/org/xtuml/bp/xtext/masl/masl/types/ArrayTypeReference.java | 1628 | /**
* generated by Xtext 2.9.2
*/
package org.xtuml.bp.xtext.masl.masl.types;
import org.xtuml.bp.xtext.masl.masl.behavior.Expression;
/**
* <!-- begin-user-doc -->
* A representation of the model object '<em><b>Array Type Reference</b></em>'.
* <!-- end-user-doc -->
*
* <p>
* The following features are supported:
* </p>
* <ul>
* <li>{@link org.xtuml.bp.xtext.masl.masl.types.ArrayTypeReference#getExpression <em>Expression</em>}</li>
* </ul>
*
* @see org.xtuml.bp.xtext.masl.masl.types.TypesPackage#getArrayTypeReference()
* @model
* @generated
*/
public interface ArrayTypeReference extends AbstractCollectionTypeReference {
/**
* Returns the value of the '<em><b>Expression</b></em>' containment reference.
* <!-- begin-user-doc -->
* <p>
* If the meaning of the '<em>Expression</em>' containment reference isn't clear,
* there really should be more of a description here...
* </p>
* <!-- end-user-doc -->
* @return the value of the '<em>Expression</em>' containment reference.
* @see #setExpression(Expression)
* @see org.xtuml.bp.xtext.masl.masl.types.TypesPackage#getArrayTypeReference_Expression()
* @model containment="true"
* @generated
*/
Expression getExpression();
/**
* Sets the value of the '{@link org.xtuml.bp.xtext.masl.masl.types.ArrayTypeReference#getExpression <em>Expression</em>}' containment reference.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @param value the new value of the '<em>Expression</em>' containment reference.
* @see #getExpression()
* @generated
*/
void setExpression(Expression value);
} // ArrayTypeReference
| apache-2.0 |
TonyWang-UMU/TFG-TWang | opencds-parent/opencds-vmr-1_0/opencds-vmr-1_0-internal/src/main/java/org/opencds/vmr/v1_0/internal/concepts/ObservationCodedValueConcept.java | 1034 | /**
* Copyright 2011 OpenCDS.org
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package org.opencds.vmr.v1_0.internal.concepts;
public class ObservationCodedValueConcept extends VmrOpenCdsConcept {
/* (non-Javadoc)
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
return "ObservationCodedValueConcept [Id=" + Id + ", conceptTargetId="
+ conceptTargetId + ", openCdsConceptCode="
+ openCdsConceptCode + ", determinationMethodCode="
+ determinationMethodCode + "]";
}
} | apache-2.0 |
itgeeker/jdk | src/com/sun/org/apache/bcel/internal/generic/CALOAD.java | 3731 | /*
* Copyright (c) 2007, 2015, Oracle and/or its affiliates. All rights reserved.
* ORACLE PROPRIETARY/CONFIDENTIAL. Use is subject to license terms.
*/
package com.sun.org.apache.bcel.internal.generic;
/* ====================================================================
* The Apache Software License, Version 1.1
*
* Copyright (c) 2001 The Apache Software Foundation. All rights
* reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met:
*
* 1. Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
*
* 2. Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in
* the documentation and/or other materials provided with the
* distribution.
*
* 3. The end-user documentation included with the redistribution,
* if any, must include the following acknowledgment:
* "This product includes software developed by the
* Apache Software Foundation (http://www.apache.org/)."
* Alternately, this acknowledgment may appear in the software itself,
* if and wherever such third-party acknowledgments normally appear.
*
* 4. The names "Apache" and "Apache Software Foundation" and
* "Apache BCEL" must not be used to endorse or promote products
* derived from this software without prior written permission. For
* written permission, please contact apache@apache.org.
*
* 5. Products derived from this software may not be called "Apache",
* "Apache BCEL", nor may "Apache" appear in their name, without
* prior written permission of the Apache Software Foundation.
*
* THIS SOFTWARE IS PROVIDED ``AS IS'' AND ANY EXPRESSED OR IMPLIED
* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
* OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE APACHE SOFTWARE FOUNDATION OR
* ITS CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
* LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF
* USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
* ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
* OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT
* OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
* SUCH DAMAGE.
* ====================================================================
*
* This software consists of voluntary contributions made by many
* individuals on behalf of the Apache Software Foundation. For more
* information on the Apache Software Foundation, please see
* <http://www.apache.org/>.
*/
/**
* CALOAD - Load char from array
* <PRE>Stack: ..., arrayref, index -> ..., value</PRE>
*
* @author <A HREF="mailto:markus.dahm@berlin.de">M. Dahm</A>
*/
public class CALOAD extends ArrayInstruction implements StackProducer {
/** Load char from array
*/
public CALOAD() {
super(com.sun.org.apache.bcel.internal.Constants.CALOAD);
}
/**
* Call corresponding visitor method(s). The order is:
* Call visitor methods of implemented interfaces first, then
* call methods according to the class hierarchy in descending order,
* i.e., the most specific visitXXX() call comes last.
*
* @param v Visitor object
*/
public void accept(Visitor v) {
v.visitStackProducer(this);
v.visitExceptionThrower(this);
v.visitTypedInstruction(this);
v.visitArrayInstruction(this);
v.visitCALOAD(this);
}
}
| apache-2.0 |
rcarlosdasilva/weixin | src/test/java/io/github/rcarlosdasilva/weixin/test/Property.java | 511 | package io.github.rcarlosdasilva.weixin.test;
import java.io.IOException;
import java.io.InputStream;
import java.util.Properties;
public class Property {
private static final Properties properties = new Properties();
static {
InputStream in = ClassLoader.getSystemResourceAsStream("sample.properties");
try {
properties.load(in);
} catch (IOException ex) {
ex.printStackTrace();
}
}
public static String get(String key) {
return properties.getProperty(key);
}
}
| apache-2.0 |
consulo/consulo-python | plugin/src/test/java/com/jetbrains/python/PySurroundWithTest.java | 1456 | package com.jetbrains.python;
import com.intellij.codeInsight.generation.surroundWith.SurroundWithHandler;
import com.intellij.lang.surroundWith.Surrounder;
import com.intellij.openapi.command.WriteCommandAction;
import com.jetbrains.python.fixtures.PyTestCase;
import com.jetbrains.python.refactoring.surround.surrounders.statements.PyWithIfSurrounder;
import com.jetbrains.python.refactoring.surround.surrounders.statements.PyWithTryExceptSurrounder;
import com.jetbrains.python.refactoring.surround.surrounders.statements.PyWithWhileSurrounder;
/**
* @author yole
*/
public abstract class PySurroundWithTest extends PyTestCase {
public void testSurroundWithIf() throws Exception {
doTest(new PyWithIfSurrounder());
}
public void testSurroundWithWhile() throws Exception {
doTest(new PyWithWhileSurrounder());
}
public void _testSurroundWithTryExcept() throws Exception {
doTest(new PyWithTryExceptSurrounder());
}
private void doTest(final Surrounder surrounder) throws Exception {
String baseName = "/surround/" + getTestName(false);
myFixture.configureByFile(baseName + ".py");
new WriteCommandAction.Simple(myFixture.getProject()) {
@Override
protected void run() throws Throwable {
SurroundWithHandler.invoke(myFixture.getProject(), myFixture.getEditor(), myFixture.getFile(), surrounder);
}
}.execute();
myFixture.checkResultByFile(baseName + "_after.py", true);
}
}
| apache-2.0 |
googleads/googleads-java-lib | modules/dfp_appengine/src/main/java/com/google/api/ads/admanager/jaxws/v202102/ActivityGroupStatus.java | 1412 | // Copyright 2021 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.api.ads.admanager.jaxws.v202102;
import javax.xml.bind.annotation.XmlEnum;
import javax.xml.bind.annotation.XmlType;
/**
* <p>Java class for ActivityGroup.Status.
*
* <p>The following schema fragment specifies the expected content contained within this class.
* <p>
* <pre>
* <simpleType name="ActivityGroup.Status">
* <restriction base="{http://www.w3.org/2001/XMLSchema}string">
* <enumeration value="ACTIVE"/>
* <enumeration value="INACTIVE"/>
* </restriction>
* </simpleType>
* </pre>
*
*/
@XmlType(name = "ActivityGroup.Status")
@XmlEnum
public enum ActivityGroupStatus {
ACTIVE,
INACTIVE;
public String value() {
return name();
}
public static ActivityGroupStatus fromValue(String v) {
return valueOf(v);
}
}
| apache-2.0 |
NessComputing/components-ness-jmx | src/main/java/com/nesscomputing/jmx/starter/guice/JmxStarterConfig.java | 1401 | /**
* Copyright (C) 2012 Ness Computing, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.nesscomputing.jmx.starter.guice;
import org.skife.config.Config;
import org.skife.config.Default;
import org.skife.config.DefaultNull;
public abstract class JmxStarterConfig
{
@Config("ness.jmx.enabled")
@Default("true")
public boolean isEnabled()
{
return true;
}
@Config("ness.jmx.access-file")
@DefaultNull
public String getAccessFile()
{
return null;
}
@Config("ness.jmx.password-file")
@DefaultNull
public String getPasswordFile()
{
return null;
}
@Config("ness.jmx.bind-address")
@DefaultNull
public String getBindAddress()
{
return null;
}
@Config("ness.jmx.bind-port")
@DefaultNull
public Integer getBindPort()
{
return null;
}
}
| apache-2.0 |
easyaspi314/Xposed-TouchWizToggles | app/src/main/java/com/devin/touchwiztoggles/Preferences.java | 353 | package com.devin.touchwiztoggles;
import android.app.*;
import android.os.*;
import android.widget.*;
import de.robv.android.xposed.XposedHelpers;
public class Preferences extends Activity
{
private ListView lv;
@Override
protected void onCreate(Bundle savedInstanceState)
{
super.onCreate(savedInstanceState);
lv =
}
}
| apache-2.0 |
photon-infotech/phresco-pom | src/test/java/com/phresco/pom/test/DependencySystemPathTest.java | 2979 | /**
* Phresco Pom
*
* Copyright (C) 1999-2014 Photon Infotech Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.phresco.pom.test;
import java.io.File;
import java.util.List;
import junit.framework.Assert;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import com.phresco.pom.exception.PhrescoPomException;
import com.phresco.pom.model.Dependency;
import com.phresco.pom.util.PomProcessor;
public class DependencySystemPathTest {
@Before
public void prepare() {
File file = new File("pomTest.xml");
if(file.exists()) {
file.delete();
}
}
@Test
public void validSetDependencySystemPath() throws PhrescoPomException {
String systemPath = "";
PomProcessor processor = new PomProcessor(new File("pomTest.xml"));
processor.addDependency("com.photon.phresco.test", "com.photon.phresco.test", "1.2.0");
processor.setDependencySystemPath("com.photon.phresco.test", "com.photon.phresco.test", "SystemPath");
processor.save();
List<Dependency> dependencyList = processor.getModel().getDependencies().getDependency();
for (Dependency dependency : dependencyList) {
systemPath = dependency.getSystemPath();
}
String actual = systemPath;
String expected = "SystemPath";
Assert.assertEquals(expected, actual);
}
@Test
public void validSetDependencySystemPathNull() throws PhrescoPomException {
String systemPath = "";
PomProcessor processor = new PomProcessor(new File("pomTest.xml"));
processor.setDependencySystemPath("com.photon.phresco.test11", "com.photon.phresco.test", "SystemPath");
processor.save();
Assert.assertNull(processor.getModel().getDependencies());
}
@Test
public void invalidSetDependencySystemPath() throws PhrescoPomException {
String systemPath = "";
PomProcessor processor = new PomProcessor(new File("pomTest.xml"));
processor.addDependency("com.photon.phresco.test", "com.photon.phresco.test", "1.2.0");
processor.setDependencySystemPath("com.photon.phresco.test", "com.photon.phresco", "SystemPath");
processor.save();
List<Dependency> dependencyList = processor.getModel().getDependencies().getDependency();
for (Dependency dependency : dependencyList) {
systemPath = dependency.getSystemPath();
}
String actual = systemPath;
String expected = null;
Assert.assertEquals(expected, actual);
}
@After
public void delete(){
File file = new File("pomTest.xml");
if(file.exists()) {
file.delete();
}
}
}
| apache-2.0 |
sedmelluq/lavaplayer | main/src/main/java/com/sedmelluq/discord/lavaplayer/container/ogg/OggPageScanner.java | 2913 | package com.sedmelluq.discord.lavaplayer.container.ogg;
import java.nio.ByteBuffer;
/**
* Scanner for determining OGG stream information by seeking around in it.
*/
public class OggPageScanner {
private static final int OGG_PAGE_HEADER_INT = ByteBuffer.wrap(new byte[] { 0x4F, 0x67, 0x67, 0x53 }).getInt(0);
private final long absoluteOffset;
private final byte[] data;
private final int dataLength;
private int flags;
private long reversedPosition;
private int pageSize;
private long byteStreamPosition;
/**
* @param absoluteOffset Current position of the stream in bytes.
* @param data Byte array with data starting at that position.
* @param dataLength Length of data.
*/
public OggPageScanner(long absoluteOffset, byte[] data, int dataLength) {
this.absoluteOffset = absoluteOffset;
this.data = data;
this.dataLength = dataLength;
}
/**
* @param firstPageOffset Absolute position of the first page in the stream.
* @param sampleRate Sample rate of the track in the stream.
* @return If the data contains the header of the last page in the OGG stream, then stream size information,
* otherwise <code>null</code>.
*/
public OggStreamSizeInfo scanForSizeInfo(long firstPageOffset, int sampleRate) {
ByteBuffer buffer = ByteBuffer.wrap(data, 0, dataLength);
int head = buffer.getInt(0);
for (int i = 0; i < dataLength - 27; i++) {
if (head == OGG_PAGE_HEADER_INT) {
buffer.position(i);
if (attemptReadHeader(buffer)) {
do {
if ((flags & OggPageHeader.FLAG_LAST_PAGE) != 0) {
return new OggStreamSizeInfo((byteStreamPosition - firstPageOffset) + pageSize,
Long.reverseBytes(reversedPosition), firstPageOffset, byteStreamPosition, sampleRate);
}
} while (attemptReadHeader(buffer));
}
}
head <<= 8;
head |= data[i + 4] & 0xFF;
}
return null;
}
private boolean attemptReadHeader(ByteBuffer buffer) {
int start = buffer.position();
if (buffer.limit() < start + 27) {
return false;
} else if (buffer.getInt(start) != OGG_PAGE_HEADER_INT) {
return false;
} else if (buffer.get(start + 4) != 0) {
return false;
}
int segmentCount = buffer.get(start + 26) & 0xFF;
int minimumCapacity = start + segmentCount + 27;
if (buffer.limit() < minimumCapacity) {
return false;
}
int segmentBase = start + 27;
for (int i = 0; i < segmentCount; i++) {
minimumCapacity += buffer.get(segmentBase + i) & 0xFF;
}
if (buffer.limit() < minimumCapacity) {
return false;
}
flags = buffer.get(start + 5) & 0xFF;
reversedPosition = buffer.getLong(start + 6);
byteStreamPosition = absoluteOffset + start;
pageSize = minimumCapacity;
buffer.position(minimumCapacity);
return true;
}
}
| apache-2.0 |
RobWin/javaslang-circuitbreaker | resilience4j-rxjava3/src/main/java/io/github/resilience4j/rxjava3/ratelimiter/operator/CompletableRateLimiter.java | 2613 | /*
* Copyright 2019 Robert Winkler
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.github.resilience4j.rxjava3.ratelimiter.operator;
import io.github.resilience4j.rxjava3.AbstractCompletableObserver;
import io.github.resilience4j.ratelimiter.RateLimiter;
import io.github.resilience4j.ratelimiter.RequestNotPermitted;
import io.reactivex.rxjava3.core.Completable;
import io.reactivex.rxjava3.core.CompletableObserver;
import io.reactivex.rxjava3.internal.disposables.EmptyDisposable;
import io.vavr.control.Either;
import java.util.concurrent.TimeUnit;
class CompletableRateLimiter extends Completable {
private final Completable upstream;
private final RateLimiter rateLimiter;
CompletableRateLimiter(Completable upstream, RateLimiter rateLimiter) {
this.upstream = upstream;
this.rateLimiter = rateLimiter;
}
@Override
protected void subscribeActual(CompletableObserver downstream) {
long waitDuration = rateLimiter.reservePermission();
if (waitDuration >= 0) {
if (waitDuration > 0) {
Completable.timer(waitDuration, TimeUnit.NANOSECONDS)
.subscribe(() -> upstream.subscribe(
new RateLimiterCompletableObserver(downstream)));
} else {
upstream.subscribe(new RateLimiterCompletableObserver(downstream));
}
} else {
downstream.onSubscribe(EmptyDisposable.INSTANCE);
downstream.onError(RequestNotPermitted.createRequestNotPermitted(rateLimiter));
}
}
class RateLimiterCompletableObserver extends AbstractCompletableObserver {
RateLimiterCompletableObserver(CompletableObserver downstreamObserver) {
super(downstreamObserver);
}
@Override
protected void hookOnComplete() {
// NoOp
}
@Override
protected void hookOnError(Throwable e) {
rateLimiter.onError(e);
}
@Override
protected void hookOnCancel() {
// NoOp
}
}
}
| apache-2.0 |
christophd/citrus | tools/restdocs/src/main/java/com/consol/citrus/restdocs/soap/CitrusRestDocSoapConfigurer.java | 4451 | /*
* Copyright 2006-2016 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.consol.citrus.restdocs.soap;
import com.consol.citrus.TestCase;
import com.consol.citrus.report.TestListener;
import com.consol.citrus.restdocs.util.RestDocTestNameFormatter;
import org.springframework.restdocs.*;
import org.springframework.restdocs.config.RestDocumentationConfigurer;
import org.springframework.ws.client.WebServiceClientException;
import org.springframework.ws.client.support.interceptor.ClientInterceptor;
import org.springframework.ws.context.MessageContext;
import java.util.HashMap;
import java.util.Map;
/**
* Interceptor that configures RestDoc with snippet and documentation configuration. After configuration has been built
* the interceptor uses a special Http request wrapper for next interceptors in line. These interceptors can then read the
* RestDoc configuration and context from the request wrapper implementation.
*
* @author Christoph Deppisch
* @since 2.6
*/
public class CitrusRestDocSoapConfigurer extends RestDocumentationConfigurer<CitrusSnippetConfigurer, CitrusSnippetConfigurer, CitrusRestDocSoapConfigurer>
implements ClientInterceptor, TestListener {
public static final String REST_DOC_SOAP_CONFIGURATION = "com.consol.citrus.restdocs.soap.configuration";
private final CitrusSnippetConfigurer snippetConfigurer = new CitrusSnippetConfigurer(this);
private final RestDocumentationContextProvider contextProvider;
public CitrusRestDocSoapConfigurer(RestDocumentationContextProvider contextProvider) {
this.contextProvider = contextProvider;
}
@Override
public CitrusSnippetConfigurer snippets() {
return this.snippetConfigurer;
}
@Override
public CitrusSnippetConfigurer operationPreprocessors() {
return this.snippetConfigurer;
}
@Override
public boolean handleRequest(MessageContext messageContext) throws WebServiceClientException {
RestDocumentationContext context = this.contextProvider.beforeOperation();
Map<String, Object> configuration = new HashMap<>();
apply(configuration, context);
messageContext.setProperty(RestDocumentationContext.class.getName(), context);
messageContext.setProperty(REST_DOC_SOAP_CONFIGURATION, configuration);
return true;
}
@Override
public boolean handleResponse(MessageContext messageContext) throws WebServiceClientException {
return true;
}
@Override
public boolean handleFault(MessageContext messageContext) throws WebServiceClientException {
return true;
}
@Override
public void afterCompletion(MessageContext messageContext, Exception ex) throws WebServiceClientException {
}
@Override
@SuppressWarnings("all")
public void onTestStart(TestCase test) {
if (contextProvider instanceof ManualRestDocumentation) {
try {
((ManualRestDocumentation) contextProvider).beforeTest(test.getTestClass(), RestDocTestNameFormatter.format(test.getTestClass(), test.getName()));
} catch (IllegalStateException e) {
// ignore as someone else has already called before test.
}
}
}
@Override
public void onTestFinish(TestCase test) {
if (contextProvider instanceof ManualRestDocumentation) {
((ManualRestDocumentation) contextProvider).afterTest();
}
}
@Override
public void onTestSuccess(TestCase test) {
}
@Override
public void onTestFailure(TestCase test, Throwable cause) {
}
@Override
public void onTestSkipped(TestCase test) {
}
/**
* Gets the value of the contextProvider property.
*
* @return the contextProvider
*/
public RestDocumentationContextProvider getContextProvider() {
return contextProvider;
}
}
| apache-2.0 |
balajiboggaram/algorithms | src/me/learn/personal/month1/DisappearedNumbersArray.java | 1497 | package me.learn.personal.month1;
import java.util.ArrayList;
import java.util.List;
/**
*
* Title 448 : Given an array of integers where 1 ≤ a[i] ≤ n (n = size of
* array), some elements appear twice and others appear once.
*
* Find all the elements of [1, n] inclusive that do not appear in this array.
*
* Could you do it without extra space and in O(n) runtime? You may assume the
* returned list does not count as extra space.
*
* @author bramanarayan
* @date May 20, 2020
*/
public class DisappearedNumbersArray {
public static void main(String args[]) {
DisappearedNumbersArray solution = new DisappearedNumbersArray();
System.out.println(solution.findDisappearedNumbers(new int[] { 4, 3, 2, 7, 8, 2, 3, 1 }));
}
/**
* As you traverse the array, Keep marking the position of the array element as
* negative. Make sure, you take the abs(value) to ignore negative on already
* marked element.
*
* at last, all we care are the indicies of elements which are positive.
*/
public List<Integer> findDisappearedNumbers(int[] nums) {
List<Integer> result = new ArrayList<Integer>();
if (nums.length == 0)
return result;
int n = nums.length;
nums[nums[0] - 1] = -1 * nums[nums[0] - 1];
for (int i = 1; i < n; i++) {
int temp = Math.abs(nums[i]);
if (nums[temp - 1] > 0) {
nums[temp - 1] = -1 * nums[temp - 1];
}
}
for (int i = 0; i < n; i++) {
if (nums[i] > 0) {
result.add(i + 1);
}
}
return result;
}
}
| apache-2.0 |
eccosolutions/osaf-jcr-server | webdav/src/java/org/apache/jackrabbit/webdav/version/LabelSetProperty.java | 1865 | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. The ASF licenses this file to You
* under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.jackrabbit.webdav.version;
import org.apache.log4j.Logger;
import org.apache.jackrabbit.webdav.xml.DomUtil;
import org.apache.jackrabbit.webdav.property.AbstractDavProperty;
import org.w3c.dom.Element;
import org.w3c.dom.Document;
/**
* <code>LabelSetProperty</code>...
*/
public class LabelSetProperty extends AbstractDavProperty {
private static Logger log = Logger.getLogger(LabelSetProperty.class);
private final String[] value;
/**
* Create a new <code>LabelSetProperty</code>.
*
* @param labels
*/
public LabelSetProperty(String[] labels) {
super(VersionResource.LABEL_NAME_SET, true);
this.value = labels;
}
public Object getValue() {
return value;
}
/**
* @see org.apache.jackrabbit.webdav.xml.XmlSerializable#toXml(Document)
* @param document
*/
public Element toXml(Document document) {
Element elem = getName().toXml(document);
for (int i = 0; i < value.length; i++) {
DomUtil.addChildElement(elem, DeltaVConstants.XML_LABEL_NAME, DeltaVConstants.NAMESPACE, value[i]);
}
return elem;
}
} | apache-2.0 |
Polina97/travelia | Build11/app/src/main/java/admin/build1/ui/adapter/ParkAdapter.java | 2284 | package admin.build1.ui.adapter;
import android.database.Cursor;
import android.support.v7.widget.RecyclerView;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.widget.ImageView;
import android.widget.TextView;
import admin.build1.R;
/**
* Created by User on 19.05.2016.
*/
public class ParkAdapter extends RecyclerView.Adapter<ParkAdapter.ParkViewHolder> {
private final Cursor mCursor;
private final ParkOnClickListener mListener;
public ParkAdapter(Cursor cursor, ParkOnClickListener listener) {
mCursor = cursor;
mListener = listener;
}
@Override
public ParkViewHolder onCreateViewHolder(ViewGroup parent, int viewType) {
View view = LayoutInflater.from(parent.getContext()).inflate(R.layout.adapter_sights, parent, false);
return new ParkViewHolder(view, mListener);
}
@Override
public void onBindViewHolder(ParkViewHolder holder, int position) {
mCursor.moveToPosition(position);
String text = mCursor.getString(mCursor.getColumnIndex("NAME"));
int imageResId = mCursor.getInt(mCursor.getColumnIndex("IMAGE_RESOURCE_ID"));
holder.populateView(imageResId, text);
}
@Override
public int getItemCount() {
return mCursor.getCount();
}
class ParkViewHolder extends RecyclerView.ViewHolder {
ImageView mImage;
TextView mText;
public ParkViewHolder(View itemView, final ParkOnClickListener listener) {
super(itemView);
mImage = (ImageView) itemView.findViewById(R.id.image);
mText = (TextView) itemView.findViewById(R.id.text);
itemView.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
mCursor.moveToPosition(getAdapterPosition());
listener.onParkClick(mCursor.getInt(mCursor.getColumnIndex("_id")));
}
});
}
public void populateView(int imageResId, String text) {
mImage.setBackgroundResource(imageResId);
mText.setText(text);
}
}
public interface ParkOnClickListener {
void onParkClick(int id);
}
}
| apache-2.0 |